1 |
684 |
jeremybenn |
/* Fold a constant sub-tree into a single node for C-compiler
|
2 |
|
|
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
|
3 |
|
|
2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
|
4 |
|
|
2012 Free Software Foundation, Inc.
|
5 |
|
|
|
6 |
|
|
This file is part of GCC.
|
7 |
|
|
|
8 |
|
|
GCC is free software; you can redistribute it and/or modify it under
|
9 |
|
|
the terms of the GNU General Public License as published by the Free
|
10 |
|
|
Software Foundation; either version 3, or (at your option) any later
|
11 |
|
|
version.
|
12 |
|
|
|
13 |
|
|
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
14 |
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
15 |
|
|
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
16 |
|
|
for more details.
|
17 |
|
|
|
18 |
|
|
You should have received a copy of the GNU General Public License
|
19 |
|
|
along with GCC; see the file COPYING3. If not see
|
20 |
|
|
<http://www.gnu.org/licenses/>. */
|
21 |
|
|
|
22 |
|
|
/*@@ This file should be rewritten to use an arbitrary precision
|
23 |
|
|
@@ representation for "struct tree_int_cst" and "struct tree_real_cst".
|
24 |
|
|
@@ Perhaps the routines could also be used for bc/dc, and made a lib.
|
25 |
|
|
@@ The routines that translate from the ap rep should
|
26 |
|
|
@@ warn if precision et. al. is lost.
|
27 |
|
|
@@ This would also make life easier when this technology is used
|
28 |
|
|
@@ for cross-compilers. */
|
29 |
|
|
|
30 |
|
|
/* The entry points in this file are fold, size_int_wide and size_binop.
|
31 |
|
|
|
32 |
|
|
fold takes a tree as argument and returns a simplified tree.
|
33 |
|
|
|
34 |
|
|
size_binop takes a tree code for an arithmetic operation
|
35 |
|
|
and two operands that are trees, and produces a tree for the
|
36 |
|
|
result, assuming the type comes from `sizetype'.
|
37 |
|
|
|
38 |
|
|
size_int takes an integer value, and creates a tree constant
|
39 |
|
|
with type from `sizetype'.
|
40 |
|
|
|
41 |
|
|
Note: Since the folders get called on non-gimple code as well as
|
42 |
|
|
gimple code, we need to handle GIMPLE tuples as well as their
|
43 |
|
|
corresponding tree equivalents. */
|
44 |
|
|
|
45 |
|
|
#include "config.h"
|
46 |
|
|
#include "system.h"
|
47 |
|
|
#include "coretypes.h"
|
48 |
|
|
#include "tm.h"
|
49 |
|
|
#include "flags.h"
|
50 |
|
|
#include "tree.h"
|
51 |
|
|
#include "realmpfr.h"
|
52 |
|
|
#include "rtl.h"
|
53 |
|
|
#include "expr.h"
|
54 |
|
|
#include "tm_p.h"
|
55 |
|
|
#include "target.h"
|
56 |
|
|
#include "diagnostic-core.h"
|
57 |
|
|
#include "intl.h"
|
58 |
|
|
#include "ggc.h"
|
59 |
|
|
#include "hashtab.h"
|
60 |
|
|
#include "langhooks.h"
|
61 |
|
|
#include "md5.h"
|
62 |
|
|
#include "gimple.h"
|
63 |
|
|
#include "tree-flow.h"
|
64 |
|
|
|
65 |
|
|
/* Nonzero if we are folding constants inside an initializer; zero
|
66 |
|
|
otherwise. */
|
67 |
|
|
int folding_initializer = 0;
|
68 |
|
|
|
69 |
|
|
/* The following constants represent a bit based encoding of GCC's
|
70 |
|
|
comparison operators. This encoding simplifies transformations
|
71 |
|
|
on relational comparison operators, such as AND and OR. */
|
72 |
|
|
enum comparison_code {
|
73 |
|
|
COMPCODE_FALSE = 0,
|
74 |
|
|
COMPCODE_LT = 1,
|
75 |
|
|
COMPCODE_EQ = 2,
|
76 |
|
|
COMPCODE_LE = 3,
|
77 |
|
|
COMPCODE_GT = 4,
|
78 |
|
|
COMPCODE_LTGT = 5,
|
79 |
|
|
COMPCODE_GE = 6,
|
80 |
|
|
COMPCODE_ORD = 7,
|
81 |
|
|
COMPCODE_UNORD = 8,
|
82 |
|
|
COMPCODE_UNLT = 9,
|
83 |
|
|
COMPCODE_UNEQ = 10,
|
84 |
|
|
COMPCODE_UNLE = 11,
|
85 |
|
|
COMPCODE_UNGT = 12,
|
86 |
|
|
COMPCODE_NE = 13,
|
87 |
|
|
COMPCODE_UNGE = 14,
|
88 |
|
|
COMPCODE_TRUE = 15
|
89 |
|
|
};
|
90 |
|
|
|
91 |
|
|
static bool negate_mathfn_p (enum built_in_function);
|
92 |
|
|
static bool negate_expr_p (tree);
|
93 |
|
|
static tree negate_expr (tree);
|
94 |
|
|
static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
|
95 |
|
|
static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
|
96 |
|
|
static tree const_binop (enum tree_code, tree, tree);
|
97 |
|
|
static enum comparison_code comparison_to_compcode (enum tree_code);
|
98 |
|
|
static enum tree_code compcode_to_comparison (enum comparison_code);
|
99 |
|
|
static int operand_equal_for_comparison_p (tree, tree, tree);
|
100 |
|
|
static int twoval_comparison_p (tree, tree *, tree *, int *);
|
101 |
|
|
static tree eval_subst (location_t, tree, tree, tree, tree, tree);
|
102 |
|
|
static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
|
103 |
|
|
static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
|
104 |
|
|
static tree make_bit_field_ref (location_t, tree, tree,
|
105 |
|
|
HOST_WIDE_INT, HOST_WIDE_INT, int);
|
106 |
|
|
static tree optimize_bit_field_compare (location_t, enum tree_code,
|
107 |
|
|
tree, tree, tree);
|
108 |
|
|
static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
|
109 |
|
|
HOST_WIDE_INT *,
|
110 |
|
|
enum machine_mode *, int *, int *,
|
111 |
|
|
tree *, tree *);
|
112 |
|
|
static int all_ones_mask_p (const_tree, int);
|
113 |
|
|
static tree sign_bit_p (tree, const_tree);
|
114 |
|
|
static int simple_operand_p (const_tree);
|
115 |
|
|
static bool simple_operand_p_2 (tree);
|
116 |
|
|
static tree range_binop (enum tree_code, tree, tree, int, tree, int);
|
117 |
|
|
static tree range_predecessor (tree);
|
118 |
|
|
static tree range_successor (tree);
|
119 |
|
|
static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
|
120 |
|
|
static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
|
121 |
|
|
static tree unextend (tree, int, int, tree);
|
122 |
|
|
static tree optimize_minmax_comparison (location_t, enum tree_code,
|
123 |
|
|
tree, tree, tree);
|
124 |
|
|
static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
|
125 |
|
|
static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
|
126 |
|
|
static tree fold_binary_op_with_conditional_arg (location_t,
|
127 |
|
|
enum tree_code, tree,
|
128 |
|
|
tree, tree,
|
129 |
|
|
tree, tree, int);
|
130 |
|
|
static tree fold_mathfn_compare (location_t,
|
131 |
|
|
enum built_in_function, enum tree_code,
|
132 |
|
|
tree, tree, tree);
|
133 |
|
|
static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
|
134 |
|
|
static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
|
135 |
|
|
static bool reorder_operands_p (const_tree, const_tree);
|
136 |
|
|
static tree fold_negate_const (tree, tree);
|
137 |
|
|
static tree fold_not_const (const_tree, tree);
|
138 |
|
|
static tree fold_relational_const (enum tree_code, tree, tree, tree);
|
139 |
|
|
static tree fold_convert_const (enum tree_code, tree, tree);
|
140 |
|
|
|
141 |
|
|
/* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
|
142 |
|
|
Otherwise, return LOC. */
|
143 |
|
|
|
144 |
|
|
static location_t
|
145 |
|
|
expr_location_or (tree t, location_t loc)
|
146 |
|
|
{
|
147 |
|
|
location_t tloc = EXPR_LOCATION (t);
|
148 |
|
|
return tloc != UNKNOWN_LOCATION ? tloc : loc;
|
149 |
|
|
}
|
150 |
|
|
|
151 |
|
|
/* Similar to protected_set_expr_location, but never modify x in place,
|
152 |
|
|
if location can and needs to be set, unshare it. */
|
153 |
|
|
|
154 |
|
|
static inline tree
|
155 |
|
|
protected_set_expr_location_unshare (tree x, location_t loc)
|
156 |
|
|
{
|
157 |
|
|
if (CAN_HAVE_LOCATION_P (x)
|
158 |
|
|
&& EXPR_LOCATION (x) != loc
|
159 |
|
|
&& !(TREE_CODE (x) == SAVE_EXPR
|
160 |
|
|
|| TREE_CODE (x) == TARGET_EXPR
|
161 |
|
|
|| TREE_CODE (x) == BIND_EXPR))
|
162 |
|
|
{
|
163 |
|
|
x = copy_node (x);
|
164 |
|
|
SET_EXPR_LOCATION (x, loc);
|
165 |
|
|
}
|
166 |
|
|
return x;
|
167 |
|
|
}
|
168 |
|
|
|
169 |
|
|
|
170 |
|
|
/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
|
171 |
|
|
overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
|
172 |
|
|
and SUM1. Then this yields nonzero if overflow occurred during the
|
173 |
|
|
addition.
|
174 |
|
|
|
175 |
|
|
Overflow occurs if A and B have the same sign, but A and SUM differ in
|
176 |
|
|
sign. Use `^' to test whether signs differ, and `< 0' to isolate the
|
177 |
|
|
sign. */
|
178 |
|
|
#define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
|
179 |
|
|
|
180 |
|
|
/* If ARG2 divides ARG1 with zero remainder, carries out the division
|
181 |
|
|
of type CODE and returns the quotient.
|
182 |
|
|
Otherwise returns NULL_TREE. */
|
183 |
|
|
|
184 |
|
|
tree
|
185 |
|
|
div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
|
186 |
|
|
{
|
187 |
|
|
double_int quo, rem;
|
188 |
|
|
int uns;
|
189 |
|
|
|
190 |
|
|
/* The sign of the division is according to operand two, that
|
191 |
|
|
does the correct thing for POINTER_PLUS_EXPR where we want
|
192 |
|
|
a signed division. */
|
193 |
|
|
uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
|
194 |
|
|
if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
|
195 |
|
|
&& TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
|
196 |
|
|
uns = false;
|
197 |
|
|
|
198 |
|
|
quo = double_int_divmod (tree_to_double_int (arg1),
|
199 |
|
|
tree_to_double_int (arg2),
|
200 |
|
|
uns, code, &rem);
|
201 |
|
|
|
202 |
|
|
if (double_int_zero_p (rem))
|
203 |
|
|
return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
|
204 |
|
|
|
205 |
|
|
return NULL_TREE;
|
206 |
|
|
}
|
207 |
|
|
|
208 |
|
|
/* This is nonzero if we should defer warnings about undefined
|
209 |
|
|
overflow. This facility exists because these warnings are a
|
210 |
|
|
special case. The code to estimate loop iterations does not want
|
211 |
|
|
to issue any warnings, since it works with expressions which do not
|
212 |
|
|
occur in user code. Various bits of cleanup code call fold(), but
|
213 |
|
|
only use the result if it has certain characteristics (e.g., is a
|
214 |
|
|
constant); that code only wants to issue a warning if the result is
|
215 |
|
|
used. */
|
216 |
|
|
|
217 |
|
|
static int fold_deferring_overflow_warnings;
|
218 |
|
|
|
219 |
|
|
/* If a warning about undefined overflow is deferred, this is the
|
220 |
|
|
warning. Note that this may cause us to turn two warnings into
|
221 |
|
|
one, but that is fine since it is sufficient to only give one
|
222 |
|
|
warning per expression. */
|
223 |
|
|
|
224 |
|
|
static const char* fold_deferred_overflow_warning;
|
225 |
|
|
|
226 |
|
|
/* If a warning about undefined overflow is deferred, this is the
|
227 |
|
|
level at which the warning should be emitted. */
|
228 |
|
|
|
229 |
|
|
static enum warn_strict_overflow_code fold_deferred_overflow_code;
|
230 |
|
|
|
231 |
|
|
/* Start deferring overflow warnings. We could use a stack here to
|
232 |
|
|
permit nested calls, but at present it is not necessary. */
|
233 |
|
|
|
234 |
|
|
void
|
235 |
|
|
fold_defer_overflow_warnings (void)
|
236 |
|
|
{
|
237 |
|
|
++fold_deferring_overflow_warnings;
|
238 |
|
|
}
|
239 |
|
|
|
240 |
|
|
/* Stop deferring overflow warnings. If there is a pending warning,
|
241 |
|
|
and ISSUE is true, then issue the warning if appropriate. STMT is
|
242 |
|
|
the statement with which the warning should be associated (used for
|
243 |
|
|
location information); STMT may be NULL. CODE is the level of the
|
244 |
|
|
warning--a warn_strict_overflow_code value. This function will use
|
245 |
|
|
the smaller of CODE and the deferred code when deciding whether to
|
246 |
|
|
issue the warning. CODE may be zero to mean to always use the
|
247 |
|
|
deferred code. */
|
248 |
|
|
|
249 |
|
|
void
|
250 |
|
|
fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
|
251 |
|
|
{
|
252 |
|
|
const char *warnmsg;
|
253 |
|
|
location_t locus;
|
254 |
|
|
|
255 |
|
|
gcc_assert (fold_deferring_overflow_warnings > 0);
|
256 |
|
|
--fold_deferring_overflow_warnings;
|
257 |
|
|
if (fold_deferring_overflow_warnings > 0)
|
258 |
|
|
{
|
259 |
|
|
if (fold_deferred_overflow_warning != NULL
|
260 |
|
|
&& code != 0
|
261 |
|
|
&& code < (int) fold_deferred_overflow_code)
|
262 |
|
|
fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
|
263 |
|
|
return;
|
264 |
|
|
}
|
265 |
|
|
|
266 |
|
|
warnmsg = fold_deferred_overflow_warning;
|
267 |
|
|
fold_deferred_overflow_warning = NULL;
|
268 |
|
|
|
269 |
|
|
if (!issue || warnmsg == NULL)
|
270 |
|
|
return;
|
271 |
|
|
|
272 |
|
|
if (gimple_no_warning_p (stmt))
|
273 |
|
|
return;
|
274 |
|
|
|
275 |
|
|
/* Use the smallest code level when deciding to issue the
|
276 |
|
|
warning. */
|
277 |
|
|
if (code == 0 || code > (int) fold_deferred_overflow_code)
|
278 |
|
|
code = fold_deferred_overflow_code;
|
279 |
|
|
|
280 |
|
|
if (!issue_strict_overflow_warning (code))
|
281 |
|
|
return;
|
282 |
|
|
|
283 |
|
|
if (stmt == NULL)
|
284 |
|
|
locus = input_location;
|
285 |
|
|
else
|
286 |
|
|
locus = gimple_location (stmt);
|
287 |
|
|
warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
|
288 |
|
|
}
|
289 |
|
|
|
290 |
|
|
/* Stop deferring overflow warnings, ignoring any deferred
|
291 |
|
|
warnings. */
|
292 |
|
|
|
293 |
|
|
void
|
294 |
|
|
fold_undefer_and_ignore_overflow_warnings (void)
|
295 |
|
|
{
|
296 |
|
|
fold_undefer_overflow_warnings (false, NULL, 0);
|
297 |
|
|
}
|
298 |
|
|
|
299 |
|
|
/* Whether we are deferring overflow warnings. */
|
300 |
|
|
|
301 |
|
|
bool
|
302 |
|
|
fold_deferring_overflow_warnings_p (void)
|
303 |
|
|
{
|
304 |
|
|
return fold_deferring_overflow_warnings > 0;
|
305 |
|
|
}
|
306 |
|
|
|
307 |
|
|
/* This is called when we fold something based on the fact that signed
|
308 |
|
|
overflow is undefined. */
|
309 |
|
|
|
310 |
|
|
static void
|
311 |
|
|
fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
|
312 |
|
|
{
|
313 |
|
|
if (fold_deferring_overflow_warnings > 0)
|
314 |
|
|
{
|
315 |
|
|
if (fold_deferred_overflow_warning == NULL
|
316 |
|
|
|| wc < fold_deferred_overflow_code)
|
317 |
|
|
{
|
318 |
|
|
fold_deferred_overflow_warning = gmsgid;
|
319 |
|
|
fold_deferred_overflow_code = wc;
|
320 |
|
|
}
|
321 |
|
|
}
|
322 |
|
|
else if (issue_strict_overflow_warning (wc))
|
323 |
|
|
warning (OPT_Wstrict_overflow, gmsgid);
|
324 |
|
|
}
|
325 |
|
|
|
326 |
|
|
/* Return true if the built-in mathematical function specified by CODE
|
327 |
|
|
is odd, i.e. -f(x) == f(-x). */
|
328 |
|
|
|
329 |
|
|
static bool
|
330 |
|
|
negate_mathfn_p (enum built_in_function code)
|
331 |
|
|
{
|
332 |
|
|
switch (code)
|
333 |
|
|
{
|
334 |
|
|
CASE_FLT_FN (BUILT_IN_ASIN):
|
335 |
|
|
CASE_FLT_FN (BUILT_IN_ASINH):
|
336 |
|
|
CASE_FLT_FN (BUILT_IN_ATAN):
|
337 |
|
|
CASE_FLT_FN (BUILT_IN_ATANH):
|
338 |
|
|
CASE_FLT_FN (BUILT_IN_CASIN):
|
339 |
|
|
CASE_FLT_FN (BUILT_IN_CASINH):
|
340 |
|
|
CASE_FLT_FN (BUILT_IN_CATAN):
|
341 |
|
|
CASE_FLT_FN (BUILT_IN_CATANH):
|
342 |
|
|
CASE_FLT_FN (BUILT_IN_CBRT):
|
343 |
|
|
CASE_FLT_FN (BUILT_IN_CPROJ):
|
344 |
|
|
CASE_FLT_FN (BUILT_IN_CSIN):
|
345 |
|
|
CASE_FLT_FN (BUILT_IN_CSINH):
|
346 |
|
|
CASE_FLT_FN (BUILT_IN_CTAN):
|
347 |
|
|
CASE_FLT_FN (BUILT_IN_CTANH):
|
348 |
|
|
CASE_FLT_FN (BUILT_IN_ERF):
|
349 |
|
|
CASE_FLT_FN (BUILT_IN_LLROUND):
|
350 |
|
|
CASE_FLT_FN (BUILT_IN_LROUND):
|
351 |
|
|
CASE_FLT_FN (BUILT_IN_ROUND):
|
352 |
|
|
CASE_FLT_FN (BUILT_IN_SIN):
|
353 |
|
|
CASE_FLT_FN (BUILT_IN_SINH):
|
354 |
|
|
CASE_FLT_FN (BUILT_IN_TAN):
|
355 |
|
|
CASE_FLT_FN (BUILT_IN_TANH):
|
356 |
|
|
CASE_FLT_FN (BUILT_IN_TRUNC):
|
357 |
|
|
return true;
|
358 |
|
|
|
359 |
|
|
CASE_FLT_FN (BUILT_IN_LLRINT):
|
360 |
|
|
CASE_FLT_FN (BUILT_IN_LRINT):
|
361 |
|
|
CASE_FLT_FN (BUILT_IN_NEARBYINT):
|
362 |
|
|
CASE_FLT_FN (BUILT_IN_RINT):
|
363 |
|
|
return !flag_rounding_math;
|
364 |
|
|
|
365 |
|
|
default:
|
366 |
|
|
break;
|
367 |
|
|
}
|
368 |
|
|
return false;
|
369 |
|
|
}
|
370 |
|
|
|
371 |
|
|
/* Check whether we may negate an integer constant T without causing
|
372 |
|
|
overflow. */
|
373 |
|
|
|
374 |
|
|
bool
|
375 |
|
|
may_negate_without_overflow_p (const_tree t)
|
376 |
|
|
{
|
377 |
|
|
unsigned HOST_WIDE_INT val;
|
378 |
|
|
unsigned int prec;
|
379 |
|
|
tree type;
|
380 |
|
|
|
381 |
|
|
gcc_assert (TREE_CODE (t) == INTEGER_CST);
|
382 |
|
|
|
383 |
|
|
type = TREE_TYPE (t);
|
384 |
|
|
if (TYPE_UNSIGNED (type))
|
385 |
|
|
return false;
|
386 |
|
|
|
387 |
|
|
prec = TYPE_PRECISION (type);
|
388 |
|
|
if (prec > HOST_BITS_PER_WIDE_INT)
|
389 |
|
|
{
|
390 |
|
|
if (TREE_INT_CST_LOW (t) != 0)
|
391 |
|
|
return true;
|
392 |
|
|
prec -= HOST_BITS_PER_WIDE_INT;
|
393 |
|
|
val = TREE_INT_CST_HIGH (t);
|
394 |
|
|
}
|
395 |
|
|
else
|
396 |
|
|
val = TREE_INT_CST_LOW (t);
|
397 |
|
|
if (prec < HOST_BITS_PER_WIDE_INT)
|
398 |
|
|
val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
|
399 |
|
|
return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
|
400 |
|
|
}
|
401 |
|
|
|
402 |
|
|
/* Determine whether an expression T can be cheaply negated using
|
403 |
|
|
the function negate_expr without introducing undefined overflow. */
|
404 |
|
|
|
405 |
|
|
static bool
|
406 |
|
|
negate_expr_p (tree t)
|
407 |
|
|
{
|
408 |
|
|
tree type;
|
409 |
|
|
|
410 |
|
|
if (t == 0)
|
411 |
|
|
return false;
|
412 |
|
|
|
413 |
|
|
type = TREE_TYPE (t);
|
414 |
|
|
|
415 |
|
|
STRIP_SIGN_NOPS (t);
|
416 |
|
|
switch (TREE_CODE (t))
|
417 |
|
|
{
|
418 |
|
|
case INTEGER_CST:
|
419 |
|
|
if (TYPE_OVERFLOW_WRAPS (type))
|
420 |
|
|
return true;
|
421 |
|
|
|
422 |
|
|
/* Check that -CST will not overflow type. */
|
423 |
|
|
return may_negate_without_overflow_p (t);
|
424 |
|
|
case BIT_NOT_EXPR:
|
425 |
|
|
return (INTEGRAL_TYPE_P (type)
|
426 |
|
|
&& TYPE_OVERFLOW_WRAPS (type));
|
427 |
|
|
|
428 |
|
|
case FIXED_CST:
|
429 |
|
|
case NEGATE_EXPR:
|
430 |
|
|
return true;
|
431 |
|
|
|
432 |
|
|
case REAL_CST:
|
433 |
|
|
/* We want to canonicalize to positive real constants. Pretend
|
434 |
|
|
that only negative ones can be easily negated. */
|
435 |
|
|
return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
|
436 |
|
|
|
437 |
|
|
case COMPLEX_CST:
|
438 |
|
|
return negate_expr_p (TREE_REALPART (t))
|
439 |
|
|
&& negate_expr_p (TREE_IMAGPART (t));
|
440 |
|
|
|
441 |
|
|
case COMPLEX_EXPR:
|
442 |
|
|
return negate_expr_p (TREE_OPERAND (t, 0))
|
443 |
|
|
&& negate_expr_p (TREE_OPERAND (t, 1));
|
444 |
|
|
|
445 |
|
|
case CONJ_EXPR:
|
446 |
|
|
return negate_expr_p (TREE_OPERAND (t, 0));
|
447 |
|
|
|
448 |
|
|
case PLUS_EXPR:
|
449 |
|
|
if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
|
450 |
|
|
|| HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
|
451 |
|
|
return false;
|
452 |
|
|
/* -(A + B) -> (-B) - A. */
|
453 |
|
|
if (negate_expr_p (TREE_OPERAND (t, 1))
|
454 |
|
|
&& reorder_operands_p (TREE_OPERAND (t, 0),
|
455 |
|
|
TREE_OPERAND (t, 1)))
|
456 |
|
|
return true;
|
457 |
|
|
/* -(A + B) -> (-A) - B. */
|
458 |
|
|
return negate_expr_p (TREE_OPERAND (t, 0));
|
459 |
|
|
|
460 |
|
|
case MINUS_EXPR:
|
461 |
|
|
/* We can't turn -(A-B) into B-A when we honor signed zeros. */
|
462 |
|
|
return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
|
463 |
|
|
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
|
464 |
|
|
&& reorder_operands_p (TREE_OPERAND (t, 0),
|
465 |
|
|
TREE_OPERAND (t, 1));
|
466 |
|
|
|
467 |
|
|
case MULT_EXPR:
|
468 |
|
|
if (TYPE_UNSIGNED (TREE_TYPE (t)))
|
469 |
|
|
break;
|
470 |
|
|
|
471 |
|
|
/* Fall through. */
|
472 |
|
|
|
473 |
|
|
case RDIV_EXPR:
|
474 |
|
|
if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
|
475 |
|
|
return negate_expr_p (TREE_OPERAND (t, 1))
|
476 |
|
|
|| negate_expr_p (TREE_OPERAND (t, 0));
|
477 |
|
|
break;
|
478 |
|
|
|
479 |
|
|
case TRUNC_DIV_EXPR:
|
480 |
|
|
case ROUND_DIV_EXPR:
|
481 |
|
|
case FLOOR_DIV_EXPR:
|
482 |
|
|
case CEIL_DIV_EXPR:
|
483 |
|
|
case EXACT_DIV_EXPR:
|
484 |
|
|
/* In general we can't negate A / B, because if A is INT_MIN and
|
485 |
|
|
B is 1, we may turn this into INT_MIN / -1 which is undefined
|
486 |
|
|
and actually traps on some architectures. But if overflow is
|
487 |
|
|
undefined, we can negate, because - (INT_MIN / 1) is an
|
488 |
|
|
overflow. */
|
489 |
|
|
if (INTEGRAL_TYPE_P (TREE_TYPE (t))
|
490 |
|
|
&& !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
|
491 |
|
|
break;
|
492 |
|
|
return negate_expr_p (TREE_OPERAND (t, 1))
|
493 |
|
|
|| negate_expr_p (TREE_OPERAND (t, 0));
|
494 |
|
|
|
495 |
|
|
case NOP_EXPR:
|
496 |
|
|
/* Negate -((double)float) as (double)(-float). */
|
497 |
|
|
if (TREE_CODE (type) == REAL_TYPE)
|
498 |
|
|
{
|
499 |
|
|
tree tem = strip_float_extensions (t);
|
500 |
|
|
if (tem != t)
|
501 |
|
|
return negate_expr_p (tem);
|
502 |
|
|
}
|
503 |
|
|
break;
|
504 |
|
|
|
505 |
|
|
case CALL_EXPR:
|
506 |
|
|
/* Negate -f(x) as f(-x). */
|
507 |
|
|
if (negate_mathfn_p (builtin_mathfn_code (t)))
|
508 |
|
|
return negate_expr_p (CALL_EXPR_ARG (t, 0));
|
509 |
|
|
break;
|
510 |
|
|
|
511 |
|
|
case RSHIFT_EXPR:
|
512 |
|
|
/* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
|
513 |
|
|
if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
|
514 |
|
|
{
|
515 |
|
|
tree op1 = TREE_OPERAND (t, 1);
|
516 |
|
|
if (TREE_INT_CST_HIGH (op1) == 0
|
517 |
|
|
&& (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
|
518 |
|
|
== TREE_INT_CST_LOW (op1))
|
519 |
|
|
return true;
|
520 |
|
|
}
|
521 |
|
|
break;
|
522 |
|
|
|
523 |
|
|
default:
|
524 |
|
|
break;
|
525 |
|
|
}
|
526 |
|
|
return false;
|
527 |
|
|
}
|
528 |
|
|
|
529 |
|
|
/* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
|
530 |
|
|
simplification is possible.
|
531 |
|
|
If negate_expr_p would return true for T, NULL_TREE will never be
|
532 |
|
|
returned. */
|
533 |
|
|
|
534 |
|
|
static tree
|
535 |
|
|
fold_negate_expr (location_t loc, tree t)
|
536 |
|
|
{
|
537 |
|
|
tree type = TREE_TYPE (t);
|
538 |
|
|
tree tem;
|
539 |
|
|
|
540 |
|
|
switch (TREE_CODE (t))
|
541 |
|
|
{
|
542 |
|
|
/* Convert - (~A) to A + 1. */
|
543 |
|
|
case BIT_NOT_EXPR:
|
544 |
|
|
if (INTEGRAL_TYPE_P (type))
|
545 |
|
|
return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
|
546 |
|
|
build_int_cst (type, 1));
|
547 |
|
|
break;
|
548 |
|
|
|
549 |
|
|
case INTEGER_CST:
|
550 |
|
|
tem = fold_negate_const (t, type);
|
551 |
|
|
if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
|
552 |
|
|
|| !TYPE_OVERFLOW_TRAPS (type))
|
553 |
|
|
return tem;
|
554 |
|
|
break;
|
555 |
|
|
|
556 |
|
|
case REAL_CST:
|
557 |
|
|
tem = fold_negate_const (t, type);
|
558 |
|
|
/* Two's complement FP formats, such as c4x, may overflow. */
|
559 |
|
|
if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
|
560 |
|
|
return tem;
|
561 |
|
|
break;
|
562 |
|
|
|
563 |
|
|
case FIXED_CST:
|
564 |
|
|
tem = fold_negate_const (t, type);
|
565 |
|
|
return tem;
|
566 |
|
|
|
567 |
|
|
case COMPLEX_CST:
|
568 |
|
|
{
|
569 |
|
|
tree rpart = negate_expr (TREE_REALPART (t));
|
570 |
|
|
tree ipart = negate_expr (TREE_IMAGPART (t));
|
571 |
|
|
|
572 |
|
|
if ((TREE_CODE (rpart) == REAL_CST
|
573 |
|
|
&& TREE_CODE (ipart) == REAL_CST)
|
574 |
|
|
|| (TREE_CODE (rpart) == INTEGER_CST
|
575 |
|
|
&& TREE_CODE (ipart) == INTEGER_CST))
|
576 |
|
|
return build_complex (type, rpart, ipart);
|
577 |
|
|
}
|
578 |
|
|
break;
|
579 |
|
|
|
580 |
|
|
case COMPLEX_EXPR:
|
581 |
|
|
if (negate_expr_p (t))
|
582 |
|
|
return fold_build2_loc (loc, COMPLEX_EXPR, type,
|
583 |
|
|
fold_negate_expr (loc, TREE_OPERAND (t, 0)),
|
584 |
|
|
fold_negate_expr (loc, TREE_OPERAND (t, 1)));
|
585 |
|
|
break;
|
586 |
|
|
|
587 |
|
|
case CONJ_EXPR:
|
588 |
|
|
if (negate_expr_p (t))
|
589 |
|
|
return fold_build1_loc (loc, CONJ_EXPR, type,
|
590 |
|
|
fold_negate_expr (loc, TREE_OPERAND (t, 0)));
|
591 |
|
|
break;
|
592 |
|
|
|
593 |
|
|
case NEGATE_EXPR:
|
594 |
|
|
return TREE_OPERAND (t, 0);
|
595 |
|
|
|
596 |
|
|
case PLUS_EXPR:
|
597 |
|
|
if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
|
598 |
|
|
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
|
599 |
|
|
{
|
600 |
|
|
/* -(A + B) -> (-B) - A. */
|
601 |
|
|
if (negate_expr_p (TREE_OPERAND (t, 1))
|
602 |
|
|
&& reorder_operands_p (TREE_OPERAND (t, 0),
|
603 |
|
|
TREE_OPERAND (t, 1)))
|
604 |
|
|
{
|
605 |
|
|
tem = negate_expr (TREE_OPERAND (t, 1));
|
606 |
|
|
return fold_build2_loc (loc, MINUS_EXPR, type,
|
607 |
|
|
tem, TREE_OPERAND (t, 0));
|
608 |
|
|
}
|
609 |
|
|
|
610 |
|
|
/* -(A + B) -> (-A) - B. */
|
611 |
|
|
if (negate_expr_p (TREE_OPERAND (t, 0)))
|
612 |
|
|
{
|
613 |
|
|
tem = negate_expr (TREE_OPERAND (t, 0));
|
614 |
|
|
return fold_build2_loc (loc, MINUS_EXPR, type,
|
615 |
|
|
tem, TREE_OPERAND (t, 1));
|
616 |
|
|
}
|
617 |
|
|
}
|
618 |
|
|
break;
|
619 |
|
|
|
620 |
|
|
case MINUS_EXPR:
|
621 |
|
|
/* - (A - B) -> B - A */
|
622 |
|
|
if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
|
623 |
|
|
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
|
624 |
|
|
&& reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
|
625 |
|
|
return fold_build2_loc (loc, MINUS_EXPR, type,
|
626 |
|
|
TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
|
627 |
|
|
break;
|
628 |
|
|
|
629 |
|
|
case MULT_EXPR:
|
630 |
|
|
if (TYPE_UNSIGNED (type))
|
631 |
|
|
break;
|
632 |
|
|
|
633 |
|
|
/* Fall through. */
|
634 |
|
|
|
635 |
|
|
case RDIV_EXPR:
|
636 |
|
|
if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
|
637 |
|
|
{
|
638 |
|
|
tem = TREE_OPERAND (t, 1);
|
639 |
|
|
if (negate_expr_p (tem))
|
640 |
|
|
return fold_build2_loc (loc, TREE_CODE (t), type,
|
641 |
|
|
TREE_OPERAND (t, 0), negate_expr (tem));
|
642 |
|
|
tem = TREE_OPERAND (t, 0);
|
643 |
|
|
if (negate_expr_p (tem))
|
644 |
|
|
return fold_build2_loc (loc, TREE_CODE (t), type,
|
645 |
|
|
negate_expr (tem), TREE_OPERAND (t, 1));
|
646 |
|
|
}
|
647 |
|
|
break;
|
648 |
|
|
|
649 |
|
|
case TRUNC_DIV_EXPR:
|
650 |
|
|
case ROUND_DIV_EXPR:
|
651 |
|
|
case FLOOR_DIV_EXPR:
|
652 |
|
|
case CEIL_DIV_EXPR:
|
653 |
|
|
case EXACT_DIV_EXPR:
|
654 |
|
|
/* In general we can't negate A / B, because if A is INT_MIN and
|
655 |
|
|
B is 1, we may turn this into INT_MIN / -1 which is undefined
|
656 |
|
|
and actually traps on some architectures. But if overflow is
|
657 |
|
|
undefined, we can negate, because - (INT_MIN / 1) is an
|
658 |
|
|
overflow. */
|
659 |
|
|
if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
|
660 |
|
|
{
|
661 |
|
|
const char * const warnmsg = G_("assuming signed overflow does not "
|
662 |
|
|
"occur when negating a division");
|
663 |
|
|
tem = TREE_OPERAND (t, 1);
|
664 |
|
|
if (negate_expr_p (tem))
|
665 |
|
|
{
|
666 |
|
|
if (INTEGRAL_TYPE_P (type)
|
667 |
|
|
&& (TREE_CODE (tem) != INTEGER_CST
|
668 |
|
|
|| integer_onep (tem)))
|
669 |
|
|
fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
|
670 |
|
|
return fold_build2_loc (loc, TREE_CODE (t), type,
|
671 |
|
|
TREE_OPERAND (t, 0), negate_expr (tem));
|
672 |
|
|
}
|
673 |
|
|
tem = TREE_OPERAND (t, 0);
|
674 |
|
|
if (negate_expr_p (tem))
|
675 |
|
|
{
|
676 |
|
|
if (INTEGRAL_TYPE_P (type)
|
677 |
|
|
&& (TREE_CODE (tem) != INTEGER_CST
|
678 |
|
|
|| tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
|
679 |
|
|
fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
|
680 |
|
|
return fold_build2_loc (loc, TREE_CODE (t), type,
|
681 |
|
|
negate_expr (tem), TREE_OPERAND (t, 1));
|
682 |
|
|
}
|
683 |
|
|
}
|
684 |
|
|
break;
|
685 |
|
|
|
686 |
|
|
case NOP_EXPR:
|
687 |
|
|
/* Convert -((double)float) into (double)(-float). */
|
688 |
|
|
if (TREE_CODE (type) == REAL_TYPE)
|
689 |
|
|
{
|
690 |
|
|
tem = strip_float_extensions (t);
|
691 |
|
|
if (tem != t && negate_expr_p (tem))
|
692 |
|
|
return fold_convert_loc (loc, type, negate_expr (tem));
|
693 |
|
|
}
|
694 |
|
|
break;
|
695 |
|
|
|
696 |
|
|
case CALL_EXPR:
|
697 |
|
|
/* Negate -f(x) as f(-x). */
|
698 |
|
|
if (negate_mathfn_p (builtin_mathfn_code (t))
|
699 |
|
|
&& negate_expr_p (CALL_EXPR_ARG (t, 0)))
|
700 |
|
|
{
|
701 |
|
|
tree fndecl, arg;
|
702 |
|
|
|
703 |
|
|
fndecl = get_callee_fndecl (t);
|
704 |
|
|
arg = negate_expr (CALL_EXPR_ARG (t, 0));
|
705 |
|
|
return build_call_expr_loc (loc, fndecl, 1, arg);
|
706 |
|
|
}
|
707 |
|
|
break;
|
708 |
|
|
|
709 |
|
|
case RSHIFT_EXPR:
|
710 |
|
|
/* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
|
711 |
|
|
if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
|
712 |
|
|
{
|
713 |
|
|
tree op1 = TREE_OPERAND (t, 1);
|
714 |
|
|
if (TREE_INT_CST_HIGH (op1) == 0
|
715 |
|
|
&& (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
|
716 |
|
|
== TREE_INT_CST_LOW (op1))
|
717 |
|
|
{
|
718 |
|
|
tree ntype = TYPE_UNSIGNED (type)
|
719 |
|
|
? signed_type_for (type)
|
720 |
|
|
: unsigned_type_for (type);
|
721 |
|
|
tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
|
722 |
|
|
temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
|
723 |
|
|
return fold_convert_loc (loc, type, temp);
|
724 |
|
|
}
|
725 |
|
|
}
|
726 |
|
|
break;
|
727 |
|
|
|
728 |
|
|
default:
|
729 |
|
|
break;
|
730 |
|
|
}
|
731 |
|
|
|
732 |
|
|
return NULL_TREE;
|
733 |
|
|
}
|
734 |
|
|
|
735 |
|
|
/* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
|
736 |
|
|
negated in a simpler way. Also allow for T to be NULL_TREE, in which case
|
737 |
|
|
return NULL_TREE. */
|
738 |
|
|
|
739 |
|
|
static tree
|
740 |
|
|
negate_expr (tree t)
|
741 |
|
|
{
|
742 |
|
|
tree type, tem;
|
743 |
|
|
location_t loc;
|
744 |
|
|
|
745 |
|
|
if (t == NULL_TREE)
|
746 |
|
|
return NULL_TREE;
|
747 |
|
|
|
748 |
|
|
loc = EXPR_LOCATION (t);
|
749 |
|
|
type = TREE_TYPE (t);
|
750 |
|
|
STRIP_SIGN_NOPS (t);
|
751 |
|
|
|
752 |
|
|
tem = fold_negate_expr (loc, t);
|
753 |
|
|
if (!tem)
|
754 |
|
|
tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
|
755 |
|
|
return fold_convert_loc (loc, type, tem);
|
756 |
|
|
}
|
757 |
|
|
|
758 |
|
|
/* Split a tree IN into a constant, literal and variable parts that could be
|
759 |
|
|
combined with CODE to make IN. "constant" means an expression with
|
760 |
|
|
TREE_CONSTANT but that isn't an actual constant. CODE must be a
|
761 |
|
|
commutative arithmetic operation. Store the constant part into *CONP,
|
762 |
|
|
the literal in *LITP and return the variable part. If a part isn't
|
763 |
|
|
present, set it to null. If the tree does not decompose in this way,
|
764 |
|
|
return the entire tree as the variable part and the other parts as null.
|
765 |
|
|
|
766 |
|
|
If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
|
767 |
|
|
case, we negate an operand that was subtracted. Except if it is a
|
768 |
|
|
literal for which we use *MINUS_LITP instead.
|
769 |
|
|
|
770 |
|
|
If NEGATE_P is true, we are negating all of IN, again except a literal
|
771 |
|
|
for which we use *MINUS_LITP instead.
|
772 |
|
|
|
773 |
|
|
If IN is itself a literal or constant, return it as appropriate.
|
774 |
|
|
|
775 |
|
|
Note that we do not guarantee that any of the three values will be the
|
776 |
|
|
same type as IN, but they will have the same signedness and mode. */
|
777 |
|
|
|
778 |
|
|
static tree
|
779 |
|
|
split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
|
780 |
|
|
tree *minus_litp, int negate_p)
|
781 |
|
|
{
|
782 |
|
|
tree var = 0;
|
783 |
|
|
|
784 |
|
|
*conp = 0;
|
785 |
|
|
*litp = 0;
|
786 |
|
|
*minus_litp = 0;
|
787 |
|
|
|
788 |
|
|
/* Strip any conversions that don't change the machine mode or signedness. */
|
789 |
|
|
STRIP_SIGN_NOPS (in);
|
790 |
|
|
|
791 |
|
|
if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
|
792 |
|
|
|| TREE_CODE (in) == FIXED_CST)
|
793 |
|
|
*litp = in;
|
794 |
|
|
else if (TREE_CODE (in) == code
|
795 |
|
|
|| ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
|
796 |
|
|
&& ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
|
797 |
|
|
/* We can associate addition and subtraction together (even
|
798 |
|
|
though the C standard doesn't say so) for integers because
|
799 |
|
|
the value is not affected. For reals, the value might be
|
800 |
|
|
affected, so we can't. */
|
801 |
|
|
&& ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
|
802 |
|
|
|| (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
|
803 |
|
|
{
|
804 |
|
|
tree op0 = TREE_OPERAND (in, 0);
|
805 |
|
|
tree op1 = TREE_OPERAND (in, 1);
|
806 |
|
|
int neg1_p = TREE_CODE (in) == MINUS_EXPR;
|
807 |
|
|
int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
|
808 |
|
|
|
809 |
|
|
/* First see if either of the operands is a literal, then a constant. */
|
810 |
|
|
if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
|
811 |
|
|
|| TREE_CODE (op0) == FIXED_CST)
|
812 |
|
|
*litp = op0, op0 = 0;
|
813 |
|
|
else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
|
814 |
|
|
|| TREE_CODE (op1) == FIXED_CST)
|
815 |
|
|
*litp = op1, neg_litp_p = neg1_p, op1 = 0;
|
816 |
|
|
|
817 |
|
|
if (op0 != 0 && TREE_CONSTANT (op0))
|
818 |
|
|
*conp = op0, op0 = 0;
|
819 |
|
|
else if (op1 != 0 && TREE_CONSTANT (op1))
|
820 |
|
|
*conp = op1, neg_conp_p = neg1_p, op1 = 0;
|
821 |
|
|
|
822 |
|
|
/* If we haven't dealt with either operand, this is not a case we can
|
823 |
|
|
decompose. Otherwise, VAR is either of the ones remaining, if any. */
|
824 |
|
|
if (op0 != 0 && op1 != 0)
|
825 |
|
|
var = in;
|
826 |
|
|
else if (op0 != 0)
|
827 |
|
|
var = op0;
|
828 |
|
|
else
|
829 |
|
|
var = op1, neg_var_p = neg1_p;
|
830 |
|
|
|
831 |
|
|
/* Now do any needed negations. */
|
832 |
|
|
if (neg_litp_p)
|
833 |
|
|
*minus_litp = *litp, *litp = 0;
|
834 |
|
|
if (neg_conp_p)
|
835 |
|
|
*conp = negate_expr (*conp);
|
836 |
|
|
if (neg_var_p)
|
837 |
|
|
var = negate_expr (var);
|
838 |
|
|
}
|
839 |
|
|
else if (TREE_CONSTANT (in))
|
840 |
|
|
*conp = in;
|
841 |
|
|
else
|
842 |
|
|
var = in;
|
843 |
|
|
|
844 |
|
|
if (negate_p)
|
845 |
|
|
{
|
846 |
|
|
if (*litp)
|
847 |
|
|
*minus_litp = *litp, *litp = 0;
|
848 |
|
|
else if (*minus_litp)
|
849 |
|
|
*litp = *minus_litp, *minus_litp = 0;
|
850 |
|
|
*conp = negate_expr (*conp);
|
851 |
|
|
var = negate_expr (var);
|
852 |
|
|
}
|
853 |
|
|
|
854 |
|
|
return var;
|
855 |
|
|
}
|
856 |
|
|
|
857 |
|
|
/* Re-associate trees split by the above function. T1 and T2 are
|
858 |
|
|
either expressions to associate or null. Return the new
|
859 |
|
|
expression, if any. LOC is the location of the new expression. If
|
860 |
|
|
we build an operation, do it in TYPE and with CODE. */
|
861 |
|
|
|
862 |
|
|
static tree
|
863 |
|
|
associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
|
864 |
|
|
{
|
865 |
|
|
if (t1 == 0)
|
866 |
|
|
return t2;
|
867 |
|
|
else if (t2 == 0)
|
868 |
|
|
return t1;
|
869 |
|
|
|
870 |
|
|
/* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
|
871 |
|
|
try to fold this since we will have infinite recursion. But do
|
872 |
|
|
deal with any NEGATE_EXPRs. */
|
873 |
|
|
if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
|
874 |
|
|
|| TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
|
875 |
|
|
{
|
876 |
|
|
if (code == PLUS_EXPR)
|
877 |
|
|
{
|
878 |
|
|
if (TREE_CODE (t1) == NEGATE_EXPR)
|
879 |
|
|
return build2_loc (loc, MINUS_EXPR, type,
|
880 |
|
|
fold_convert_loc (loc, type, t2),
|
881 |
|
|
fold_convert_loc (loc, type,
|
882 |
|
|
TREE_OPERAND (t1, 0)));
|
883 |
|
|
else if (TREE_CODE (t2) == NEGATE_EXPR)
|
884 |
|
|
return build2_loc (loc, MINUS_EXPR, type,
|
885 |
|
|
fold_convert_loc (loc, type, t1),
|
886 |
|
|
fold_convert_loc (loc, type,
|
887 |
|
|
TREE_OPERAND (t2, 0)));
|
888 |
|
|
else if (integer_zerop (t2))
|
889 |
|
|
return fold_convert_loc (loc, type, t1);
|
890 |
|
|
}
|
891 |
|
|
else if (code == MINUS_EXPR)
|
892 |
|
|
{
|
893 |
|
|
if (integer_zerop (t2))
|
894 |
|
|
return fold_convert_loc (loc, type, t1);
|
895 |
|
|
}
|
896 |
|
|
|
897 |
|
|
return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
|
898 |
|
|
fold_convert_loc (loc, type, t2));
|
899 |
|
|
}
|
900 |
|
|
|
901 |
|
|
return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
|
902 |
|
|
fold_convert_loc (loc, type, t2));
|
903 |
|
|
}
|
904 |
|
|
|
905 |
|
|
/* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
|
906 |
|
|
for use in int_const_binop, size_binop and size_diffop. */
|
907 |
|
|
|
908 |
|
|
static bool
|
909 |
|
|
int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
|
910 |
|
|
{
|
911 |
|
|
if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
|
912 |
|
|
return false;
|
913 |
|
|
if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
|
914 |
|
|
return false;
|
915 |
|
|
|
916 |
|
|
switch (code)
|
917 |
|
|
{
|
918 |
|
|
case LSHIFT_EXPR:
|
919 |
|
|
case RSHIFT_EXPR:
|
920 |
|
|
case LROTATE_EXPR:
|
921 |
|
|
case RROTATE_EXPR:
|
922 |
|
|
return true;
|
923 |
|
|
|
924 |
|
|
default:
|
925 |
|
|
break;
|
926 |
|
|
}
|
927 |
|
|
|
928 |
|
|
return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
|
929 |
|
|
&& TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
|
930 |
|
|
&& TYPE_MODE (type1) == TYPE_MODE (type2);
|
931 |
|
|
}
|
932 |
|
|
|
933 |
|
|
|
934 |
|
|
/* Combine two integer constants ARG1 and ARG2 under operation CODE
|
935 |
|
|
to produce a new constant. Return NULL_TREE if we don't know how
|
936 |
|
|
to evaluate CODE at compile-time. */
|
937 |
|
|
|
938 |
|
|
tree
|
939 |
|
|
int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
|
940 |
|
|
{
|
941 |
|
|
double_int op1, op2, res, tmp;
|
942 |
|
|
tree t;
|
943 |
|
|
tree type = TREE_TYPE (arg1);
|
944 |
|
|
bool uns = TYPE_UNSIGNED (type);
|
945 |
|
|
bool is_sizetype
|
946 |
|
|
= (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
|
947 |
|
|
bool overflow = false;
|
948 |
|
|
|
949 |
|
|
op1 = tree_to_double_int (arg1);
|
950 |
|
|
op2 = tree_to_double_int (arg2);
|
951 |
|
|
|
952 |
|
|
switch (code)
|
953 |
|
|
{
|
954 |
|
|
case BIT_IOR_EXPR:
|
955 |
|
|
res = double_int_ior (op1, op2);
|
956 |
|
|
break;
|
957 |
|
|
|
958 |
|
|
case BIT_XOR_EXPR:
|
959 |
|
|
res = double_int_xor (op1, op2);
|
960 |
|
|
break;
|
961 |
|
|
|
962 |
|
|
case BIT_AND_EXPR:
|
963 |
|
|
res = double_int_and (op1, op2);
|
964 |
|
|
break;
|
965 |
|
|
|
966 |
|
|
case RSHIFT_EXPR:
|
967 |
|
|
res = double_int_rshift (op1, double_int_to_shwi (op2),
|
968 |
|
|
TYPE_PRECISION (type), !uns);
|
969 |
|
|
break;
|
970 |
|
|
|
971 |
|
|
case LSHIFT_EXPR:
|
972 |
|
|
/* It's unclear from the C standard whether shifts can overflow.
|
973 |
|
|
The following code ignores overflow; perhaps a C standard
|
974 |
|
|
interpretation ruling is needed. */
|
975 |
|
|
res = double_int_lshift (op1, double_int_to_shwi (op2),
|
976 |
|
|
TYPE_PRECISION (type), !uns);
|
977 |
|
|
break;
|
978 |
|
|
|
979 |
|
|
case RROTATE_EXPR:
|
980 |
|
|
res = double_int_rrotate (op1, double_int_to_shwi (op2),
|
981 |
|
|
TYPE_PRECISION (type));
|
982 |
|
|
break;
|
983 |
|
|
|
984 |
|
|
case LROTATE_EXPR:
|
985 |
|
|
res = double_int_lrotate (op1, double_int_to_shwi (op2),
|
986 |
|
|
TYPE_PRECISION (type));
|
987 |
|
|
break;
|
988 |
|
|
|
989 |
|
|
case PLUS_EXPR:
|
990 |
|
|
overflow = add_double (op1.low, op1.high, op2.low, op2.high,
|
991 |
|
|
&res.low, &res.high);
|
992 |
|
|
break;
|
993 |
|
|
|
994 |
|
|
case MINUS_EXPR:
|
995 |
|
|
neg_double (op2.low, op2.high, &res.low, &res.high);
|
996 |
|
|
add_double (op1.low, op1.high, res.low, res.high,
|
997 |
|
|
&res.low, &res.high);
|
998 |
|
|
overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
|
999 |
|
|
break;
|
1000 |
|
|
|
1001 |
|
|
case MULT_EXPR:
|
1002 |
|
|
overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
|
1003 |
|
|
&res.low, &res.high);
|
1004 |
|
|
break;
|
1005 |
|
|
|
1006 |
|
|
case TRUNC_DIV_EXPR:
|
1007 |
|
|
case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
|
1008 |
|
|
case EXACT_DIV_EXPR:
|
1009 |
|
|
/* This is a shortcut for a common special case. */
|
1010 |
|
|
if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
|
1011 |
|
|
&& !TREE_OVERFLOW (arg1)
|
1012 |
|
|
&& !TREE_OVERFLOW (arg2)
|
1013 |
|
|
&& op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
|
1014 |
|
|
{
|
1015 |
|
|
if (code == CEIL_DIV_EXPR)
|
1016 |
|
|
op1.low += op2.low - 1;
|
1017 |
|
|
|
1018 |
|
|
res.low = op1.low / op2.low, res.high = 0;
|
1019 |
|
|
break;
|
1020 |
|
|
}
|
1021 |
|
|
|
1022 |
|
|
/* ... fall through ... */
|
1023 |
|
|
|
1024 |
|
|
case ROUND_DIV_EXPR:
|
1025 |
|
|
if (double_int_zero_p (op2))
|
1026 |
|
|
return NULL_TREE;
|
1027 |
|
|
if (double_int_one_p (op2))
|
1028 |
|
|
{
|
1029 |
|
|
res = op1;
|
1030 |
|
|
break;
|
1031 |
|
|
}
|
1032 |
|
|
if (double_int_equal_p (op1, op2)
|
1033 |
|
|
&& ! double_int_zero_p (op1))
|
1034 |
|
|
{
|
1035 |
|
|
res = double_int_one;
|
1036 |
|
|
break;
|
1037 |
|
|
}
|
1038 |
|
|
overflow = div_and_round_double (code, uns,
|
1039 |
|
|
op1.low, op1.high, op2.low, op2.high,
|
1040 |
|
|
&res.low, &res.high,
|
1041 |
|
|
&tmp.low, &tmp.high);
|
1042 |
|
|
break;
|
1043 |
|
|
|
1044 |
|
|
case TRUNC_MOD_EXPR:
|
1045 |
|
|
case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
|
1046 |
|
|
/* This is a shortcut for a common special case. */
|
1047 |
|
|
if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
|
1048 |
|
|
&& !TREE_OVERFLOW (arg1)
|
1049 |
|
|
&& !TREE_OVERFLOW (arg2)
|
1050 |
|
|
&& op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
|
1051 |
|
|
{
|
1052 |
|
|
if (code == CEIL_MOD_EXPR)
|
1053 |
|
|
op1.low += op2.low - 1;
|
1054 |
|
|
res.low = op1.low % op2.low, res.high = 0;
|
1055 |
|
|
break;
|
1056 |
|
|
}
|
1057 |
|
|
|
1058 |
|
|
/* ... fall through ... */
|
1059 |
|
|
|
1060 |
|
|
case ROUND_MOD_EXPR:
|
1061 |
|
|
if (double_int_zero_p (op2))
|
1062 |
|
|
return NULL_TREE;
|
1063 |
|
|
overflow = div_and_round_double (code, uns,
|
1064 |
|
|
op1.low, op1.high, op2.low, op2.high,
|
1065 |
|
|
&tmp.low, &tmp.high,
|
1066 |
|
|
&res.low, &res.high);
|
1067 |
|
|
break;
|
1068 |
|
|
|
1069 |
|
|
case MIN_EXPR:
|
1070 |
|
|
res = double_int_min (op1, op2, uns);
|
1071 |
|
|
break;
|
1072 |
|
|
|
1073 |
|
|
case MAX_EXPR:
|
1074 |
|
|
res = double_int_max (op1, op2, uns);
|
1075 |
|
|
break;
|
1076 |
|
|
|
1077 |
|
|
default:
|
1078 |
|
|
return NULL_TREE;
|
1079 |
|
|
}
|
1080 |
|
|
|
1081 |
|
|
t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
|
1082 |
|
|
((!uns || is_sizetype) && overflow)
|
1083 |
|
|
| TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
|
1084 |
|
|
|
1085 |
|
|
return t;
|
1086 |
|
|
}
|
1087 |
|
|
|
1088 |
|
|
/* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
|
1089 |
|
|
constant. We assume ARG1 and ARG2 have the same data type, or at least
|
1090 |
|
|
are the same kind of constant and the same machine mode. Return zero if
|
1091 |
|
|
combining the constants is not allowed in the current operating mode. */
|
1092 |
|
|
|
1093 |
|
|
static tree
|
1094 |
|
|
const_binop (enum tree_code code, tree arg1, tree arg2)
|
1095 |
|
|
{
|
1096 |
|
|
/* Sanity check for the recursive cases. */
|
1097 |
|
|
if (!arg1 || !arg2)
|
1098 |
|
|
return NULL_TREE;
|
1099 |
|
|
|
1100 |
|
|
STRIP_NOPS (arg1);
|
1101 |
|
|
STRIP_NOPS (arg2);
|
1102 |
|
|
|
1103 |
|
|
if (TREE_CODE (arg1) == INTEGER_CST)
|
1104 |
|
|
return int_const_binop (code, arg1, arg2);
|
1105 |
|
|
|
1106 |
|
|
if (TREE_CODE (arg1) == REAL_CST)
|
1107 |
|
|
{
|
1108 |
|
|
enum machine_mode mode;
|
1109 |
|
|
REAL_VALUE_TYPE d1;
|
1110 |
|
|
REAL_VALUE_TYPE d2;
|
1111 |
|
|
REAL_VALUE_TYPE value;
|
1112 |
|
|
REAL_VALUE_TYPE result;
|
1113 |
|
|
bool inexact;
|
1114 |
|
|
tree t, type;
|
1115 |
|
|
|
1116 |
|
|
/* The following codes are handled by real_arithmetic. */
|
1117 |
|
|
switch (code)
|
1118 |
|
|
{
|
1119 |
|
|
case PLUS_EXPR:
|
1120 |
|
|
case MINUS_EXPR:
|
1121 |
|
|
case MULT_EXPR:
|
1122 |
|
|
case RDIV_EXPR:
|
1123 |
|
|
case MIN_EXPR:
|
1124 |
|
|
case MAX_EXPR:
|
1125 |
|
|
break;
|
1126 |
|
|
|
1127 |
|
|
default:
|
1128 |
|
|
return NULL_TREE;
|
1129 |
|
|
}
|
1130 |
|
|
|
1131 |
|
|
d1 = TREE_REAL_CST (arg1);
|
1132 |
|
|
d2 = TREE_REAL_CST (arg2);
|
1133 |
|
|
|
1134 |
|
|
type = TREE_TYPE (arg1);
|
1135 |
|
|
mode = TYPE_MODE (type);
|
1136 |
|
|
|
1137 |
|
|
/* Don't perform operation if we honor signaling NaNs and
|
1138 |
|
|
either operand is a NaN. */
|
1139 |
|
|
if (HONOR_SNANS (mode)
|
1140 |
|
|
&& (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
|
1141 |
|
|
return NULL_TREE;
|
1142 |
|
|
|
1143 |
|
|
/* Don't perform operation if it would raise a division
|
1144 |
|
|
by zero exception. */
|
1145 |
|
|
if (code == RDIV_EXPR
|
1146 |
|
|
&& REAL_VALUES_EQUAL (d2, dconst0)
|
1147 |
|
|
&& (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
|
1148 |
|
|
return NULL_TREE;
|
1149 |
|
|
|
1150 |
|
|
/* If either operand is a NaN, just return it. Otherwise, set up
|
1151 |
|
|
for floating-point trap; we return an overflow. */
|
1152 |
|
|
if (REAL_VALUE_ISNAN (d1))
|
1153 |
|
|
return arg1;
|
1154 |
|
|
else if (REAL_VALUE_ISNAN (d2))
|
1155 |
|
|
return arg2;
|
1156 |
|
|
|
1157 |
|
|
inexact = real_arithmetic (&value, code, &d1, &d2);
|
1158 |
|
|
real_convert (&result, mode, &value);
|
1159 |
|
|
|
1160 |
|
|
/* Don't constant fold this floating point operation if
|
1161 |
|
|
the result has overflowed and flag_trapping_math. */
|
1162 |
|
|
if (flag_trapping_math
|
1163 |
|
|
&& MODE_HAS_INFINITIES (mode)
|
1164 |
|
|
&& REAL_VALUE_ISINF (result)
|
1165 |
|
|
&& !REAL_VALUE_ISINF (d1)
|
1166 |
|
|
&& !REAL_VALUE_ISINF (d2))
|
1167 |
|
|
return NULL_TREE;
|
1168 |
|
|
|
1169 |
|
|
/* Don't constant fold this floating point operation if the
|
1170 |
|
|
result may dependent upon the run-time rounding mode and
|
1171 |
|
|
flag_rounding_math is set, or if GCC's software emulation
|
1172 |
|
|
is unable to accurately represent the result. */
|
1173 |
|
|
if ((flag_rounding_math
|
1174 |
|
|
|| (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
|
1175 |
|
|
&& (inexact || !real_identical (&result, &value)))
|
1176 |
|
|
return NULL_TREE;
|
1177 |
|
|
|
1178 |
|
|
t = build_real (type, result);
|
1179 |
|
|
|
1180 |
|
|
TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
|
1181 |
|
|
return t;
|
1182 |
|
|
}
|
1183 |
|
|
|
1184 |
|
|
if (TREE_CODE (arg1) == FIXED_CST)
|
1185 |
|
|
{
|
1186 |
|
|
FIXED_VALUE_TYPE f1;
|
1187 |
|
|
FIXED_VALUE_TYPE f2;
|
1188 |
|
|
FIXED_VALUE_TYPE result;
|
1189 |
|
|
tree t, type;
|
1190 |
|
|
int sat_p;
|
1191 |
|
|
bool overflow_p;
|
1192 |
|
|
|
1193 |
|
|
/* The following codes are handled by fixed_arithmetic. */
|
1194 |
|
|
switch (code)
|
1195 |
|
|
{
|
1196 |
|
|
case PLUS_EXPR:
|
1197 |
|
|
case MINUS_EXPR:
|
1198 |
|
|
case MULT_EXPR:
|
1199 |
|
|
case TRUNC_DIV_EXPR:
|
1200 |
|
|
f2 = TREE_FIXED_CST (arg2);
|
1201 |
|
|
break;
|
1202 |
|
|
|
1203 |
|
|
case LSHIFT_EXPR:
|
1204 |
|
|
case RSHIFT_EXPR:
|
1205 |
|
|
f2.data.high = TREE_INT_CST_HIGH (arg2);
|
1206 |
|
|
f2.data.low = TREE_INT_CST_LOW (arg2);
|
1207 |
|
|
f2.mode = SImode;
|
1208 |
|
|
break;
|
1209 |
|
|
|
1210 |
|
|
default:
|
1211 |
|
|
return NULL_TREE;
|
1212 |
|
|
}
|
1213 |
|
|
|
1214 |
|
|
f1 = TREE_FIXED_CST (arg1);
|
1215 |
|
|
type = TREE_TYPE (arg1);
|
1216 |
|
|
sat_p = TYPE_SATURATING (type);
|
1217 |
|
|
overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
|
1218 |
|
|
t = build_fixed (type, result);
|
1219 |
|
|
/* Propagate overflow flags. */
|
1220 |
|
|
if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
|
1221 |
|
|
TREE_OVERFLOW (t) = 1;
|
1222 |
|
|
return t;
|
1223 |
|
|
}
|
1224 |
|
|
|
1225 |
|
|
if (TREE_CODE (arg1) == COMPLEX_CST)
|
1226 |
|
|
{
|
1227 |
|
|
tree type = TREE_TYPE (arg1);
|
1228 |
|
|
tree r1 = TREE_REALPART (arg1);
|
1229 |
|
|
tree i1 = TREE_IMAGPART (arg1);
|
1230 |
|
|
tree r2 = TREE_REALPART (arg2);
|
1231 |
|
|
tree i2 = TREE_IMAGPART (arg2);
|
1232 |
|
|
tree real, imag;
|
1233 |
|
|
|
1234 |
|
|
switch (code)
|
1235 |
|
|
{
|
1236 |
|
|
case PLUS_EXPR:
|
1237 |
|
|
case MINUS_EXPR:
|
1238 |
|
|
real = const_binop (code, r1, r2);
|
1239 |
|
|
imag = const_binop (code, i1, i2);
|
1240 |
|
|
break;
|
1241 |
|
|
|
1242 |
|
|
case MULT_EXPR:
|
1243 |
|
|
if (COMPLEX_FLOAT_TYPE_P (type))
|
1244 |
|
|
return do_mpc_arg2 (arg1, arg2, type,
|
1245 |
|
|
/* do_nonfinite= */ folding_initializer,
|
1246 |
|
|
mpc_mul);
|
1247 |
|
|
|
1248 |
|
|
real = const_binop (MINUS_EXPR,
|
1249 |
|
|
const_binop (MULT_EXPR, r1, r2),
|
1250 |
|
|
const_binop (MULT_EXPR, i1, i2));
|
1251 |
|
|
imag = const_binop (PLUS_EXPR,
|
1252 |
|
|
const_binop (MULT_EXPR, r1, i2),
|
1253 |
|
|
const_binop (MULT_EXPR, i1, r2));
|
1254 |
|
|
break;
|
1255 |
|
|
|
1256 |
|
|
case RDIV_EXPR:
|
1257 |
|
|
if (COMPLEX_FLOAT_TYPE_P (type))
|
1258 |
|
|
return do_mpc_arg2 (arg1, arg2, type,
|
1259 |
|
|
/* do_nonfinite= */ folding_initializer,
|
1260 |
|
|
mpc_div);
|
1261 |
|
|
/* Fallthru ... */
|
1262 |
|
|
case TRUNC_DIV_EXPR:
|
1263 |
|
|
case CEIL_DIV_EXPR:
|
1264 |
|
|
case FLOOR_DIV_EXPR:
|
1265 |
|
|
case ROUND_DIV_EXPR:
|
1266 |
|
|
if (flag_complex_method == 0)
|
1267 |
|
|
{
|
1268 |
|
|
/* Keep this algorithm in sync with
|
1269 |
|
|
tree-complex.c:expand_complex_div_straight().
|
1270 |
|
|
|
1271 |
|
|
Expand complex division to scalars, straightforward algorithm.
|
1272 |
|
|
a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
|
1273 |
|
|
t = br*br + bi*bi
|
1274 |
|
|
*/
|
1275 |
|
|
tree magsquared
|
1276 |
|
|
= const_binop (PLUS_EXPR,
|
1277 |
|
|
const_binop (MULT_EXPR, r2, r2),
|
1278 |
|
|
const_binop (MULT_EXPR, i2, i2));
|
1279 |
|
|
tree t1
|
1280 |
|
|
= const_binop (PLUS_EXPR,
|
1281 |
|
|
const_binop (MULT_EXPR, r1, r2),
|
1282 |
|
|
const_binop (MULT_EXPR, i1, i2));
|
1283 |
|
|
tree t2
|
1284 |
|
|
= const_binop (MINUS_EXPR,
|
1285 |
|
|
const_binop (MULT_EXPR, i1, r2),
|
1286 |
|
|
const_binop (MULT_EXPR, r1, i2));
|
1287 |
|
|
|
1288 |
|
|
real = const_binop (code, t1, magsquared);
|
1289 |
|
|
imag = const_binop (code, t2, magsquared);
|
1290 |
|
|
}
|
1291 |
|
|
else
|
1292 |
|
|
{
|
1293 |
|
|
/* Keep this algorithm in sync with
|
1294 |
|
|
tree-complex.c:expand_complex_div_wide().
|
1295 |
|
|
|
1296 |
|
|
Expand complex division to scalars, modified algorithm to minimize
|
1297 |
|
|
overflow with wide input ranges. */
|
1298 |
|
|
tree compare = fold_build2 (LT_EXPR, boolean_type_node,
|
1299 |
|
|
fold_abs_const (r2, TREE_TYPE (type)),
|
1300 |
|
|
fold_abs_const (i2, TREE_TYPE (type)));
|
1301 |
|
|
|
1302 |
|
|
if (integer_nonzerop (compare))
|
1303 |
|
|
{
|
1304 |
|
|
/* In the TRUE branch, we compute
|
1305 |
|
|
ratio = br/bi;
|
1306 |
|
|
div = (br * ratio) + bi;
|
1307 |
|
|
tr = (ar * ratio) + ai;
|
1308 |
|
|
ti = (ai * ratio) - ar;
|
1309 |
|
|
tr = tr / div;
|
1310 |
|
|
ti = ti / div; */
|
1311 |
|
|
tree ratio = const_binop (code, r2, i2);
|
1312 |
|
|
tree div = const_binop (PLUS_EXPR, i2,
|
1313 |
|
|
const_binop (MULT_EXPR, r2, ratio));
|
1314 |
|
|
real = const_binop (MULT_EXPR, r1, ratio);
|
1315 |
|
|
real = const_binop (PLUS_EXPR, real, i1);
|
1316 |
|
|
real = const_binop (code, real, div);
|
1317 |
|
|
|
1318 |
|
|
imag = const_binop (MULT_EXPR, i1, ratio);
|
1319 |
|
|
imag = const_binop (MINUS_EXPR, imag, r1);
|
1320 |
|
|
imag = const_binop (code, imag, div);
|
1321 |
|
|
}
|
1322 |
|
|
else
|
1323 |
|
|
{
|
1324 |
|
|
/* In the FALSE branch, we compute
|
1325 |
|
|
ratio = d/c;
|
1326 |
|
|
divisor = (d * ratio) + c;
|
1327 |
|
|
tr = (b * ratio) + a;
|
1328 |
|
|
ti = b - (a * ratio);
|
1329 |
|
|
tr = tr / div;
|
1330 |
|
|
ti = ti / div; */
|
1331 |
|
|
tree ratio = const_binop (code, i2, r2);
|
1332 |
|
|
tree div = const_binop (PLUS_EXPR, r2,
|
1333 |
|
|
const_binop (MULT_EXPR, i2, ratio));
|
1334 |
|
|
|
1335 |
|
|
real = const_binop (MULT_EXPR, i1, ratio);
|
1336 |
|
|
real = const_binop (PLUS_EXPR, real, r1);
|
1337 |
|
|
real = const_binop (code, real, div);
|
1338 |
|
|
|
1339 |
|
|
imag = const_binop (MULT_EXPR, r1, ratio);
|
1340 |
|
|
imag = const_binop (MINUS_EXPR, i1, imag);
|
1341 |
|
|
imag = const_binop (code, imag, div);
|
1342 |
|
|
}
|
1343 |
|
|
}
|
1344 |
|
|
break;
|
1345 |
|
|
|
1346 |
|
|
default:
|
1347 |
|
|
return NULL_TREE;
|
1348 |
|
|
}
|
1349 |
|
|
|
1350 |
|
|
if (real && imag)
|
1351 |
|
|
return build_complex (type, real, imag);
|
1352 |
|
|
}
|
1353 |
|
|
|
1354 |
|
|
if (TREE_CODE (arg1) == VECTOR_CST)
|
1355 |
|
|
{
|
1356 |
|
|
tree type = TREE_TYPE(arg1);
|
1357 |
|
|
int count = TYPE_VECTOR_SUBPARTS (type), i;
|
1358 |
|
|
tree elements1, elements2, list = NULL_TREE;
|
1359 |
|
|
|
1360 |
|
|
if(TREE_CODE(arg2) != VECTOR_CST)
|
1361 |
|
|
return NULL_TREE;
|
1362 |
|
|
|
1363 |
|
|
elements1 = TREE_VECTOR_CST_ELTS (arg1);
|
1364 |
|
|
elements2 = TREE_VECTOR_CST_ELTS (arg2);
|
1365 |
|
|
|
1366 |
|
|
for (i = 0; i < count; i++)
|
1367 |
|
|
{
|
1368 |
|
|
tree elem1, elem2, elem;
|
1369 |
|
|
|
1370 |
|
|
/* The trailing elements can be empty and should be treated as 0 */
|
1371 |
|
|
if(!elements1)
|
1372 |
|
|
elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
|
1373 |
|
|
else
|
1374 |
|
|
{
|
1375 |
|
|
elem1 = TREE_VALUE(elements1);
|
1376 |
|
|
elements1 = TREE_CHAIN (elements1);
|
1377 |
|
|
}
|
1378 |
|
|
|
1379 |
|
|
if(!elements2)
|
1380 |
|
|
elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
|
1381 |
|
|
else
|
1382 |
|
|
{
|
1383 |
|
|
elem2 = TREE_VALUE(elements2);
|
1384 |
|
|
elements2 = TREE_CHAIN (elements2);
|
1385 |
|
|
}
|
1386 |
|
|
|
1387 |
|
|
elem = const_binop (code, elem1, elem2);
|
1388 |
|
|
|
1389 |
|
|
/* It is possible that const_binop cannot handle the given
|
1390 |
|
|
code and return NULL_TREE */
|
1391 |
|
|
if(elem == NULL_TREE)
|
1392 |
|
|
return NULL_TREE;
|
1393 |
|
|
|
1394 |
|
|
list = tree_cons (NULL_TREE, elem, list);
|
1395 |
|
|
}
|
1396 |
|
|
return build_vector(type, nreverse(list));
|
1397 |
|
|
}
|
1398 |
|
|
return NULL_TREE;
|
1399 |
|
|
}
|
1400 |
|
|
|
1401 |
|
|
/* Create a size type INT_CST node with NUMBER sign extended. KIND
|
1402 |
|
|
indicates which particular sizetype to create. */
|
1403 |
|
|
|
1404 |
|
|
tree
|
1405 |
|
|
size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
|
1406 |
|
|
{
|
1407 |
|
|
return build_int_cst (sizetype_tab[(int) kind], number);
|
1408 |
|
|
}
|
1409 |
|
|
|
1410 |
|
|
/* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
|
1411 |
|
|
is a tree code. The type of the result is taken from the operands.
|
1412 |
|
|
Both must be equivalent integer types, ala int_binop_types_match_p.
|
1413 |
|
|
If the operands are constant, so is the result. */
|
1414 |
|
|
|
1415 |
|
|
tree
|
1416 |
|
|
size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
|
1417 |
|
|
{
|
1418 |
|
|
tree type = TREE_TYPE (arg0);
|
1419 |
|
|
|
1420 |
|
|
if (arg0 == error_mark_node || arg1 == error_mark_node)
|
1421 |
|
|
return error_mark_node;
|
1422 |
|
|
|
1423 |
|
|
gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
|
1424 |
|
|
TREE_TYPE (arg1)));
|
1425 |
|
|
|
1426 |
|
|
/* Handle the special case of two integer constants faster. */
|
1427 |
|
|
if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
|
1428 |
|
|
{
|
1429 |
|
|
/* And some specific cases even faster than that. */
|
1430 |
|
|
if (code == PLUS_EXPR)
|
1431 |
|
|
{
|
1432 |
|
|
if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
|
1433 |
|
|
return arg1;
|
1434 |
|
|
if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
|
1435 |
|
|
return arg0;
|
1436 |
|
|
}
|
1437 |
|
|
else if (code == MINUS_EXPR)
|
1438 |
|
|
{
|
1439 |
|
|
if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
|
1440 |
|
|
return arg0;
|
1441 |
|
|
}
|
1442 |
|
|
else if (code == MULT_EXPR)
|
1443 |
|
|
{
|
1444 |
|
|
if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
|
1445 |
|
|
return arg1;
|
1446 |
|
|
}
|
1447 |
|
|
|
1448 |
|
|
/* Handle general case of two integer constants. */
|
1449 |
|
|
return int_const_binop (code, arg0, arg1);
|
1450 |
|
|
}
|
1451 |
|
|
|
1452 |
|
|
return fold_build2_loc (loc, code, type, arg0, arg1);
|
1453 |
|
|
}
|
1454 |
|
|
|
1455 |
|
|
/* Given two values, either both of sizetype or both of bitsizetype,
|
1456 |
|
|
compute the difference between the two values. Return the value
|
1457 |
|
|
in signed type corresponding to the type of the operands. */
|
1458 |
|
|
|
1459 |
|
|
tree
|
1460 |
|
|
size_diffop_loc (location_t loc, tree arg0, tree arg1)
|
1461 |
|
|
{
|
1462 |
|
|
tree type = TREE_TYPE (arg0);
|
1463 |
|
|
tree ctype;
|
1464 |
|
|
|
1465 |
|
|
gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
|
1466 |
|
|
TREE_TYPE (arg1)));
|
1467 |
|
|
|
1468 |
|
|
/* If the type is already signed, just do the simple thing. */
|
1469 |
|
|
if (!TYPE_UNSIGNED (type))
|
1470 |
|
|
return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
|
1471 |
|
|
|
1472 |
|
|
if (type == sizetype)
|
1473 |
|
|
ctype = ssizetype;
|
1474 |
|
|
else if (type == bitsizetype)
|
1475 |
|
|
ctype = sbitsizetype;
|
1476 |
|
|
else
|
1477 |
|
|
ctype = signed_type_for (type);
|
1478 |
|
|
|
1479 |
|
|
/* If either operand is not a constant, do the conversions to the signed
|
1480 |
|
|
type and subtract. The hardware will do the right thing with any
|
1481 |
|
|
overflow in the subtraction. */
|
1482 |
|
|
if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
|
1483 |
|
|
return size_binop_loc (loc, MINUS_EXPR,
|
1484 |
|
|
fold_convert_loc (loc, ctype, arg0),
|
1485 |
|
|
fold_convert_loc (loc, ctype, arg1));
|
1486 |
|
|
|
1487 |
|
|
/* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
|
1488 |
|
|
Otherwise, subtract the other way, convert to CTYPE (we know that can't
|
1489 |
|
|
overflow) and negate (which can't either). Special-case a result
|
1490 |
|
|
of zero while we're here. */
|
1491 |
|
|
if (tree_int_cst_equal (arg0, arg1))
|
1492 |
|
|
return build_int_cst (ctype, 0);
|
1493 |
|
|
else if (tree_int_cst_lt (arg1, arg0))
|
1494 |
|
|
return fold_convert_loc (loc, ctype,
|
1495 |
|
|
size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
|
1496 |
|
|
else
|
1497 |
|
|
return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
|
1498 |
|
|
fold_convert_loc (loc, ctype,
|
1499 |
|
|
size_binop_loc (loc,
|
1500 |
|
|
MINUS_EXPR,
|
1501 |
|
|
arg1, arg0)));
|
1502 |
|
|
}
|
1503 |
|
|
|
1504 |
|
|
/* A subroutine of fold_convert_const handling conversions of an
|
1505 |
|
|
INTEGER_CST to another integer type. */
|
1506 |
|
|
|
1507 |
|
|
static tree
|
1508 |
|
|
fold_convert_const_int_from_int (tree type, const_tree arg1)
|
1509 |
|
|
{
|
1510 |
|
|
tree t;
|
1511 |
|
|
|
1512 |
|
|
/* Given an integer constant, make new constant with new type,
|
1513 |
|
|
appropriately sign-extended or truncated. */
|
1514 |
|
|
t = force_fit_type_double (type, tree_to_double_int (arg1),
|
1515 |
|
|
!POINTER_TYPE_P (TREE_TYPE (arg1)),
|
1516 |
|
|
(TREE_INT_CST_HIGH (arg1) < 0
|
1517 |
|
|
&& (TYPE_UNSIGNED (type)
|
1518 |
|
|
< TYPE_UNSIGNED (TREE_TYPE (arg1))))
|
1519 |
|
|
| TREE_OVERFLOW (arg1));
|
1520 |
|
|
|
1521 |
|
|
return t;
|
1522 |
|
|
}
|
1523 |
|
|
|
1524 |
|
|
/* A subroutine of fold_convert_const handling conversions a REAL_CST
|
1525 |
|
|
to an integer type. */
|
1526 |
|
|
|
1527 |
|
|
static tree
|
1528 |
|
|
fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
|
1529 |
|
|
{
|
1530 |
|
|
int overflow = 0;
|
1531 |
|
|
tree t;
|
1532 |
|
|
|
1533 |
|
|
/* The following code implements the floating point to integer
|
1534 |
|
|
conversion rules required by the Java Language Specification,
|
1535 |
|
|
that IEEE NaNs are mapped to zero and values that overflow
|
1536 |
|
|
the target precision saturate, i.e. values greater than
|
1537 |
|
|
INT_MAX are mapped to INT_MAX, and values less than INT_MIN
|
1538 |
|
|
are mapped to INT_MIN. These semantics are allowed by the
|
1539 |
|
|
C and C++ standards that simply state that the behavior of
|
1540 |
|
|
FP-to-integer conversion is unspecified upon overflow. */
|
1541 |
|
|
|
1542 |
|
|
double_int val;
|
1543 |
|
|
REAL_VALUE_TYPE r;
|
1544 |
|
|
REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
|
1545 |
|
|
|
1546 |
|
|
switch (code)
|
1547 |
|
|
{
|
1548 |
|
|
case FIX_TRUNC_EXPR:
|
1549 |
|
|
real_trunc (&r, VOIDmode, &x);
|
1550 |
|
|
break;
|
1551 |
|
|
|
1552 |
|
|
default:
|
1553 |
|
|
gcc_unreachable ();
|
1554 |
|
|
}
|
1555 |
|
|
|
1556 |
|
|
/* If R is NaN, return zero and show we have an overflow. */
|
1557 |
|
|
if (REAL_VALUE_ISNAN (r))
|
1558 |
|
|
{
|
1559 |
|
|
overflow = 1;
|
1560 |
|
|
val = double_int_zero;
|
1561 |
|
|
}
|
1562 |
|
|
|
1563 |
|
|
/* See if R is less than the lower bound or greater than the
|
1564 |
|
|
upper bound. */
|
1565 |
|
|
|
1566 |
|
|
if (! overflow)
|
1567 |
|
|
{
|
1568 |
|
|
tree lt = TYPE_MIN_VALUE (type);
|
1569 |
|
|
REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
|
1570 |
|
|
if (REAL_VALUES_LESS (r, l))
|
1571 |
|
|
{
|
1572 |
|
|
overflow = 1;
|
1573 |
|
|
val = tree_to_double_int (lt);
|
1574 |
|
|
}
|
1575 |
|
|
}
|
1576 |
|
|
|
1577 |
|
|
if (! overflow)
|
1578 |
|
|
{
|
1579 |
|
|
tree ut = TYPE_MAX_VALUE (type);
|
1580 |
|
|
if (ut)
|
1581 |
|
|
{
|
1582 |
|
|
REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
|
1583 |
|
|
if (REAL_VALUES_LESS (u, r))
|
1584 |
|
|
{
|
1585 |
|
|
overflow = 1;
|
1586 |
|
|
val = tree_to_double_int (ut);
|
1587 |
|
|
}
|
1588 |
|
|
}
|
1589 |
|
|
}
|
1590 |
|
|
|
1591 |
|
|
if (! overflow)
|
1592 |
|
|
real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
|
1593 |
|
|
|
1594 |
|
|
t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
|
1595 |
|
|
return t;
|
1596 |
|
|
}
|
1597 |
|
|
|
1598 |
|
|
/* A subroutine of fold_convert_const handling conversions of a
|
1599 |
|
|
FIXED_CST to an integer type. */
|
1600 |
|
|
|
1601 |
|
|
static tree
|
1602 |
|
|
fold_convert_const_int_from_fixed (tree type, const_tree arg1)
|
1603 |
|
|
{
|
1604 |
|
|
tree t;
|
1605 |
|
|
double_int temp, temp_trunc;
|
1606 |
|
|
unsigned int mode;
|
1607 |
|
|
|
1608 |
|
|
/* Right shift FIXED_CST to temp by fbit. */
|
1609 |
|
|
temp = TREE_FIXED_CST (arg1).data;
|
1610 |
|
|
mode = TREE_FIXED_CST (arg1).mode;
|
1611 |
|
|
if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
|
1612 |
|
|
{
|
1613 |
|
|
temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
|
1614 |
|
|
HOST_BITS_PER_DOUBLE_INT,
|
1615 |
|
|
SIGNED_FIXED_POINT_MODE_P (mode));
|
1616 |
|
|
|
1617 |
|
|
/* Left shift temp to temp_trunc by fbit. */
|
1618 |
|
|
temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
|
1619 |
|
|
HOST_BITS_PER_DOUBLE_INT,
|
1620 |
|
|
SIGNED_FIXED_POINT_MODE_P (mode));
|
1621 |
|
|
}
|
1622 |
|
|
else
|
1623 |
|
|
{
|
1624 |
|
|
temp = double_int_zero;
|
1625 |
|
|
temp_trunc = double_int_zero;
|
1626 |
|
|
}
|
1627 |
|
|
|
1628 |
|
|
/* If FIXED_CST is negative, we need to round the value toward 0.
|
1629 |
|
|
By checking if the fractional bits are not zero to add 1 to temp. */
|
1630 |
|
|
if (SIGNED_FIXED_POINT_MODE_P (mode)
|
1631 |
|
|
&& double_int_negative_p (temp_trunc)
|
1632 |
|
|
&& !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
|
1633 |
|
|
temp = double_int_add (temp, double_int_one);
|
1634 |
|
|
|
1635 |
|
|
/* Given a fixed-point constant, make new constant with new type,
|
1636 |
|
|
appropriately sign-extended or truncated. */
|
1637 |
|
|
t = force_fit_type_double (type, temp, -1,
|
1638 |
|
|
(double_int_negative_p (temp)
|
1639 |
|
|
&& (TYPE_UNSIGNED (type)
|
1640 |
|
|
< TYPE_UNSIGNED (TREE_TYPE (arg1))))
|
1641 |
|
|
| TREE_OVERFLOW (arg1));
|
1642 |
|
|
|
1643 |
|
|
return t;
|
1644 |
|
|
}
|
1645 |
|
|
|
1646 |
|
|
/* A subroutine of fold_convert_const handling conversions a REAL_CST
|
1647 |
|
|
to another floating point type. */
|
1648 |
|
|
|
1649 |
|
|
static tree
|
1650 |
|
|
fold_convert_const_real_from_real (tree type, const_tree arg1)
|
1651 |
|
|
{
|
1652 |
|
|
REAL_VALUE_TYPE value;
|
1653 |
|
|
tree t;
|
1654 |
|
|
|
1655 |
|
|
real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
|
1656 |
|
|
t = build_real (type, value);
|
1657 |
|
|
|
1658 |
|
|
/* If converting an infinity or NAN to a representation that doesn't
|
1659 |
|
|
have one, set the overflow bit so that we can produce some kind of
|
1660 |
|
|
error message at the appropriate point if necessary. It's not the
|
1661 |
|
|
most user-friendly message, but it's better than nothing. */
|
1662 |
|
|
if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
|
1663 |
|
|
&& !MODE_HAS_INFINITIES (TYPE_MODE (type)))
|
1664 |
|
|
TREE_OVERFLOW (t) = 1;
|
1665 |
|
|
else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
|
1666 |
|
|
&& !MODE_HAS_NANS (TYPE_MODE (type)))
|
1667 |
|
|
TREE_OVERFLOW (t) = 1;
|
1668 |
|
|
/* Regular overflow, conversion produced an infinity in a mode that
|
1669 |
|
|
can't represent them. */
|
1670 |
|
|
else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
|
1671 |
|
|
&& REAL_VALUE_ISINF (value)
|
1672 |
|
|
&& !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
|
1673 |
|
|
TREE_OVERFLOW (t) = 1;
|
1674 |
|
|
else
|
1675 |
|
|
TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
|
1676 |
|
|
return t;
|
1677 |
|
|
}
|
1678 |
|
|
|
1679 |
|
|
/* A subroutine of fold_convert_const handling conversions a FIXED_CST
|
1680 |
|
|
to a floating point type. */
|
1681 |
|
|
|
1682 |
|
|
static tree
|
1683 |
|
|
fold_convert_const_real_from_fixed (tree type, const_tree arg1)
|
1684 |
|
|
{
|
1685 |
|
|
REAL_VALUE_TYPE value;
|
1686 |
|
|
tree t;
|
1687 |
|
|
|
1688 |
|
|
real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
|
1689 |
|
|
t = build_real (type, value);
|
1690 |
|
|
|
1691 |
|
|
TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
|
1692 |
|
|
return t;
|
1693 |
|
|
}
|
1694 |
|
|
|
1695 |
|
|
/* A subroutine of fold_convert_const handling conversions a FIXED_CST
|
1696 |
|
|
to another fixed-point type. */
|
1697 |
|
|
|
1698 |
|
|
static tree
|
1699 |
|
|
fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
|
1700 |
|
|
{
|
1701 |
|
|
FIXED_VALUE_TYPE value;
|
1702 |
|
|
tree t;
|
1703 |
|
|
bool overflow_p;
|
1704 |
|
|
|
1705 |
|
|
overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
|
1706 |
|
|
TYPE_SATURATING (type));
|
1707 |
|
|
t = build_fixed (type, value);
|
1708 |
|
|
|
1709 |
|
|
/* Propagate overflow flags. */
|
1710 |
|
|
if (overflow_p | TREE_OVERFLOW (arg1))
|
1711 |
|
|
TREE_OVERFLOW (t) = 1;
|
1712 |
|
|
return t;
|
1713 |
|
|
}
|
1714 |
|
|
|
1715 |
|
|
/* A subroutine of fold_convert_const handling conversions an INTEGER_CST
|
1716 |
|
|
to a fixed-point type. */
|
1717 |
|
|
|
1718 |
|
|
static tree
|
1719 |
|
|
fold_convert_const_fixed_from_int (tree type, const_tree arg1)
|
1720 |
|
|
{
|
1721 |
|
|
FIXED_VALUE_TYPE value;
|
1722 |
|
|
tree t;
|
1723 |
|
|
bool overflow_p;
|
1724 |
|
|
|
1725 |
|
|
overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
|
1726 |
|
|
TREE_INT_CST (arg1),
|
1727 |
|
|
TYPE_UNSIGNED (TREE_TYPE (arg1)),
|
1728 |
|
|
TYPE_SATURATING (type));
|
1729 |
|
|
t = build_fixed (type, value);
|
1730 |
|
|
|
1731 |
|
|
/* Propagate overflow flags. */
|
1732 |
|
|
if (overflow_p | TREE_OVERFLOW (arg1))
|
1733 |
|
|
TREE_OVERFLOW (t) = 1;
|
1734 |
|
|
return t;
|
1735 |
|
|
}
|
1736 |
|
|
|
1737 |
|
|
/* A subroutine of fold_convert_const handling conversions a REAL_CST
|
1738 |
|
|
to a fixed-point type. */
|
1739 |
|
|
|
1740 |
|
|
static tree
|
1741 |
|
|
fold_convert_const_fixed_from_real (tree type, const_tree arg1)
|
1742 |
|
|
{
|
1743 |
|
|
FIXED_VALUE_TYPE value;
|
1744 |
|
|
tree t;
|
1745 |
|
|
bool overflow_p;
|
1746 |
|
|
|
1747 |
|
|
overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
|
1748 |
|
|
&TREE_REAL_CST (arg1),
|
1749 |
|
|
TYPE_SATURATING (type));
|
1750 |
|
|
t = build_fixed (type, value);
|
1751 |
|
|
|
1752 |
|
|
/* Propagate overflow flags. */
|
1753 |
|
|
if (overflow_p | TREE_OVERFLOW (arg1))
|
1754 |
|
|
TREE_OVERFLOW (t) = 1;
|
1755 |
|
|
return t;
|
1756 |
|
|
}
|
1757 |
|
|
|
1758 |
|
|
/* Attempt to fold type conversion operation CODE of expression ARG1 to
|
1759 |
|
|
type TYPE. If no simplification can be done return NULL_TREE. */
|
1760 |
|
|
|
1761 |
|
|
static tree
|
1762 |
|
|
fold_convert_const (enum tree_code code, tree type, tree arg1)
|
1763 |
|
|
{
|
1764 |
|
|
if (TREE_TYPE (arg1) == type)
|
1765 |
|
|
return arg1;
|
1766 |
|
|
|
1767 |
|
|
if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
|
1768 |
|
|
|| TREE_CODE (type) == OFFSET_TYPE)
|
1769 |
|
|
{
|
1770 |
|
|
if (TREE_CODE (arg1) == INTEGER_CST)
|
1771 |
|
|
return fold_convert_const_int_from_int (type, arg1);
|
1772 |
|
|
else if (TREE_CODE (arg1) == REAL_CST)
|
1773 |
|
|
return fold_convert_const_int_from_real (code, type, arg1);
|
1774 |
|
|
else if (TREE_CODE (arg1) == FIXED_CST)
|
1775 |
|
|
return fold_convert_const_int_from_fixed (type, arg1);
|
1776 |
|
|
}
|
1777 |
|
|
else if (TREE_CODE (type) == REAL_TYPE)
|
1778 |
|
|
{
|
1779 |
|
|
if (TREE_CODE (arg1) == INTEGER_CST)
|
1780 |
|
|
return build_real_from_int_cst (type, arg1);
|
1781 |
|
|
else if (TREE_CODE (arg1) == REAL_CST)
|
1782 |
|
|
return fold_convert_const_real_from_real (type, arg1);
|
1783 |
|
|
else if (TREE_CODE (arg1) == FIXED_CST)
|
1784 |
|
|
return fold_convert_const_real_from_fixed (type, arg1);
|
1785 |
|
|
}
|
1786 |
|
|
else if (TREE_CODE (type) == FIXED_POINT_TYPE)
|
1787 |
|
|
{
|
1788 |
|
|
if (TREE_CODE (arg1) == FIXED_CST)
|
1789 |
|
|
return fold_convert_const_fixed_from_fixed (type, arg1);
|
1790 |
|
|
else if (TREE_CODE (arg1) == INTEGER_CST)
|
1791 |
|
|
return fold_convert_const_fixed_from_int (type, arg1);
|
1792 |
|
|
else if (TREE_CODE (arg1) == REAL_CST)
|
1793 |
|
|
return fold_convert_const_fixed_from_real (type, arg1);
|
1794 |
|
|
}
|
1795 |
|
|
return NULL_TREE;
|
1796 |
|
|
}
|
1797 |
|
|
|
1798 |
|
|
/* Construct a vector of zero elements of vector type TYPE. */
|
1799 |
|
|
|
1800 |
|
|
static tree
|
1801 |
|
|
build_zero_vector (tree type)
|
1802 |
|
|
{
|
1803 |
|
|
tree t;
|
1804 |
|
|
|
1805 |
|
|
t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
|
1806 |
|
|
return build_vector_from_val (type, t);
|
1807 |
|
|
}
|
1808 |
|
|
|
1809 |
|
|
/* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
|
1810 |
|
|
|
1811 |
|
|
bool
|
1812 |
|
|
fold_convertible_p (const_tree type, const_tree arg)
|
1813 |
|
|
{
|
1814 |
|
|
tree orig = TREE_TYPE (arg);
|
1815 |
|
|
|
1816 |
|
|
if (type == orig)
|
1817 |
|
|
return true;
|
1818 |
|
|
|
1819 |
|
|
if (TREE_CODE (arg) == ERROR_MARK
|
1820 |
|
|
|| TREE_CODE (type) == ERROR_MARK
|
1821 |
|
|
|| TREE_CODE (orig) == ERROR_MARK)
|
1822 |
|
|
return false;
|
1823 |
|
|
|
1824 |
|
|
if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
|
1825 |
|
|
return true;
|
1826 |
|
|
|
1827 |
|
|
switch (TREE_CODE (type))
|
1828 |
|
|
{
|
1829 |
|
|
case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
|
1830 |
|
|
case POINTER_TYPE: case REFERENCE_TYPE:
|
1831 |
|
|
case OFFSET_TYPE:
|
1832 |
|
|
if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
|
1833 |
|
|
|| TREE_CODE (orig) == OFFSET_TYPE)
|
1834 |
|
|
return true;
|
1835 |
|
|
return (TREE_CODE (orig) == VECTOR_TYPE
|
1836 |
|
|
&& tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
|
1837 |
|
|
|
1838 |
|
|
case REAL_TYPE:
|
1839 |
|
|
case FIXED_POINT_TYPE:
|
1840 |
|
|
case COMPLEX_TYPE:
|
1841 |
|
|
case VECTOR_TYPE:
|
1842 |
|
|
case VOID_TYPE:
|
1843 |
|
|
return TREE_CODE (type) == TREE_CODE (orig);
|
1844 |
|
|
|
1845 |
|
|
default:
|
1846 |
|
|
return false;
|
1847 |
|
|
}
|
1848 |
|
|
}
|
1849 |
|
|
|
1850 |
|
|
/* Convert expression ARG to type TYPE. Used by the middle-end for
|
1851 |
|
|
simple conversions in preference to calling the front-end's convert. */
|
1852 |
|
|
|
1853 |
|
|
tree
|
1854 |
|
|
fold_convert_loc (location_t loc, tree type, tree arg)
|
1855 |
|
|
{
|
1856 |
|
|
tree orig = TREE_TYPE (arg);
|
1857 |
|
|
tree tem;
|
1858 |
|
|
|
1859 |
|
|
if (type == orig)
|
1860 |
|
|
return arg;
|
1861 |
|
|
|
1862 |
|
|
if (TREE_CODE (arg) == ERROR_MARK
|
1863 |
|
|
|| TREE_CODE (type) == ERROR_MARK
|
1864 |
|
|
|| TREE_CODE (orig) == ERROR_MARK)
|
1865 |
|
|
return error_mark_node;
|
1866 |
|
|
|
1867 |
|
|
switch (TREE_CODE (type))
|
1868 |
|
|
{
|
1869 |
|
|
case POINTER_TYPE:
|
1870 |
|
|
case REFERENCE_TYPE:
|
1871 |
|
|
/* Handle conversions between pointers to different address spaces. */
|
1872 |
|
|
if (POINTER_TYPE_P (orig)
|
1873 |
|
|
&& (TYPE_ADDR_SPACE (TREE_TYPE (type))
|
1874 |
|
|
!= TYPE_ADDR_SPACE (TREE_TYPE (orig))))
|
1875 |
|
|
return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
|
1876 |
|
|
/* fall through */
|
1877 |
|
|
|
1878 |
|
|
case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
|
1879 |
|
|
case OFFSET_TYPE:
|
1880 |
|
|
if (TREE_CODE (arg) == INTEGER_CST)
|
1881 |
|
|
{
|
1882 |
|
|
tem = fold_convert_const (NOP_EXPR, type, arg);
|
1883 |
|
|
if (tem != NULL_TREE)
|
1884 |
|
|
return tem;
|
1885 |
|
|
}
|
1886 |
|
|
if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
|
1887 |
|
|
|| TREE_CODE (orig) == OFFSET_TYPE)
|
1888 |
|
|
return fold_build1_loc (loc, NOP_EXPR, type, arg);
|
1889 |
|
|
if (TREE_CODE (orig) == COMPLEX_TYPE)
|
1890 |
|
|
return fold_convert_loc (loc, type,
|
1891 |
|
|
fold_build1_loc (loc, REALPART_EXPR,
|
1892 |
|
|
TREE_TYPE (orig), arg));
|
1893 |
|
|
gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
|
1894 |
|
|
&& tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
|
1895 |
|
|
return fold_build1_loc (loc, NOP_EXPR, type, arg);
|
1896 |
|
|
|
1897 |
|
|
case REAL_TYPE:
|
1898 |
|
|
if (TREE_CODE (arg) == INTEGER_CST)
|
1899 |
|
|
{
|
1900 |
|
|
tem = fold_convert_const (FLOAT_EXPR, type, arg);
|
1901 |
|
|
if (tem != NULL_TREE)
|
1902 |
|
|
return tem;
|
1903 |
|
|
}
|
1904 |
|
|
else if (TREE_CODE (arg) == REAL_CST)
|
1905 |
|
|
{
|
1906 |
|
|
tem = fold_convert_const (NOP_EXPR, type, arg);
|
1907 |
|
|
if (tem != NULL_TREE)
|
1908 |
|
|
return tem;
|
1909 |
|
|
}
|
1910 |
|
|
else if (TREE_CODE (arg) == FIXED_CST)
|
1911 |
|
|
{
|
1912 |
|
|
tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
|
1913 |
|
|
if (tem != NULL_TREE)
|
1914 |
|
|
return tem;
|
1915 |
|
|
}
|
1916 |
|
|
|
1917 |
|
|
switch (TREE_CODE (orig))
|
1918 |
|
|
{
|
1919 |
|
|
case INTEGER_TYPE:
|
1920 |
|
|
case BOOLEAN_TYPE: case ENUMERAL_TYPE:
|
1921 |
|
|
case POINTER_TYPE: case REFERENCE_TYPE:
|
1922 |
|
|
return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
|
1923 |
|
|
|
1924 |
|
|
case REAL_TYPE:
|
1925 |
|
|
return fold_build1_loc (loc, NOP_EXPR, type, arg);
|
1926 |
|
|
|
1927 |
|
|
case FIXED_POINT_TYPE:
|
1928 |
|
|
return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
|
1929 |
|
|
|
1930 |
|
|
case COMPLEX_TYPE:
|
1931 |
|
|
tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
|
1932 |
|
|
return fold_convert_loc (loc, type, tem);
|
1933 |
|
|
|
1934 |
|
|
default:
|
1935 |
|
|
gcc_unreachable ();
|
1936 |
|
|
}
|
1937 |
|
|
|
1938 |
|
|
case FIXED_POINT_TYPE:
|
1939 |
|
|
if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
|
1940 |
|
|
|| TREE_CODE (arg) == REAL_CST)
|
1941 |
|
|
{
|
1942 |
|
|
tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
|
1943 |
|
|
if (tem != NULL_TREE)
|
1944 |
|
|
goto fold_convert_exit;
|
1945 |
|
|
}
|
1946 |
|
|
|
1947 |
|
|
switch (TREE_CODE (orig))
|
1948 |
|
|
{
|
1949 |
|
|
case FIXED_POINT_TYPE:
|
1950 |
|
|
case INTEGER_TYPE:
|
1951 |
|
|
case ENUMERAL_TYPE:
|
1952 |
|
|
case BOOLEAN_TYPE:
|
1953 |
|
|
case REAL_TYPE:
|
1954 |
|
|
return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
|
1955 |
|
|
|
1956 |
|
|
case COMPLEX_TYPE:
|
1957 |
|
|
tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
|
1958 |
|
|
return fold_convert_loc (loc, type, tem);
|
1959 |
|
|
|
1960 |
|
|
default:
|
1961 |
|
|
gcc_unreachable ();
|
1962 |
|
|
}
|
1963 |
|
|
|
1964 |
|
|
case COMPLEX_TYPE:
|
1965 |
|
|
switch (TREE_CODE (orig))
|
1966 |
|
|
{
|
1967 |
|
|
case INTEGER_TYPE:
|
1968 |
|
|
case BOOLEAN_TYPE: case ENUMERAL_TYPE:
|
1969 |
|
|
case POINTER_TYPE: case REFERENCE_TYPE:
|
1970 |
|
|
case REAL_TYPE:
|
1971 |
|
|
case FIXED_POINT_TYPE:
|
1972 |
|
|
return fold_build2_loc (loc, COMPLEX_EXPR, type,
|
1973 |
|
|
fold_convert_loc (loc, TREE_TYPE (type), arg),
|
1974 |
|
|
fold_convert_loc (loc, TREE_TYPE (type),
|
1975 |
|
|
integer_zero_node));
|
1976 |
|
|
case COMPLEX_TYPE:
|
1977 |
|
|
{
|
1978 |
|
|
tree rpart, ipart;
|
1979 |
|
|
|
1980 |
|
|
if (TREE_CODE (arg) == COMPLEX_EXPR)
|
1981 |
|
|
{
|
1982 |
|
|
rpart = fold_convert_loc (loc, TREE_TYPE (type),
|
1983 |
|
|
TREE_OPERAND (arg, 0));
|
1984 |
|
|
ipart = fold_convert_loc (loc, TREE_TYPE (type),
|
1985 |
|
|
TREE_OPERAND (arg, 1));
|
1986 |
|
|
return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
|
1987 |
|
|
}
|
1988 |
|
|
|
1989 |
|
|
arg = save_expr (arg);
|
1990 |
|
|
rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
|
1991 |
|
|
ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
|
1992 |
|
|
rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
|
1993 |
|
|
ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
|
1994 |
|
|
return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
|
1995 |
|
|
}
|
1996 |
|
|
|
1997 |
|
|
default:
|
1998 |
|
|
gcc_unreachable ();
|
1999 |
|
|
}
|
2000 |
|
|
|
2001 |
|
|
case VECTOR_TYPE:
|
2002 |
|
|
if (integer_zerop (arg))
|
2003 |
|
|
return build_zero_vector (type);
|
2004 |
|
|
gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
|
2005 |
|
|
gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
|
2006 |
|
|
|| TREE_CODE (orig) == VECTOR_TYPE);
|
2007 |
|
|
return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
|
2008 |
|
|
|
2009 |
|
|
case VOID_TYPE:
|
2010 |
|
|
tem = fold_ignored_result (arg);
|
2011 |
|
|
return fold_build1_loc (loc, NOP_EXPR, type, tem);
|
2012 |
|
|
|
2013 |
|
|
default:
|
2014 |
|
|
if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
|
2015 |
|
|
return fold_build1_loc (loc, NOP_EXPR, type, arg);
|
2016 |
|
|
gcc_unreachable ();
|
2017 |
|
|
}
|
2018 |
|
|
fold_convert_exit:
|
2019 |
|
|
protected_set_expr_location_unshare (tem, loc);
|
2020 |
|
|
return tem;
|
2021 |
|
|
}
|
2022 |
|
|
|
2023 |
|
|
/* Return false if expr can be assumed not to be an lvalue, true
|
2024 |
|
|
otherwise. */
|
2025 |
|
|
|
2026 |
|
|
static bool
|
2027 |
|
|
maybe_lvalue_p (const_tree x)
|
2028 |
|
|
{
|
2029 |
|
|
/* We only need to wrap lvalue tree codes. */
|
2030 |
|
|
switch (TREE_CODE (x))
|
2031 |
|
|
{
|
2032 |
|
|
case VAR_DECL:
|
2033 |
|
|
case PARM_DECL:
|
2034 |
|
|
case RESULT_DECL:
|
2035 |
|
|
case LABEL_DECL:
|
2036 |
|
|
case FUNCTION_DECL:
|
2037 |
|
|
case SSA_NAME:
|
2038 |
|
|
|
2039 |
|
|
case COMPONENT_REF:
|
2040 |
|
|
case MEM_REF:
|
2041 |
|
|
case INDIRECT_REF:
|
2042 |
|
|
case ARRAY_REF:
|
2043 |
|
|
case ARRAY_RANGE_REF:
|
2044 |
|
|
case BIT_FIELD_REF:
|
2045 |
|
|
case OBJ_TYPE_REF:
|
2046 |
|
|
|
2047 |
|
|
case REALPART_EXPR:
|
2048 |
|
|
case IMAGPART_EXPR:
|
2049 |
|
|
case PREINCREMENT_EXPR:
|
2050 |
|
|
case PREDECREMENT_EXPR:
|
2051 |
|
|
case SAVE_EXPR:
|
2052 |
|
|
case TRY_CATCH_EXPR:
|
2053 |
|
|
case WITH_CLEANUP_EXPR:
|
2054 |
|
|
case COMPOUND_EXPR:
|
2055 |
|
|
case MODIFY_EXPR:
|
2056 |
|
|
case TARGET_EXPR:
|
2057 |
|
|
case COND_EXPR:
|
2058 |
|
|
case BIND_EXPR:
|
2059 |
|
|
break;
|
2060 |
|
|
|
2061 |
|
|
default:
|
2062 |
|
|
/* Assume the worst for front-end tree codes. */
|
2063 |
|
|
if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
|
2064 |
|
|
break;
|
2065 |
|
|
return false;
|
2066 |
|
|
}
|
2067 |
|
|
|
2068 |
|
|
return true;
|
2069 |
|
|
}
|
2070 |
|
|
|
2071 |
|
|
/* Return an expr equal to X but certainly not valid as an lvalue. */
|
2072 |
|
|
|
2073 |
|
|
tree
|
2074 |
|
|
non_lvalue_loc (location_t loc, tree x)
|
2075 |
|
|
{
|
2076 |
|
|
/* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
|
2077 |
|
|
us. */
|
2078 |
|
|
if (in_gimple_form)
|
2079 |
|
|
return x;
|
2080 |
|
|
|
2081 |
|
|
if (! maybe_lvalue_p (x))
|
2082 |
|
|
return x;
|
2083 |
|
|
return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
|
2084 |
|
|
}
|
2085 |
|
|
|
2086 |
|
|
/* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
|
2087 |
|
|
Zero means allow extended lvalues. */
|
2088 |
|
|
|
2089 |
|
|
int pedantic_lvalues;
|
2090 |
|
|
|
2091 |
|
|
/* When pedantic, return an expr equal to X but certainly not valid as a
|
2092 |
|
|
pedantic lvalue. Otherwise, return X. */
|
2093 |
|
|
|
2094 |
|
|
static tree
|
2095 |
|
|
pedantic_non_lvalue_loc (location_t loc, tree x)
|
2096 |
|
|
{
|
2097 |
|
|
if (pedantic_lvalues)
|
2098 |
|
|
return non_lvalue_loc (loc, x);
|
2099 |
|
|
|
2100 |
|
|
return protected_set_expr_location_unshare (x, loc);
|
2101 |
|
|
}
|
2102 |
|
|
|
2103 |
|
|
/* Given a tree comparison code, return the code that is the logical inverse.
|
2104 |
|
|
It is generally not safe to do this for floating-point comparisons, except
|
2105 |
|
|
for EQ_EXPR and NE_EXPR, so we return ERROR_MARK in this case. */
|
2106 |
|
|
|
2107 |
|
|
enum tree_code
|
2108 |
|
|
invert_tree_comparison (enum tree_code code, bool honor_nans)
|
2109 |
|
|
{
|
2110 |
|
|
if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR)
|
2111 |
|
|
return ERROR_MARK;
|
2112 |
|
|
|
2113 |
|
|
switch (code)
|
2114 |
|
|
{
|
2115 |
|
|
case EQ_EXPR:
|
2116 |
|
|
return NE_EXPR;
|
2117 |
|
|
case NE_EXPR:
|
2118 |
|
|
return EQ_EXPR;
|
2119 |
|
|
case GT_EXPR:
|
2120 |
|
|
return honor_nans ? UNLE_EXPR : LE_EXPR;
|
2121 |
|
|
case GE_EXPR:
|
2122 |
|
|
return honor_nans ? UNLT_EXPR : LT_EXPR;
|
2123 |
|
|
case LT_EXPR:
|
2124 |
|
|
return honor_nans ? UNGE_EXPR : GE_EXPR;
|
2125 |
|
|
case LE_EXPR:
|
2126 |
|
|
return honor_nans ? UNGT_EXPR : GT_EXPR;
|
2127 |
|
|
case LTGT_EXPR:
|
2128 |
|
|
return UNEQ_EXPR;
|
2129 |
|
|
case UNEQ_EXPR:
|
2130 |
|
|
return LTGT_EXPR;
|
2131 |
|
|
case UNGT_EXPR:
|
2132 |
|
|
return LE_EXPR;
|
2133 |
|
|
case UNGE_EXPR:
|
2134 |
|
|
return LT_EXPR;
|
2135 |
|
|
case UNLT_EXPR:
|
2136 |
|
|
return GE_EXPR;
|
2137 |
|
|
case UNLE_EXPR:
|
2138 |
|
|
return GT_EXPR;
|
2139 |
|
|
case ORDERED_EXPR:
|
2140 |
|
|
return UNORDERED_EXPR;
|
2141 |
|
|
case UNORDERED_EXPR:
|
2142 |
|
|
return ORDERED_EXPR;
|
2143 |
|
|
default:
|
2144 |
|
|
gcc_unreachable ();
|
2145 |
|
|
}
|
2146 |
|
|
}
|
2147 |
|
|
|
2148 |
|
|
/* Similar, but return the comparison that results if the operands are
|
2149 |
|
|
swapped. This is safe for floating-point. */
|
2150 |
|
|
|
2151 |
|
|
enum tree_code
|
2152 |
|
|
swap_tree_comparison (enum tree_code code)
|
2153 |
|
|
{
|
2154 |
|
|
switch (code)
|
2155 |
|
|
{
|
2156 |
|
|
case EQ_EXPR:
|
2157 |
|
|
case NE_EXPR:
|
2158 |
|
|
case ORDERED_EXPR:
|
2159 |
|
|
case UNORDERED_EXPR:
|
2160 |
|
|
case LTGT_EXPR:
|
2161 |
|
|
case UNEQ_EXPR:
|
2162 |
|
|
return code;
|
2163 |
|
|
case GT_EXPR:
|
2164 |
|
|
return LT_EXPR;
|
2165 |
|
|
case GE_EXPR:
|
2166 |
|
|
return LE_EXPR;
|
2167 |
|
|
case LT_EXPR:
|
2168 |
|
|
return GT_EXPR;
|
2169 |
|
|
case LE_EXPR:
|
2170 |
|
|
return GE_EXPR;
|
2171 |
|
|
case UNGT_EXPR:
|
2172 |
|
|
return UNLT_EXPR;
|
2173 |
|
|
case UNGE_EXPR:
|
2174 |
|
|
return UNLE_EXPR;
|
2175 |
|
|
case UNLT_EXPR:
|
2176 |
|
|
return UNGT_EXPR;
|
2177 |
|
|
case UNLE_EXPR:
|
2178 |
|
|
return UNGE_EXPR;
|
2179 |
|
|
default:
|
2180 |
|
|
gcc_unreachable ();
|
2181 |
|
|
}
|
2182 |
|
|
}
|
2183 |
|
|
|
2184 |
|
|
|
2185 |
|
|
/* Convert a comparison tree code from an enum tree_code representation
|
2186 |
|
|
into a compcode bit-based encoding. This function is the inverse of
|
2187 |
|
|
compcode_to_comparison. */
|
2188 |
|
|
|
2189 |
|
|
static enum comparison_code
|
2190 |
|
|
comparison_to_compcode (enum tree_code code)
|
2191 |
|
|
{
|
2192 |
|
|
switch (code)
|
2193 |
|
|
{
|
2194 |
|
|
case LT_EXPR:
|
2195 |
|
|
return COMPCODE_LT;
|
2196 |
|
|
case EQ_EXPR:
|
2197 |
|
|
return COMPCODE_EQ;
|
2198 |
|
|
case LE_EXPR:
|
2199 |
|
|
return COMPCODE_LE;
|
2200 |
|
|
case GT_EXPR:
|
2201 |
|
|
return COMPCODE_GT;
|
2202 |
|
|
case NE_EXPR:
|
2203 |
|
|
return COMPCODE_NE;
|
2204 |
|
|
case GE_EXPR:
|
2205 |
|
|
return COMPCODE_GE;
|
2206 |
|
|
case ORDERED_EXPR:
|
2207 |
|
|
return COMPCODE_ORD;
|
2208 |
|
|
case UNORDERED_EXPR:
|
2209 |
|
|
return COMPCODE_UNORD;
|
2210 |
|
|
case UNLT_EXPR:
|
2211 |
|
|
return COMPCODE_UNLT;
|
2212 |
|
|
case UNEQ_EXPR:
|
2213 |
|
|
return COMPCODE_UNEQ;
|
2214 |
|
|
case UNLE_EXPR:
|
2215 |
|
|
return COMPCODE_UNLE;
|
2216 |
|
|
case UNGT_EXPR:
|
2217 |
|
|
return COMPCODE_UNGT;
|
2218 |
|
|
case LTGT_EXPR:
|
2219 |
|
|
return COMPCODE_LTGT;
|
2220 |
|
|
case UNGE_EXPR:
|
2221 |
|
|
return COMPCODE_UNGE;
|
2222 |
|
|
default:
|
2223 |
|
|
gcc_unreachable ();
|
2224 |
|
|
}
|
2225 |
|
|
}
|
2226 |
|
|
|
2227 |
|
|
/* Convert a compcode bit-based encoding of a comparison operator back
|
2228 |
|
|
to GCC's enum tree_code representation. This function is the
|
2229 |
|
|
inverse of comparison_to_compcode. */
|
2230 |
|
|
|
2231 |
|
|
static enum tree_code
|
2232 |
|
|
compcode_to_comparison (enum comparison_code code)
|
2233 |
|
|
{
|
2234 |
|
|
switch (code)
|
2235 |
|
|
{
|
2236 |
|
|
case COMPCODE_LT:
|
2237 |
|
|
return LT_EXPR;
|
2238 |
|
|
case COMPCODE_EQ:
|
2239 |
|
|
return EQ_EXPR;
|
2240 |
|
|
case COMPCODE_LE:
|
2241 |
|
|
return LE_EXPR;
|
2242 |
|
|
case COMPCODE_GT:
|
2243 |
|
|
return GT_EXPR;
|
2244 |
|
|
case COMPCODE_NE:
|
2245 |
|
|
return NE_EXPR;
|
2246 |
|
|
case COMPCODE_GE:
|
2247 |
|
|
return GE_EXPR;
|
2248 |
|
|
case COMPCODE_ORD:
|
2249 |
|
|
return ORDERED_EXPR;
|
2250 |
|
|
case COMPCODE_UNORD:
|
2251 |
|
|
return UNORDERED_EXPR;
|
2252 |
|
|
case COMPCODE_UNLT:
|
2253 |
|
|
return UNLT_EXPR;
|
2254 |
|
|
case COMPCODE_UNEQ:
|
2255 |
|
|
return UNEQ_EXPR;
|
2256 |
|
|
case COMPCODE_UNLE:
|
2257 |
|
|
return UNLE_EXPR;
|
2258 |
|
|
case COMPCODE_UNGT:
|
2259 |
|
|
return UNGT_EXPR;
|
2260 |
|
|
case COMPCODE_LTGT:
|
2261 |
|
|
return LTGT_EXPR;
|
2262 |
|
|
case COMPCODE_UNGE:
|
2263 |
|
|
return UNGE_EXPR;
|
2264 |
|
|
default:
|
2265 |
|
|
gcc_unreachable ();
|
2266 |
|
|
}
|
2267 |
|
|
}
|
2268 |
|
|
|
2269 |
|
|
/* Return a tree for the comparison which is the combination of
|
2270 |
|
|
doing the AND or OR (depending on CODE) of the two operations LCODE
|
2271 |
|
|
and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
|
2272 |
|
|
the possibility of trapping if the mode has NaNs, and return NULL_TREE
|
2273 |
|
|
if this makes the transformation invalid. */
|
2274 |
|
|
|
2275 |
|
|
tree
|
2276 |
|
|
combine_comparisons (location_t loc,
|
2277 |
|
|
enum tree_code code, enum tree_code lcode,
|
2278 |
|
|
enum tree_code rcode, tree truth_type,
|
2279 |
|
|
tree ll_arg, tree lr_arg)
|
2280 |
|
|
{
|
2281 |
|
|
bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
|
2282 |
|
|
enum comparison_code lcompcode = comparison_to_compcode (lcode);
|
2283 |
|
|
enum comparison_code rcompcode = comparison_to_compcode (rcode);
|
2284 |
|
|
int compcode;
|
2285 |
|
|
|
2286 |
|
|
switch (code)
|
2287 |
|
|
{
|
2288 |
|
|
case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
|
2289 |
|
|
compcode = lcompcode & rcompcode;
|
2290 |
|
|
break;
|
2291 |
|
|
|
2292 |
|
|
case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
|
2293 |
|
|
compcode = lcompcode | rcompcode;
|
2294 |
|
|
break;
|
2295 |
|
|
|
2296 |
|
|
default:
|
2297 |
|
|
return NULL_TREE;
|
2298 |
|
|
}
|
2299 |
|
|
|
2300 |
|
|
if (!honor_nans)
|
2301 |
|
|
{
|
2302 |
|
|
/* Eliminate unordered comparisons, as well as LTGT and ORD
|
2303 |
|
|
which are not used unless the mode has NaNs. */
|
2304 |
|
|
compcode &= ~COMPCODE_UNORD;
|
2305 |
|
|
if (compcode == COMPCODE_LTGT)
|
2306 |
|
|
compcode = COMPCODE_NE;
|
2307 |
|
|
else if (compcode == COMPCODE_ORD)
|
2308 |
|
|
compcode = COMPCODE_TRUE;
|
2309 |
|
|
}
|
2310 |
|
|
else if (flag_trapping_math)
|
2311 |
|
|
{
|
2312 |
|
|
/* Check that the original operation and the optimized ones will trap
|
2313 |
|
|
under the same condition. */
|
2314 |
|
|
bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
|
2315 |
|
|
&& (lcompcode != COMPCODE_EQ)
|
2316 |
|
|
&& (lcompcode != COMPCODE_ORD);
|
2317 |
|
|
bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
|
2318 |
|
|
&& (rcompcode != COMPCODE_EQ)
|
2319 |
|
|
&& (rcompcode != COMPCODE_ORD);
|
2320 |
|
|
bool trap = (compcode & COMPCODE_UNORD) == 0
|
2321 |
|
|
&& (compcode != COMPCODE_EQ)
|
2322 |
|
|
&& (compcode != COMPCODE_ORD);
|
2323 |
|
|
|
2324 |
|
|
/* In a short-circuited boolean expression the LHS might be
|
2325 |
|
|
such that the RHS, if evaluated, will never trap. For
|
2326 |
|
|
example, in ORD (x, y) && (x < y), we evaluate the RHS only
|
2327 |
|
|
if neither x nor y is NaN. (This is a mixed blessing: for
|
2328 |
|
|
example, the expression above will never trap, hence
|
2329 |
|
|
optimizing it to x < y would be invalid). */
|
2330 |
|
|
if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
|
2331 |
|
|
|| (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
|
2332 |
|
|
rtrap = false;
|
2333 |
|
|
|
2334 |
|
|
/* If the comparison was short-circuited, and only the RHS
|
2335 |
|
|
trapped, we may now generate a spurious trap. */
|
2336 |
|
|
if (rtrap && !ltrap
|
2337 |
|
|
&& (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
|
2338 |
|
|
return NULL_TREE;
|
2339 |
|
|
|
2340 |
|
|
/* If we changed the conditions that cause a trap, we lose. */
|
2341 |
|
|
if ((ltrap || rtrap) != trap)
|
2342 |
|
|
return NULL_TREE;
|
2343 |
|
|
}
|
2344 |
|
|
|
2345 |
|
|
if (compcode == COMPCODE_TRUE)
|
2346 |
|
|
return constant_boolean_node (true, truth_type);
|
2347 |
|
|
else if (compcode == COMPCODE_FALSE)
|
2348 |
|
|
return constant_boolean_node (false, truth_type);
|
2349 |
|
|
else
|
2350 |
|
|
{
|
2351 |
|
|
enum tree_code tcode;
|
2352 |
|
|
|
2353 |
|
|
tcode = compcode_to_comparison ((enum comparison_code) compcode);
|
2354 |
|
|
return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
|
2355 |
|
|
}
|
2356 |
|
|
}
|
2357 |
|
|
|
2358 |
|
|
/* Return nonzero if two operands (typically of the same tree node)
|
2359 |
|
|
are necessarily equal. If either argument has side-effects this
|
2360 |
|
|
function returns zero. FLAGS modifies behavior as follows:
|
2361 |
|
|
|
2362 |
|
|
If OEP_ONLY_CONST is set, only return nonzero for constants.
|
2363 |
|
|
This function tests whether the operands are indistinguishable;
|
2364 |
|
|
it does not test whether they are equal using C's == operation.
|
2365 |
|
|
The distinction is important for IEEE floating point, because
|
2366 |
|
|
(1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
|
2367 |
|
|
(2) two NaNs may be indistinguishable, but NaN!=NaN.
|
2368 |
|
|
|
2369 |
|
|
If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
|
2370 |
|
|
even though it may hold multiple values during a function.
|
2371 |
|
|
This is because a GCC tree node guarantees that nothing else is
|
2372 |
|
|
executed between the evaluation of its "operands" (which may often
|
2373 |
|
|
be evaluated in arbitrary order). Hence if the operands themselves
|
2374 |
|
|
don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
|
2375 |
|
|
same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
|
2376 |
|
|
unset means assuming isochronic (or instantaneous) tree equivalence.
|
2377 |
|
|
Unless comparing arbitrary expression trees, such as from different
|
2378 |
|
|
statements, this flag can usually be left unset.
|
2379 |
|
|
|
2380 |
|
|
If OEP_PURE_SAME is set, then pure functions with identical arguments
|
2381 |
|
|
are considered the same. It is used when the caller has other ways
|
2382 |
|
|
to ensure that global memory is unchanged in between. */
|
2383 |
|
|
|
2384 |
|
|
int
|
2385 |
|
|
operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
|
2386 |
|
|
{
|
2387 |
|
|
/* If either is ERROR_MARK, they aren't equal. */
|
2388 |
|
|
if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
|
2389 |
|
|
|| TREE_TYPE (arg0) == error_mark_node
|
2390 |
|
|
|| TREE_TYPE (arg1) == error_mark_node)
|
2391 |
|
|
return 0;
|
2392 |
|
|
|
2393 |
|
|
/* Similar, if either does not have a type (like a released SSA name),
|
2394 |
|
|
they aren't equal. */
|
2395 |
|
|
if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
|
2396 |
|
|
return 0;
|
2397 |
|
|
|
2398 |
|
|
/* Check equality of integer constants before bailing out due to
|
2399 |
|
|
precision differences. */
|
2400 |
|
|
if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
|
2401 |
|
|
return tree_int_cst_equal (arg0, arg1);
|
2402 |
|
|
|
2403 |
|
|
/* If both types don't have the same signedness, then we can't consider
|
2404 |
|
|
them equal. We must check this before the STRIP_NOPS calls
|
2405 |
|
|
because they may change the signedness of the arguments. As pointers
|
2406 |
|
|
strictly don't have a signedness, require either two pointers or
|
2407 |
|
|
two non-pointers as well. */
|
2408 |
|
|
if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
|
2409 |
|
|
|| POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
|
2410 |
|
|
return 0;
|
2411 |
|
|
|
2412 |
|
|
/* We cannot consider pointers to different address space equal. */
|
2413 |
|
|
if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
|
2414 |
|
|
&& (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
|
2415 |
|
|
!= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
|
2416 |
|
|
return 0;
|
2417 |
|
|
|
2418 |
|
|
/* If both types don't have the same precision, then it is not safe
|
2419 |
|
|
to strip NOPs. */
|
2420 |
|
|
if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
|
2421 |
|
|
return 0;
|
2422 |
|
|
|
2423 |
|
|
STRIP_NOPS (arg0);
|
2424 |
|
|
STRIP_NOPS (arg1);
|
2425 |
|
|
|
2426 |
|
|
/* In case both args are comparisons but with different comparison
|
2427 |
|
|
code, try to swap the comparison operands of one arg to produce
|
2428 |
|
|
a match and compare that variant. */
|
2429 |
|
|
if (TREE_CODE (arg0) != TREE_CODE (arg1)
|
2430 |
|
|
&& COMPARISON_CLASS_P (arg0)
|
2431 |
|
|
&& COMPARISON_CLASS_P (arg1))
|
2432 |
|
|
{
|
2433 |
|
|
enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
|
2434 |
|
|
|
2435 |
|
|
if (TREE_CODE (arg0) == swap_code)
|
2436 |
|
|
return operand_equal_p (TREE_OPERAND (arg0, 0),
|
2437 |
|
|
TREE_OPERAND (arg1, 1), flags)
|
2438 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 1),
|
2439 |
|
|
TREE_OPERAND (arg1, 0), flags);
|
2440 |
|
|
}
|
2441 |
|
|
|
2442 |
|
|
if (TREE_CODE (arg0) != TREE_CODE (arg1)
|
2443 |
|
|
/* This is needed for conversions and for COMPONENT_REF.
|
2444 |
|
|
Might as well play it safe and always test this. */
|
2445 |
|
|
|| TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
|
2446 |
|
|
|| TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
|
2447 |
|
|
|| TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
|
2448 |
|
|
return 0;
|
2449 |
|
|
|
2450 |
|
|
/* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
|
2451 |
|
|
We don't care about side effects in that case because the SAVE_EXPR
|
2452 |
|
|
takes care of that for us. In all other cases, two expressions are
|
2453 |
|
|
equal if they have no side effects. If we have two identical
|
2454 |
|
|
expressions with side effects that should be treated the same due
|
2455 |
|
|
to the only side effects being identical SAVE_EXPR's, that will
|
2456 |
|
|
be detected in the recursive calls below.
|
2457 |
|
|
If we are taking an invariant address of two identical objects
|
2458 |
|
|
they are necessarily equal as well. */
|
2459 |
|
|
if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
|
2460 |
|
|
&& (TREE_CODE (arg0) == SAVE_EXPR
|
2461 |
|
|
|| (flags & OEP_CONSTANT_ADDRESS_OF)
|
2462 |
|
|
|| (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
|
2463 |
|
|
return 1;
|
2464 |
|
|
|
2465 |
|
|
/* Next handle constant cases, those for which we can return 1 even
|
2466 |
|
|
if ONLY_CONST is set. */
|
2467 |
|
|
if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
|
2468 |
|
|
switch (TREE_CODE (arg0))
|
2469 |
|
|
{
|
2470 |
|
|
case INTEGER_CST:
|
2471 |
|
|
return tree_int_cst_equal (arg0, arg1);
|
2472 |
|
|
|
2473 |
|
|
case FIXED_CST:
|
2474 |
|
|
return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
|
2475 |
|
|
TREE_FIXED_CST (arg1));
|
2476 |
|
|
|
2477 |
|
|
case REAL_CST:
|
2478 |
|
|
if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
|
2479 |
|
|
TREE_REAL_CST (arg1)))
|
2480 |
|
|
return 1;
|
2481 |
|
|
|
2482 |
|
|
|
2483 |
|
|
if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
|
2484 |
|
|
{
|
2485 |
|
|
/* If we do not distinguish between signed and unsigned zero,
|
2486 |
|
|
consider them equal. */
|
2487 |
|
|
if (real_zerop (arg0) && real_zerop (arg1))
|
2488 |
|
|
return 1;
|
2489 |
|
|
}
|
2490 |
|
|
return 0;
|
2491 |
|
|
|
2492 |
|
|
case VECTOR_CST:
|
2493 |
|
|
{
|
2494 |
|
|
tree v1, v2;
|
2495 |
|
|
|
2496 |
|
|
v1 = TREE_VECTOR_CST_ELTS (arg0);
|
2497 |
|
|
v2 = TREE_VECTOR_CST_ELTS (arg1);
|
2498 |
|
|
while (v1 && v2)
|
2499 |
|
|
{
|
2500 |
|
|
if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
|
2501 |
|
|
flags))
|
2502 |
|
|
return 0;
|
2503 |
|
|
v1 = TREE_CHAIN (v1);
|
2504 |
|
|
v2 = TREE_CHAIN (v2);
|
2505 |
|
|
}
|
2506 |
|
|
|
2507 |
|
|
return v1 == v2;
|
2508 |
|
|
}
|
2509 |
|
|
|
2510 |
|
|
case COMPLEX_CST:
|
2511 |
|
|
return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
|
2512 |
|
|
flags)
|
2513 |
|
|
&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
|
2514 |
|
|
flags));
|
2515 |
|
|
|
2516 |
|
|
case STRING_CST:
|
2517 |
|
|
return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
|
2518 |
|
|
&& ! memcmp (TREE_STRING_POINTER (arg0),
|
2519 |
|
|
TREE_STRING_POINTER (arg1),
|
2520 |
|
|
TREE_STRING_LENGTH (arg0)));
|
2521 |
|
|
|
2522 |
|
|
case ADDR_EXPR:
|
2523 |
|
|
return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
|
2524 |
|
|
TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
|
2525 |
|
|
? OEP_CONSTANT_ADDRESS_OF : 0);
|
2526 |
|
|
default:
|
2527 |
|
|
break;
|
2528 |
|
|
}
|
2529 |
|
|
|
2530 |
|
|
if (flags & OEP_ONLY_CONST)
|
2531 |
|
|
return 0;
|
2532 |
|
|
|
2533 |
|
|
/* Define macros to test an operand from arg0 and arg1 for equality and a
|
2534 |
|
|
variant that allows null and views null as being different from any
|
2535 |
|
|
non-null value. In the latter case, if either is null, the both
|
2536 |
|
|
must be; otherwise, do the normal comparison. */
|
2537 |
|
|
#define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
|
2538 |
|
|
TREE_OPERAND (arg1, N), flags)
|
2539 |
|
|
|
2540 |
|
|
#define OP_SAME_WITH_NULL(N) \
|
2541 |
|
|
((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
|
2542 |
|
|
? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
|
2543 |
|
|
|
2544 |
|
|
switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
|
2545 |
|
|
{
|
2546 |
|
|
case tcc_unary:
|
2547 |
|
|
/* Two conversions are equal only if signedness and modes match. */
|
2548 |
|
|
switch (TREE_CODE (arg0))
|
2549 |
|
|
{
|
2550 |
|
|
CASE_CONVERT:
|
2551 |
|
|
case FIX_TRUNC_EXPR:
|
2552 |
|
|
if (TYPE_UNSIGNED (TREE_TYPE (arg0))
|
2553 |
|
|
!= TYPE_UNSIGNED (TREE_TYPE (arg1)))
|
2554 |
|
|
return 0;
|
2555 |
|
|
break;
|
2556 |
|
|
default:
|
2557 |
|
|
break;
|
2558 |
|
|
}
|
2559 |
|
|
|
2560 |
|
|
return OP_SAME (0);
|
2561 |
|
|
|
2562 |
|
|
|
2563 |
|
|
case tcc_comparison:
|
2564 |
|
|
case tcc_binary:
|
2565 |
|
|
if (OP_SAME (0) && OP_SAME (1))
|
2566 |
|
|
return 1;
|
2567 |
|
|
|
2568 |
|
|
/* For commutative ops, allow the other order. */
|
2569 |
|
|
return (commutative_tree_code (TREE_CODE (arg0))
|
2570 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 0),
|
2571 |
|
|
TREE_OPERAND (arg1, 1), flags)
|
2572 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 1),
|
2573 |
|
|
TREE_OPERAND (arg1, 0), flags));
|
2574 |
|
|
|
2575 |
|
|
case tcc_reference:
|
2576 |
|
|
/* If either of the pointer (or reference) expressions we are
|
2577 |
|
|
dereferencing contain a side effect, these cannot be equal. */
|
2578 |
|
|
if (TREE_SIDE_EFFECTS (arg0)
|
2579 |
|
|
|| TREE_SIDE_EFFECTS (arg1))
|
2580 |
|
|
return 0;
|
2581 |
|
|
|
2582 |
|
|
switch (TREE_CODE (arg0))
|
2583 |
|
|
{
|
2584 |
|
|
case INDIRECT_REF:
|
2585 |
|
|
case REALPART_EXPR:
|
2586 |
|
|
case IMAGPART_EXPR:
|
2587 |
|
|
return OP_SAME (0);
|
2588 |
|
|
|
2589 |
|
|
case MEM_REF:
|
2590 |
|
|
/* Require equal access sizes, and similar pointer types.
|
2591 |
|
|
We can have incomplete types for array references of
|
2592 |
|
|
variable-sized arrays from the Fortran frontent
|
2593 |
|
|
though. */
|
2594 |
|
|
return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
|
2595 |
|
|
|| (TYPE_SIZE (TREE_TYPE (arg0))
|
2596 |
|
|
&& TYPE_SIZE (TREE_TYPE (arg1))
|
2597 |
|
|
&& operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
|
2598 |
|
|
TYPE_SIZE (TREE_TYPE (arg1)), flags)))
|
2599 |
|
|
&& (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
|
2600 |
|
|
== TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
|
2601 |
|
|
&& OP_SAME (0) && OP_SAME (1));
|
2602 |
|
|
|
2603 |
|
|
case ARRAY_REF:
|
2604 |
|
|
case ARRAY_RANGE_REF:
|
2605 |
|
|
/* Operands 2 and 3 may be null.
|
2606 |
|
|
Compare the array index by value if it is constant first as we
|
2607 |
|
|
may have different types but same value here. */
|
2608 |
|
|
return (OP_SAME (0)
|
2609 |
|
|
&& (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
|
2610 |
|
|
TREE_OPERAND (arg1, 1))
|
2611 |
|
|
|| OP_SAME (1))
|
2612 |
|
|
&& OP_SAME_WITH_NULL (2)
|
2613 |
|
|
&& OP_SAME_WITH_NULL (3));
|
2614 |
|
|
|
2615 |
|
|
case COMPONENT_REF:
|
2616 |
|
|
/* Handle operand 2 the same as for ARRAY_REF. Operand 0
|
2617 |
|
|
may be NULL when we're called to compare MEM_EXPRs. */
|
2618 |
|
|
return OP_SAME_WITH_NULL (0)
|
2619 |
|
|
&& OP_SAME (1)
|
2620 |
|
|
&& OP_SAME_WITH_NULL (2);
|
2621 |
|
|
|
2622 |
|
|
case BIT_FIELD_REF:
|
2623 |
|
|
return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
|
2624 |
|
|
|
2625 |
|
|
default:
|
2626 |
|
|
return 0;
|
2627 |
|
|
}
|
2628 |
|
|
|
2629 |
|
|
case tcc_expression:
|
2630 |
|
|
switch (TREE_CODE (arg0))
|
2631 |
|
|
{
|
2632 |
|
|
case ADDR_EXPR:
|
2633 |
|
|
case TRUTH_NOT_EXPR:
|
2634 |
|
|
return OP_SAME (0);
|
2635 |
|
|
|
2636 |
|
|
case TRUTH_ANDIF_EXPR:
|
2637 |
|
|
case TRUTH_ORIF_EXPR:
|
2638 |
|
|
return OP_SAME (0) && OP_SAME (1);
|
2639 |
|
|
|
2640 |
|
|
case FMA_EXPR:
|
2641 |
|
|
case WIDEN_MULT_PLUS_EXPR:
|
2642 |
|
|
case WIDEN_MULT_MINUS_EXPR:
|
2643 |
|
|
if (!OP_SAME (2))
|
2644 |
|
|
return 0;
|
2645 |
|
|
/* The multiplcation operands are commutative. */
|
2646 |
|
|
/* FALLTHRU */
|
2647 |
|
|
|
2648 |
|
|
case TRUTH_AND_EXPR:
|
2649 |
|
|
case TRUTH_OR_EXPR:
|
2650 |
|
|
case TRUTH_XOR_EXPR:
|
2651 |
|
|
if (OP_SAME (0) && OP_SAME (1))
|
2652 |
|
|
return 1;
|
2653 |
|
|
|
2654 |
|
|
/* Otherwise take into account this is a commutative operation. */
|
2655 |
|
|
return (operand_equal_p (TREE_OPERAND (arg0, 0),
|
2656 |
|
|
TREE_OPERAND (arg1, 1), flags)
|
2657 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 1),
|
2658 |
|
|
TREE_OPERAND (arg1, 0), flags));
|
2659 |
|
|
|
2660 |
|
|
case COND_EXPR:
|
2661 |
|
|
case VEC_COND_EXPR:
|
2662 |
|
|
case DOT_PROD_EXPR:
|
2663 |
|
|
return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
|
2664 |
|
|
|
2665 |
|
|
default:
|
2666 |
|
|
return 0;
|
2667 |
|
|
}
|
2668 |
|
|
|
2669 |
|
|
case tcc_vl_exp:
|
2670 |
|
|
switch (TREE_CODE (arg0))
|
2671 |
|
|
{
|
2672 |
|
|
case CALL_EXPR:
|
2673 |
|
|
/* If the CALL_EXPRs call different functions, then they
|
2674 |
|
|
clearly can not be equal. */
|
2675 |
|
|
if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
|
2676 |
|
|
flags))
|
2677 |
|
|
return 0;
|
2678 |
|
|
|
2679 |
|
|
{
|
2680 |
|
|
unsigned int cef = call_expr_flags (arg0);
|
2681 |
|
|
if (flags & OEP_PURE_SAME)
|
2682 |
|
|
cef &= ECF_CONST | ECF_PURE;
|
2683 |
|
|
else
|
2684 |
|
|
cef &= ECF_CONST;
|
2685 |
|
|
if (!cef)
|
2686 |
|
|
return 0;
|
2687 |
|
|
}
|
2688 |
|
|
|
2689 |
|
|
/* Now see if all the arguments are the same. */
|
2690 |
|
|
{
|
2691 |
|
|
const_call_expr_arg_iterator iter0, iter1;
|
2692 |
|
|
const_tree a0, a1;
|
2693 |
|
|
for (a0 = first_const_call_expr_arg (arg0, &iter0),
|
2694 |
|
|
a1 = first_const_call_expr_arg (arg1, &iter1);
|
2695 |
|
|
a0 && a1;
|
2696 |
|
|
a0 = next_const_call_expr_arg (&iter0),
|
2697 |
|
|
a1 = next_const_call_expr_arg (&iter1))
|
2698 |
|
|
if (! operand_equal_p (a0, a1, flags))
|
2699 |
|
|
return 0;
|
2700 |
|
|
|
2701 |
|
|
/* If we get here and both argument lists are exhausted
|
2702 |
|
|
then the CALL_EXPRs are equal. */
|
2703 |
|
|
return ! (a0 || a1);
|
2704 |
|
|
}
|
2705 |
|
|
default:
|
2706 |
|
|
return 0;
|
2707 |
|
|
}
|
2708 |
|
|
|
2709 |
|
|
case tcc_declaration:
|
2710 |
|
|
/* Consider __builtin_sqrt equal to sqrt. */
|
2711 |
|
|
return (TREE_CODE (arg0) == FUNCTION_DECL
|
2712 |
|
|
&& DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
|
2713 |
|
|
&& DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
|
2714 |
|
|
&& DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
|
2715 |
|
|
|
2716 |
|
|
default:
|
2717 |
|
|
return 0;
|
2718 |
|
|
}
|
2719 |
|
|
|
2720 |
|
|
#undef OP_SAME
|
2721 |
|
|
#undef OP_SAME_WITH_NULL
|
2722 |
|
|
}
|
2723 |
|
|
|
2724 |
|
|
/* Similar to operand_equal_p, but see if ARG0 might have been made by
|
2725 |
|
|
shorten_compare from ARG1 when ARG1 was being compared with OTHER.
|
2726 |
|
|
|
2727 |
|
|
When in doubt, return 0. */
|
2728 |
|
|
|
2729 |
|
|
static int
|
2730 |
|
|
operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
|
2731 |
|
|
{
|
2732 |
|
|
int unsignedp1, unsignedpo;
|
2733 |
|
|
tree primarg0, primarg1, primother;
|
2734 |
|
|
unsigned int correct_width;
|
2735 |
|
|
|
2736 |
|
|
if (operand_equal_p (arg0, arg1, 0))
|
2737 |
|
|
return 1;
|
2738 |
|
|
|
2739 |
|
|
if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
|
2740 |
|
|
|| ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
|
2741 |
|
|
return 0;
|
2742 |
|
|
|
2743 |
|
|
/* Discard any conversions that don't change the modes of ARG0 and ARG1
|
2744 |
|
|
and see if the inner values are the same. This removes any
|
2745 |
|
|
signedness comparison, which doesn't matter here. */
|
2746 |
|
|
primarg0 = arg0, primarg1 = arg1;
|
2747 |
|
|
STRIP_NOPS (primarg0);
|
2748 |
|
|
STRIP_NOPS (primarg1);
|
2749 |
|
|
if (operand_equal_p (primarg0, primarg1, 0))
|
2750 |
|
|
return 1;
|
2751 |
|
|
|
2752 |
|
|
/* Duplicate what shorten_compare does to ARG1 and see if that gives the
|
2753 |
|
|
actual comparison operand, ARG0.
|
2754 |
|
|
|
2755 |
|
|
First throw away any conversions to wider types
|
2756 |
|
|
already present in the operands. */
|
2757 |
|
|
|
2758 |
|
|
primarg1 = get_narrower (arg1, &unsignedp1);
|
2759 |
|
|
primother = get_narrower (other, &unsignedpo);
|
2760 |
|
|
|
2761 |
|
|
correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
|
2762 |
|
|
if (unsignedp1 == unsignedpo
|
2763 |
|
|
&& TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
|
2764 |
|
|
&& TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
|
2765 |
|
|
{
|
2766 |
|
|
tree type = TREE_TYPE (arg0);
|
2767 |
|
|
|
2768 |
|
|
/* Make sure shorter operand is extended the right way
|
2769 |
|
|
to match the longer operand. */
|
2770 |
|
|
primarg1 = fold_convert (signed_or_unsigned_type_for
|
2771 |
|
|
(unsignedp1, TREE_TYPE (primarg1)), primarg1);
|
2772 |
|
|
|
2773 |
|
|
if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
|
2774 |
|
|
return 1;
|
2775 |
|
|
}
|
2776 |
|
|
|
2777 |
|
|
return 0;
|
2778 |
|
|
}
|
2779 |
|
|
|
2780 |
|
|
/* See if ARG is an expression that is either a comparison or is performing
|
2781 |
|
|
arithmetic on comparisons. The comparisons must only be comparing
|
2782 |
|
|
two different values, which will be stored in *CVAL1 and *CVAL2; if
|
2783 |
|
|
they are nonzero it means that some operands have already been found.
|
2784 |
|
|
No variables may be used anywhere else in the expression except in the
|
2785 |
|
|
comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
|
2786 |
|
|
the expression and save_expr needs to be called with CVAL1 and CVAL2.
|
2787 |
|
|
|
2788 |
|
|
If this is true, return 1. Otherwise, return zero. */
|
2789 |
|
|
|
2790 |
|
|
static int
|
2791 |
|
|
twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
|
2792 |
|
|
{
|
2793 |
|
|
enum tree_code code = TREE_CODE (arg);
|
2794 |
|
|
enum tree_code_class tclass = TREE_CODE_CLASS (code);
|
2795 |
|
|
|
2796 |
|
|
/* We can handle some of the tcc_expression cases here. */
|
2797 |
|
|
if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
|
2798 |
|
|
tclass = tcc_unary;
|
2799 |
|
|
else if (tclass == tcc_expression
|
2800 |
|
|
&& (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
|
2801 |
|
|
|| code == COMPOUND_EXPR))
|
2802 |
|
|
tclass = tcc_binary;
|
2803 |
|
|
|
2804 |
|
|
else if (tclass == tcc_expression && code == SAVE_EXPR
|
2805 |
|
|
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
|
2806 |
|
|
{
|
2807 |
|
|
/* If we've already found a CVAL1 or CVAL2, this expression is
|
2808 |
|
|
two complex to handle. */
|
2809 |
|
|
if (*cval1 || *cval2)
|
2810 |
|
|
return 0;
|
2811 |
|
|
|
2812 |
|
|
tclass = tcc_unary;
|
2813 |
|
|
*save_p = 1;
|
2814 |
|
|
}
|
2815 |
|
|
|
2816 |
|
|
switch (tclass)
|
2817 |
|
|
{
|
2818 |
|
|
case tcc_unary:
|
2819 |
|
|
return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
|
2820 |
|
|
|
2821 |
|
|
case tcc_binary:
|
2822 |
|
|
return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
|
2823 |
|
|
&& twoval_comparison_p (TREE_OPERAND (arg, 1),
|
2824 |
|
|
cval1, cval2, save_p));
|
2825 |
|
|
|
2826 |
|
|
case tcc_constant:
|
2827 |
|
|
return 1;
|
2828 |
|
|
|
2829 |
|
|
case tcc_expression:
|
2830 |
|
|
if (code == COND_EXPR)
|
2831 |
|
|
return (twoval_comparison_p (TREE_OPERAND (arg, 0),
|
2832 |
|
|
cval1, cval2, save_p)
|
2833 |
|
|
&& twoval_comparison_p (TREE_OPERAND (arg, 1),
|
2834 |
|
|
cval1, cval2, save_p)
|
2835 |
|
|
&& twoval_comparison_p (TREE_OPERAND (arg, 2),
|
2836 |
|
|
cval1, cval2, save_p));
|
2837 |
|
|
return 0;
|
2838 |
|
|
|
2839 |
|
|
case tcc_comparison:
|
2840 |
|
|
/* First see if we can handle the first operand, then the second. For
|
2841 |
|
|
the second operand, we know *CVAL1 can't be zero. It must be that
|
2842 |
|
|
one side of the comparison is each of the values; test for the
|
2843 |
|
|
case where this isn't true by failing if the two operands
|
2844 |
|
|
are the same. */
|
2845 |
|
|
|
2846 |
|
|
if (operand_equal_p (TREE_OPERAND (arg, 0),
|
2847 |
|
|
TREE_OPERAND (arg, 1), 0))
|
2848 |
|
|
return 0;
|
2849 |
|
|
|
2850 |
|
|
if (*cval1 == 0)
|
2851 |
|
|
*cval1 = TREE_OPERAND (arg, 0);
|
2852 |
|
|
else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
|
2853 |
|
|
;
|
2854 |
|
|
else if (*cval2 == 0)
|
2855 |
|
|
*cval2 = TREE_OPERAND (arg, 0);
|
2856 |
|
|
else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
|
2857 |
|
|
;
|
2858 |
|
|
else
|
2859 |
|
|
return 0;
|
2860 |
|
|
|
2861 |
|
|
if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
|
2862 |
|
|
;
|
2863 |
|
|
else if (*cval2 == 0)
|
2864 |
|
|
*cval2 = TREE_OPERAND (arg, 1);
|
2865 |
|
|
else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
|
2866 |
|
|
;
|
2867 |
|
|
else
|
2868 |
|
|
return 0;
|
2869 |
|
|
|
2870 |
|
|
return 1;
|
2871 |
|
|
|
2872 |
|
|
default:
|
2873 |
|
|
return 0;
|
2874 |
|
|
}
|
2875 |
|
|
}
|
2876 |
|
|
|
2877 |
|
|
/* ARG is a tree that is known to contain just arithmetic operations and
|
2878 |
|
|
comparisons. Evaluate the operations in the tree substituting NEW0 for
|
2879 |
|
|
any occurrence of OLD0 as an operand of a comparison and likewise for
|
2880 |
|
|
NEW1 and OLD1. */
|
2881 |
|
|
|
2882 |
|
|
static tree
|
2883 |
|
|
eval_subst (location_t loc, tree arg, tree old0, tree new0,
|
2884 |
|
|
tree old1, tree new1)
|
2885 |
|
|
{
|
2886 |
|
|
tree type = TREE_TYPE (arg);
|
2887 |
|
|
enum tree_code code = TREE_CODE (arg);
|
2888 |
|
|
enum tree_code_class tclass = TREE_CODE_CLASS (code);
|
2889 |
|
|
|
2890 |
|
|
/* We can handle some of the tcc_expression cases here. */
|
2891 |
|
|
if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
|
2892 |
|
|
tclass = tcc_unary;
|
2893 |
|
|
else if (tclass == tcc_expression
|
2894 |
|
|
&& (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
|
2895 |
|
|
tclass = tcc_binary;
|
2896 |
|
|
|
2897 |
|
|
switch (tclass)
|
2898 |
|
|
{
|
2899 |
|
|
case tcc_unary:
|
2900 |
|
|
return fold_build1_loc (loc, code, type,
|
2901 |
|
|
eval_subst (loc, TREE_OPERAND (arg, 0),
|
2902 |
|
|
old0, new0, old1, new1));
|
2903 |
|
|
|
2904 |
|
|
case tcc_binary:
|
2905 |
|
|
return fold_build2_loc (loc, code, type,
|
2906 |
|
|
eval_subst (loc, TREE_OPERAND (arg, 0),
|
2907 |
|
|
old0, new0, old1, new1),
|
2908 |
|
|
eval_subst (loc, TREE_OPERAND (arg, 1),
|
2909 |
|
|
old0, new0, old1, new1));
|
2910 |
|
|
|
2911 |
|
|
case tcc_expression:
|
2912 |
|
|
switch (code)
|
2913 |
|
|
{
|
2914 |
|
|
case SAVE_EXPR:
|
2915 |
|
|
return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
|
2916 |
|
|
old1, new1);
|
2917 |
|
|
|
2918 |
|
|
case COMPOUND_EXPR:
|
2919 |
|
|
return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
|
2920 |
|
|
old1, new1);
|
2921 |
|
|
|
2922 |
|
|
case COND_EXPR:
|
2923 |
|
|
return fold_build3_loc (loc, code, type,
|
2924 |
|
|
eval_subst (loc, TREE_OPERAND (arg, 0),
|
2925 |
|
|
old0, new0, old1, new1),
|
2926 |
|
|
eval_subst (loc, TREE_OPERAND (arg, 1),
|
2927 |
|
|
old0, new0, old1, new1),
|
2928 |
|
|
eval_subst (loc, TREE_OPERAND (arg, 2),
|
2929 |
|
|
old0, new0, old1, new1));
|
2930 |
|
|
default:
|
2931 |
|
|
break;
|
2932 |
|
|
}
|
2933 |
|
|
/* Fall through - ??? */
|
2934 |
|
|
|
2935 |
|
|
case tcc_comparison:
|
2936 |
|
|
{
|
2937 |
|
|
tree arg0 = TREE_OPERAND (arg, 0);
|
2938 |
|
|
tree arg1 = TREE_OPERAND (arg, 1);
|
2939 |
|
|
|
2940 |
|
|
/* We need to check both for exact equality and tree equality. The
|
2941 |
|
|
former will be true if the operand has a side-effect. In that
|
2942 |
|
|
case, we know the operand occurred exactly once. */
|
2943 |
|
|
|
2944 |
|
|
if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
|
2945 |
|
|
arg0 = new0;
|
2946 |
|
|
else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
|
2947 |
|
|
arg0 = new1;
|
2948 |
|
|
|
2949 |
|
|
if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
|
2950 |
|
|
arg1 = new0;
|
2951 |
|
|
else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
|
2952 |
|
|
arg1 = new1;
|
2953 |
|
|
|
2954 |
|
|
return fold_build2_loc (loc, code, type, arg0, arg1);
|
2955 |
|
|
}
|
2956 |
|
|
|
2957 |
|
|
default:
|
2958 |
|
|
return arg;
|
2959 |
|
|
}
|
2960 |
|
|
}
|
2961 |
|
|
|
2962 |
|
|
/* Return a tree for the case when the result of an expression is RESULT
|
2963 |
|
|
converted to TYPE and OMITTED was previously an operand of the expression
|
2964 |
|
|
but is now not needed (e.g., we folded OMITTED * 0).
|
2965 |
|
|
|
2966 |
|
|
If OMITTED has side effects, we must evaluate it. Otherwise, just do
|
2967 |
|
|
the conversion of RESULT to TYPE. */
|
2968 |
|
|
|
2969 |
|
|
tree
|
2970 |
|
|
omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
|
2971 |
|
|
{
|
2972 |
|
|
tree t = fold_convert_loc (loc, type, result);
|
2973 |
|
|
|
2974 |
|
|
/* If the resulting operand is an empty statement, just return the omitted
|
2975 |
|
|
statement casted to void. */
|
2976 |
|
|
if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
|
2977 |
|
|
return build1_loc (loc, NOP_EXPR, void_type_node,
|
2978 |
|
|
fold_ignored_result (omitted));
|
2979 |
|
|
|
2980 |
|
|
if (TREE_SIDE_EFFECTS (omitted))
|
2981 |
|
|
return build2_loc (loc, COMPOUND_EXPR, type,
|
2982 |
|
|
fold_ignored_result (omitted), t);
|
2983 |
|
|
|
2984 |
|
|
return non_lvalue_loc (loc, t);
|
2985 |
|
|
}
|
2986 |
|
|
|
2987 |
|
|
/* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
|
2988 |
|
|
|
2989 |
|
|
static tree
|
2990 |
|
|
pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
|
2991 |
|
|
tree omitted)
|
2992 |
|
|
{
|
2993 |
|
|
tree t = fold_convert_loc (loc, type, result);
|
2994 |
|
|
|
2995 |
|
|
/* If the resulting operand is an empty statement, just return the omitted
|
2996 |
|
|
statement casted to void. */
|
2997 |
|
|
if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
|
2998 |
|
|
return build1_loc (loc, NOP_EXPR, void_type_node,
|
2999 |
|
|
fold_ignored_result (omitted));
|
3000 |
|
|
|
3001 |
|
|
if (TREE_SIDE_EFFECTS (omitted))
|
3002 |
|
|
return build2_loc (loc, COMPOUND_EXPR, type,
|
3003 |
|
|
fold_ignored_result (omitted), t);
|
3004 |
|
|
|
3005 |
|
|
return pedantic_non_lvalue_loc (loc, t);
|
3006 |
|
|
}
|
3007 |
|
|
|
3008 |
|
|
/* Return a tree for the case when the result of an expression is RESULT
|
3009 |
|
|
converted to TYPE and OMITTED1 and OMITTED2 were previously operands
|
3010 |
|
|
of the expression but are now not needed.
|
3011 |
|
|
|
3012 |
|
|
If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
|
3013 |
|
|
If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
|
3014 |
|
|
evaluated before OMITTED2. Otherwise, if neither has side effects,
|
3015 |
|
|
just do the conversion of RESULT to TYPE. */
|
3016 |
|
|
|
3017 |
|
|
tree
|
3018 |
|
|
omit_two_operands_loc (location_t loc, tree type, tree result,
|
3019 |
|
|
tree omitted1, tree omitted2)
|
3020 |
|
|
{
|
3021 |
|
|
tree t = fold_convert_loc (loc, type, result);
|
3022 |
|
|
|
3023 |
|
|
if (TREE_SIDE_EFFECTS (omitted2))
|
3024 |
|
|
t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
|
3025 |
|
|
if (TREE_SIDE_EFFECTS (omitted1))
|
3026 |
|
|
t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
|
3027 |
|
|
|
3028 |
|
|
return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
|
3029 |
|
|
}
|
3030 |
|
|
|
3031 |
|
|
|
3032 |
|
|
/* Return a simplified tree node for the truth-negation of ARG. This
|
3033 |
|
|
never alters ARG itself. We assume that ARG is an operation that
|
3034 |
|
|
returns a truth value (0 or 1).
|
3035 |
|
|
|
3036 |
|
|
FIXME: one would think we would fold the result, but it causes
|
3037 |
|
|
problems with the dominator optimizer. */
|
3038 |
|
|
|
3039 |
|
|
tree
|
3040 |
|
|
fold_truth_not_expr (location_t loc, tree arg)
|
3041 |
|
|
{
|
3042 |
|
|
tree type = TREE_TYPE (arg);
|
3043 |
|
|
enum tree_code code = TREE_CODE (arg);
|
3044 |
|
|
location_t loc1, loc2;
|
3045 |
|
|
|
3046 |
|
|
/* If this is a comparison, we can simply invert it, except for
|
3047 |
|
|
floating-point non-equality comparisons, in which case we just
|
3048 |
|
|
enclose a TRUTH_NOT_EXPR around what we have. */
|
3049 |
|
|
|
3050 |
|
|
if (TREE_CODE_CLASS (code) == tcc_comparison)
|
3051 |
|
|
{
|
3052 |
|
|
tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
|
3053 |
|
|
if (FLOAT_TYPE_P (op_type)
|
3054 |
|
|
&& flag_trapping_math
|
3055 |
|
|
&& code != ORDERED_EXPR && code != UNORDERED_EXPR
|
3056 |
|
|
&& code != NE_EXPR && code != EQ_EXPR)
|
3057 |
|
|
return NULL_TREE;
|
3058 |
|
|
|
3059 |
|
|
code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
|
3060 |
|
|
if (code == ERROR_MARK)
|
3061 |
|
|
return NULL_TREE;
|
3062 |
|
|
|
3063 |
|
|
return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
|
3064 |
|
|
TREE_OPERAND (arg, 1));
|
3065 |
|
|
}
|
3066 |
|
|
|
3067 |
|
|
switch (code)
|
3068 |
|
|
{
|
3069 |
|
|
case INTEGER_CST:
|
3070 |
|
|
return constant_boolean_node (integer_zerop (arg), type);
|
3071 |
|
|
|
3072 |
|
|
case TRUTH_AND_EXPR:
|
3073 |
|
|
loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
|
3074 |
|
|
loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
|
3075 |
|
|
return build2_loc (loc, TRUTH_OR_EXPR, type,
|
3076 |
|
|
invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
|
3077 |
|
|
invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
|
3078 |
|
|
|
3079 |
|
|
case TRUTH_OR_EXPR:
|
3080 |
|
|
loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
|
3081 |
|
|
loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
|
3082 |
|
|
return build2_loc (loc, TRUTH_AND_EXPR, type,
|
3083 |
|
|
invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
|
3084 |
|
|
invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
|
3085 |
|
|
|
3086 |
|
|
case TRUTH_XOR_EXPR:
|
3087 |
|
|
/* Here we can invert either operand. We invert the first operand
|
3088 |
|
|
unless the second operand is a TRUTH_NOT_EXPR in which case our
|
3089 |
|
|
result is the XOR of the first operand with the inside of the
|
3090 |
|
|
negation of the second operand. */
|
3091 |
|
|
|
3092 |
|
|
if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
|
3093 |
|
|
return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
|
3094 |
|
|
TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
|
3095 |
|
|
else
|
3096 |
|
|
return build2_loc (loc, TRUTH_XOR_EXPR, type,
|
3097 |
|
|
invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
|
3098 |
|
|
TREE_OPERAND (arg, 1));
|
3099 |
|
|
|
3100 |
|
|
case TRUTH_ANDIF_EXPR:
|
3101 |
|
|
loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
|
3102 |
|
|
loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
|
3103 |
|
|
return build2_loc (loc, TRUTH_ORIF_EXPR, type,
|
3104 |
|
|
invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
|
3105 |
|
|
invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
|
3106 |
|
|
|
3107 |
|
|
case TRUTH_ORIF_EXPR:
|
3108 |
|
|
loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
|
3109 |
|
|
loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
|
3110 |
|
|
return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
|
3111 |
|
|
invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
|
3112 |
|
|
invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
|
3113 |
|
|
|
3114 |
|
|
case TRUTH_NOT_EXPR:
|
3115 |
|
|
return TREE_OPERAND (arg, 0);
|
3116 |
|
|
|
3117 |
|
|
case COND_EXPR:
|
3118 |
|
|
{
|
3119 |
|
|
tree arg1 = TREE_OPERAND (arg, 1);
|
3120 |
|
|
tree arg2 = TREE_OPERAND (arg, 2);
|
3121 |
|
|
|
3122 |
|
|
loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
|
3123 |
|
|
loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
|
3124 |
|
|
|
3125 |
|
|
/* A COND_EXPR may have a throw as one operand, which
|
3126 |
|
|
then has void type. Just leave void operands
|
3127 |
|
|
as they are. */
|
3128 |
|
|
return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
|
3129 |
|
|
VOID_TYPE_P (TREE_TYPE (arg1))
|
3130 |
|
|
? arg1 : invert_truthvalue_loc (loc1, arg1),
|
3131 |
|
|
VOID_TYPE_P (TREE_TYPE (arg2))
|
3132 |
|
|
? arg2 : invert_truthvalue_loc (loc2, arg2));
|
3133 |
|
|
}
|
3134 |
|
|
|
3135 |
|
|
case COMPOUND_EXPR:
|
3136 |
|
|
loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
|
3137 |
|
|
return build2_loc (loc, COMPOUND_EXPR, type,
|
3138 |
|
|
TREE_OPERAND (arg, 0),
|
3139 |
|
|
invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
|
3140 |
|
|
|
3141 |
|
|
case NON_LVALUE_EXPR:
|
3142 |
|
|
loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
|
3143 |
|
|
return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
|
3144 |
|
|
|
3145 |
|
|
CASE_CONVERT:
|
3146 |
|
|
if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
|
3147 |
|
|
return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
|
3148 |
|
|
|
3149 |
|
|
/* ... fall through ... */
|
3150 |
|
|
|
3151 |
|
|
case FLOAT_EXPR:
|
3152 |
|
|
loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
|
3153 |
|
|
return build1_loc (loc, TREE_CODE (arg), type,
|
3154 |
|
|
invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
|
3155 |
|
|
|
3156 |
|
|
case BIT_AND_EXPR:
|
3157 |
|
|
if (!integer_onep (TREE_OPERAND (arg, 1)))
|
3158 |
|
|
return NULL_TREE;
|
3159 |
|
|
return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
|
3160 |
|
|
|
3161 |
|
|
case SAVE_EXPR:
|
3162 |
|
|
return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
|
3163 |
|
|
|
3164 |
|
|
case CLEANUP_POINT_EXPR:
|
3165 |
|
|
loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
|
3166 |
|
|
return build1_loc (loc, CLEANUP_POINT_EXPR, type,
|
3167 |
|
|
invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
|
3168 |
|
|
|
3169 |
|
|
default:
|
3170 |
|
|
return NULL_TREE;
|
3171 |
|
|
}
|
3172 |
|
|
}
|
3173 |
|
|
|
3174 |
|
|
/* Return a simplified tree node for the truth-negation of ARG. This
|
3175 |
|
|
never alters ARG itself. We assume that ARG is an operation that
|
3176 |
|
|
returns a truth value (0 or 1).
|
3177 |
|
|
|
3178 |
|
|
FIXME: one would think we would fold the result, but it causes
|
3179 |
|
|
problems with the dominator optimizer. */
|
3180 |
|
|
|
3181 |
|
|
tree
|
3182 |
|
|
invert_truthvalue_loc (location_t loc, tree arg)
|
3183 |
|
|
{
|
3184 |
|
|
tree tem;
|
3185 |
|
|
|
3186 |
|
|
if (TREE_CODE (arg) == ERROR_MARK)
|
3187 |
|
|
return arg;
|
3188 |
|
|
|
3189 |
|
|
tem = fold_truth_not_expr (loc, arg);
|
3190 |
|
|
if (!tem)
|
3191 |
|
|
tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
|
3192 |
|
|
|
3193 |
|
|
return tem;
|
3194 |
|
|
}
|
3195 |
|
|
|
3196 |
|
|
/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
|
3197 |
|
|
operands are another bit-wise operation with a common input. If so,
|
3198 |
|
|
distribute the bit operations to save an operation and possibly two if
|
3199 |
|
|
constants are involved. For example, convert
|
3200 |
|
|
(A | B) & (A | C) into A | (B & C)
|
3201 |
|
|
Further simplification will occur if B and C are constants.
|
3202 |
|
|
|
3203 |
|
|
If this optimization cannot be done, 0 will be returned. */
|
3204 |
|
|
|
3205 |
|
|
static tree
|
3206 |
|
|
distribute_bit_expr (location_t loc, enum tree_code code, tree type,
|
3207 |
|
|
tree arg0, tree arg1)
|
3208 |
|
|
{
|
3209 |
|
|
tree common;
|
3210 |
|
|
tree left, right;
|
3211 |
|
|
|
3212 |
|
|
if (TREE_CODE (arg0) != TREE_CODE (arg1)
|
3213 |
|
|
|| TREE_CODE (arg0) == code
|
3214 |
|
|
|| (TREE_CODE (arg0) != BIT_AND_EXPR
|
3215 |
|
|
&& TREE_CODE (arg0) != BIT_IOR_EXPR))
|
3216 |
|
|
return 0;
|
3217 |
|
|
|
3218 |
|
|
if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
|
3219 |
|
|
{
|
3220 |
|
|
common = TREE_OPERAND (arg0, 0);
|
3221 |
|
|
left = TREE_OPERAND (arg0, 1);
|
3222 |
|
|
right = TREE_OPERAND (arg1, 1);
|
3223 |
|
|
}
|
3224 |
|
|
else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
|
3225 |
|
|
{
|
3226 |
|
|
common = TREE_OPERAND (arg0, 0);
|
3227 |
|
|
left = TREE_OPERAND (arg0, 1);
|
3228 |
|
|
right = TREE_OPERAND (arg1, 0);
|
3229 |
|
|
}
|
3230 |
|
|
else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
|
3231 |
|
|
{
|
3232 |
|
|
common = TREE_OPERAND (arg0, 1);
|
3233 |
|
|
left = TREE_OPERAND (arg0, 0);
|
3234 |
|
|
right = TREE_OPERAND (arg1, 1);
|
3235 |
|
|
}
|
3236 |
|
|
else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
|
3237 |
|
|
{
|
3238 |
|
|
common = TREE_OPERAND (arg0, 1);
|
3239 |
|
|
left = TREE_OPERAND (arg0, 0);
|
3240 |
|
|
right = TREE_OPERAND (arg1, 0);
|
3241 |
|
|
}
|
3242 |
|
|
else
|
3243 |
|
|
return 0;
|
3244 |
|
|
|
3245 |
|
|
common = fold_convert_loc (loc, type, common);
|
3246 |
|
|
left = fold_convert_loc (loc, type, left);
|
3247 |
|
|
right = fold_convert_loc (loc, type, right);
|
3248 |
|
|
return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
|
3249 |
|
|
fold_build2_loc (loc, code, type, left, right));
|
3250 |
|
|
}
|
3251 |
|
|
|
3252 |
|
|
/* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
|
3253 |
|
|
with code CODE. This optimization is unsafe. */
|
3254 |
|
|
static tree
|
3255 |
|
|
distribute_real_division (location_t loc, enum tree_code code, tree type,
|
3256 |
|
|
tree arg0, tree arg1)
|
3257 |
|
|
{
|
3258 |
|
|
bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
|
3259 |
|
|
bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
|
3260 |
|
|
|
3261 |
|
|
/* (A / C) +- (B / C) -> (A +- B) / C. */
|
3262 |
|
|
if (mul0 == mul1
|
3263 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 1),
|
3264 |
|
|
TREE_OPERAND (arg1, 1), 0))
|
3265 |
|
|
return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
|
3266 |
|
|
fold_build2_loc (loc, code, type,
|
3267 |
|
|
TREE_OPERAND (arg0, 0),
|
3268 |
|
|
TREE_OPERAND (arg1, 0)),
|
3269 |
|
|
TREE_OPERAND (arg0, 1));
|
3270 |
|
|
|
3271 |
|
|
/* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
|
3272 |
|
|
if (operand_equal_p (TREE_OPERAND (arg0, 0),
|
3273 |
|
|
TREE_OPERAND (arg1, 0), 0)
|
3274 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
|
3275 |
|
|
&& TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
|
3276 |
|
|
{
|
3277 |
|
|
REAL_VALUE_TYPE r0, r1;
|
3278 |
|
|
r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
|
3279 |
|
|
r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
|
3280 |
|
|
if (!mul0)
|
3281 |
|
|
real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
|
3282 |
|
|
if (!mul1)
|
3283 |
|
|
real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
|
3284 |
|
|
real_arithmetic (&r0, code, &r0, &r1);
|
3285 |
|
|
return fold_build2_loc (loc, MULT_EXPR, type,
|
3286 |
|
|
TREE_OPERAND (arg0, 0),
|
3287 |
|
|
build_real (type, r0));
|
3288 |
|
|
}
|
3289 |
|
|
|
3290 |
|
|
return NULL_TREE;
|
3291 |
|
|
}
|
3292 |
|
|
|
3293 |
|
|
/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
|
3294 |
|
|
starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
|
3295 |
|
|
|
3296 |
|
|
static tree
|
3297 |
|
|
make_bit_field_ref (location_t loc, tree inner, tree type,
|
3298 |
|
|
HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
|
3299 |
|
|
{
|
3300 |
|
|
tree result, bftype;
|
3301 |
|
|
|
3302 |
|
|
if (bitpos == 0)
|
3303 |
|
|
{
|
3304 |
|
|
tree size = TYPE_SIZE (TREE_TYPE (inner));
|
3305 |
|
|
if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
|
3306 |
|
|
|| POINTER_TYPE_P (TREE_TYPE (inner)))
|
3307 |
|
|
&& host_integerp (size, 0)
|
3308 |
|
|
&& tree_low_cst (size, 0) == bitsize)
|
3309 |
|
|
return fold_convert_loc (loc, type, inner);
|
3310 |
|
|
}
|
3311 |
|
|
|
3312 |
|
|
bftype = type;
|
3313 |
|
|
if (TYPE_PRECISION (bftype) != bitsize
|
3314 |
|
|
|| TYPE_UNSIGNED (bftype) == !unsignedp)
|
3315 |
|
|
bftype = build_nonstandard_integer_type (bitsize, 0);
|
3316 |
|
|
|
3317 |
|
|
result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
|
3318 |
|
|
size_int (bitsize), bitsize_int (bitpos));
|
3319 |
|
|
|
3320 |
|
|
if (bftype != type)
|
3321 |
|
|
result = fold_convert_loc (loc, type, result);
|
3322 |
|
|
|
3323 |
|
|
return result;
|
3324 |
|
|
}
|
3325 |
|
|
|
3326 |
|
|
/* Optimize a bit-field compare.
|
3327 |
|
|
|
3328 |
|
|
There are two cases: First is a compare against a constant and the
|
3329 |
|
|
second is a comparison of two items where the fields are at the same
|
3330 |
|
|
bit position relative to the start of a chunk (byte, halfword, word)
|
3331 |
|
|
large enough to contain it. In these cases we can avoid the shift
|
3332 |
|
|
implicit in bitfield extractions.
|
3333 |
|
|
|
3334 |
|
|
For constants, we emit a compare of the shifted constant with the
|
3335 |
|
|
BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
|
3336 |
|
|
compared. For two fields at the same position, we do the ANDs with the
|
3337 |
|
|
similar mask and compare the result of the ANDs.
|
3338 |
|
|
|
3339 |
|
|
CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
|
3340 |
|
|
COMPARE_TYPE is the type of the comparison, and LHS and RHS
|
3341 |
|
|
are the left and right operands of the comparison, respectively.
|
3342 |
|
|
|
3343 |
|
|
If the optimization described above can be done, we return the resulting
|
3344 |
|
|
tree. Otherwise we return zero. */
|
3345 |
|
|
|
3346 |
|
|
static tree
|
3347 |
|
|
optimize_bit_field_compare (location_t loc, enum tree_code code,
|
3348 |
|
|
tree compare_type, tree lhs, tree rhs)
|
3349 |
|
|
{
|
3350 |
|
|
HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
|
3351 |
|
|
tree type = TREE_TYPE (lhs);
|
3352 |
|
|
tree signed_type, unsigned_type;
|
3353 |
|
|
int const_p = TREE_CODE (rhs) == INTEGER_CST;
|
3354 |
|
|
enum machine_mode lmode, rmode, nmode;
|
3355 |
|
|
int lunsignedp, runsignedp;
|
3356 |
|
|
int lvolatilep = 0, rvolatilep = 0;
|
3357 |
|
|
tree linner, rinner = NULL_TREE;
|
3358 |
|
|
tree mask;
|
3359 |
|
|
tree offset;
|
3360 |
|
|
|
3361 |
|
|
/* Get all the information about the extractions being done. If the bit size
|
3362 |
|
|
if the same as the size of the underlying object, we aren't doing an
|
3363 |
|
|
extraction at all and so can do nothing. We also don't want to
|
3364 |
|
|
do anything if the inner expression is a PLACEHOLDER_EXPR since we
|
3365 |
|
|
then will no longer be able to replace it. */
|
3366 |
|
|
linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
|
3367 |
|
|
&lunsignedp, &lvolatilep, false);
|
3368 |
|
|
if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
|
3369 |
|
|
|| offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
|
3370 |
|
|
return 0;
|
3371 |
|
|
|
3372 |
|
|
if (!const_p)
|
3373 |
|
|
{
|
3374 |
|
|
/* If this is not a constant, we can only do something if bit positions,
|
3375 |
|
|
sizes, and signedness are the same. */
|
3376 |
|
|
rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
|
3377 |
|
|
&runsignedp, &rvolatilep, false);
|
3378 |
|
|
|
3379 |
|
|
if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
|
3380 |
|
|
|| lunsignedp != runsignedp || offset != 0
|
3381 |
|
|
|| TREE_CODE (rinner) == PLACEHOLDER_EXPR)
|
3382 |
|
|
return 0;
|
3383 |
|
|
}
|
3384 |
|
|
|
3385 |
|
|
/* See if we can find a mode to refer to this field. We should be able to,
|
3386 |
|
|
but fail if we can't. */
|
3387 |
|
|
if (lvolatilep
|
3388 |
|
|
&& GET_MODE_BITSIZE (lmode) > 0
|
3389 |
|
|
&& flag_strict_volatile_bitfields > 0)
|
3390 |
|
|
nmode = lmode;
|
3391 |
|
|
else
|
3392 |
|
|
nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
|
3393 |
|
|
const_p ? TYPE_ALIGN (TREE_TYPE (linner))
|
3394 |
|
|
: MIN (TYPE_ALIGN (TREE_TYPE (linner)),
|
3395 |
|
|
TYPE_ALIGN (TREE_TYPE (rinner))),
|
3396 |
|
|
word_mode, lvolatilep || rvolatilep);
|
3397 |
|
|
if (nmode == VOIDmode)
|
3398 |
|
|
return 0;
|
3399 |
|
|
|
3400 |
|
|
/* Set signed and unsigned types of the precision of this mode for the
|
3401 |
|
|
shifts below. */
|
3402 |
|
|
signed_type = lang_hooks.types.type_for_mode (nmode, 0);
|
3403 |
|
|
unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
|
3404 |
|
|
|
3405 |
|
|
/* Compute the bit position and size for the new reference and our offset
|
3406 |
|
|
within it. If the new reference is the same size as the original, we
|
3407 |
|
|
won't optimize anything, so return zero. */
|
3408 |
|
|
nbitsize = GET_MODE_BITSIZE (nmode);
|
3409 |
|
|
nbitpos = lbitpos & ~ (nbitsize - 1);
|
3410 |
|
|
lbitpos -= nbitpos;
|
3411 |
|
|
if (nbitsize == lbitsize)
|
3412 |
|
|
return 0;
|
3413 |
|
|
|
3414 |
|
|
if (BYTES_BIG_ENDIAN)
|
3415 |
|
|
lbitpos = nbitsize - lbitsize - lbitpos;
|
3416 |
|
|
|
3417 |
|
|
/* Make the mask to be used against the extracted field. */
|
3418 |
|
|
mask = build_int_cst_type (unsigned_type, -1);
|
3419 |
|
|
mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
|
3420 |
|
|
mask = const_binop (RSHIFT_EXPR, mask,
|
3421 |
|
|
size_int (nbitsize - lbitsize - lbitpos));
|
3422 |
|
|
|
3423 |
|
|
if (! const_p)
|
3424 |
|
|
/* If not comparing with constant, just rework the comparison
|
3425 |
|
|
and return. */
|
3426 |
|
|
return fold_build2_loc (loc, code, compare_type,
|
3427 |
|
|
fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
|
3428 |
|
|
make_bit_field_ref (loc, linner,
|
3429 |
|
|
unsigned_type,
|
3430 |
|
|
nbitsize, nbitpos,
|
3431 |
|
|
1),
|
3432 |
|
|
mask),
|
3433 |
|
|
fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
|
3434 |
|
|
make_bit_field_ref (loc, rinner,
|
3435 |
|
|
unsigned_type,
|
3436 |
|
|
nbitsize, nbitpos,
|
3437 |
|
|
1),
|
3438 |
|
|
mask));
|
3439 |
|
|
|
3440 |
|
|
/* Otherwise, we are handling the constant case. See if the constant is too
|
3441 |
|
|
big for the field. Warn and return a tree of for 0 (false) if so. We do
|
3442 |
|
|
this not only for its own sake, but to avoid having to test for this
|
3443 |
|
|
error case below. If we didn't, we might generate wrong code.
|
3444 |
|
|
|
3445 |
|
|
For unsigned fields, the constant shifted right by the field length should
|
3446 |
|
|
be all zero. For signed fields, the high-order bits should agree with
|
3447 |
|
|
the sign bit. */
|
3448 |
|
|
|
3449 |
|
|
if (lunsignedp)
|
3450 |
|
|
{
|
3451 |
|
|
if (! integer_zerop (const_binop (RSHIFT_EXPR,
|
3452 |
|
|
fold_convert_loc (loc,
|
3453 |
|
|
unsigned_type, rhs),
|
3454 |
|
|
size_int (lbitsize))))
|
3455 |
|
|
{
|
3456 |
|
|
warning (0, "comparison is always %d due to width of bit-field",
|
3457 |
|
|
code == NE_EXPR);
|
3458 |
|
|
return constant_boolean_node (code == NE_EXPR, compare_type);
|
3459 |
|
|
}
|
3460 |
|
|
}
|
3461 |
|
|
else
|
3462 |
|
|
{
|
3463 |
|
|
tree tem = const_binop (RSHIFT_EXPR,
|
3464 |
|
|
fold_convert_loc (loc, signed_type, rhs),
|
3465 |
|
|
size_int (lbitsize - 1));
|
3466 |
|
|
if (! integer_zerop (tem) && ! integer_all_onesp (tem))
|
3467 |
|
|
{
|
3468 |
|
|
warning (0, "comparison is always %d due to width of bit-field",
|
3469 |
|
|
code == NE_EXPR);
|
3470 |
|
|
return constant_boolean_node (code == NE_EXPR, compare_type);
|
3471 |
|
|
}
|
3472 |
|
|
}
|
3473 |
|
|
|
3474 |
|
|
/* Single-bit compares should always be against zero. */
|
3475 |
|
|
if (lbitsize == 1 && ! integer_zerop (rhs))
|
3476 |
|
|
{
|
3477 |
|
|
code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
|
3478 |
|
|
rhs = build_int_cst (type, 0);
|
3479 |
|
|
}
|
3480 |
|
|
|
3481 |
|
|
/* Make a new bitfield reference, shift the constant over the
|
3482 |
|
|
appropriate number of bits and mask it with the computed mask
|
3483 |
|
|
(in case this was a signed field). If we changed it, make a new one. */
|
3484 |
|
|
lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
|
3485 |
|
|
if (lvolatilep)
|
3486 |
|
|
{
|
3487 |
|
|
TREE_SIDE_EFFECTS (lhs) = 1;
|
3488 |
|
|
TREE_THIS_VOLATILE (lhs) = 1;
|
3489 |
|
|
}
|
3490 |
|
|
|
3491 |
|
|
rhs = const_binop (BIT_AND_EXPR,
|
3492 |
|
|
const_binop (LSHIFT_EXPR,
|
3493 |
|
|
fold_convert_loc (loc, unsigned_type, rhs),
|
3494 |
|
|
size_int (lbitpos)),
|
3495 |
|
|
mask);
|
3496 |
|
|
|
3497 |
|
|
lhs = build2_loc (loc, code, compare_type,
|
3498 |
|
|
build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
|
3499 |
|
|
return lhs;
|
3500 |
|
|
}
|
3501 |
|
|
|
3502 |
|
|
/* Subroutine for fold_truth_andor_1: decode a field reference.
|
3503 |
|
|
|
3504 |
|
|
If EXP is a comparison reference, we return the innermost reference.
|
3505 |
|
|
|
3506 |
|
|
*PBITSIZE is set to the number of bits in the reference, *PBITPOS is
|
3507 |
|
|
set to the starting bit number.
|
3508 |
|
|
|
3509 |
|
|
If the innermost field can be completely contained in a mode-sized
|
3510 |
|
|
unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
|
3511 |
|
|
|
3512 |
|
|
*PVOLATILEP is set to 1 if the any expression encountered is volatile;
|
3513 |
|
|
otherwise it is not changed.
|
3514 |
|
|
|
3515 |
|
|
*PUNSIGNEDP is set to the signedness of the field.
|
3516 |
|
|
|
3517 |
|
|
*PMASK is set to the mask used. This is either contained in a
|
3518 |
|
|
BIT_AND_EXPR or derived from the width of the field.
|
3519 |
|
|
|
3520 |
|
|
*PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
|
3521 |
|
|
|
3522 |
|
|
Return 0 if this is not a component reference or is one that we can't
|
3523 |
|
|
do anything with. */
|
3524 |
|
|
|
3525 |
|
|
static tree
|
3526 |
|
|
decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
|
3527 |
|
|
HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
|
3528 |
|
|
int *punsignedp, int *pvolatilep,
|
3529 |
|
|
tree *pmask, tree *pand_mask)
|
3530 |
|
|
{
|
3531 |
|
|
tree outer_type = 0;
|
3532 |
|
|
tree and_mask = 0;
|
3533 |
|
|
tree mask, inner, offset;
|
3534 |
|
|
tree unsigned_type;
|
3535 |
|
|
unsigned int precision;
|
3536 |
|
|
|
3537 |
|
|
/* All the optimizations using this function assume integer fields.
|
3538 |
|
|
There are problems with FP fields since the type_for_size call
|
3539 |
|
|
below can fail for, e.g., XFmode. */
|
3540 |
|
|
if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
|
3541 |
|
|
return 0;
|
3542 |
|
|
|
3543 |
|
|
/* We are interested in the bare arrangement of bits, so strip everything
|
3544 |
|
|
that doesn't affect the machine mode. However, record the type of the
|
3545 |
|
|
outermost expression if it may matter below. */
|
3546 |
|
|
if (CONVERT_EXPR_P (exp)
|
3547 |
|
|
|| TREE_CODE (exp) == NON_LVALUE_EXPR)
|
3548 |
|
|
outer_type = TREE_TYPE (exp);
|
3549 |
|
|
STRIP_NOPS (exp);
|
3550 |
|
|
|
3551 |
|
|
if (TREE_CODE (exp) == BIT_AND_EXPR)
|
3552 |
|
|
{
|
3553 |
|
|
and_mask = TREE_OPERAND (exp, 1);
|
3554 |
|
|
exp = TREE_OPERAND (exp, 0);
|
3555 |
|
|
STRIP_NOPS (exp); STRIP_NOPS (and_mask);
|
3556 |
|
|
if (TREE_CODE (and_mask) != INTEGER_CST)
|
3557 |
|
|
return 0;
|
3558 |
|
|
}
|
3559 |
|
|
|
3560 |
|
|
inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
|
3561 |
|
|
punsignedp, pvolatilep, false);
|
3562 |
|
|
if ((inner == exp && and_mask == 0)
|
3563 |
|
|
|| *pbitsize < 0 || offset != 0
|
3564 |
|
|
|| TREE_CODE (inner) == PLACEHOLDER_EXPR)
|
3565 |
|
|
return 0;
|
3566 |
|
|
|
3567 |
|
|
/* If the number of bits in the reference is the same as the bitsize of
|
3568 |
|
|
the outer type, then the outer type gives the signedness. Otherwise
|
3569 |
|
|
(in case of a small bitfield) the signedness is unchanged. */
|
3570 |
|
|
if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
|
3571 |
|
|
*punsignedp = TYPE_UNSIGNED (outer_type);
|
3572 |
|
|
|
3573 |
|
|
/* Compute the mask to access the bitfield. */
|
3574 |
|
|
unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
|
3575 |
|
|
precision = TYPE_PRECISION (unsigned_type);
|
3576 |
|
|
|
3577 |
|
|
mask = build_int_cst_type (unsigned_type, -1);
|
3578 |
|
|
|
3579 |
|
|
mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
|
3580 |
|
|
mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
|
3581 |
|
|
|
3582 |
|
|
/* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
|
3583 |
|
|
if (and_mask != 0)
|
3584 |
|
|
mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
|
3585 |
|
|
fold_convert_loc (loc, unsigned_type, and_mask), mask);
|
3586 |
|
|
|
3587 |
|
|
*pmask = mask;
|
3588 |
|
|
*pand_mask = and_mask;
|
3589 |
|
|
return inner;
|
3590 |
|
|
}
|
3591 |
|
|
|
3592 |
|
|
/* Return nonzero if MASK represents a mask of SIZE ones in the low-order
|
3593 |
|
|
bit positions. */
|
3594 |
|
|
|
3595 |
|
|
static int
|
3596 |
|
|
all_ones_mask_p (const_tree mask, int size)
|
3597 |
|
|
{
|
3598 |
|
|
tree type = TREE_TYPE (mask);
|
3599 |
|
|
unsigned int precision = TYPE_PRECISION (type);
|
3600 |
|
|
tree tmask;
|
3601 |
|
|
|
3602 |
|
|
tmask = build_int_cst_type (signed_type_for (type), -1);
|
3603 |
|
|
|
3604 |
|
|
return
|
3605 |
|
|
tree_int_cst_equal (mask,
|
3606 |
|
|
const_binop (RSHIFT_EXPR,
|
3607 |
|
|
const_binop (LSHIFT_EXPR, tmask,
|
3608 |
|
|
size_int (precision - size)),
|
3609 |
|
|
size_int (precision - size)));
|
3610 |
|
|
}
|
3611 |
|
|
|
3612 |
|
|
/* Subroutine for fold: determine if VAL is the INTEGER_CONST that
|
3613 |
|
|
represents the sign bit of EXP's type. If EXP represents a sign
|
3614 |
|
|
or zero extension, also test VAL against the unextended type.
|
3615 |
|
|
The return value is the (sub)expression whose sign bit is VAL,
|
3616 |
|
|
or NULL_TREE otherwise. */
|
3617 |
|
|
|
3618 |
|
|
static tree
|
3619 |
|
|
sign_bit_p (tree exp, const_tree val)
|
3620 |
|
|
{
|
3621 |
|
|
unsigned HOST_WIDE_INT mask_lo, lo;
|
3622 |
|
|
HOST_WIDE_INT mask_hi, hi;
|
3623 |
|
|
int width;
|
3624 |
|
|
tree t;
|
3625 |
|
|
|
3626 |
|
|
/* Tree EXP must have an integral type. */
|
3627 |
|
|
t = TREE_TYPE (exp);
|
3628 |
|
|
if (! INTEGRAL_TYPE_P (t))
|
3629 |
|
|
return NULL_TREE;
|
3630 |
|
|
|
3631 |
|
|
/* Tree VAL must be an integer constant. */
|
3632 |
|
|
if (TREE_CODE (val) != INTEGER_CST
|
3633 |
|
|
|| TREE_OVERFLOW (val))
|
3634 |
|
|
return NULL_TREE;
|
3635 |
|
|
|
3636 |
|
|
width = TYPE_PRECISION (t);
|
3637 |
|
|
if (width > HOST_BITS_PER_WIDE_INT)
|
3638 |
|
|
{
|
3639 |
|
|
hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
|
3640 |
|
|
lo = 0;
|
3641 |
|
|
|
3642 |
|
|
mask_hi = ((unsigned HOST_WIDE_INT) -1
|
3643 |
|
|
>> (2 * HOST_BITS_PER_WIDE_INT - width));
|
3644 |
|
|
mask_lo = -1;
|
3645 |
|
|
}
|
3646 |
|
|
else
|
3647 |
|
|
{
|
3648 |
|
|
hi = 0;
|
3649 |
|
|
lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
|
3650 |
|
|
|
3651 |
|
|
mask_hi = 0;
|
3652 |
|
|
mask_lo = ((unsigned HOST_WIDE_INT) -1
|
3653 |
|
|
>> (HOST_BITS_PER_WIDE_INT - width));
|
3654 |
|
|
}
|
3655 |
|
|
|
3656 |
|
|
/* We mask off those bits beyond TREE_TYPE (exp) so that we can
|
3657 |
|
|
treat VAL as if it were unsigned. */
|
3658 |
|
|
if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
|
3659 |
|
|
&& (TREE_INT_CST_LOW (val) & mask_lo) == lo)
|
3660 |
|
|
return exp;
|
3661 |
|
|
|
3662 |
|
|
/* Handle extension from a narrower type. */
|
3663 |
|
|
if (TREE_CODE (exp) == NOP_EXPR
|
3664 |
|
|
&& TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
|
3665 |
|
|
return sign_bit_p (TREE_OPERAND (exp, 0), val);
|
3666 |
|
|
|
3667 |
|
|
return NULL_TREE;
|
3668 |
|
|
}
|
3669 |
|
|
|
3670 |
|
|
/* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
|
3671 |
|
|
to be evaluated unconditionally. */
|
3672 |
|
|
|
3673 |
|
|
static int
|
3674 |
|
|
simple_operand_p (const_tree exp)
|
3675 |
|
|
{
|
3676 |
|
|
/* Strip any conversions that don't change the machine mode. */
|
3677 |
|
|
STRIP_NOPS (exp);
|
3678 |
|
|
|
3679 |
|
|
return (CONSTANT_CLASS_P (exp)
|
3680 |
|
|
|| TREE_CODE (exp) == SSA_NAME
|
3681 |
|
|
|| (DECL_P (exp)
|
3682 |
|
|
&& ! TREE_ADDRESSABLE (exp)
|
3683 |
|
|
&& ! TREE_THIS_VOLATILE (exp)
|
3684 |
|
|
&& ! DECL_NONLOCAL (exp)
|
3685 |
|
|
/* Don't regard global variables as simple. They may be
|
3686 |
|
|
allocated in ways unknown to the compiler (shared memory,
|
3687 |
|
|
#pragma weak, etc). */
|
3688 |
|
|
&& ! TREE_PUBLIC (exp)
|
3689 |
|
|
&& ! DECL_EXTERNAL (exp)
|
3690 |
|
|
/* Loading a static variable is unduly expensive, but global
|
3691 |
|
|
registers aren't expensive. */
|
3692 |
|
|
&& (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
|
3693 |
|
|
}
|
3694 |
|
|
|
3695 |
|
|
/* Subroutine for fold_truth_andor: determine if an operand is simple enough
|
3696 |
|
|
to be evaluated unconditionally.
|
3697 |
|
|
I addition to simple_operand_p, we assume that comparisons, conversions,
|
3698 |
|
|
and logic-not operations are simple, if their operands are simple, too. */
|
3699 |
|
|
|
3700 |
|
|
static bool
|
3701 |
|
|
simple_operand_p_2 (tree exp)
|
3702 |
|
|
{
|
3703 |
|
|
enum tree_code code;
|
3704 |
|
|
|
3705 |
|
|
if (TREE_SIDE_EFFECTS (exp)
|
3706 |
|
|
|| tree_could_trap_p (exp))
|
3707 |
|
|
return false;
|
3708 |
|
|
|
3709 |
|
|
while (CONVERT_EXPR_P (exp))
|
3710 |
|
|
exp = TREE_OPERAND (exp, 0);
|
3711 |
|
|
|
3712 |
|
|
code = TREE_CODE (exp);
|
3713 |
|
|
|
3714 |
|
|
if (TREE_CODE_CLASS (code) == tcc_comparison)
|
3715 |
|
|
return (simple_operand_p (TREE_OPERAND (exp, 0))
|
3716 |
|
|
&& simple_operand_p (TREE_OPERAND (exp, 1)));
|
3717 |
|
|
|
3718 |
|
|
if (code == TRUTH_NOT_EXPR)
|
3719 |
|
|
return simple_operand_p_2 (TREE_OPERAND (exp, 0));
|
3720 |
|
|
|
3721 |
|
|
return simple_operand_p (exp);
|
3722 |
|
|
}
|
3723 |
|
|
|
3724 |
|
|
|
3725 |
|
|
/* The following functions are subroutines to fold_range_test and allow it to
|
3726 |
|
|
try to change a logical combination of comparisons into a range test.
|
3727 |
|
|
|
3728 |
|
|
For example, both
|
3729 |
|
|
X == 2 || X == 3 || X == 4 || X == 5
|
3730 |
|
|
and
|
3731 |
|
|
X >= 2 && X <= 5
|
3732 |
|
|
are converted to
|
3733 |
|
|
(unsigned) (X - 2) <= 3
|
3734 |
|
|
|
3735 |
|
|
We describe each set of comparisons as being either inside or outside
|
3736 |
|
|
a range, using a variable named like IN_P, and then describe the
|
3737 |
|
|
range with a lower and upper bound. If one of the bounds is omitted,
|
3738 |
|
|
it represents either the highest or lowest value of the type.
|
3739 |
|
|
|
3740 |
|
|
In the comments below, we represent a range by two numbers in brackets
|
3741 |
|
|
preceded by a "+" to designate being inside that range, or a "-" to
|
3742 |
|
|
designate being outside that range, so the condition can be inverted by
|
3743 |
|
|
flipping the prefix. An omitted bound is represented by a "-". For
|
3744 |
|
|
example, "- [-, 10]" means being outside the range starting at the lowest
|
3745 |
|
|
possible value and ending at 10, in other words, being greater than 10.
|
3746 |
|
|
The range "+ [-, -]" is always true and hence the range "- [-, -]" is
|
3747 |
|
|
always false.
|
3748 |
|
|
|
3749 |
|
|
We set up things so that the missing bounds are handled in a consistent
|
3750 |
|
|
manner so neither a missing bound nor "true" and "false" need to be
|
3751 |
|
|
handled using a special case. */
|
3752 |
|
|
|
3753 |
|
|
/* Return the result of applying CODE to ARG0 and ARG1, but handle the case
|
3754 |
|
|
of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
|
3755 |
|
|
and UPPER1_P are nonzero if the respective argument is an upper bound
|
3756 |
|
|
and zero for a lower. TYPE, if nonzero, is the type of the result; it
|
3757 |
|
|
must be specified for a comparison. ARG1 will be converted to ARG0's
|
3758 |
|
|
type if both are specified. */
|
3759 |
|
|
|
3760 |
|
|
static tree
|
3761 |
|
|
range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
|
3762 |
|
|
tree arg1, int upper1_p)
|
3763 |
|
|
{
|
3764 |
|
|
tree tem;
|
3765 |
|
|
int result;
|
3766 |
|
|
int sgn0, sgn1;
|
3767 |
|
|
|
3768 |
|
|
/* If neither arg represents infinity, do the normal operation.
|
3769 |
|
|
Else, if not a comparison, return infinity. Else handle the special
|
3770 |
|
|
comparison rules. Note that most of the cases below won't occur, but
|
3771 |
|
|
are handled for consistency. */
|
3772 |
|
|
|
3773 |
|
|
if (arg0 != 0 && arg1 != 0)
|
3774 |
|
|
{
|
3775 |
|
|
tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
|
3776 |
|
|
arg0, fold_convert (TREE_TYPE (arg0), arg1));
|
3777 |
|
|
STRIP_NOPS (tem);
|
3778 |
|
|
return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
|
3779 |
|
|
}
|
3780 |
|
|
|
3781 |
|
|
if (TREE_CODE_CLASS (code) != tcc_comparison)
|
3782 |
|
|
return 0;
|
3783 |
|
|
|
3784 |
|
|
/* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
|
3785 |
|
|
for neither. In real maths, we cannot assume open ended ranges are
|
3786 |
|
|
the same. But, this is computer arithmetic, where numbers are finite.
|
3787 |
|
|
We can therefore make the transformation of any unbounded range with
|
3788 |
|
|
the value Z, Z being greater than any representable number. This permits
|
3789 |
|
|
us to treat unbounded ranges as equal. */
|
3790 |
|
|
sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
|
3791 |
|
|
sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
|
3792 |
|
|
switch (code)
|
3793 |
|
|
{
|
3794 |
|
|
case EQ_EXPR:
|
3795 |
|
|
result = sgn0 == sgn1;
|
3796 |
|
|
break;
|
3797 |
|
|
case NE_EXPR:
|
3798 |
|
|
result = sgn0 != sgn1;
|
3799 |
|
|
break;
|
3800 |
|
|
case LT_EXPR:
|
3801 |
|
|
result = sgn0 < sgn1;
|
3802 |
|
|
break;
|
3803 |
|
|
case LE_EXPR:
|
3804 |
|
|
result = sgn0 <= sgn1;
|
3805 |
|
|
break;
|
3806 |
|
|
case GT_EXPR:
|
3807 |
|
|
result = sgn0 > sgn1;
|
3808 |
|
|
break;
|
3809 |
|
|
case GE_EXPR:
|
3810 |
|
|
result = sgn0 >= sgn1;
|
3811 |
|
|
break;
|
3812 |
|
|
default:
|
3813 |
|
|
gcc_unreachable ();
|
3814 |
|
|
}
|
3815 |
|
|
|
3816 |
|
|
return constant_boolean_node (result, type);
|
3817 |
|
|
}
|
3818 |
|
|
|
3819 |
|
|
/* Helper routine for make_range. Perform one step for it, return
|
3820 |
|
|
new expression if the loop should continue or NULL_TREE if it should
|
3821 |
|
|
stop. */
|
3822 |
|
|
|
3823 |
|
|
tree
|
3824 |
|
|
make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
|
3825 |
|
|
tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
|
3826 |
|
|
bool *strict_overflow_p)
|
3827 |
|
|
{
|
3828 |
|
|
tree arg0_type = TREE_TYPE (arg0);
|
3829 |
|
|
tree n_low, n_high, low = *p_low, high = *p_high;
|
3830 |
|
|
int in_p = *p_in_p, n_in_p;
|
3831 |
|
|
|
3832 |
|
|
switch (code)
|
3833 |
|
|
{
|
3834 |
|
|
case TRUTH_NOT_EXPR:
|
3835 |
|
|
*p_in_p = ! in_p;
|
3836 |
|
|
return arg0;
|
3837 |
|
|
|
3838 |
|
|
case EQ_EXPR: case NE_EXPR:
|
3839 |
|
|
case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
|
3840 |
|
|
/* We can only do something if the range is testing for zero
|
3841 |
|
|
and if the second operand is an integer constant. Note that
|
3842 |
|
|
saying something is "in" the range we make is done by
|
3843 |
|
|
complementing IN_P since it will set in the initial case of
|
3844 |
|
|
being not equal to zero; "out" is leaving it alone. */
|
3845 |
|
|
if (low == NULL_TREE || high == NULL_TREE
|
3846 |
|
|
|| ! integer_zerop (low) || ! integer_zerop (high)
|
3847 |
|
|
|| TREE_CODE (arg1) != INTEGER_CST)
|
3848 |
|
|
return NULL_TREE;
|
3849 |
|
|
|
3850 |
|
|
switch (code)
|
3851 |
|
|
{
|
3852 |
|
|
case NE_EXPR: /* - [c, c] */
|
3853 |
|
|
low = high = arg1;
|
3854 |
|
|
break;
|
3855 |
|
|
case EQ_EXPR: /* + [c, c] */
|
3856 |
|
|
in_p = ! in_p, low = high = arg1;
|
3857 |
|
|
break;
|
3858 |
|
|
case GT_EXPR: /* - [-, c] */
|
3859 |
|
|
low = 0, high = arg1;
|
3860 |
|
|
break;
|
3861 |
|
|
case GE_EXPR: /* + [c, -] */
|
3862 |
|
|
in_p = ! in_p, low = arg1, high = 0;
|
3863 |
|
|
break;
|
3864 |
|
|
case LT_EXPR: /* - [c, -] */
|
3865 |
|
|
low = arg1, high = 0;
|
3866 |
|
|
break;
|
3867 |
|
|
case LE_EXPR: /* + [-, c] */
|
3868 |
|
|
in_p = ! in_p, low = 0, high = arg1;
|
3869 |
|
|
break;
|
3870 |
|
|
default:
|
3871 |
|
|
gcc_unreachable ();
|
3872 |
|
|
}
|
3873 |
|
|
|
3874 |
|
|
/* If this is an unsigned comparison, we also know that EXP is
|
3875 |
|
|
greater than or equal to zero. We base the range tests we make
|
3876 |
|
|
on that fact, so we record it here so we can parse existing
|
3877 |
|
|
range tests. We test arg0_type since often the return type
|
3878 |
|
|
of, e.g. EQ_EXPR, is boolean. */
|
3879 |
|
|
if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
|
3880 |
|
|
{
|
3881 |
|
|
if (! merge_ranges (&n_in_p, &n_low, &n_high,
|
3882 |
|
|
in_p, low, high, 1,
|
3883 |
|
|
build_int_cst (arg0_type, 0),
|
3884 |
|
|
NULL_TREE))
|
3885 |
|
|
return NULL_TREE;
|
3886 |
|
|
|
3887 |
|
|
in_p = n_in_p, low = n_low, high = n_high;
|
3888 |
|
|
|
3889 |
|
|
/* If the high bound is missing, but we have a nonzero low
|
3890 |
|
|
bound, reverse the range so it goes from zero to the low bound
|
3891 |
|
|
minus 1. */
|
3892 |
|
|
if (high == 0 && low && ! integer_zerop (low))
|
3893 |
|
|
{
|
3894 |
|
|
in_p = ! in_p;
|
3895 |
|
|
high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
|
3896 |
|
|
integer_one_node, 0);
|
3897 |
|
|
low = build_int_cst (arg0_type, 0);
|
3898 |
|
|
}
|
3899 |
|
|
}
|
3900 |
|
|
|
3901 |
|
|
*p_low = low;
|
3902 |
|
|
*p_high = high;
|
3903 |
|
|
*p_in_p = in_p;
|
3904 |
|
|
return arg0;
|
3905 |
|
|
|
3906 |
|
|
case NEGATE_EXPR:
|
3907 |
|
|
/* (-x) IN [a,b] -> x in [-b, -a] */
|
3908 |
|
|
n_low = range_binop (MINUS_EXPR, exp_type,
|
3909 |
|
|
build_int_cst (exp_type, 0),
|
3910 |
|
|
0, high, 1);
|
3911 |
|
|
n_high = range_binop (MINUS_EXPR, exp_type,
|
3912 |
|
|
build_int_cst (exp_type, 0),
|
3913 |
|
|
0, low, 0);
|
3914 |
|
|
if (n_high != 0 && TREE_OVERFLOW (n_high))
|
3915 |
|
|
return NULL_TREE;
|
3916 |
|
|
goto normalize;
|
3917 |
|
|
|
3918 |
|
|
case BIT_NOT_EXPR:
|
3919 |
|
|
/* ~ X -> -X - 1 */
|
3920 |
|
|
return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
|
3921 |
|
|
build_int_cst (exp_type, 1));
|
3922 |
|
|
|
3923 |
|
|
case PLUS_EXPR:
|
3924 |
|
|
case MINUS_EXPR:
|
3925 |
|
|
if (TREE_CODE (arg1) != INTEGER_CST)
|
3926 |
|
|
return NULL_TREE;
|
3927 |
|
|
|
3928 |
|
|
/* If flag_wrapv and ARG0_TYPE is signed, then we cannot
|
3929 |
|
|
move a constant to the other side. */
|
3930 |
|
|
if (!TYPE_UNSIGNED (arg0_type)
|
3931 |
|
|
&& !TYPE_OVERFLOW_UNDEFINED (arg0_type))
|
3932 |
|
|
return NULL_TREE;
|
3933 |
|
|
|
3934 |
|
|
/* If EXP is signed, any overflow in the computation is undefined,
|
3935 |
|
|
so we don't worry about it so long as our computations on
|
3936 |
|
|
the bounds don't overflow. For unsigned, overflow is defined
|
3937 |
|
|
and this is exactly the right thing. */
|
3938 |
|
|
n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
|
3939 |
|
|
arg0_type, low, 0, arg1, 0);
|
3940 |
|
|
n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
|
3941 |
|
|
arg0_type, high, 1, arg1, 0);
|
3942 |
|
|
if ((n_low != 0 && TREE_OVERFLOW (n_low))
|
3943 |
|
|
|| (n_high != 0 && TREE_OVERFLOW (n_high)))
|
3944 |
|
|
return NULL_TREE;
|
3945 |
|
|
|
3946 |
|
|
if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
|
3947 |
|
|
*strict_overflow_p = true;
|
3948 |
|
|
|
3949 |
|
|
normalize:
|
3950 |
|
|
/* Check for an unsigned range which has wrapped around the maximum
|
3951 |
|
|
value thus making n_high < n_low, and normalize it. */
|
3952 |
|
|
if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
|
3953 |
|
|
{
|
3954 |
|
|
low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
|
3955 |
|
|
integer_one_node, 0);
|
3956 |
|
|
high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
|
3957 |
|
|
integer_one_node, 0);
|
3958 |
|
|
|
3959 |
|
|
/* If the range is of the form +/- [ x+1, x ], we won't
|
3960 |
|
|
be able to normalize it. But then, it represents the
|
3961 |
|
|
whole range or the empty set, so make it
|
3962 |
|
|
+/- [ -, - ]. */
|
3963 |
|
|
if (tree_int_cst_equal (n_low, low)
|
3964 |
|
|
&& tree_int_cst_equal (n_high, high))
|
3965 |
|
|
low = high = 0;
|
3966 |
|
|
else
|
3967 |
|
|
in_p = ! in_p;
|
3968 |
|
|
}
|
3969 |
|
|
else
|
3970 |
|
|
low = n_low, high = n_high;
|
3971 |
|
|
|
3972 |
|
|
*p_low = low;
|
3973 |
|
|
*p_high = high;
|
3974 |
|
|
*p_in_p = in_p;
|
3975 |
|
|
return arg0;
|
3976 |
|
|
|
3977 |
|
|
CASE_CONVERT:
|
3978 |
|
|
case NON_LVALUE_EXPR:
|
3979 |
|
|
if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
|
3980 |
|
|
return NULL_TREE;
|
3981 |
|
|
|
3982 |
|
|
if (! INTEGRAL_TYPE_P (arg0_type)
|
3983 |
|
|
|| (low != 0 && ! int_fits_type_p (low, arg0_type))
|
3984 |
|
|
|| (high != 0 && ! int_fits_type_p (high, arg0_type)))
|
3985 |
|
|
return NULL_TREE;
|
3986 |
|
|
|
3987 |
|
|
n_low = low, n_high = high;
|
3988 |
|
|
|
3989 |
|
|
if (n_low != 0)
|
3990 |
|
|
n_low = fold_convert_loc (loc, arg0_type, n_low);
|
3991 |
|
|
|
3992 |
|
|
if (n_high != 0)
|
3993 |
|
|
n_high = fold_convert_loc (loc, arg0_type, n_high);
|
3994 |
|
|
|
3995 |
|
|
/* If we're converting arg0 from an unsigned type, to exp,
|
3996 |
|
|
a signed type, we will be doing the comparison as unsigned.
|
3997 |
|
|
The tests above have already verified that LOW and HIGH
|
3998 |
|
|
are both positive.
|
3999 |
|
|
|
4000 |
|
|
So we have to ensure that we will handle large unsigned
|
4001 |
|
|
values the same way that the current signed bounds treat
|
4002 |
|
|
negative values. */
|
4003 |
|
|
|
4004 |
|
|
if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
|
4005 |
|
|
{
|
4006 |
|
|
tree high_positive;
|
4007 |
|
|
tree equiv_type;
|
4008 |
|
|
/* For fixed-point modes, we need to pass the saturating flag
|
4009 |
|
|
as the 2nd parameter. */
|
4010 |
|
|
if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
|
4011 |
|
|
equiv_type
|
4012 |
|
|
= lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
|
4013 |
|
|
TYPE_SATURATING (arg0_type));
|
4014 |
|
|
else
|
4015 |
|
|
equiv_type
|
4016 |
|
|
= lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
|
4017 |
|
|
|
4018 |
|
|
/* A range without an upper bound is, naturally, unbounded.
|
4019 |
|
|
Since convert would have cropped a very large value, use
|
4020 |
|
|
the max value for the destination type. */
|
4021 |
|
|
high_positive
|
4022 |
|
|
= TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
|
4023 |
|
|
: TYPE_MAX_VALUE (arg0_type);
|
4024 |
|
|
|
4025 |
|
|
if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
|
4026 |
|
|
high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
|
4027 |
|
|
fold_convert_loc (loc, arg0_type,
|
4028 |
|
|
high_positive),
|
4029 |
|
|
build_int_cst (arg0_type, 1));
|
4030 |
|
|
|
4031 |
|
|
/* If the low bound is specified, "and" the range with the
|
4032 |
|
|
range for which the original unsigned value will be
|
4033 |
|
|
positive. */
|
4034 |
|
|
if (low != 0)
|
4035 |
|
|
{
|
4036 |
|
|
if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
|
4037 |
|
|
1, fold_convert_loc (loc, arg0_type,
|
4038 |
|
|
integer_zero_node),
|
4039 |
|
|
high_positive))
|
4040 |
|
|
return NULL_TREE;
|
4041 |
|
|
|
4042 |
|
|
in_p = (n_in_p == in_p);
|
4043 |
|
|
}
|
4044 |
|
|
else
|
4045 |
|
|
{
|
4046 |
|
|
/* Otherwise, "or" the range with the range of the input
|
4047 |
|
|
that will be interpreted as negative. */
|
4048 |
|
|
if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
|
4049 |
|
|
1, fold_convert_loc (loc, arg0_type,
|
4050 |
|
|
integer_zero_node),
|
4051 |
|
|
high_positive))
|
4052 |
|
|
return NULL_TREE;
|
4053 |
|
|
|
4054 |
|
|
in_p = (in_p != n_in_p);
|
4055 |
|
|
}
|
4056 |
|
|
}
|
4057 |
|
|
|
4058 |
|
|
*p_low = n_low;
|
4059 |
|
|
*p_high = n_high;
|
4060 |
|
|
*p_in_p = in_p;
|
4061 |
|
|
return arg0;
|
4062 |
|
|
|
4063 |
|
|
default:
|
4064 |
|
|
return NULL_TREE;
|
4065 |
|
|
}
|
4066 |
|
|
}
|
4067 |
|
|
|
4068 |
|
|
/* Given EXP, a logical expression, set the range it is testing into
|
4069 |
|
|
variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
|
4070 |
|
|
actually being tested. *PLOW and *PHIGH will be made of the same
|
4071 |
|
|
type as the returned expression. If EXP is not a comparison, we
|
4072 |
|
|
will most likely not be returning a useful value and range. Set
|
4073 |
|
|
*STRICT_OVERFLOW_P to true if the return value is only valid
|
4074 |
|
|
because signed overflow is undefined; otherwise, do not change
|
4075 |
|
|
*STRICT_OVERFLOW_P. */
|
4076 |
|
|
|
4077 |
|
|
tree
|
4078 |
|
|
make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
|
4079 |
|
|
bool *strict_overflow_p)
|
4080 |
|
|
{
|
4081 |
|
|
enum tree_code code;
|
4082 |
|
|
tree arg0, arg1 = NULL_TREE;
|
4083 |
|
|
tree exp_type, nexp;
|
4084 |
|
|
int in_p;
|
4085 |
|
|
tree low, high;
|
4086 |
|
|
location_t loc = EXPR_LOCATION (exp);
|
4087 |
|
|
|
4088 |
|
|
/* Start with simply saying "EXP != 0" and then look at the code of EXP
|
4089 |
|
|
and see if we can refine the range. Some of the cases below may not
|
4090 |
|
|
happen, but it doesn't seem worth worrying about this. We "continue"
|
4091 |
|
|
the outer loop when we've changed something; otherwise we "break"
|
4092 |
|
|
the switch, which will "break" the while. */
|
4093 |
|
|
|
4094 |
|
|
in_p = 0;
|
4095 |
|
|
low = high = build_int_cst (TREE_TYPE (exp), 0);
|
4096 |
|
|
|
4097 |
|
|
while (1)
|
4098 |
|
|
{
|
4099 |
|
|
code = TREE_CODE (exp);
|
4100 |
|
|
exp_type = TREE_TYPE (exp);
|
4101 |
|
|
arg0 = NULL_TREE;
|
4102 |
|
|
|
4103 |
|
|
if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
|
4104 |
|
|
{
|
4105 |
|
|
if (TREE_OPERAND_LENGTH (exp) > 0)
|
4106 |
|
|
arg0 = TREE_OPERAND (exp, 0);
|
4107 |
|
|
if (TREE_CODE_CLASS (code) == tcc_binary
|
4108 |
|
|
|| TREE_CODE_CLASS (code) == tcc_comparison
|
4109 |
|
|
|| (TREE_CODE_CLASS (code) == tcc_expression
|
4110 |
|
|
&& TREE_OPERAND_LENGTH (exp) > 1))
|
4111 |
|
|
arg1 = TREE_OPERAND (exp, 1);
|
4112 |
|
|
}
|
4113 |
|
|
if (arg0 == NULL_TREE)
|
4114 |
|
|
break;
|
4115 |
|
|
|
4116 |
|
|
nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
|
4117 |
|
|
&high, &in_p, strict_overflow_p);
|
4118 |
|
|
if (nexp == NULL_TREE)
|
4119 |
|
|
break;
|
4120 |
|
|
exp = nexp;
|
4121 |
|
|
}
|
4122 |
|
|
|
4123 |
|
|
/* If EXP is a constant, we can evaluate whether this is true or false. */
|
4124 |
|
|
if (TREE_CODE (exp) == INTEGER_CST)
|
4125 |
|
|
{
|
4126 |
|
|
in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
|
4127 |
|
|
exp, 0, low, 0))
|
4128 |
|
|
&& integer_onep (range_binop (LE_EXPR, integer_type_node,
|
4129 |
|
|
exp, 1, high, 1)));
|
4130 |
|
|
low = high = 0;
|
4131 |
|
|
exp = 0;
|
4132 |
|
|
}
|
4133 |
|
|
|
4134 |
|
|
*pin_p = in_p, *plow = low, *phigh = high;
|
4135 |
|
|
return exp;
|
4136 |
|
|
}
|
4137 |
|
|
|
4138 |
|
|
/* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
|
4139 |
|
|
type, TYPE, return an expression to test if EXP is in (or out of, depending
|
4140 |
|
|
on IN_P) the range. Return 0 if the test couldn't be created. */
|
4141 |
|
|
|
4142 |
|
|
tree
|
4143 |
|
|
build_range_check (location_t loc, tree type, tree exp, int in_p,
|
4144 |
|
|
tree low, tree high)
|
4145 |
|
|
{
|
4146 |
|
|
tree etype = TREE_TYPE (exp), value;
|
4147 |
|
|
|
4148 |
|
|
#ifdef HAVE_canonicalize_funcptr_for_compare
|
4149 |
|
|
/* Disable this optimization for function pointer expressions
|
4150 |
|
|
on targets that require function pointer canonicalization. */
|
4151 |
|
|
if (HAVE_canonicalize_funcptr_for_compare
|
4152 |
|
|
&& TREE_CODE (etype) == POINTER_TYPE
|
4153 |
|
|
&& TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
|
4154 |
|
|
return NULL_TREE;
|
4155 |
|
|
#endif
|
4156 |
|
|
|
4157 |
|
|
if (! in_p)
|
4158 |
|
|
{
|
4159 |
|
|
value = build_range_check (loc, type, exp, 1, low, high);
|
4160 |
|
|
if (value != 0)
|
4161 |
|
|
return invert_truthvalue_loc (loc, value);
|
4162 |
|
|
|
4163 |
|
|
return 0;
|
4164 |
|
|
}
|
4165 |
|
|
|
4166 |
|
|
if (low == 0 && high == 0)
|
4167 |
|
|
return build_int_cst (type, 1);
|
4168 |
|
|
|
4169 |
|
|
if (low == 0)
|
4170 |
|
|
return fold_build2_loc (loc, LE_EXPR, type, exp,
|
4171 |
|
|
fold_convert_loc (loc, etype, high));
|
4172 |
|
|
|
4173 |
|
|
if (high == 0)
|
4174 |
|
|
return fold_build2_loc (loc, GE_EXPR, type, exp,
|
4175 |
|
|
fold_convert_loc (loc, etype, low));
|
4176 |
|
|
|
4177 |
|
|
if (operand_equal_p (low, high, 0))
|
4178 |
|
|
return fold_build2_loc (loc, EQ_EXPR, type, exp,
|
4179 |
|
|
fold_convert_loc (loc, etype, low));
|
4180 |
|
|
|
4181 |
|
|
if (integer_zerop (low))
|
4182 |
|
|
{
|
4183 |
|
|
if (! TYPE_UNSIGNED (etype))
|
4184 |
|
|
{
|
4185 |
|
|
etype = unsigned_type_for (etype);
|
4186 |
|
|
high = fold_convert_loc (loc, etype, high);
|
4187 |
|
|
exp = fold_convert_loc (loc, etype, exp);
|
4188 |
|
|
}
|
4189 |
|
|
return build_range_check (loc, type, exp, 1, 0, high);
|
4190 |
|
|
}
|
4191 |
|
|
|
4192 |
|
|
/* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
|
4193 |
|
|
if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
|
4194 |
|
|
{
|
4195 |
|
|
unsigned HOST_WIDE_INT lo;
|
4196 |
|
|
HOST_WIDE_INT hi;
|
4197 |
|
|
int prec;
|
4198 |
|
|
|
4199 |
|
|
prec = TYPE_PRECISION (etype);
|
4200 |
|
|
if (prec <= HOST_BITS_PER_WIDE_INT)
|
4201 |
|
|
{
|
4202 |
|
|
hi = 0;
|
4203 |
|
|
lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
|
4204 |
|
|
}
|
4205 |
|
|
else
|
4206 |
|
|
{
|
4207 |
|
|
hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
|
4208 |
|
|
lo = (unsigned HOST_WIDE_INT) -1;
|
4209 |
|
|
}
|
4210 |
|
|
|
4211 |
|
|
if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
|
4212 |
|
|
{
|
4213 |
|
|
if (TYPE_UNSIGNED (etype))
|
4214 |
|
|
{
|
4215 |
|
|
tree signed_etype = signed_type_for (etype);
|
4216 |
|
|
if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
|
4217 |
|
|
etype
|
4218 |
|
|
= build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
|
4219 |
|
|
else
|
4220 |
|
|
etype = signed_etype;
|
4221 |
|
|
exp = fold_convert_loc (loc, etype, exp);
|
4222 |
|
|
}
|
4223 |
|
|
return fold_build2_loc (loc, GT_EXPR, type, exp,
|
4224 |
|
|
build_int_cst (etype, 0));
|
4225 |
|
|
}
|
4226 |
|
|
}
|
4227 |
|
|
|
4228 |
|
|
/* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
|
4229 |
|
|
This requires wrap-around arithmetics for the type of the expression.
|
4230 |
|
|
First make sure that arithmetics in this type is valid, then make sure
|
4231 |
|
|
that it wraps around. */
|
4232 |
|
|
if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
|
4233 |
|
|
etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
|
4234 |
|
|
TYPE_UNSIGNED (etype));
|
4235 |
|
|
|
4236 |
|
|
if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
|
4237 |
|
|
{
|
4238 |
|
|
tree utype, minv, maxv;
|
4239 |
|
|
|
4240 |
|
|
/* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
|
4241 |
|
|
for the type in question, as we rely on this here. */
|
4242 |
|
|
utype = unsigned_type_for (etype);
|
4243 |
|
|
maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
|
4244 |
|
|
maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
|
4245 |
|
|
integer_one_node, 1);
|
4246 |
|
|
minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
|
4247 |
|
|
|
4248 |
|
|
if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
|
4249 |
|
|
minv, 1, maxv, 1)))
|
4250 |
|
|
etype = utype;
|
4251 |
|
|
else
|
4252 |
|
|
return 0;
|
4253 |
|
|
}
|
4254 |
|
|
|
4255 |
|
|
high = fold_convert_loc (loc, etype, high);
|
4256 |
|
|
low = fold_convert_loc (loc, etype, low);
|
4257 |
|
|
exp = fold_convert_loc (loc, etype, exp);
|
4258 |
|
|
|
4259 |
|
|
value = const_binop (MINUS_EXPR, high, low);
|
4260 |
|
|
|
4261 |
|
|
|
4262 |
|
|
if (POINTER_TYPE_P (etype))
|
4263 |
|
|
{
|
4264 |
|
|
if (value != 0 && !TREE_OVERFLOW (value))
|
4265 |
|
|
{
|
4266 |
|
|
low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
|
4267 |
|
|
return build_range_check (loc, type,
|
4268 |
|
|
fold_build_pointer_plus_loc (loc, exp, low),
|
4269 |
|
|
1, build_int_cst (etype, 0), value);
|
4270 |
|
|
}
|
4271 |
|
|
return 0;
|
4272 |
|
|
}
|
4273 |
|
|
|
4274 |
|
|
if (value != 0 && !TREE_OVERFLOW (value))
|
4275 |
|
|
return build_range_check (loc, type,
|
4276 |
|
|
fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
|
4277 |
|
|
1, build_int_cst (etype, 0), value);
|
4278 |
|
|
|
4279 |
|
|
return 0;
|
4280 |
|
|
}
|
4281 |
|
|
|
4282 |
|
|
/* Return the predecessor of VAL in its type, handling the infinite case. */
|
4283 |
|
|
|
4284 |
|
|
static tree
|
4285 |
|
|
range_predecessor (tree val)
|
4286 |
|
|
{
|
4287 |
|
|
tree type = TREE_TYPE (val);
|
4288 |
|
|
|
4289 |
|
|
if (INTEGRAL_TYPE_P (type)
|
4290 |
|
|
&& operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
|
4291 |
|
|
return 0;
|
4292 |
|
|
else
|
4293 |
|
|
return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
|
4294 |
|
|
}
|
4295 |
|
|
|
4296 |
|
|
/* Return the successor of VAL in its type, handling the infinite case. */
|
4297 |
|
|
|
4298 |
|
|
static tree
|
4299 |
|
|
range_successor (tree val)
|
4300 |
|
|
{
|
4301 |
|
|
tree type = TREE_TYPE (val);
|
4302 |
|
|
|
4303 |
|
|
if (INTEGRAL_TYPE_P (type)
|
4304 |
|
|
&& operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
|
4305 |
|
|
return 0;
|
4306 |
|
|
else
|
4307 |
|
|
return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
|
4308 |
|
|
}
|
4309 |
|
|
|
4310 |
|
|
/* Given two ranges, see if we can merge them into one. Return 1 if we
|
4311 |
|
|
can, 0 if we can't. Set the output range into the specified parameters. */
|
4312 |
|
|
|
4313 |
|
|
bool
|
4314 |
|
|
merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
|
4315 |
|
|
tree high0, int in1_p, tree low1, tree high1)
|
4316 |
|
|
{
|
4317 |
|
|
int no_overlap;
|
4318 |
|
|
int subset;
|
4319 |
|
|
int temp;
|
4320 |
|
|
tree tem;
|
4321 |
|
|
int in_p;
|
4322 |
|
|
tree low, high;
|
4323 |
|
|
int lowequal = ((low0 == 0 && low1 == 0)
|
4324 |
|
|
|| integer_onep (range_binop (EQ_EXPR, integer_type_node,
|
4325 |
|
|
low0, 0, low1, 0)));
|
4326 |
|
|
int highequal = ((high0 == 0 && high1 == 0)
|
4327 |
|
|
|| integer_onep (range_binop (EQ_EXPR, integer_type_node,
|
4328 |
|
|
high0, 1, high1, 1)));
|
4329 |
|
|
|
4330 |
|
|
/* Make range 0 be the range that starts first, or ends last if they
|
4331 |
|
|
start at the same value. Swap them if it isn't. */
|
4332 |
|
|
if (integer_onep (range_binop (GT_EXPR, integer_type_node,
|
4333 |
|
|
low0, 0, low1, 0))
|
4334 |
|
|
|| (lowequal
|
4335 |
|
|
&& integer_onep (range_binop (GT_EXPR, integer_type_node,
|
4336 |
|
|
high1, 1, high0, 1))))
|
4337 |
|
|
{
|
4338 |
|
|
temp = in0_p, in0_p = in1_p, in1_p = temp;
|
4339 |
|
|
tem = low0, low0 = low1, low1 = tem;
|
4340 |
|
|
tem = high0, high0 = high1, high1 = tem;
|
4341 |
|
|
}
|
4342 |
|
|
|
4343 |
|
|
/* Now flag two cases, whether the ranges are disjoint or whether the
|
4344 |
|
|
second range is totally subsumed in the first. Note that the tests
|
4345 |
|
|
below are simplified by the ones above. */
|
4346 |
|
|
no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
|
4347 |
|
|
high0, 1, low1, 0));
|
4348 |
|
|
subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
|
4349 |
|
|
high1, 1, high0, 1));
|
4350 |
|
|
|
4351 |
|
|
/* We now have four cases, depending on whether we are including or
|
4352 |
|
|
excluding the two ranges. */
|
4353 |
|
|
if (in0_p && in1_p)
|
4354 |
|
|
{
|
4355 |
|
|
/* If they don't overlap, the result is false. If the second range
|
4356 |
|
|
is a subset it is the result. Otherwise, the range is from the start
|
4357 |
|
|
of the second to the end of the first. */
|
4358 |
|
|
if (no_overlap)
|
4359 |
|
|
in_p = 0, low = high = 0;
|
4360 |
|
|
else if (subset)
|
4361 |
|
|
in_p = 1, low = low1, high = high1;
|
4362 |
|
|
else
|
4363 |
|
|
in_p = 1, low = low1, high = high0;
|
4364 |
|
|
}
|
4365 |
|
|
|
4366 |
|
|
else if (in0_p && ! in1_p)
|
4367 |
|
|
{
|
4368 |
|
|
/* If they don't overlap, the result is the first range. If they are
|
4369 |
|
|
equal, the result is false. If the second range is a subset of the
|
4370 |
|
|
first, and the ranges begin at the same place, we go from just after
|
4371 |
|
|
the end of the second range to the end of the first. If the second
|
4372 |
|
|
range is not a subset of the first, or if it is a subset and both
|
4373 |
|
|
ranges end at the same place, the range starts at the start of the
|
4374 |
|
|
first range and ends just before the second range.
|
4375 |
|
|
Otherwise, we can't describe this as a single range. */
|
4376 |
|
|
if (no_overlap)
|
4377 |
|
|
in_p = 1, low = low0, high = high0;
|
4378 |
|
|
else if (lowequal && highequal)
|
4379 |
|
|
in_p = 0, low = high = 0;
|
4380 |
|
|
else if (subset && lowequal)
|
4381 |
|
|
{
|
4382 |
|
|
low = range_successor (high1);
|
4383 |
|
|
high = high0;
|
4384 |
|
|
in_p = 1;
|
4385 |
|
|
if (low == 0)
|
4386 |
|
|
{
|
4387 |
|
|
/* We are in the weird situation where high0 > high1 but
|
4388 |
|
|
high1 has no successor. Punt. */
|
4389 |
|
|
return 0;
|
4390 |
|
|
}
|
4391 |
|
|
}
|
4392 |
|
|
else if (! subset || highequal)
|
4393 |
|
|
{
|
4394 |
|
|
low = low0;
|
4395 |
|
|
high = range_predecessor (low1);
|
4396 |
|
|
in_p = 1;
|
4397 |
|
|
if (high == 0)
|
4398 |
|
|
{
|
4399 |
|
|
/* low0 < low1 but low1 has no predecessor. Punt. */
|
4400 |
|
|
return 0;
|
4401 |
|
|
}
|
4402 |
|
|
}
|
4403 |
|
|
else
|
4404 |
|
|
return 0;
|
4405 |
|
|
}
|
4406 |
|
|
|
4407 |
|
|
else if (! in0_p && in1_p)
|
4408 |
|
|
{
|
4409 |
|
|
/* If they don't overlap, the result is the second range. If the second
|
4410 |
|
|
is a subset of the first, the result is false. Otherwise,
|
4411 |
|
|
the range starts just after the first range and ends at the
|
4412 |
|
|
end of the second. */
|
4413 |
|
|
if (no_overlap)
|
4414 |
|
|
in_p = 1, low = low1, high = high1;
|
4415 |
|
|
else if (subset || highequal)
|
4416 |
|
|
in_p = 0, low = high = 0;
|
4417 |
|
|
else
|
4418 |
|
|
{
|
4419 |
|
|
low = range_successor (high0);
|
4420 |
|
|
high = high1;
|
4421 |
|
|
in_p = 1;
|
4422 |
|
|
if (low == 0)
|
4423 |
|
|
{
|
4424 |
|
|
/* high1 > high0 but high0 has no successor. Punt. */
|
4425 |
|
|
return 0;
|
4426 |
|
|
}
|
4427 |
|
|
}
|
4428 |
|
|
}
|
4429 |
|
|
|
4430 |
|
|
else
|
4431 |
|
|
{
|
4432 |
|
|
/* The case where we are excluding both ranges. Here the complex case
|
4433 |
|
|
is if they don't overlap. In that case, the only time we have a
|
4434 |
|
|
range is if they are adjacent. If the second is a subset of the
|
4435 |
|
|
first, the result is the first. Otherwise, the range to exclude
|
4436 |
|
|
starts at the beginning of the first range and ends at the end of the
|
4437 |
|
|
second. */
|
4438 |
|
|
if (no_overlap)
|
4439 |
|
|
{
|
4440 |
|
|
if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
|
4441 |
|
|
range_successor (high0),
|
4442 |
|
|
1, low1, 0)))
|
4443 |
|
|
in_p = 0, low = low0, high = high1;
|
4444 |
|
|
else
|
4445 |
|
|
{
|
4446 |
|
|
/* Canonicalize - [min, x] into - [-, x]. */
|
4447 |
|
|
if (low0 && TREE_CODE (low0) == INTEGER_CST)
|
4448 |
|
|
switch (TREE_CODE (TREE_TYPE (low0)))
|
4449 |
|
|
{
|
4450 |
|
|
case ENUMERAL_TYPE:
|
4451 |
|
|
if (TYPE_PRECISION (TREE_TYPE (low0))
|
4452 |
|
|
!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
|
4453 |
|
|
break;
|
4454 |
|
|
/* FALLTHROUGH */
|
4455 |
|
|
case INTEGER_TYPE:
|
4456 |
|
|
if (tree_int_cst_equal (low0,
|
4457 |
|
|
TYPE_MIN_VALUE (TREE_TYPE (low0))))
|
4458 |
|
|
low0 = 0;
|
4459 |
|
|
break;
|
4460 |
|
|
case POINTER_TYPE:
|
4461 |
|
|
if (TYPE_UNSIGNED (TREE_TYPE (low0))
|
4462 |
|
|
&& integer_zerop (low0))
|
4463 |
|
|
low0 = 0;
|
4464 |
|
|
break;
|
4465 |
|
|
default:
|
4466 |
|
|
break;
|
4467 |
|
|
}
|
4468 |
|
|
|
4469 |
|
|
/* Canonicalize - [x, max] into - [x, -]. */
|
4470 |
|
|
if (high1 && TREE_CODE (high1) == INTEGER_CST)
|
4471 |
|
|
switch (TREE_CODE (TREE_TYPE (high1)))
|
4472 |
|
|
{
|
4473 |
|
|
case ENUMERAL_TYPE:
|
4474 |
|
|
if (TYPE_PRECISION (TREE_TYPE (high1))
|
4475 |
|
|
!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
|
4476 |
|
|
break;
|
4477 |
|
|
/* FALLTHROUGH */
|
4478 |
|
|
case INTEGER_TYPE:
|
4479 |
|
|
if (tree_int_cst_equal (high1,
|
4480 |
|
|
TYPE_MAX_VALUE (TREE_TYPE (high1))))
|
4481 |
|
|
high1 = 0;
|
4482 |
|
|
break;
|
4483 |
|
|
case POINTER_TYPE:
|
4484 |
|
|
if (TYPE_UNSIGNED (TREE_TYPE (high1))
|
4485 |
|
|
&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
|
4486 |
|
|
high1, 1,
|
4487 |
|
|
integer_one_node, 1)))
|
4488 |
|
|
high1 = 0;
|
4489 |
|
|
break;
|
4490 |
|
|
default:
|
4491 |
|
|
break;
|
4492 |
|
|
}
|
4493 |
|
|
|
4494 |
|
|
/* The ranges might be also adjacent between the maximum and
|
4495 |
|
|
minimum values of the given type. For
|
4496 |
|
|
- [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
|
4497 |
|
|
return + [x + 1, y - 1]. */
|
4498 |
|
|
if (low0 == 0 && high1 == 0)
|
4499 |
|
|
{
|
4500 |
|
|
low = range_successor (high0);
|
4501 |
|
|
high = range_predecessor (low1);
|
4502 |
|
|
if (low == 0 || high == 0)
|
4503 |
|
|
return 0;
|
4504 |
|
|
|
4505 |
|
|
in_p = 1;
|
4506 |
|
|
}
|
4507 |
|
|
else
|
4508 |
|
|
return 0;
|
4509 |
|
|
}
|
4510 |
|
|
}
|
4511 |
|
|
else if (subset)
|
4512 |
|
|
in_p = 0, low = low0, high = high0;
|
4513 |
|
|
else
|
4514 |
|
|
in_p = 0, low = low0, high = high1;
|
4515 |
|
|
}
|
4516 |
|
|
|
4517 |
|
|
*pin_p = in_p, *plow = low, *phigh = high;
|
4518 |
|
|
return 1;
|
4519 |
|
|
}
|
4520 |
|
|
|
4521 |
|
|
|
4522 |
|
|
/* Subroutine of fold, looking inside expressions of the form
|
4523 |
|
|
A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
|
4524 |
|
|
of the COND_EXPR. This function is being used also to optimize
|
4525 |
|
|
A op B ? C : A, by reversing the comparison first.
|
4526 |
|
|
|
4527 |
|
|
Return a folded expression whose code is not a COND_EXPR
|
4528 |
|
|
anymore, or NULL_TREE if no folding opportunity is found. */
|
4529 |
|
|
|
4530 |
|
|
static tree
|
4531 |
|
|
fold_cond_expr_with_comparison (location_t loc, tree type,
|
4532 |
|
|
tree arg0, tree arg1, tree arg2)
|
4533 |
|
|
{
|
4534 |
|
|
enum tree_code comp_code = TREE_CODE (arg0);
|
4535 |
|
|
tree arg00 = TREE_OPERAND (arg0, 0);
|
4536 |
|
|
tree arg01 = TREE_OPERAND (arg0, 1);
|
4537 |
|
|
tree arg1_type = TREE_TYPE (arg1);
|
4538 |
|
|
tree tem;
|
4539 |
|
|
|
4540 |
|
|
STRIP_NOPS (arg1);
|
4541 |
|
|
STRIP_NOPS (arg2);
|
4542 |
|
|
|
4543 |
|
|
/* If we have A op 0 ? A : -A, consider applying the following
|
4544 |
|
|
transformations:
|
4545 |
|
|
|
4546 |
|
|
A == 0? A : -A same as -A
|
4547 |
|
|
A != 0? A : -A same as A
|
4548 |
|
|
A >= 0? A : -A same as abs (A)
|
4549 |
|
|
A > 0? A : -A same as abs (A)
|
4550 |
|
|
A <= 0? A : -A same as -abs (A)
|
4551 |
|
|
A < 0? A : -A same as -abs (A)
|
4552 |
|
|
|
4553 |
|
|
None of these transformations work for modes with signed
|
4554 |
|
|
zeros. If A is +/-0, the first two transformations will
|
4555 |
|
|
change the sign of the result (from +0 to -0, or vice
|
4556 |
|
|
versa). The last four will fix the sign of the result,
|
4557 |
|
|
even though the original expressions could be positive or
|
4558 |
|
|
negative, depending on the sign of A.
|
4559 |
|
|
|
4560 |
|
|
Note that all these transformations are correct if A is
|
4561 |
|
|
NaN, since the two alternatives (A and -A) are also NaNs. */
|
4562 |
|
|
if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
|
4563 |
|
|
&& (FLOAT_TYPE_P (TREE_TYPE (arg01))
|
4564 |
|
|
? real_zerop (arg01)
|
4565 |
|
|
: integer_zerop (arg01))
|
4566 |
|
|
&& ((TREE_CODE (arg2) == NEGATE_EXPR
|
4567 |
|
|
&& operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
|
4568 |
|
|
/* In the case that A is of the form X-Y, '-A' (arg2) may
|
4569 |
|
|
have already been folded to Y-X, check for that. */
|
4570 |
|
|
|| (TREE_CODE (arg1) == MINUS_EXPR
|
4571 |
|
|
&& TREE_CODE (arg2) == MINUS_EXPR
|
4572 |
|
|
&& operand_equal_p (TREE_OPERAND (arg1, 0),
|
4573 |
|
|
TREE_OPERAND (arg2, 1), 0)
|
4574 |
|
|
&& operand_equal_p (TREE_OPERAND (arg1, 1),
|
4575 |
|
|
TREE_OPERAND (arg2, 0), 0))))
|
4576 |
|
|
switch (comp_code)
|
4577 |
|
|
{
|
4578 |
|
|
case EQ_EXPR:
|
4579 |
|
|
case UNEQ_EXPR:
|
4580 |
|
|
tem = fold_convert_loc (loc, arg1_type, arg1);
|
4581 |
|
|
return pedantic_non_lvalue_loc (loc,
|
4582 |
|
|
fold_convert_loc (loc, type,
|
4583 |
|
|
negate_expr (tem)));
|
4584 |
|
|
case NE_EXPR:
|
4585 |
|
|
case LTGT_EXPR:
|
4586 |
|
|
return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
|
4587 |
|
|
case UNGE_EXPR:
|
4588 |
|
|
case UNGT_EXPR:
|
4589 |
|
|
if (flag_trapping_math)
|
4590 |
|
|
break;
|
4591 |
|
|
/* Fall through. */
|
4592 |
|
|
case GE_EXPR:
|
4593 |
|
|
case GT_EXPR:
|
4594 |
|
|
if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
|
4595 |
|
|
arg1 = fold_convert_loc (loc, signed_type_for
|
4596 |
|
|
(TREE_TYPE (arg1)), arg1);
|
4597 |
|
|
tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
|
4598 |
|
|
return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
|
4599 |
|
|
case UNLE_EXPR:
|
4600 |
|
|
case UNLT_EXPR:
|
4601 |
|
|
if (flag_trapping_math)
|
4602 |
|
|
break;
|
4603 |
|
|
case LE_EXPR:
|
4604 |
|
|
case LT_EXPR:
|
4605 |
|
|
if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
|
4606 |
|
|
arg1 = fold_convert_loc (loc, signed_type_for
|
4607 |
|
|
(TREE_TYPE (arg1)), arg1);
|
4608 |
|
|
tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
|
4609 |
|
|
return negate_expr (fold_convert_loc (loc, type, tem));
|
4610 |
|
|
default:
|
4611 |
|
|
gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
|
4612 |
|
|
break;
|
4613 |
|
|
}
|
4614 |
|
|
|
4615 |
|
|
/* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
|
4616 |
|
|
A == 0 ? A : 0 is always 0 unless A is -0. Note that
|
4617 |
|
|
both transformations are correct when A is NaN: A != 0
|
4618 |
|
|
is then true, and A == 0 is false. */
|
4619 |
|
|
|
4620 |
|
|
if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
|
4621 |
|
|
&& integer_zerop (arg01) && integer_zerop (arg2))
|
4622 |
|
|
{
|
4623 |
|
|
if (comp_code == NE_EXPR)
|
4624 |
|
|
return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
|
4625 |
|
|
else if (comp_code == EQ_EXPR)
|
4626 |
|
|
return build_int_cst (type, 0);
|
4627 |
|
|
}
|
4628 |
|
|
|
4629 |
|
|
/* Try some transformations of A op B ? A : B.
|
4630 |
|
|
|
4631 |
|
|
A == B? A : B same as B
|
4632 |
|
|
A != B? A : B same as A
|
4633 |
|
|
A >= B? A : B same as max (A, B)
|
4634 |
|
|
A > B? A : B same as max (B, A)
|
4635 |
|
|
A <= B? A : B same as min (A, B)
|
4636 |
|
|
A < B? A : B same as min (B, A)
|
4637 |
|
|
|
4638 |
|
|
As above, these transformations don't work in the presence
|
4639 |
|
|
of signed zeros. For example, if A and B are zeros of
|
4640 |
|
|
opposite sign, the first two transformations will change
|
4641 |
|
|
the sign of the result. In the last four, the original
|
4642 |
|
|
expressions give different results for (A=+0, B=-0) and
|
4643 |
|
|
(A=-0, B=+0), but the transformed expressions do not.
|
4644 |
|
|
|
4645 |
|
|
The first two transformations are correct if either A or B
|
4646 |
|
|
is a NaN. In the first transformation, the condition will
|
4647 |
|
|
be false, and B will indeed be chosen. In the case of the
|
4648 |
|
|
second transformation, the condition A != B will be true,
|
4649 |
|
|
and A will be chosen.
|
4650 |
|
|
|
4651 |
|
|
The conversions to max() and min() are not correct if B is
|
4652 |
|
|
a number and A is not. The conditions in the original
|
4653 |
|
|
expressions will be false, so all four give B. The min()
|
4654 |
|
|
and max() versions would give a NaN instead. */
|
4655 |
|
|
if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
|
4656 |
|
|
&& operand_equal_for_comparison_p (arg01, arg2, arg00)
|
4657 |
|
|
/* Avoid these transformations if the COND_EXPR may be used
|
4658 |
|
|
as an lvalue in the C++ front-end. PR c++/19199. */
|
4659 |
|
|
&& (in_gimple_form
|
4660 |
|
|
|| (strcmp (lang_hooks.name, "GNU C++") != 0
|
4661 |
|
|
&& strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
|
4662 |
|
|
|| ! maybe_lvalue_p (arg1)
|
4663 |
|
|
|| ! maybe_lvalue_p (arg2)))
|
4664 |
|
|
{
|
4665 |
|
|
tree comp_op0 = arg00;
|
4666 |
|
|
tree comp_op1 = arg01;
|
4667 |
|
|
tree comp_type = TREE_TYPE (comp_op0);
|
4668 |
|
|
|
4669 |
|
|
/* Avoid adding NOP_EXPRs in case this is an lvalue. */
|
4670 |
|
|
if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
|
4671 |
|
|
{
|
4672 |
|
|
comp_type = type;
|
4673 |
|
|
comp_op0 = arg1;
|
4674 |
|
|
comp_op1 = arg2;
|
4675 |
|
|
}
|
4676 |
|
|
|
4677 |
|
|
switch (comp_code)
|
4678 |
|
|
{
|
4679 |
|
|
case EQ_EXPR:
|
4680 |
|
|
return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
|
4681 |
|
|
case NE_EXPR:
|
4682 |
|
|
return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
|
4683 |
|
|
case LE_EXPR:
|
4684 |
|
|
case LT_EXPR:
|
4685 |
|
|
case UNLE_EXPR:
|
4686 |
|
|
case UNLT_EXPR:
|
4687 |
|
|
/* In C++ a ?: expression can be an lvalue, so put the
|
4688 |
|
|
operand which will be used if they are equal first
|
4689 |
|
|
so that we can convert this back to the
|
4690 |
|
|
corresponding COND_EXPR. */
|
4691 |
|
|
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
|
4692 |
|
|
{
|
4693 |
|
|
comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
|
4694 |
|
|
comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
|
4695 |
|
|
tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
|
4696 |
|
|
? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
|
4697 |
|
|
: fold_build2_loc (loc, MIN_EXPR, comp_type,
|
4698 |
|
|
comp_op1, comp_op0);
|
4699 |
|
|
return pedantic_non_lvalue_loc (loc,
|
4700 |
|
|
fold_convert_loc (loc, type, tem));
|
4701 |
|
|
}
|
4702 |
|
|
break;
|
4703 |
|
|
case GE_EXPR:
|
4704 |
|
|
case GT_EXPR:
|
4705 |
|
|
case UNGE_EXPR:
|
4706 |
|
|
case UNGT_EXPR:
|
4707 |
|
|
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
|
4708 |
|
|
{
|
4709 |
|
|
comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
|
4710 |
|
|
comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
|
4711 |
|
|
tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
|
4712 |
|
|
? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
|
4713 |
|
|
: fold_build2_loc (loc, MAX_EXPR, comp_type,
|
4714 |
|
|
comp_op1, comp_op0);
|
4715 |
|
|
return pedantic_non_lvalue_loc (loc,
|
4716 |
|
|
fold_convert_loc (loc, type, tem));
|
4717 |
|
|
}
|
4718 |
|
|
break;
|
4719 |
|
|
case UNEQ_EXPR:
|
4720 |
|
|
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
|
4721 |
|
|
return pedantic_non_lvalue_loc (loc,
|
4722 |
|
|
fold_convert_loc (loc, type, arg2));
|
4723 |
|
|
break;
|
4724 |
|
|
case LTGT_EXPR:
|
4725 |
|
|
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
|
4726 |
|
|
return pedantic_non_lvalue_loc (loc,
|
4727 |
|
|
fold_convert_loc (loc, type, arg1));
|
4728 |
|
|
break;
|
4729 |
|
|
default:
|
4730 |
|
|
gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
|
4731 |
|
|
break;
|
4732 |
|
|
}
|
4733 |
|
|
}
|
4734 |
|
|
|
4735 |
|
|
/* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
|
4736 |
|
|
we might still be able to simplify this. For example,
|
4737 |
|
|
if C1 is one less or one more than C2, this might have started
|
4738 |
|
|
out as a MIN or MAX and been transformed by this function.
|
4739 |
|
|
Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
|
4740 |
|
|
|
4741 |
|
|
if (INTEGRAL_TYPE_P (type)
|
4742 |
|
|
&& TREE_CODE (arg01) == INTEGER_CST
|
4743 |
|
|
&& TREE_CODE (arg2) == INTEGER_CST)
|
4744 |
|
|
switch (comp_code)
|
4745 |
|
|
{
|
4746 |
|
|
case EQ_EXPR:
|
4747 |
|
|
if (TREE_CODE (arg1) == INTEGER_CST)
|
4748 |
|
|
break;
|
4749 |
|
|
/* We can replace A with C1 in this case. */
|
4750 |
|
|
arg1 = fold_convert_loc (loc, type, arg01);
|
4751 |
|
|
return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
|
4752 |
|
|
|
4753 |
|
|
case LT_EXPR:
|
4754 |
|
|
/* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
|
4755 |
|
|
MIN_EXPR, to preserve the signedness of the comparison. */
|
4756 |
|
|
if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
|
4757 |
|
|
OEP_ONLY_CONST)
|
4758 |
|
|
&& operand_equal_p (arg01,
|
4759 |
|
|
const_binop (PLUS_EXPR, arg2,
|
4760 |
|
|
build_int_cst (type, 1)),
|
4761 |
|
|
OEP_ONLY_CONST))
|
4762 |
|
|
{
|
4763 |
|
|
tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
|
4764 |
|
|
fold_convert_loc (loc, TREE_TYPE (arg00),
|
4765 |
|
|
arg2));
|
4766 |
|
|
return pedantic_non_lvalue_loc (loc,
|
4767 |
|
|
fold_convert_loc (loc, type, tem));
|
4768 |
|
|
}
|
4769 |
|
|
break;
|
4770 |
|
|
|
4771 |
|
|
case LE_EXPR:
|
4772 |
|
|
/* If C1 is C2 - 1, this is min(A, C2), with the same care
|
4773 |
|
|
as above. */
|
4774 |
|
|
if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
|
4775 |
|
|
OEP_ONLY_CONST)
|
4776 |
|
|
&& operand_equal_p (arg01,
|
4777 |
|
|
const_binop (MINUS_EXPR, arg2,
|
4778 |
|
|
build_int_cst (type, 1)),
|
4779 |
|
|
OEP_ONLY_CONST))
|
4780 |
|
|
{
|
4781 |
|
|
tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
|
4782 |
|
|
fold_convert_loc (loc, TREE_TYPE (arg00),
|
4783 |
|
|
arg2));
|
4784 |
|
|
return pedantic_non_lvalue_loc (loc,
|
4785 |
|
|
fold_convert_loc (loc, type, tem));
|
4786 |
|
|
}
|
4787 |
|
|
break;
|
4788 |
|
|
|
4789 |
|
|
case GT_EXPR:
|
4790 |
|
|
/* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
|
4791 |
|
|
MAX_EXPR, to preserve the signedness of the comparison. */
|
4792 |
|
|
if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
|
4793 |
|
|
OEP_ONLY_CONST)
|
4794 |
|
|
&& operand_equal_p (arg01,
|
4795 |
|
|
const_binop (MINUS_EXPR, arg2,
|
4796 |
|
|
build_int_cst (type, 1)),
|
4797 |
|
|
OEP_ONLY_CONST))
|
4798 |
|
|
{
|
4799 |
|
|
tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
|
4800 |
|
|
fold_convert_loc (loc, TREE_TYPE (arg00),
|
4801 |
|
|
arg2));
|
4802 |
|
|
return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
|
4803 |
|
|
}
|
4804 |
|
|
break;
|
4805 |
|
|
|
4806 |
|
|
case GE_EXPR:
|
4807 |
|
|
/* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
|
4808 |
|
|
if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
|
4809 |
|
|
OEP_ONLY_CONST)
|
4810 |
|
|
&& operand_equal_p (arg01,
|
4811 |
|
|
const_binop (PLUS_EXPR, arg2,
|
4812 |
|
|
build_int_cst (type, 1)),
|
4813 |
|
|
OEP_ONLY_CONST))
|
4814 |
|
|
{
|
4815 |
|
|
tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
|
4816 |
|
|
fold_convert_loc (loc, TREE_TYPE (arg00),
|
4817 |
|
|
arg2));
|
4818 |
|
|
return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
|
4819 |
|
|
}
|
4820 |
|
|
break;
|
4821 |
|
|
case NE_EXPR:
|
4822 |
|
|
break;
|
4823 |
|
|
default:
|
4824 |
|
|
gcc_unreachable ();
|
4825 |
|
|
}
|
4826 |
|
|
|
4827 |
|
|
return NULL_TREE;
|
4828 |
|
|
}
|
4829 |
|
|
|
4830 |
|
|
|
4831 |
|
|
|
4832 |
|
|
#ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
|
4833 |
|
|
#define LOGICAL_OP_NON_SHORT_CIRCUIT \
|
4834 |
|
|
(BRANCH_COST (optimize_function_for_speed_p (cfun), \
|
4835 |
|
|
false) >= 2)
|
4836 |
|
|
#endif
|
4837 |
|
|
|
4838 |
|
|
/* EXP is some logical combination of boolean tests. See if we can
|
4839 |
|
|
merge it into some range test. Return the new tree if so. */
|
4840 |
|
|
|
4841 |
|
|
static tree
|
4842 |
|
|
fold_range_test (location_t loc, enum tree_code code, tree type,
|
4843 |
|
|
tree op0, tree op1)
|
4844 |
|
|
{
|
4845 |
|
|
int or_op = (code == TRUTH_ORIF_EXPR
|
4846 |
|
|
|| code == TRUTH_OR_EXPR);
|
4847 |
|
|
int in0_p, in1_p, in_p;
|
4848 |
|
|
tree low0, low1, low, high0, high1, high;
|
4849 |
|
|
bool strict_overflow_p = false;
|
4850 |
|
|
tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
|
4851 |
|
|
tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
|
4852 |
|
|
tree tem;
|
4853 |
|
|
const char * const warnmsg = G_("assuming signed overflow does not occur "
|
4854 |
|
|
"when simplifying range test");
|
4855 |
|
|
|
4856 |
|
|
/* If this is an OR operation, invert both sides; we will invert
|
4857 |
|
|
again at the end. */
|
4858 |
|
|
if (or_op)
|
4859 |
|
|
in0_p = ! in0_p, in1_p = ! in1_p;
|
4860 |
|
|
|
4861 |
|
|
/* If both expressions are the same, if we can merge the ranges, and we
|
4862 |
|
|
can build the range test, return it or it inverted. If one of the
|
4863 |
|
|
ranges is always true or always false, consider it to be the same
|
4864 |
|
|
expression as the other. */
|
4865 |
|
|
if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
|
4866 |
|
|
&& merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
|
4867 |
|
|
in1_p, low1, high1)
|
4868 |
|
|
&& 0 != (tem = (build_range_check (loc, type,
|
4869 |
|
|
lhs != 0 ? lhs
|
4870 |
|
|
: rhs != 0 ? rhs : integer_zero_node,
|
4871 |
|
|
in_p, low, high))))
|
4872 |
|
|
{
|
4873 |
|
|
if (strict_overflow_p)
|
4874 |
|
|
fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
|
4875 |
|
|
return or_op ? invert_truthvalue_loc (loc, tem) : tem;
|
4876 |
|
|
}
|
4877 |
|
|
|
4878 |
|
|
/* On machines where the branch cost is expensive, if this is a
|
4879 |
|
|
short-circuited branch and the underlying object on both sides
|
4880 |
|
|
is the same, make a non-short-circuit operation. */
|
4881 |
|
|
else if (LOGICAL_OP_NON_SHORT_CIRCUIT
|
4882 |
|
|
&& lhs != 0 && rhs != 0
|
4883 |
|
|
&& (code == TRUTH_ANDIF_EXPR
|
4884 |
|
|
|| code == TRUTH_ORIF_EXPR)
|
4885 |
|
|
&& operand_equal_p (lhs, rhs, 0))
|
4886 |
|
|
{
|
4887 |
|
|
/* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
|
4888 |
|
|
unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
|
4889 |
|
|
which cases we can't do this. */
|
4890 |
|
|
if (simple_operand_p (lhs))
|
4891 |
|
|
return build2_loc (loc, code == TRUTH_ANDIF_EXPR
|
4892 |
|
|
? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
|
4893 |
|
|
type, op0, op1);
|
4894 |
|
|
|
4895 |
|
|
else if (!lang_hooks.decls.global_bindings_p ()
|
4896 |
|
|
&& !CONTAINS_PLACEHOLDER_P (lhs))
|
4897 |
|
|
{
|
4898 |
|
|
tree common = save_expr (lhs);
|
4899 |
|
|
|
4900 |
|
|
if (0 != (lhs = build_range_check (loc, type, common,
|
4901 |
|
|
or_op ? ! in0_p : in0_p,
|
4902 |
|
|
low0, high0))
|
4903 |
|
|
&& (0 != (rhs = build_range_check (loc, type, common,
|
4904 |
|
|
or_op ? ! in1_p : in1_p,
|
4905 |
|
|
low1, high1))))
|
4906 |
|
|
{
|
4907 |
|
|
if (strict_overflow_p)
|
4908 |
|
|
fold_overflow_warning (warnmsg,
|
4909 |
|
|
WARN_STRICT_OVERFLOW_COMPARISON);
|
4910 |
|
|
return build2_loc (loc, code == TRUTH_ANDIF_EXPR
|
4911 |
|
|
? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
|
4912 |
|
|
type, lhs, rhs);
|
4913 |
|
|
}
|
4914 |
|
|
}
|
4915 |
|
|
}
|
4916 |
|
|
|
4917 |
|
|
return 0;
|
4918 |
|
|
}
|
4919 |
|
|
|
4920 |
|
|
/* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
|
4921 |
|
|
bit value. Arrange things so the extra bits will be set to zero if and
|
4922 |
|
|
only if C is signed-extended to its full width. If MASK is nonzero,
|
4923 |
|
|
it is an INTEGER_CST that should be AND'ed with the extra bits. */
|
4924 |
|
|
|
4925 |
|
|
static tree
|
4926 |
|
|
unextend (tree c, int p, int unsignedp, tree mask)
|
4927 |
|
|
{
|
4928 |
|
|
tree type = TREE_TYPE (c);
|
4929 |
|
|
int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
|
4930 |
|
|
tree temp;
|
4931 |
|
|
|
4932 |
|
|
if (p == modesize || unsignedp)
|
4933 |
|
|
return c;
|
4934 |
|
|
|
4935 |
|
|
/* We work by getting just the sign bit into the low-order bit, then
|
4936 |
|
|
into the high-order bit, then sign-extend. We then XOR that value
|
4937 |
|
|
with C. */
|
4938 |
|
|
temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
|
4939 |
|
|
temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
|
4940 |
|
|
|
4941 |
|
|
/* We must use a signed type in order to get an arithmetic right shift.
|
4942 |
|
|
However, we must also avoid introducing accidental overflows, so that
|
4943 |
|
|
a subsequent call to integer_zerop will work. Hence we must
|
4944 |
|
|
do the type conversion here. At this point, the constant is either
|
4945 |
|
|
zero or one, and the conversion to a signed type can never overflow.
|
4946 |
|
|
We could get an overflow if this conversion is done anywhere else. */
|
4947 |
|
|
if (TYPE_UNSIGNED (type))
|
4948 |
|
|
temp = fold_convert (signed_type_for (type), temp);
|
4949 |
|
|
|
4950 |
|
|
temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
|
4951 |
|
|
temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
|
4952 |
|
|
if (mask != 0)
|
4953 |
|
|
temp = const_binop (BIT_AND_EXPR, temp,
|
4954 |
|
|
fold_convert (TREE_TYPE (c), mask));
|
4955 |
|
|
/* If necessary, convert the type back to match the type of C. */
|
4956 |
|
|
if (TYPE_UNSIGNED (type))
|
4957 |
|
|
temp = fold_convert (type, temp);
|
4958 |
|
|
|
4959 |
|
|
return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
|
4960 |
|
|
}
|
4961 |
|
|
|
4962 |
|
|
/* For an expression that has the form
|
4963 |
|
|
(A && B) || ~B
|
4964 |
|
|
or
|
4965 |
|
|
(A || B) && ~B,
|
4966 |
|
|
we can drop one of the inner expressions and simplify to
|
4967 |
|
|
A || ~B
|
4968 |
|
|
or
|
4969 |
|
|
A && ~B
|
4970 |
|
|
LOC is the location of the resulting expression. OP is the inner
|
4971 |
|
|
logical operation; the left-hand side in the examples above, while CMPOP
|
4972 |
|
|
is the right-hand side. RHS_ONLY is used to prevent us from accidentally
|
4973 |
|
|
removing a condition that guards another, as in
|
4974 |
|
|
(A != NULL && A->...) || A == NULL
|
4975 |
|
|
which we must not transform. If RHS_ONLY is true, only eliminate the
|
4976 |
|
|
right-most operand of the inner logical operation. */
|
4977 |
|
|
|
4978 |
|
|
static tree
|
4979 |
|
|
merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
|
4980 |
|
|
bool rhs_only)
|
4981 |
|
|
{
|
4982 |
|
|
tree type = TREE_TYPE (cmpop);
|
4983 |
|
|
enum tree_code code = TREE_CODE (cmpop);
|
4984 |
|
|
enum tree_code truthop_code = TREE_CODE (op);
|
4985 |
|
|
tree lhs = TREE_OPERAND (op, 0);
|
4986 |
|
|
tree rhs = TREE_OPERAND (op, 1);
|
4987 |
|
|
tree orig_lhs = lhs, orig_rhs = rhs;
|
4988 |
|
|
enum tree_code rhs_code = TREE_CODE (rhs);
|
4989 |
|
|
enum tree_code lhs_code = TREE_CODE (lhs);
|
4990 |
|
|
enum tree_code inv_code;
|
4991 |
|
|
|
4992 |
|
|
if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
|
4993 |
|
|
return NULL_TREE;
|
4994 |
|
|
|
4995 |
|
|
if (TREE_CODE_CLASS (code) != tcc_comparison)
|
4996 |
|
|
return NULL_TREE;
|
4997 |
|
|
|
4998 |
|
|
if (rhs_code == truthop_code)
|
4999 |
|
|
{
|
5000 |
|
|
tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
|
5001 |
|
|
if (newrhs != NULL_TREE)
|
5002 |
|
|
{
|
5003 |
|
|
rhs = newrhs;
|
5004 |
|
|
rhs_code = TREE_CODE (rhs);
|
5005 |
|
|
}
|
5006 |
|
|
}
|
5007 |
|
|
if (lhs_code == truthop_code && !rhs_only)
|
5008 |
|
|
{
|
5009 |
|
|
tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
|
5010 |
|
|
if (newlhs != NULL_TREE)
|
5011 |
|
|
{
|
5012 |
|
|
lhs = newlhs;
|
5013 |
|
|
lhs_code = TREE_CODE (lhs);
|
5014 |
|
|
}
|
5015 |
|
|
}
|
5016 |
|
|
|
5017 |
|
|
inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
|
5018 |
|
|
if (inv_code == rhs_code
|
5019 |
|
|
&& operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
|
5020 |
|
|
&& operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
|
5021 |
|
|
return lhs;
|
5022 |
|
|
if (!rhs_only && inv_code == lhs_code
|
5023 |
|
|
&& operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
|
5024 |
|
|
&& operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
|
5025 |
|
|
return rhs;
|
5026 |
|
|
if (rhs != orig_rhs || lhs != orig_lhs)
|
5027 |
|
|
return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
|
5028 |
|
|
lhs, rhs);
|
5029 |
|
|
return NULL_TREE;
|
5030 |
|
|
}
|
5031 |
|
|
|
5032 |
|
|
/* Find ways of folding logical expressions of LHS and RHS:
|
5033 |
|
|
Try to merge two comparisons to the same innermost item.
|
5034 |
|
|
Look for range tests like "ch >= '0' && ch <= '9'".
|
5035 |
|
|
Look for combinations of simple terms on machines with expensive branches
|
5036 |
|
|
and evaluate the RHS unconditionally.
|
5037 |
|
|
|
5038 |
|
|
For example, if we have p->a == 2 && p->b == 4 and we can make an
|
5039 |
|
|
object large enough to span both A and B, we can do this with a comparison
|
5040 |
|
|
against the object ANDed with the a mask.
|
5041 |
|
|
|
5042 |
|
|
If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
|
5043 |
|
|
operations to do this with one comparison.
|
5044 |
|
|
|
5045 |
|
|
We check for both normal comparisons and the BIT_AND_EXPRs made this by
|
5046 |
|
|
function and the one above.
|
5047 |
|
|
|
5048 |
|
|
CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
|
5049 |
|
|
TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
|
5050 |
|
|
|
5051 |
|
|
TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
|
5052 |
|
|
two operands.
|
5053 |
|
|
|
5054 |
|
|
We return the simplified tree or 0 if no optimization is possible. */
|
5055 |
|
|
|
5056 |
|
|
static tree
|
5057 |
|
|
fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
|
5058 |
|
|
tree lhs, tree rhs)
|
5059 |
|
|
{
|
5060 |
|
|
/* If this is the "or" of two comparisons, we can do something if
|
5061 |
|
|
the comparisons are NE_EXPR. If this is the "and", we can do something
|
5062 |
|
|
if the comparisons are EQ_EXPR. I.e.,
|
5063 |
|
|
(a->b == 2 && a->c == 4) can become (a->new == NEW).
|
5064 |
|
|
|
5065 |
|
|
WANTED_CODE is this operation code. For single bit fields, we can
|
5066 |
|
|
convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
|
5067 |
|
|
comparison for one-bit fields. */
|
5068 |
|
|
|
5069 |
|
|
enum tree_code wanted_code;
|
5070 |
|
|
enum tree_code lcode, rcode;
|
5071 |
|
|
tree ll_arg, lr_arg, rl_arg, rr_arg;
|
5072 |
|
|
tree ll_inner, lr_inner, rl_inner, rr_inner;
|
5073 |
|
|
HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
|
5074 |
|
|
HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
|
5075 |
|
|
HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
|
5076 |
|
|
HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
|
5077 |
|
|
int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
|
5078 |
|
|
enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
|
5079 |
|
|
enum machine_mode lnmode, rnmode;
|
5080 |
|
|
tree ll_mask, lr_mask, rl_mask, rr_mask;
|
5081 |
|
|
tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
|
5082 |
|
|
tree l_const, r_const;
|
5083 |
|
|
tree lntype, rntype, result;
|
5084 |
|
|
HOST_WIDE_INT first_bit, end_bit;
|
5085 |
|
|
int volatilep;
|
5086 |
|
|
|
5087 |
|
|
/* Start by getting the comparison codes. Fail if anything is volatile.
|
5088 |
|
|
If one operand is a BIT_AND_EXPR with the constant one, treat it as if
|
5089 |
|
|
it were surrounded with a NE_EXPR. */
|
5090 |
|
|
|
5091 |
|
|
if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
|
5092 |
|
|
return 0;
|
5093 |
|
|
|
5094 |
|
|
lcode = TREE_CODE (lhs);
|
5095 |
|
|
rcode = TREE_CODE (rhs);
|
5096 |
|
|
|
5097 |
|
|
if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
|
5098 |
|
|
{
|
5099 |
|
|
lhs = build2 (NE_EXPR, truth_type, lhs,
|
5100 |
|
|
build_int_cst (TREE_TYPE (lhs), 0));
|
5101 |
|
|
lcode = NE_EXPR;
|
5102 |
|
|
}
|
5103 |
|
|
|
5104 |
|
|
if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
|
5105 |
|
|
{
|
5106 |
|
|
rhs = build2 (NE_EXPR, truth_type, rhs,
|
5107 |
|
|
build_int_cst (TREE_TYPE (rhs), 0));
|
5108 |
|
|
rcode = NE_EXPR;
|
5109 |
|
|
}
|
5110 |
|
|
|
5111 |
|
|
if (TREE_CODE_CLASS (lcode) != tcc_comparison
|
5112 |
|
|
|| TREE_CODE_CLASS (rcode) != tcc_comparison)
|
5113 |
|
|
return 0;
|
5114 |
|
|
|
5115 |
|
|
ll_arg = TREE_OPERAND (lhs, 0);
|
5116 |
|
|
lr_arg = TREE_OPERAND (lhs, 1);
|
5117 |
|
|
rl_arg = TREE_OPERAND (rhs, 0);
|
5118 |
|
|
rr_arg = TREE_OPERAND (rhs, 1);
|
5119 |
|
|
|
5120 |
|
|
/* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
|
5121 |
|
|
if (simple_operand_p (ll_arg)
|
5122 |
|
|
&& simple_operand_p (lr_arg))
|
5123 |
|
|
{
|
5124 |
|
|
if (operand_equal_p (ll_arg, rl_arg, 0)
|
5125 |
|
|
&& operand_equal_p (lr_arg, rr_arg, 0))
|
5126 |
|
|
{
|
5127 |
|
|
result = combine_comparisons (loc, code, lcode, rcode,
|
5128 |
|
|
truth_type, ll_arg, lr_arg);
|
5129 |
|
|
if (result)
|
5130 |
|
|
return result;
|
5131 |
|
|
}
|
5132 |
|
|
else if (operand_equal_p (ll_arg, rr_arg, 0)
|
5133 |
|
|
&& operand_equal_p (lr_arg, rl_arg, 0))
|
5134 |
|
|
{
|
5135 |
|
|
result = combine_comparisons (loc, code, lcode,
|
5136 |
|
|
swap_tree_comparison (rcode),
|
5137 |
|
|
truth_type, ll_arg, lr_arg);
|
5138 |
|
|
if (result)
|
5139 |
|
|
return result;
|
5140 |
|
|
}
|
5141 |
|
|
}
|
5142 |
|
|
|
5143 |
|
|
code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
|
5144 |
|
|
? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
|
5145 |
|
|
|
5146 |
|
|
/* If the RHS can be evaluated unconditionally and its operands are
|
5147 |
|
|
simple, it wins to evaluate the RHS unconditionally on machines
|
5148 |
|
|
with expensive branches. In this case, this isn't a comparison
|
5149 |
|
|
that can be merged. */
|
5150 |
|
|
|
5151 |
|
|
if (BRANCH_COST (optimize_function_for_speed_p (cfun),
|
5152 |
|
|
false) >= 2
|
5153 |
|
|
&& ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
|
5154 |
|
|
&& simple_operand_p (rl_arg)
|
5155 |
|
|
&& simple_operand_p (rr_arg))
|
5156 |
|
|
{
|
5157 |
|
|
/* Convert (a != 0) || (b != 0) into (a | b) != 0. */
|
5158 |
|
|
if (code == TRUTH_OR_EXPR
|
5159 |
|
|
&& lcode == NE_EXPR && integer_zerop (lr_arg)
|
5160 |
|
|
&& rcode == NE_EXPR && integer_zerop (rr_arg)
|
5161 |
|
|
&& TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
|
5162 |
|
|
&& INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
|
5163 |
|
|
return build2_loc (loc, NE_EXPR, truth_type,
|
5164 |
|
|
build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
|
5165 |
|
|
ll_arg, rl_arg),
|
5166 |
|
|
build_int_cst (TREE_TYPE (ll_arg), 0));
|
5167 |
|
|
|
5168 |
|
|
/* Convert (a == 0) && (b == 0) into (a | b) == 0. */
|
5169 |
|
|
if (code == TRUTH_AND_EXPR
|
5170 |
|
|
&& lcode == EQ_EXPR && integer_zerop (lr_arg)
|
5171 |
|
|
&& rcode == EQ_EXPR && integer_zerop (rr_arg)
|
5172 |
|
|
&& TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
|
5173 |
|
|
&& INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
|
5174 |
|
|
return build2_loc (loc, EQ_EXPR, truth_type,
|
5175 |
|
|
build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
|
5176 |
|
|
ll_arg, rl_arg),
|
5177 |
|
|
build_int_cst (TREE_TYPE (ll_arg), 0));
|
5178 |
|
|
}
|
5179 |
|
|
|
5180 |
|
|
/* See if the comparisons can be merged. Then get all the parameters for
|
5181 |
|
|
each side. */
|
5182 |
|
|
|
5183 |
|
|
if ((lcode != EQ_EXPR && lcode != NE_EXPR)
|
5184 |
|
|
|| (rcode != EQ_EXPR && rcode != NE_EXPR))
|
5185 |
|
|
return 0;
|
5186 |
|
|
|
5187 |
|
|
volatilep = 0;
|
5188 |
|
|
ll_inner = decode_field_reference (loc, ll_arg,
|
5189 |
|
|
&ll_bitsize, &ll_bitpos, &ll_mode,
|
5190 |
|
|
&ll_unsignedp, &volatilep, &ll_mask,
|
5191 |
|
|
&ll_and_mask);
|
5192 |
|
|
lr_inner = decode_field_reference (loc, lr_arg,
|
5193 |
|
|
&lr_bitsize, &lr_bitpos, &lr_mode,
|
5194 |
|
|
&lr_unsignedp, &volatilep, &lr_mask,
|
5195 |
|
|
&lr_and_mask);
|
5196 |
|
|
rl_inner = decode_field_reference (loc, rl_arg,
|
5197 |
|
|
&rl_bitsize, &rl_bitpos, &rl_mode,
|
5198 |
|
|
&rl_unsignedp, &volatilep, &rl_mask,
|
5199 |
|
|
&rl_and_mask);
|
5200 |
|
|
rr_inner = decode_field_reference (loc, rr_arg,
|
5201 |
|
|
&rr_bitsize, &rr_bitpos, &rr_mode,
|
5202 |
|
|
&rr_unsignedp, &volatilep, &rr_mask,
|
5203 |
|
|
&rr_and_mask);
|
5204 |
|
|
|
5205 |
|
|
/* It must be true that the inner operation on the lhs of each
|
5206 |
|
|
comparison must be the same if we are to be able to do anything.
|
5207 |
|
|
Then see if we have constants. If not, the same must be true for
|
5208 |
|
|
the rhs's. */
|
5209 |
|
|
if (volatilep || ll_inner == 0 || rl_inner == 0
|
5210 |
|
|
|| ! operand_equal_p (ll_inner, rl_inner, 0))
|
5211 |
|
|
return 0;
|
5212 |
|
|
|
5213 |
|
|
if (TREE_CODE (lr_arg) == INTEGER_CST
|
5214 |
|
|
&& TREE_CODE (rr_arg) == INTEGER_CST)
|
5215 |
|
|
l_const = lr_arg, r_const = rr_arg;
|
5216 |
|
|
else if (lr_inner == 0 || rr_inner == 0
|
5217 |
|
|
|| ! operand_equal_p (lr_inner, rr_inner, 0))
|
5218 |
|
|
return 0;
|
5219 |
|
|
else
|
5220 |
|
|
l_const = r_const = 0;
|
5221 |
|
|
|
5222 |
|
|
/* If either comparison code is not correct for our logical operation,
|
5223 |
|
|
fail. However, we can convert a one-bit comparison against zero into
|
5224 |
|
|
the opposite comparison against that bit being set in the field. */
|
5225 |
|
|
|
5226 |
|
|
wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
|
5227 |
|
|
if (lcode != wanted_code)
|
5228 |
|
|
{
|
5229 |
|
|
if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
|
5230 |
|
|
{
|
5231 |
|
|
/* Make the left operand unsigned, since we are only interested
|
5232 |
|
|
in the value of one bit. Otherwise we are doing the wrong
|
5233 |
|
|
thing below. */
|
5234 |
|
|
ll_unsignedp = 1;
|
5235 |
|
|
l_const = ll_mask;
|
5236 |
|
|
}
|
5237 |
|
|
else
|
5238 |
|
|
return 0;
|
5239 |
|
|
}
|
5240 |
|
|
|
5241 |
|
|
/* This is analogous to the code for l_const above. */
|
5242 |
|
|
if (rcode != wanted_code)
|
5243 |
|
|
{
|
5244 |
|
|
if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
|
5245 |
|
|
{
|
5246 |
|
|
rl_unsignedp = 1;
|
5247 |
|
|
r_const = rl_mask;
|
5248 |
|
|
}
|
5249 |
|
|
else
|
5250 |
|
|
return 0;
|
5251 |
|
|
}
|
5252 |
|
|
|
5253 |
|
|
/* See if we can find a mode that contains both fields being compared on
|
5254 |
|
|
the left. If we can't, fail. Otherwise, update all constants and masks
|
5255 |
|
|
to be relative to a field of that size. */
|
5256 |
|
|
first_bit = MIN (ll_bitpos, rl_bitpos);
|
5257 |
|
|
end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
|
5258 |
|
|
lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
|
5259 |
|
|
TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
|
5260 |
|
|
volatilep);
|
5261 |
|
|
if (lnmode == VOIDmode)
|
5262 |
|
|
return 0;
|
5263 |
|
|
|
5264 |
|
|
lnbitsize = GET_MODE_BITSIZE (lnmode);
|
5265 |
|
|
lnbitpos = first_bit & ~ (lnbitsize - 1);
|
5266 |
|
|
lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
|
5267 |
|
|
xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
|
5268 |
|
|
|
5269 |
|
|
if (BYTES_BIG_ENDIAN)
|
5270 |
|
|
{
|
5271 |
|
|
xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
|
5272 |
|
|
xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
|
5273 |
|
|
}
|
5274 |
|
|
|
5275 |
|
|
ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
|
5276 |
|
|
size_int (xll_bitpos));
|
5277 |
|
|
rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
|
5278 |
|
|
size_int (xrl_bitpos));
|
5279 |
|
|
|
5280 |
|
|
if (l_const)
|
5281 |
|
|
{
|
5282 |
|
|
l_const = fold_convert_loc (loc, lntype, l_const);
|
5283 |
|
|
l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
|
5284 |
|
|
l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
|
5285 |
|
|
if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
|
5286 |
|
|
fold_build1_loc (loc, BIT_NOT_EXPR,
|
5287 |
|
|
lntype, ll_mask))))
|
5288 |
|
|
{
|
5289 |
|
|
warning (0, "comparison is always %d", wanted_code == NE_EXPR);
|
5290 |
|
|
|
5291 |
|
|
return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
|
5292 |
|
|
}
|
5293 |
|
|
}
|
5294 |
|
|
if (r_const)
|
5295 |
|
|
{
|
5296 |
|
|
r_const = fold_convert_loc (loc, lntype, r_const);
|
5297 |
|
|
r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
|
5298 |
|
|
r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
|
5299 |
|
|
if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
|
5300 |
|
|
fold_build1_loc (loc, BIT_NOT_EXPR,
|
5301 |
|
|
lntype, rl_mask))))
|
5302 |
|
|
{
|
5303 |
|
|
warning (0, "comparison is always %d", wanted_code == NE_EXPR);
|
5304 |
|
|
|
5305 |
|
|
return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
|
5306 |
|
|
}
|
5307 |
|
|
}
|
5308 |
|
|
|
5309 |
|
|
/* If the right sides are not constant, do the same for it. Also,
|
5310 |
|
|
disallow this optimization if a size or signedness mismatch occurs
|
5311 |
|
|
between the left and right sides. */
|
5312 |
|
|
if (l_const == 0)
|
5313 |
|
|
{
|
5314 |
|
|
if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
|
5315 |
|
|
|| ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
|
5316 |
|
|
/* Make sure the two fields on the right
|
5317 |
|
|
correspond to the left without being swapped. */
|
5318 |
|
|
|| ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
|
5319 |
|
|
return 0;
|
5320 |
|
|
|
5321 |
|
|
first_bit = MIN (lr_bitpos, rr_bitpos);
|
5322 |
|
|
end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
|
5323 |
|
|
rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
|
5324 |
|
|
TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
|
5325 |
|
|
volatilep);
|
5326 |
|
|
if (rnmode == VOIDmode)
|
5327 |
|
|
return 0;
|
5328 |
|
|
|
5329 |
|
|
rnbitsize = GET_MODE_BITSIZE (rnmode);
|
5330 |
|
|
rnbitpos = first_bit & ~ (rnbitsize - 1);
|
5331 |
|
|
rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
|
5332 |
|
|
xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
|
5333 |
|
|
|
5334 |
|
|
if (BYTES_BIG_ENDIAN)
|
5335 |
|
|
{
|
5336 |
|
|
xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
|
5337 |
|
|
xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
|
5338 |
|
|
}
|
5339 |
|
|
|
5340 |
|
|
lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
|
5341 |
|
|
rntype, lr_mask),
|
5342 |
|
|
size_int (xlr_bitpos));
|
5343 |
|
|
rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
|
5344 |
|
|
rntype, rr_mask),
|
5345 |
|
|
size_int (xrr_bitpos));
|
5346 |
|
|
|
5347 |
|
|
/* Make a mask that corresponds to both fields being compared.
|
5348 |
|
|
Do this for both items being compared. If the operands are the
|
5349 |
|
|
same size and the bits being compared are in the same position
|
5350 |
|
|
then we can do this by masking both and comparing the masked
|
5351 |
|
|
results. */
|
5352 |
|
|
ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
|
5353 |
|
|
lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
|
5354 |
|
|
if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
|
5355 |
|
|
{
|
5356 |
|
|
lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
|
5357 |
|
|
ll_unsignedp || rl_unsignedp);
|
5358 |
|
|
if (! all_ones_mask_p (ll_mask, lnbitsize))
|
5359 |
|
|
lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
|
5360 |
|
|
|
5361 |
|
|
rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
|
5362 |
|
|
lr_unsignedp || rr_unsignedp);
|
5363 |
|
|
if (! all_ones_mask_p (lr_mask, rnbitsize))
|
5364 |
|
|
rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
|
5365 |
|
|
|
5366 |
|
|
return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
|
5367 |
|
|
}
|
5368 |
|
|
|
5369 |
|
|
/* There is still another way we can do something: If both pairs of
|
5370 |
|
|
fields being compared are adjacent, we may be able to make a wider
|
5371 |
|
|
field containing them both.
|
5372 |
|
|
|
5373 |
|
|
Note that we still must mask the lhs/rhs expressions. Furthermore,
|
5374 |
|
|
the mask must be shifted to account for the shift done by
|
5375 |
|
|
make_bit_field_ref. */
|
5376 |
|
|
if ((ll_bitsize + ll_bitpos == rl_bitpos
|
5377 |
|
|
&& lr_bitsize + lr_bitpos == rr_bitpos)
|
5378 |
|
|
|| (ll_bitpos == rl_bitpos + rl_bitsize
|
5379 |
|
|
&& lr_bitpos == rr_bitpos + rr_bitsize))
|
5380 |
|
|
{
|
5381 |
|
|
tree type;
|
5382 |
|
|
|
5383 |
|
|
lhs = make_bit_field_ref (loc, ll_inner, lntype,
|
5384 |
|
|
ll_bitsize + rl_bitsize,
|
5385 |
|
|
MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
|
5386 |
|
|
rhs = make_bit_field_ref (loc, lr_inner, rntype,
|
5387 |
|
|
lr_bitsize + rr_bitsize,
|
5388 |
|
|
MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
|
5389 |
|
|
|
5390 |
|
|
ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
|
5391 |
|
|
size_int (MIN (xll_bitpos, xrl_bitpos)));
|
5392 |
|
|
lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
|
5393 |
|
|
size_int (MIN (xlr_bitpos, xrr_bitpos)));
|
5394 |
|
|
|
5395 |
|
|
/* Convert to the smaller type before masking out unwanted bits. */
|
5396 |
|
|
type = lntype;
|
5397 |
|
|
if (lntype != rntype)
|
5398 |
|
|
{
|
5399 |
|
|
if (lnbitsize > rnbitsize)
|
5400 |
|
|
{
|
5401 |
|
|
lhs = fold_convert_loc (loc, rntype, lhs);
|
5402 |
|
|
ll_mask = fold_convert_loc (loc, rntype, ll_mask);
|
5403 |
|
|
type = rntype;
|
5404 |
|
|
}
|
5405 |
|
|
else if (lnbitsize < rnbitsize)
|
5406 |
|
|
{
|
5407 |
|
|
rhs = fold_convert_loc (loc, lntype, rhs);
|
5408 |
|
|
lr_mask = fold_convert_loc (loc, lntype, lr_mask);
|
5409 |
|
|
type = lntype;
|
5410 |
|
|
}
|
5411 |
|
|
}
|
5412 |
|
|
|
5413 |
|
|
if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
|
5414 |
|
|
lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
|
5415 |
|
|
|
5416 |
|
|
if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
|
5417 |
|
|
rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
|
5418 |
|
|
|
5419 |
|
|
return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
|
5420 |
|
|
}
|
5421 |
|
|
|
5422 |
|
|
return 0;
|
5423 |
|
|
}
|
5424 |
|
|
|
5425 |
|
|
/* Handle the case of comparisons with constants. If there is something in
|
5426 |
|
|
common between the masks, those bits of the constants must be the same.
|
5427 |
|
|
If not, the condition is always false. Test for this to avoid generating
|
5428 |
|
|
incorrect code below. */
|
5429 |
|
|
result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
|
5430 |
|
|
if (! integer_zerop (result)
|
5431 |
|
|
&& simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
|
5432 |
|
|
const_binop (BIT_AND_EXPR, result, r_const)) != 1)
|
5433 |
|
|
{
|
5434 |
|
|
if (wanted_code == NE_EXPR)
|
5435 |
|
|
{
|
5436 |
|
|
warning (0, "%<or%> of unmatched not-equal tests is always 1");
|
5437 |
|
|
return constant_boolean_node (true, truth_type);
|
5438 |
|
|
}
|
5439 |
|
|
else
|
5440 |
|
|
{
|
5441 |
|
|
warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
|
5442 |
|
|
return constant_boolean_node (false, truth_type);
|
5443 |
|
|
}
|
5444 |
|
|
}
|
5445 |
|
|
|
5446 |
|
|
/* Construct the expression we will return. First get the component
|
5447 |
|
|
reference we will make. Unless the mask is all ones the width of
|
5448 |
|
|
that field, perform the mask operation. Then compare with the
|
5449 |
|
|
merged constant. */
|
5450 |
|
|
result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
|
5451 |
|
|
ll_unsignedp || rl_unsignedp);
|
5452 |
|
|
|
5453 |
|
|
ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
|
5454 |
|
|
if (! all_ones_mask_p (ll_mask, lnbitsize))
|
5455 |
|
|
result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
|
5456 |
|
|
|
5457 |
|
|
return build2_loc (loc, wanted_code, truth_type, result,
|
5458 |
|
|
const_binop (BIT_IOR_EXPR, l_const, r_const));
|
5459 |
|
|
}
|
5460 |
|
|
|
5461 |
|
|
/* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
|
5462 |
|
|
constant. */
|
5463 |
|
|
|
5464 |
|
|
static tree
|
5465 |
|
|
optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
|
5466 |
|
|
tree op0, tree op1)
|
5467 |
|
|
{
|
5468 |
|
|
tree arg0 = op0;
|
5469 |
|
|
enum tree_code op_code;
|
5470 |
|
|
tree comp_const;
|
5471 |
|
|
tree minmax_const;
|
5472 |
|
|
int consts_equal, consts_lt;
|
5473 |
|
|
tree inner;
|
5474 |
|
|
|
5475 |
|
|
STRIP_SIGN_NOPS (arg0);
|
5476 |
|
|
|
5477 |
|
|
op_code = TREE_CODE (arg0);
|
5478 |
|
|
minmax_const = TREE_OPERAND (arg0, 1);
|
5479 |
|
|
comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
|
5480 |
|
|
consts_equal = tree_int_cst_equal (minmax_const, comp_const);
|
5481 |
|
|
consts_lt = tree_int_cst_lt (minmax_const, comp_const);
|
5482 |
|
|
inner = TREE_OPERAND (arg0, 0);
|
5483 |
|
|
|
5484 |
|
|
/* If something does not permit us to optimize, return the original tree. */
|
5485 |
|
|
if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
|
5486 |
|
|
|| TREE_CODE (comp_const) != INTEGER_CST
|
5487 |
|
|
|| TREE_OVERFLOW (comp_const)
|
5488 |
|
|
|| TREE_CODE (minmax_const) != INTEGER_CST
|
5489 |
|
|
|| TREE_OVERFLOW (minmax_const))
|
5490 |
|
|
return NULL_TREE;
|
5491 |
|
|
|
5492 |
|
|
/* Now handle all the various comparison codes. We only handle EQ_EXPR
|
5493 |
|
|
and GT_EXPR, doing the rest with recursive calls using logical
|
5494 |
|
|
simplifications. */
|
5495 |
|
|
switch (code)
|
5496 |
|
|
{
|
5497 |
|
|
case NE_EXPR: case LT_EXPR: case LE_EXPR:
|
5498 |
|
|
{
|
5499 |
|
|
tree tem
|
5500 |
|
|
= optimize_minmax_comparison (loc,
|
5501 |
|
|
invert_tree_comparison (code, false),
|
5502 |
|
|
type, op0, op1);
|
5503 |
|
|
if (tem)
|
5504 |
|
|
return invert_truthvalue_loc (loc, tem);
|
5505 |
|
|
return NULL_TREE;
|
5506 |
|
|
}
|
5507 |
|
|
|
5508 |
|
|
case GE_EXPR:
|
5509 |
|
|
return
|
5510 |
|
|
fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
|
5511 |
|
|
optimize_minmax_comparison
|
5512 |
|
|
(loc, EQ_EXPR, type, arg0, comp_const),
|
5513 |
|
|
optimize_minmax_comparison
|
5514 |
|
|
(loc, GT_EXPR, type, arg0, comp_const));
|
5515 |
|
|
|
5516 |
|
|
case EQ_EXPR:
|
5517 |
|
|
if (op_code == MAX_EXPR && consts_equal)
|
5518 |
|
|
/* MAX (X, 0) == 0 -> X <= 0 */
|
5519 |
|
|
return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
|
5520 |
|
|
|
5521 |
|
|
else if (op_code == MAX_EXPR && consts_lt)
|
5522 |
|
|
/* MAX (X, 0) == 5 -> X == 5 */
|
5523 |
|
|
return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
|
5524 |
|
|
|
5525 |
|
|
else if (op_code == MAX_EXPR)
|
5526 |
|
|
/* MAX (X, 0) == -1 -> false */
|
5527 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, inner);
|
5528 |
|
|
|
5529 |
|
|
else if (consts_equal)
|
5530 |
|
|
/* MIN (X, 0) == 0 -> X >= 0 */
|
5531 |
|
|
return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
|
5532 |
|
|
|
5533 |
|
|
else if (consts_lt)
|
5534 |
|
|
/* MIN (X, 0) == 5 -> false */
|
5535 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, inner);
|
5536 |
|
|
|
5537 |
|
|
else
|
5538 |
|
|
/* MIN (X, 0) == -1 -> X == -1 */
|
5539 |
|
|
return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
|
5540 |
|
|
|
5541 |
|
|
case GT_EXPR:
|
5542 |
|
|
if (op_code == MAX_EXPR && (consts_equal || consts_lt))
|
5543 |
|
|
/* MAX (X, 0) > 0 -> X > 0
|
5544 |
|
|
MAX (X, 0) > 5 -> X > 5 */
|
5545 |
|
|
return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
|
5546 |
|
|
|
5547 |
|
|
else if (op_code == MAX_EXPR)
|
5548 |
|
|
/* MAX (X, 0) > -1 -> true */
|
5549 |
|
|
return omit_one_operand_loc (loc, type, integer_one_node, inner);
|
5550 |
|
|
|
5551 |
|
|
else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
|
5552 |
|
|
/* MIN (X, 0) > 0 -> false
|
5553 |
|
|
MIN (X, 0) > 5 -> false */
|
5554 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, inner);
|
5555 |
|
|
|
5556 |
|
|
else
|
5557 |
|
|
/* MIN (X, 0) > -1 -> X > -1 */
|
5558 |
|
|
return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
|
5559 |
|
|
|
5560 |
|
|
default:
|
5561 |
|
|
return NULL_TREE;
|
5562 |
|
|
}
|
5563 |
|
|
}
|
5564 |
|
|
|
5565 |
|
|
/* T is an integer expression that is being multiplied, divided, or taken a
|
5566 |
|
|
modulus (CODE says which and what kind of divide or modulus) by a
|
5567 |
|
|
constant C. See if we can eliminate that operation by folding it with
|
5568 |
|
|
other operations already in T. WIDE_TYPE, if non-null, is a type that
|
5569 |
|
|
should be used for the computation if wider than our type.
|
5570 |
|
|
|
5571 |
|
|
For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
|
5572 |
|
|
(X * 2) + (Y * 4). We must, however, be assured that either the original
|
5573 |
|
|
expression would not overflow or that overflow is undefined for the type
|
5574 |
|
|
in the language in question.
|
5575 |
|
|
|
5576 |
|
|
If we return a non-null expression, it is an equivalent form of the
|
5577 |
|
|
original computation, but need not be in the original type.
|
5578 |
|
|
|
5579 |
|
|
We set *STRICT_OVERFLOW_P to true if the return values depends on
|
5580 |
|
|
signed overflow being undefined. Otherwise we do not change
|
5581 |
|
|
*STRICT_OVERFLOW_P. */
|
5582 |
|
|
|
5583 |
|
|
static tree
|
5584 |
|
|
extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
|
5585 |
|
|
bool *strict_overflow_p)
|
5586 |
|
|
{
|
5587 |
|
|
/* To avoid exponential search depth, refuse to allow recursion past
|
5588 |
|
|
three levels. Beyond that (1) it's highly unlikely that we'll find
|
5589 |
|
|
something interesting and (2) we've probably processed it before
|
5590 |
|
|
when we built the inner expression. */
|
5591 |
|
|
|
5592 |
|
|
static int depth;
|
5593 |
|
|
tree ret;
|
5594 |
|
|
|
5595 |
|
|
if (depth > 3)
|
5596 |
|
|
return NULL;
|
5597 |
|
|
|
5598 |
|
|
depth++;
|
5599 |
|
|
ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
|
5600 |
|
|
depth--;
|
5601 |
|
|
|
5602 |
|
|
return ret;
|
5603 |
|
|
}
|
5604 |
|
|
|
5605 |
|
|
static tree
|
5606 |
|
|
extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
|
5607 |
|
|
bool *strict_overflow_p)
|
5608 |
|
|
{
|
5609 |
|
|
tree type = TREE_TYPE (t);
|
5610 |
|
|
enum tree_code tcode = TREE_CODE (t);
|
5611 |
|
|
tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
|
5612 |
|
|
> GET_MODE_SIZE (TYPE_MODE (type)))
|
5613 |
|
|
? wide_type : type);
|
5614 |
|
|
tree t1, t2;
|
5615 |
|
|
int same_p = tcode == code;
|
5616 |
|
|
tree op0 = NULL_TREE, op1 = NULL_TREE;
|
5617 |
|
|
bool sub_strict_overflow_p;
|
5618 |
|
|
|
5619 |
|
|
/* Don't deal with constants of zero here; they confuse the code below. */
|
5620 |
|
|
if (integer_zerop (c))
|
5621 |
|
|
return NULL_TREE;
|
5622 |
|
|
|
5623 |
|
|
if (TREE_CODE_CLASS (tcode) == tcc_unary)
|
5624 |
|
|
op0 = TREE_OPERAND (t, 0);
|
5625 |
|
|
|
5626 |
|
|
if (TREE_CODE_CLASS (tcode) == tcc_binary)
|
5627 |
|
|
op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
|
5628 |
|
|
|
5629 |
|
|
/* Note that we need not handle conditional operations here since fold
|
5630 |
|
|
already handles those cases. So just do arithmetic here. */
|
5631 |
|
|
switch (tcode)
|
5632 |
|
|
{
|
5633 |
|
|
case INTEGER_CST:
|
5634 |
|
|
/* For a constant, we can always simplify if we are a multiply
|
5635 |
|
|
or (for divide and modulus) if it is a multiple of our constant. */
|
5636 |
|
|
if (code == MULT_EXPR
|
5637 |
|
|
|| integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
|
5638 |
|
|
return const_binop (code, fold_convert (ctype, t),
|
5639 |
|
|
fold_convert (ctype, c));
|
5640 |
|
|
break;
|
5641 |
|
|
|
5642 |
|
|
CASE_CONVERT: case NON_LVALUE_EXPR:
|
5643 |
|
|
/* If op0 is an expression ... */
|
5644 |
|
|
if ((COMPARISON_CLASS_P (op0)
|
5645 |
|
|
|| UNARY_CLASS_P (op0)
|
5646 |
|
|
|| BINARY_CLASS_P (op0)
|
5647 |
|
|
|| VL_EXP_CLASS_P (op0)
|
5648 |
|
|
|| EXPRESSION_CLASS_P (op0))
|
5649 |
|
|
/* ... and has wrapping overflow, and its type is smaller
|
5650 |
|
|
than ctype, then we cannot pass through as widening. */
|
5651 |
|
|
&& ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
|
5652 |
|
|
&& ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
|
5653 |
|
|
&& TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
|
5654 |
|
|
&& (TYPE_PRECISION (ctype)
|
5655 |
|
|
> TYPE_PRECISION (TREE_TYPE (op0))))
|
5656 |
|
|
/* ... or this is a truncation (t is narrower than op0),
|
5657 |
|
|
then we cannot pass through this narrowing. */
|
5658 |
|
|
|| (TYPE_PRECISION (type)
|
5659 |
|
|
< TYPE_PRECISION (TREE_TYPE (op0)))
|
5660 |
|
|
/* ... or signedness changes for division or modulus,
|
5661 |
|
|
then we cannot pass through this conversion. */
|
5662 |
|
|
|| (code != MULT_EXPR
|
5663 |
|
|
&& (TYPE_UNSIGNED (ctype)
|
5664 |
|
|
!= TYPE_UNSIGNED (TREE_TYPE (op0))))
|
5665 |
|
|
/* ... or has undefined overflow while the converted to
|
5666 |
|
|
type has not, we cannot do the operation in the inner type
|
5667 |
|
|
as that would introduce undefined overflow. */
|
5668 |
|
|
|| (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
|
5669 |
|
|
&& !TYPE_OVERFLOW_UNDEFINED (type))))
|
5670 |
|
|
break;
|
5671 |
|
|
|
5672 |
|
|
/* Pass the constant down and see if we can make a simplification. If
|
5673 |
|
|
we can, replace this expression with the inner simplification for
|
5674 |
|
|
possible later conversion to our or some other type. */
|
5675 |
|
|
if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
|
5676 |
|
|
&& TREE_CODE (t2) == INTEGER_CST
|
5677 |
|
|
&& !TREE_OVERFLOW (t2)
|
5678 |
|
|
&& (0 != (t1 = extract_muldiv (op0, t2, code,
|
5679 |
|
|
code == MULT_EXPR
|
5680 |
|
|
? ctype : NULL_TREE,
|
5681 |
|
|
strict_overflow_p))))
|
5682 |
|
|
return t1;
|
5683 |
|
|
break;
|
5684 |
|
|
|
5685 |
|
|
case ABS_EXPR:
|
5686 |
|
|
/* If widening the type changes it from signed to unsigned, then we
|
5687 |
|
|
must avoid building ABS_EXPR itself as unsigned. */
|
5688 |
|
|
if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
|
5689 |
|
|
{
|
5690 |
|
|
tree cstype = (*signed_type_for) (ctype);
|
5691 |
|
|
if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
|
5692 |
|
|
!= 0)
|
5693 |
|
|
{
|
5694 |
|
|
t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
|
5695 |
|
|
return fold_convert (ctype, t1);
|
5696 |
|
|
}
|
5697 |
|
|
break;
|
5698 |
|
|
}
|
5699 |
|
|
/* If the constant is negative, we cannot simplify this. */
|
5700 |
|
|
if (tree_int_cst_sgn (c) == -1)
|
5701 |
|
|
break;
|
5702 |
|
|
/* FALLTHROUGH */
|
5703 |
|
|
case NEGATE_EXPR:
|
5704 |
|
|
if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
|
5705 |
|
|
!= 0)
|
5706 |
|
|
return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
|
5707 |
|
|
break;
|
5708 |
|
|
|
5709 |
|
|
case MIN_EXPR: case MAX_EXPR:
|
5710 |
|
|
/* If widening the type changes the signedness, then we can't perform
|
5711 |
|
|
this optimization as that changes the result. */
|
5712 |
|
|
if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
|
5713 |
|
|
break;
|
5714 |
|
|
|
5715 |
|
|
/* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
|
5716 |
|
|
sub_strict_overflow_p = false;
|
5717 |
|
|
if ((t1 = extract_muldiv (op0, c, code, wide_type,
|
5718 |
|
|
&sub_strict_overflow_p)) != 0
|
5719 |
|
|
&& (t2 = extract_muldiv (op1, c, code, wide_type,
|
5720 |
|
|
&sub_strict_overflow_p)) != 0)
|
5721 |
|
|
{
|
5722 |
|
|
if (tree_int_cst_sgn (c) < 0)
|
5723 |
|
|
tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
|
5724 |
|
|
if (sub_strict_overflow_p)
|
5725 |
|
|
*strict_overflow_p = true;
|
5726 |
|
|
return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
|
5727 |
|
|
fold_convert (ctype, t2));
|
5728 |
|
|
}
|
5729 |
|
|
break;
|
5730 |
|
|
|
5731 |
|
|
case LSHIFT_EXPR: case RSHIFT_EXPR:
|
5732 |
|
|
/* If the second operand is constant, this is a multiplication
|
5733 |
|
|
or floor division, by a power of two, so we can treat it that
|
5734 |
|
|
way unless the multiplier or divisor overflows. Signed
|
5735 |
|
|
left-shift overflow is implementation-defined rather than
|
5736 |
|
|
undefined in C90, so do not convert signed left shift into
|
5737 |
|
|
multiplication. */
|
5738 |
|
|
if (TREE_CODE (op1) == INTEGER_CST
|
5739 |
|
|
&& (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
|
5740 |
|
|
/* const_binop may not detect overflow correctly,
|
5741 |
|
|
so check for it explicitly here. */
|
5742 |
|
|
&& TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
|
5743 |
|
|
&& TREE_INT_CST_HIGH (op1) == 0
|
5744 |
|
|
&& 0 != (t1 = fold_convert (ctype,
|
5745 |
|
|
const_binop (LSHIFT_EXPR,
|
5746 |
|
|
size_one_node,
|
5747 |
|
|
op1)))
|
5748 |
|
|
&& !TREE_OVERFLOW (t1))
|
5749 |
|
|
return extract_muldiv (build2 (tcode == LSHIFT_EXPR
|
5750 |
|
|
? MULT_EXPR : FLOOR_DIV_EXPR,
|
5751 |
|
|
ctype,
|
5752 |
|
|
fold_convert (ctype, op0),
|
5753 |
|
|
t1),
|
5754 |
|
|
c, code, wide_type, strict_overflow_p);
|
5755 |
|
|
break;
|
5756 |
|
|
|
5757 |
|
|
case PLUS_EXPR: case MINUS_EXPR:
|
5758 |
|
|
/* See if we can eliminate the operation on both sides. If we can, we
|
5759 |
|
|
can return a new PLUS or MINUS. If we can't, the only remaining
|
5760 |
|
|
cases where we can do anything are if the second operand is a
|
5761 |
|
|
constant. */
|
5762 |
|
|
sub_strict_overflow_p = false;
|
5763 |
|
|
t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
|
5764 |
|
|
t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
|
5765 |
|
|
if (t1 != 0 && t2 != 0
|
5766 |
|
|
&& (code == MULT_EXPR
|
5767 |
|
|
/* If not multiplication, we can only do this if both operands
|
5768 |
|
|
are divisible by c. */
|
5769 |
|
|
|| (multiple_of_p (ctype, op0, c)
|
5770 |
|
|
&& multiple_of_p (ctype, op1, c))))
|
5771 |
|
|
{
|
5772 |
|
|
if (sub_strict_overflow_p)
|
5773 |
|
|
*strict_overflow_p = true;
|
5774 |
|
|
return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
|
5775 |
|
|
fold_convert (ctype, t2));
|
5776 |
|
|
}
|
5777 |
|
|
|
5778 |
|
|
/* If this was a subtraction, negate OP1 and set it to be an addition.
|
5779 |
|
|
This simplifies the logic below. */
|
5780 |
|
|
if (tcode == MINUS_EXPR)
|
5781 |
|
|
{
|
5782 |
|
|
tcode = PLUS_EXPR, op1 = negate_expr (op1);
|
5783 |
|
|
/* If OP1 was not easily negatable, the constant may be OP0. */
|
5784 |
|
|
if (TREE_CODE (op0) == INTEGER_CST)
|
5785 |
|
|
{
|
5786 |
|
|
tree tem = op0;
|
5787 |
|
|
op0 = op1;
|
5788 |
|
|
op1 = tem;
|
5789 |
|
|
tem = t1;
|
5790 |
|
|
t1 = t2;
|
5791 |
|
|
t2 = tem;
|
5792 |
|
|
}
|
5793 |
|
|
}
|
5794 |
|
|
|
5795 |
|
|
if (TREE_CODE (op1) != INTEGER_CST)
|
5796 |
|
|
break;
|
5797 |
|
|
|
5798 |
|
|
/* If either OP1 or C are negative, this optimization is not safe for
|
5799 |
|
|
some of the division and remainder types while for others we need
|
5800 |
|
|
to change the code. */
|
5801 |
|
|
if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
|
5802 |
|
|
{
|
5803 |
|
|
if (code == CEIL_DIV_EXPR)
|
5804 |
|
|
code = FLOOR_DIV_EXPR;
|
5805 |
|
|
else if (code == FLOOR_DIV_EXPR)
|
5806 |
|
|
code = CEIL_DIV_EXPR;
|
5807 |
|
|
else if (code != MULT_EXPR
|
5808 |
|
|
&& code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
|
5809 |
|
|
break;
|
5810 |
|
|
}
|
5811 |
|
|
|
5812 |
|
|
/* If it's a multiply or a division/modulus operation of a multiple
|
5813 |
|
|
of our constant, do the operation and verify it doesn't overflow. */
|
5814 |
|
|
if (code == MULT_EXPR
|
5815 |
|
|
|| integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
|
5816 |
|
|
{
|
5817 |
|
|
op1 = const_binop (code, fold_convert (ctype, op1),
|
5818 |
|
|
fold_convert (ctype, c));
|
5819 |
|
|
/* We allow the constant to overflow with wrapping semantics. */
|
5820 |
|
|
if (op1 == 0
|
5821 |
|
|
|| (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
|
5822 |
|
|
break;
|
5823 |
|
|
}
|
5824 |
|
|
else
|
5825 |
|
|
break;
|
5826 |
|
|
|
5827 |
|
|
/* If we have an unsigned type is not a sizetype, we cannot widen
|
5828 |
|
|
the operation since it will change the result if the original
|
5829 |
|
|
computation overflowed. */
|
5830 |
|
|
if (TYPE_UNSIGNED (ctype)
|
5831 |
|
|
&& ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
|
5832 |
|
|
&& ctype != type)
|
5833 |
|
|
break;
|
5834 |
|
|
|
5835 |
|
|
/* If we were able to eliminate our operation from the first side,
|
5836 |
|
|
apply our operation to the second side and reform the PLUS. */
|
5837 |
|
|
if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
|
5838 |
|
|
return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
|
5839 |
|
|
|
5840 |
|
|
/* The last case is if we are a multiply. In that case, we can
|
5841 |
|
|
apply the distributive law to commute the multiply and addition
|
5842 |
|
|
if the multiplication of the constants doesn't overflow. */
|
5843 |
|
|
if (code == MULT_EXPR)
|
5844 |
|
|
return fold_build2 (tcode, ctype,
|
5845 |
|
|
fold_build2 (code, ctype,
|
5846 |
|
|
fold_convert (ctype, op0),
|
5847 |
|
|
fold_convert (ctype, c)),
|
5848 |
|
|
op1);
|
5849 |
|
|
|
5850 |
|
|
break;
|
5851 |
|
|
|
5852 |
|
|
case MULT_EXPR:
|
5853 |
|
|
/* We have a special case here if we are doing something like
|
5854 |
|
|
(C * 8) % 4 since we know that's zero. */
|
5855 |
|
|
if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
|
5856 |
|
|
|| code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
|
5857 |
|
|
/* If the multiplication can overflow we cannot optimize this.
|
5858 |
|
|
??? Until we can properly mark individual operations as
|
5859 |
|
|
not overflowing we need to treat sizetype special here as
|
5860 |
|
|
stor-layout relies on this opimization to make
|
5861 |
|
|
DECL_FIELD_BIT_OFFSET always a constant. */
|
5862 |
|
|
&& (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
|
5863 |
|
|
|| (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
|
5864 |
|
|
&& TYPE_IS_SIZETYPE (TREE_TYPE (t))))
|
5865 |
|
|
&& TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
|
5866 |
|
|
&& integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
|
5867 |
|
|
{
|
5868 |
|
|
*strict_overflow_p = true;
|
5869 |
|
|
return omit_one_operand (type, integer_zero_node, op0);
|
5870 |
|
|
}
|
5871 |
|
|
|
5872 |
|
|
/* ... fall through ... */
|
5873 |
|
|
|
5874 |
|
|
case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
|
5875 |
|
|
case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
|
5876 |
|
|
/* If we can extract our operation from the LHS, do so and return a
|
5877 |
|
|
new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
|
5878 |
|
|
do something only if the second operand is a constant. */
|
5879 |
|
|
if (same_p
|
5880 |
|
|
&& (t1 = extract_muldiv (op0, c, code, wide_type,
|
5881 |
|
|
strict_overflow_p)) != 0)
|
5882 |
|
|
return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
|
5883 |
|
|
fold_convert (ctype, op1));
|
5884 |
|
|
else if (tcode == MULT_EXPR && code == MULT_EXPR
|
5885 |
|
|
&& (t1 = extract_muldiv (op1, c, code, wide_type,
|
5886 |
|
|
strict_overflow_p)) != 0)
|
5887 |
|
|
return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
|
5888 |
|
|
fold_convert (ctype, t1));
|
5889 |
|
|
else if (TREE_CODE (op1) != INTEGER_CST)
|
5890 |
|
|
return 0;
|
5891 |
|
|
|
5892 |
|
|
/* If these are the same operation types, we can associate them
|
5893 |
|
|
assuming no overflow. */
|
5894 |
|
|
if (tcode == code)
|
5895 |
|
|
{
|
5896 |
|
|
double_int mul;
|
5897 |
|
|
int overflow_p;
|
5898 |
|
|
mul = double_int_mul_with_sign
|
5899 |
|
|
(double_int_ext
|
5900 |
|
|
(tree_to_double_int (op1),
|
5901 |
|
|
TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
|
5902 |
|
|
double_int_ext
|
5903 |
|
|
(tree_to_double_int (c),
|
5904 |
|
|
TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
|
5905 |
|
|
false, &overflow_p);
|
5906 |
|
|
overflow_p = (((!TYPE_UNSIGNED (ctype)
|
5907 |
|
|
|| (TREE_CODE (ctype) == INTEGER_TYPE
|
5908 |
|
|
&& TYPE_IS_SIZETYPE (ctype)))
|
5909 |
|
|
&& overflow_p)
|
5910 |
|
|
| TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
|
5911 |
|
|
if (!double_int_fits_to_tree_p (ctype, mul)
|
5912 |
|
|
&& ((TYPE_UNSIGNED (ctype) && tcode != MULT_EXPR)
|
5913 |
|
|
|| !TYPE_UNSIGNED (ctype)
|
5914 |
|
|
|| (TREE_CODE (ctype) == INTEGER_TYPE
|
5915 |
|
|
&& TYPE_IS_SIZETYPE (ctype))))
|
5916 |
|
|
overflow_p = 1;
|
5917 |
|
|
if (!overflow_p)
|
5918 |
|
|
return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
|
5919 |
|
|
double_int_to_tree (ctype, mul));
|
5920 |
|
|
}
|
5921 |
|
|
|
5922 |
|
|
/* If these operations "cancel" each other, we have the main
|
5923 |
|
|
optimizations of this pass, which occur when either constant is a
|
5924 |
|
|
multiple of the other, in which case we replace this with either an
|
5925 |
|
|
operation or CODE or TCODE.
|
5926 |
|
|
|
5927 |
|
|
If we have an unsigned type that is not a sizetype, we cannot do
|
5928 |
|
|
this since it will change the result if the original computation
|
5929 |
|
|
overflowed. */
|
5930 |
|
|
if ((TYPE_OVERFLOW_UNDEFINED (ctype)
|
5931 |
|
|
|| (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
|
5932 |
|
|
&& ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
|
5933 |
|
|
|| (tcode == MULT_EXPR
|
5934 |
|
|
&& code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
|
5935 |
|
|
&& code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
|
5936 |
|
|
&& code != MULT_EXPR)))
|
5937 |
|
|
{
|
5938 |
|
|
if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
|
5939 |
|
|
{
|
5940 |
|
|
if (TYPE_OVERFLOW_UNDEFINED (ctype))
|
5941 |
|
|
*strict_overflow_p = true;
|
5942 |
|
|
return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
|
5943 |
|
|
fold_convert (ctype,
|
5944 |
|
|
const_binop (TRUNC_DIV_EXPR,
|
5945 |
|
|
op1, c)));
|
5946 |
|
|
}
|
5947 |
|
|
else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
|
5948 |
|
|
{
|
5949 |
|
|
if (TYPE_OVERFLOW_UNDEFINED (ctype))
|
5950 |
|
|
*strict_overflow_p = true;
|
5951 |
|
|
return fold_build2 (code, ctype, fold_convert (ctype, op0),
|
5952 |
|
|
fold_convert (ctype,
|
5953 |
|
|
const_binop (TRUNC_DIV_EXPR,
|
5954 |
|
|
c, op1)));
|
5955 |
|
|
}
|
5956 |
|
|
}
|
5957 |
|
|
break;
|
5958 |
|
|
|
5959 |
|
|
default:
|
5960 |
|
|
break;
|
5961 |
|
|
}
|
5962 |
|
|
|
5963 |
|
|
return 0;
|
5964 |
|
|
}
|
5965 |
|
|
|
5966 |
|
|
/* Return a node which has the indicated constant VALUE (either 0 or
|
5967 |
|
|
1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
|
5968 |
|
|
and is of the indicated TYPE. */
|
5969 |
|
|
|
5970 |
|
|
tree
|
5971 |
|
|
constant_boolean_node (bool value, tree type)
|
5972 |
|
|
{
|
5973 |
|
|
if (type == integer_type_node)
|
5974 |
|
|
return value ? integer_one_node : integer_zero_node;
|
5975 |
|
|
else if (type == boolean_type_node)
|
5976 |
|
|
return value ? boolean_true_node : boolean_false_node;
|
5977 |
|
|
else if (TREE_CODE (type) == VECTOR_TYPE)
|
5978 |
|
|
return build_vector_from_val (type,
|
5979 |
|
|
build_int_cst (TREE_TYPE (type),
|
5980 |
|
|
value ? -1 : 0));
|
5981 |
|
|
else
|
5982 |
|
|
return fold_convert (type, value ? integer_one_node : integer_zero_node);
|
5983 |
|
|
}
|
5984 |
|
|
|
5985 |
|
|
|
5986 |
|
|
/* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
|
5987 |
|
|
Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
|
5988 |
|
|
CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
|
5989 |
|
|
expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
|
5990 |
|
|
COND is the first argument to CODE; otherwise (as in the example
|
5991 |
|
|
given here), it is the second argument. TYPE is the type of the
|
5992 |
|
|
original expression. Return NULL_TREE if no simplification is
|
5993 |
|
|
possible. */
|
5994 |
|
|
|
5995 |
|
|
static tree
|
5996 |
|
|
fold_binary_op_with_conditional_arg (location_t loc,
|
5997 |
|
|
enum tree_code code,
|
5998 |
|
|
tree type, tree op0, tree op1,
|
5999 |
|
|
tree cond, tree arg, int cond_first_p)
|
6000 |
|
|
{
|
6001 |
|
|
tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
|
6002 |
|
|
tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
|
6003 |
|
|
tree test, true_value, false_value;
|
6004 |
|
|
tree lhs = NULL_TREE;
|
6005 |
|
|
tree rhs = NULL_TREE;
|
6006 |
|
|
|
6007 |
|
|
if (TREE_CODE (cond) == COND_EXPR)
|
6008 |
|
|
{
|
6009 |
|
|
test = TREE_OPERAND (cond, 0);
|
6010 |
|
|
true_value = TREE_OPERAND (cond, 1);
|
6011 |
|
|
false_value = TREE_OPERAND (cond, 2);
|
6012 |
|
|
/* If this operand throws an expression, then it does not make
|
6013 |
|
|
sense to try to perform a logical or arithmetic operation
|
6014 |
|
|
involving it. */
|
6015 |
|
|
if (VOID_TYPE_P (TREE_TYPE (true_value)))
|
6016 |
|
|
lhs = true_value;
|
6017 |
|
|
if (VOID_TYPE_P (TREE_TYPE (false_value)))
|
6018 |
|
|
rhs = false_value;
|
6019 |
|
|
}
|
6020 |
|
|
else
|
6021 |
|
|
{
|
6022 |
|
|
tree testtype = TREE_TYPE (cond);
|
6023 |
|
|
test = cond;
|
6024 |
|
|
true_value = constant_boolean_node (true, testtype);
|
6025 |
|
|
false_value = constant_boolean_node (false, testtype);
|
6026 |
|
|
}
|
6027 |
|
|
|
6028 |
|
|
/* This transformation is only worthwhile if we don't have to wrap ARG
|
6029 |
|
|
in a SAVE_EXPR and the operation can be simplified on at least one
|
6030 |
|
|
of the branches once its pushed inside the COND_EXPR. */
|
6031 |
|
|
if (!TREE_CONSTANT (arg)
|
6032 |
|
|
&& (TREE_SIDE_EFFECTS (arg)
|
6033 |
|
|
|| TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
|
6034 |
|
|
return NULL_TREE;
|
6035 |
|
|
|
6036 |
|
|
arg = fold_convert_loc (loc, arg_type, arg);
|
6037 |
|
|
if (lhs == 0)
|
6038 |
|
|
{
|
6039 |
|
|
true_value = fold_convert_loc (loc, cond_type, true_value);
|
6040 |
|
|
if (cond_first_p)
|
6041 |
|
|
lhs = fold_build2_loc (loc, code, type, true_value, arg);
|
6042 |
|
|
else
|
6043 |
|
|
lhs = fold_build2_loc (loc, code, type, arg, true_value);
|
6044 |
|
|
}
|
6045 |
|
|
if (rhs == 0)
|
6046 |
|
|
{
|
6047 |
|
|
false_value = fold_convert_loc (loc, cond_type, false_value);
|
6048 |
|
|
if (cond_first_p)
|
6049 |
|
|
rhs = fold_build2_loc (loc, code, type, false_value, arg);
|
6050 |
|
|
else
|
6051 |
|
|
rhs = fold_build2_loc (loc, code, type, arg, false_value);
|
6052 |
|
|
}
|
6053 |
|
|
|
6054 |
|
|
/* Check that we have simplified at least one of the branches. */
|
6055 |
|
|
if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
|
6056 |
|
|
return NULL_TREE;
|
6057 |
|
|
|
6058 |
|
|
return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
|
6059 |
|
|
}
|
6060 |
|
|
|
6061 |
|
|
|
6062 |
|
|
/* Subroutine of fold() that checks for the addition of +/- 0.0.
|
6063 |
|
|
|
6064 |
|
|
If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
|
6065 |
|
|
TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
|
6066 |
|
|
ADDEND is the same as X.
|
6067 |
|
|
|
6068 |
|
|
X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
|
6069 |
|
|
and finite. The problematic cases are when X is zero, and its mode
|
6070 |
|
|
has signed zeros. In the case of rounding towards -infinity,
|
6071 |
|
|
X - 0 is not the same as X because 0 - 0 is -0. In other rounding
|
6072 |
|
|
modes, X + 0 is not the same as X because -0 + 0 is 0. */
|
6073 |
|
|
|
6074 |
|
|
bool
|
6075 |
|
|
fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
|
6076 |
|
|
{
|
6077 |
|
|
if (!real_zerop (addend))
|
6078 |
|
|
return false;
|
6079 |
|
|
|
6080 |
|
|
/* Don't allow the fold with -fsignaling-nans. */
|
6081 |
|
|
if (HONOR_SNANS (TYPE_MODE (type)))
|
6082 |
|
|
return false;
|
6083 |
|
|
|
6084 |
|
|
/* Allow the fold if zeros aren't signed, or their sign isn't important. */
|
6085 |
|
|
if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
|
6086 |
|
|
return true;
|
6087 |
|
|
|
6088 |
|
|
/* Treat x + -0 as x - 0 and x - -0 as x + 0. */
|
6089 |
|
|
if (TREE_CODE (addend) == REAL_CST
|
6090 |
|
|
&& REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
|
6091 |
|
|
negate = !negate;
|
6092 |
|
|
|
6093 |
|
|
/* The mode has signed zeros, and we have to honor their sign.
|
6094 |
|
|
In this situation, there is only one case we can return true for.
|
6095 |
|
|
X - 0 is the same as X unless rounding towards -infinity is
|
6096 |
|
|
supported. */
|
6097 |
|
|
return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
|
6098 |
|
|
}
|
6099 |
|
|
|
6100 |
|
|
/* Subroutine of fold() that checks comparisons of built-in math
|
6101 |
|
|
functions against real constants.
|
6102 |
|
|
|
6103 |
|
|
FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
|
6104 |
|
|
operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
|
6105 |
|
|
is the type of the result and ARG0 and ARG1 are the operands of the
|
6106 |
|
|
comparison. ARG1 must be a TREE_REAL_CST.
|
6107 |
|
|
|
6108 |
|
|
The function returns the constant folded tree if a simplification
|
6109 |
|
|
can be made, and NULL_TREE otherwise. */
|
6110 |
|
|
|
6111 |
|
|
static tree
|
6112 |
|
|
fold_mathfn_compare (location_t loc,
|
6113 |
|
|
enum built_in_function fcode, enum tree_code code,
|
6114 |
|
|
tree type, tree arg0, tree arg1)
|
6115 |
|
|
{
|
6116 |
|
|
REAL_VALUE_TYPE c;
|
6117 |
|
|
|
6118 |
|
|
if (BUILTIN_SQRT_P (fcode))
|
6119 |
|
|
{
|
6120 |
|
|
tree arg = CALL_EXPR_ARG (arg0, 0);
|
6121 |
|
|
enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
|
6122 |
|
|
|
6123 |
|
|
c = TREE_REAL_CST (arg1);
|
6124 |
|
|
if (REAL_VALUE_NEGATIVE (c))
|
6125 |
|
|
{
|
6126 |
|
|
/* sqrt(x) < y is always false, if y is negative. */
|
6127 |
|
|
if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
|
6128 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, arg);
|
6129 |
|
|
|
6130 |
|
|
/* sqrt(x) > y is always true, if y is negative and we
|
6131 |
|
|
don't care about NaNs, i.e. negative values of x. */
|
6132 |
|
|
if (code == NE_EXPR || !HONOR_NANS (mode))
|
6133 |
|
|
return omit_one_operand_loc (loc, type, integer_one_node, arg);
|
6134 |
|
|
|
6135 |
|
|
/* sqrt(x) > y is the same as x >= 0, if y is negative. */
|
6136 |
|
|
return fold_build2_loc (loc, GE_EXPR, type, arg,
|
6137 |
|
|
build_real (TREE_TYPE (arg), dconst0));
|
6138 |
|
|
}
|
6139 |
|
|
else if (code == GT_EXPR || code == GE_EXPR)
|
6140 |
|
|
{
|
6141 |
|
|
REAL_VALUE_TYPE c2;
|
6142 |
|
|
|
6143 |
|
|
REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
|
6144 |
|
|
real_convert (&c2, mode, &c2);
|
6145 |
|
|
|
6146 |
|
|
if (REAL_VALUE_ISINF (c2))
|
6147 |
|
|
{
|
6148 |
|
|
/* sqrt(x) > y is x == +Inf, when y is very large. */
|
6149 |
|
|
if (HONOR_INFINITIES (mode))
|
6150 |
|
|
return fold_build2_loc (loc, EQ_EXPR, type, arg,
|
6151 |
|
|
build_real (TREE_TYPE (arg), c2));
|
6152 |
|
|
|
6153 |
|
|
/* sqrt(x) > y is always false, when y is very large
|
6154 |
|
|
and we don't care about infinities. */
|
6155 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, arg);
|
6156 |
|
|
}
|
6157 |
|
|
|
6158 |
|
|
/* sqrt(x) > c is the same as x > c*c. */
|
6159 |
|
|
return fold_build2_loc (loc, code, type, arg,
|
6160 |
|
|
build_real (TREE_TYPE (arg), c2));
|
6161 |
|
|
}
|
6162 |
|
|
else if (code == LT_EXPR || code == LE_EXPR)
|
6163 |
|
|
{
|
6164 |
|
|
REAL_VALUE_TYPE c2;
|
6165 |
|
|
|
6166 |
|
|
REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
|
6167 |
|
|
real_convert (&c2, mode, &c2);
|
6168 |
|
|
|
6169 |
|
|
if (REAL_VALUE_ISINF (c2))
|
6170 |
|
|
{
|
6171 |
|
|
/* sqrt(x) < y is always true, when y is a very large
|
6172 |
|
|
value and we don't care about NaNs or Infinities. */
|
6173 |
|
|
if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
|
6174 |
|
|
return omit_one_operand_loc (loc, type, integer_one_node, arg);
|
6175 |
|
|
|
6176 |
|
|
/* sqrt(x) < y is x != +Inf when y is very large and we
|
6177 |
|
|
don't care about NaNs. */
|
6178 |
|
|
if (! HONOR_NANS (mode))
|
6179 |
|
|
return fold_build2_loc (loc, NE_EXPR, type, arg,
|
6180 |
|
|
build_real (TREE_TYPE (arg), c2));
|
6181 |
|
|
|
6182 |
|
|
/* sqrt(x) < y is x >= 0 when y is very large and we
|
6183 |
|
|
don't care about Infinities. */
|
6184 |
|
|
if (! HONOR_INFINITIES (mode))
|
6185 |
|
|
return fold_build2_loc (loc, GE_EXPR, type, arg,
|
6186 |
|
|
build_real (TREE_TYPE (arg), dconst0));
|
6187 |
|
|
|
6188 |
|
|
/* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
|
6189 |
|
|
arg = save_expr (arg);
|
6190 |
|
|
return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
|
6191 |
|
|
fold_build2_loc (loc, GE_EXPR, type, arg,
|
6192 |
|
|
build_real (TREE_TYPE (arg),
|
6193 |
|
|
dconst0)),
|
6194 |
|
|
fold_build2_loc (loc, NE_EXPR, type, arg,
|
6195 |
|
|
build_real (TREE_TYPE (arg),
|
6196 |
|
|
c2)));
|
6197 |
|
|
}
|
6198 |
|
|
|
6199 |
|
|
/* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
|
6200 |
|
|
if (! HONOR_NANS (mode))
|
6201 |
|
|
return fold_build2_loc (loc, code, type, arg,
|
6202 |
|
|
build_real (TREE_TYPE (arg), c2));
|
6203 |
|
|
|
6204 |
|
|
/* sqrt(x) < c is the same as x >= 0 && x < c*c. */
|
6205 |
|
|
arg = save_expr (arg);
|
6206 |
|
|
return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
|
6207 |
|
|
fold_build2_loc (loc, GE_EXPR, type, arg,
|
6208 |
|
|
build_real (TREE_TYPE (arg),
|
6209 |
|
|
dconst0)),
|
6210 |
|
|
fold_build2_loc (loc, code, type, arg,
|
6211 |
|
|
build_real (TREE_TYPE (arg),
|
6212 |
|
|
c2)));
|
6213 |
|
|
}
|
6214 |
|
|
}
|
6215 |
|
|
|
6216 |
|
|
return NULL_TREE;
|
6217 |
|
|
}
|
6218 |
|
|
|
6219 |
|
|
/* Subroutine of fold() that optimizes comparisons against Infinities,
|
6220 |
|
|
either +Inf or -Inf.
|
6221 |
|
|
|
6222 |
|
|
CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
|
6223 |
|
|
GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
|
6224 |
|
|
are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
|
6225 |
|
|
|
6226 |
|
|
The function returns the constant folded tree if a simplification
|
6227 |
|
|
can be made, and NULL_TREE otherwise. */
|
6228 |
|
|
|
6229 |
|
|
static tree
|
6230 |
|
|
fold_inf_compare (location_t loc, enum tree_code code, tree type,
|
6231 |
|
|
tree arg0, tree arg1)
|
6232 |
|
|
{
|
6233 |
|
|
enum machine_mode mode;
|
6234 |
|
|
REAL_VALUE_TYPE max;
|
6235 |
|
|
tree temp;
|
6236 |
|
|
bool neg;
|
6237 |
|
|
|
6238 |
|
|
mode = TYPE_MODE (TREE_TYPE (arg0));
|
6239 |
|
|
|
6240 |
|
|
/* For negative infinity swap the sense of the comparison. */
|
6241 |
|
|
neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
|
6242 |
|
|
if (neg)
|
6243 |
|
|
code = swap_tree_comparison (code);
|
6244 |
|
|
|
6245 |
|
|
switch (code)
|
6246 |
|
|
{
|
6247 |
|
|
case GT_EXPR:
|
6248 |
|
|
/* x > +Inf is always false, if with ignore sNANs. */
|
6249 |
|
|
if (HONOR_SNANS (mode))
|
6250 |
|
|
return NULL_TREE;
|
6251 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
|
6252 |
|
|
|
6253 |
|
|
case LE_EXPR:
|
6254 |
|
|
/* x <= +Inf is always true, if we don't case about NaNs. */
|
6255 |
|
|
if (! HONOR_NANS (mode))
|
6256 |
|
|
return omit_one_operand_loc (loc, type, integer_one_node, arg0);
|
6257 |
|
|
|
6258 |
|
|
/* x <= +Inf is the same as x == x, i.e. isfinite(x). */
|
6259 |
|
|
arg0 = save_expr (arg0);
|
6260 |
|
|
return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
|
6261 |
|
|
|
6262 |
|
|
case EQ_EXPR:
|
6263 |
|
|
case GE_EXPR:
|
6264 |
|
|
/* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
|
6265 |
|
|
real_maxval (&max, neg, mode);
|
6266 |
|
|
return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
|
6267 |
|
|
arg0, build_real (TREE_TYPE (arg0), max));
|
6268 |
|
|
|
6269 |
|
|
case LT_EXPR:
|
6270 |
|
|
/* x < +Inf is always equal to x <= DBL_MAX. */
|
6271 |
|
|
real_maxval (&max, neg, mode);
|
6272 |
|
|
return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
|
6273 |
|
|
arg0, build_real (TREE_TYPE (arg0), max));
|
6274 |
|
|
|
6275 |
|
|
case NE_EXPR:
|
6276 |
|
|
/* x != +Inf is always equal to !(x > DBL_MAX). */
|
6277 |
|
|
real_maxval (&max, neg, mode);
|
6278 |
|
|
if (! HONOR_NANS (mode))
|
6279 |
|
|
return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
|
6280 |
|
|
arg0, build_real (TREE_TYPE (arg0), max));
|
6281 |
|
|
|
6282 |
|
|
temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
|
6283 |
|
|
arg0, build_real (TREE_TYPE (arg0), max));
|
6284 |
|
|
return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
|
6285 |
|
|
|
6286 |
|
|
default:
|
6287 |
|
|
break;
|
6288 |
|
|
}
|
6289 |
|
|
|
6290 |
|
|
return NULL_TREE;
|
6291 |
|
|
}
|
6292 |
|
|
|
6293 |
|
|
/* Subroutine of fold() that optimizes comparisons of a division by
|
6294 |
|
|
a nonzero integer constant against an integer constant, i.e.
|
6295 |
|
|
X/C1 op C2.
|
6296 |
|
|
|
6297 |
|
|
CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
|
6298 |
|
|
GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
|
6299 |
|
|
are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
|
6300 |
|
|
|
6301 |
|
|
The function returns the constant folded tree if a simplification
|
6302 |
|
|
can be made, and NULL_TREE otherwise. */
|
6303 |
|
|
|
6304 |
|
|
static tree
|
6305 |
|
|
fold_div_compare (location_t loc,
|
6306 |
|
|
enum tree_code code, tree type, tree arg0, tree arg1)
|
6307 |
|
|
{
|
6308 |
|
|
tree prod, tmp, hi, lo;
|
6309 |
|
|
tree arg00 = TREE_OPERAND (arg0, 0);
|
6310 |
|
|
tree arg01 = TREE_OPERAND (arg0, 1);
|
6311 |
|
|
double_int val;
|
6312 |
|
|
bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
|
6313 |
|
|
bool neg_overflow;
|
6314 |
|
|
int overflow;
|
6315 |
|
|
|
6316 |
|
|
/* We have to do this the hard way to detect unsigned overflow.
|
6317 |
|
|
prod = int_const_binop (MULT_EXPR, arg01, arg1); */
|
6318 |
|
|
overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
|
6319 |
|
|
TREE_INT_CST_HIGH (arg01),
|
6320 |
|
|
TREE_INT_CST_LOW (arg1),
|
6321 |
|
|
TREE_INT_CST_HIGH (arg1),
|
6322 |
|
|
&val.low, &val.high, unsigned_p);
|
6323 |
|
|
prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
|
6324 |
|
|
neg_overflow = false;
|
6325 |
|
|
|
6326 |
|
|
if (unsigned_p)
|
6327 |
|
|
{
|
6328 |
|
|
tmp = int_const_binop (MINUS_EXPR, arg01,
|
6329 |
|
|
build_int_cst (TREE_TYPE (arg01), 1));
|
6330 |
|
|
lo = prod;
|
6331 |
|
|
|
6332 |
|
|
/* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
|
6333 |
|
|
overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
|
6334 |
|
|
TREE_INT_CST_HIGH (prod),
|
6335 |
|
|
TREE_INT_CST_LOW (tmp),
|
6336 |
|
|
TREE_INT_CST_HIGH (tmp),
|
6337 |
|
|
&val.low, &val.high, unsigned_p);
|
6338 |
|
|
hi = force_fit_type_double (TREE_TYPE (arg00), val,
|
6339 |
|
|
-1, overflow | TREE_OVERFLOW (prod));
|
6340 |
|
|
}
|
6341 |
|
|
else if (tree_int_cst_sgn (arg01) >= 0)
|
6342 |
|
|
{
|
6343 |
|
|
tmp = int_const_binop (MINUS_EXPR, arg01,
|
6344 |
|
|
build_int_cst (TREE_TYPE (arg01), 1));
|
6345 |
|
|
switch (tree_int_cst_sgn (arg1))
|
6346 |
|
|
{
|
6347 |
|
|
case -1:
|
6348 |
|
|
neg_overflow = true;
|
6349 |
|
|
lo = int_const_binop (MINUS_EXPR, prod, tmp);
|
6350 |
|
|
hi = prod;
|
6351 |
|
|
break;
|
6352 |
|
|
|
6353 |
|
|
case 0:
|
6354 |
|
|
lo = fold_negate_const (tmp, TREE_TYPE (arg0));
|
6355 |
|
|
hi = tmp;
|
6356 |
|
|
break;
|
6357 |
|
|
|
6358 |
|
|
case 1:
|
6359 |
|
|
hi = int_const_binop (PLUS_EXPR, prod, tmp);
|
6360 |
|
|
lo = prod;
|
6361 |
|
|
break;
|
6362 |
|
|
|
6363 |
|
|
default:
|
6364 |
|
|
gcc_unreachable ();
|
6365 |
|
|
}
|
6366 |
|
|
}
|
6367 |
|
|
else
|
6368 |
|
|
{
|
6369 |
|
|
/* A negative divisor reverses the relational operators. */
|
6370 |
|
|
code = swap_tree_comparison (code);
|
6371 |
|
|
|
6372 |
|
|
tmp = int_const_binop (PLUS_EXPR, arg01,
|
6373 |
|
|
build_int_cst (TREE_TYPE (arg01), 1));
|
6374 |
|
|
switch (tree_int_cst_sgn (arg1))
|
6375 |
|
|
{
|
6376 |
|
|
case -1:
|
6377 |
|
|
hi = int_const_binop (MINUS_EXPR, prod, tmp);
|
6378 |
|
|
lo = prod;
|
6379 |
|
|
break;
|
6380 |
|
|
|
6381 |
|
|
case 0:
|
6382 |
|
|
hi = fold_negate_const (tmp, TREE_TYPE (arg0));
|
6383 |
|
|
lo = tmp;
|
6384 |
|
|
break;
|
6385 |
|
|
|
6386 |
|
|
case 1:
|
6387 |
|
|
neg_overflow = true;
|
6388 |
|
|
lo = int_const_binop (PLUS_EXPR, prod, tmp);
|
6389 |
|
|
hi = prod;
|
6390 |
|
|
break;
|
6391 |
|
|
|
6392 |
|
|
default:
|
6393 |
|
|
gcc_unreachable ();
|
6394 |
|
|
}
|
6395 |
|
|
}
|
6396 |
|
|
|
6397 |
|
|
switch (code)
|
6398 |
|
|
{
|
6399 |
|
|
case EQ_EXPR:
|
6400 |
|
|
if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
|
6401 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
|
6402 |
|
|
if (TREE_OVERFLOW (hi))
|
6403 |
|
|
return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
|
6404 |
|
|
if (TREE_OVERFLOW (lo))
|
6405 |
|
|
return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
|
6406 |
|
|
return build_range_check (loc, type, arg00, 1, lo, hi);
|
6407 |
|
|
|
6408 |
|
|
case NE_EXPR:
|
6409 |
|
|
if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
|
6410 |
|
|
return omit_one_operand_loc (loc, type, integer_one_node, arg00);
|
6411 |
|
|
if (TREE_OVERFLOW (hi))
|
6412 |
|
|
return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
|
6413 |
|
|
if (TREE_OVERFLOW (lo))
|
6414 |
|
|
return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
|
6415 |
|
|
return build_range_check (loc, type, arg00, 0, lo, hi);
|
6416 |
|
|
|
6417 |
|
|
case LT_EXPR:
|
6418 |
|
|
if (TREE_OVERFLOW (lo))
|
6419 |
|
|
{
|
6420 |
|
|
tmp = neg_overflow ? integer_zero_node : integer_one_node;
|
6421 |
|
|
return omit_one_operand_loc (loc, type, tmp, arg00);
|
6422 |
|
|
}
|
6423 |
|
|
return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
|
6424 |
|
|
|
6425 |
|
|
case LE_EXPR:
|
6426 |
|
|
if (TREE_OVERFLOW (hi))
|
6427 |
|
|
{
|
6428 |
|
|
tmp = neg_overflow ? integer_zero_node : integer_one_node;
|
6429 |
|
|
return omit_one_operand_loc (loc, type, tmp, arg00);
|
6430 |
|
|
}
|
6431 |
|
|
return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
|
6432 |
|
|
|
6433 |
|
|
case GT_EXPR:
|
6434 |
|
|
if (TREE_OVERFLOW (hi))
|
6435 |
|
|
{
|
6436 |
|
|
tmp = neg_overflow ? integer_one_node : integer_zero_node;
|
6437 |
|
|
return omit_one_operand_loc (loc, type, tmp, arg00);
|
6438 |
|
|
}
|
6439 |
|
|
return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
|
6440 |
|
|
|
6441 |
|
|
case GE_EXPR:
|
6442 |
|
|
if (TREE_OVERFLOW (lo))
|
6443 |
|
|
{
|
6444 |
|
|
tmp = neg_overflow ? integer_one_node : integer_zero_node;
|
6445 |
|
|
return omit_one_operand_loc (loc, type, tmp, arg00);
|
6446 |
|
|
}
|
6447 |
|
|
return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
|
6448 |
|
|
|
6449 |
|
|
default:
|
6450 |
|
|
break;
|
6451 |
|
|
}
|
6452 |
|
|
|
6453 |
|
|
return NULL_TREE;
|
6454 |
|
|
}
|
6455 |
|
|
|
6456 |
|
|
|
6457 |
|
|
/* If CODE with arguments ARG0 and ARG1 represents a single bit
|
6458 |
|
|
equality/inequality test, then return a simplified form of the test
|
6459 |
|
|
using a sign testing. Otherwise return NULL. TYPE is the desired
|
6460 |
|
|
result type. */
|
6461 |
|
|
|
6462 |
|
|
static tree
|
6463 |
|
|
fold_single_bit_test_into_sign_test (location_t loc,
|
6464 |
|
|
enum tree_code code, tree arg0, tree arg1,
|
6465 |
|
|
tree result_type)
|
6466 |
|
|
{
|
6467 |
|
|
/* If this is testing a single bit, we can optimize the test. */
|
6468 |
|
|
if ((code == NE_EXPR || code == EQ_EXPR)
|
6469 |
|
|
&& TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
|
6470 |
|
|
&& integer_pow2p (TREE_OPERAND (arg0, 1)))
|
6471 |
|
|
{
|
6472 |
|
|
/* If we have (A & C) != 0 where C is the sign bit of A, convert
|
6473 |
|
|
this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
|
6474 |
|
|
tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
|
6475 |
|
|
|
6476 |
|
|
if (arg00 != NULL_TREE
|
6477 |
|
|
/* This is only a win if casting to a signed type is cheap,
|
6478 |
|
|
i.e. when arg00's type is not a partial mode. */
|
6479 |
|
|
&& TYPE_PRECISION (TREE_TYPE (arg00))
|
6480 |
|
|
== GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
|
6481 |
|
|
{
|
6482 |
|
|
tree stype = signed_type_for (TREE_TYPE (arg00));
|
6483 |
|
|
return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
|
6484 |
|
|
result_type,
|
6485 |
|
|
fold_convert_loc (loc, stype, arg00),
|
6486 |
|
|
build_int_cst (stype, 0));
|
6487 |
|
|
}
|
6488 |
|
|
}
|
6489 |
|
|
|
6490 |
|
|
return NULL_TREE;
|
6491 |
|
|
}
|
6492 |
|
|
|
6493 |
|
|
/* If CODE with arguments ARG0 and ARG1 represents a single bit
|
6494 |
|
|
equality/inequality test, then return a simplified form of
|
6495 |
|
|
the test using shifts and logical operations. Otherwise return
|
6496 |
|
|
NULL. TYPE is the desired result type. */
|
6497 |
|
|
|
6498 |
|
|
tree
|
6499 |
|
|
fold_single_bit_test (location_t loc, enum tree_code code,
|
6500 |
|
|
tree arg0, tree arg1, tree result_type)
|
6501 |
|
|
{
|
6502 |
|
|
/* If this is testing a single bit, we can optimize the test. */
|
6503 |
|
|
if ((code == NE_EXPR || code == EQ_EXPR)
|
6504 |
|
|
&& TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
|
6505 |
|
|
&& integer_pow2p (TREE_OPERAND (arg0, 1)))
|
6506 |
|
|
{
|
6507 |
|
|
tree inner = TREE_OPERAND (arg0, 0);
|
6508 |
|
|
tree type = TREE_TYPE (arg0);
|
6509 |
|
|
int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
|
6510 |
|
|
enum machine_mode operand_mode = TYPE_MODE (type);
|
6511 |
|
|
int ops_unsigned;
|
6512 |
|
|
tree signed_type, unsigned_type, intermediate_type;
|
6513 |
|
|
tree tem, one;
|
6514 |
|
|
|
6515 |
|
|
/* First, see if we can fold the single bit test into a sign-bit
|
6516 |
|
|
test. */
|
6517 |
|
|
tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
|
6518 |
|
|
result_type);
|
6519 |
|
|
if (tem)
|
6520 |
|
|
return tem;
|
6521 |
|
|
|
6522 |
|
|
/* Otherwise we have (A & C) != 0 where C is a single bit,
|
6523 |
|
|
convert that into ((A >> C2) & 1). Where C2 = log2(C).
|
6524 |
|
|
Similarly for (A & C) == 0. */
|
6525 |
|
|
|
6526 |
|
|
/* If INNER is a right shift of a constant and it plus BITNUM does
|
6527 |
|
|
not overflow, adjust BITNUM and INNER. */
|
6528 |
|
|
if (TREE_CODE (inner) == RSHIFT_EXPR
|
6529 |
|
|
&& TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
|
6530 |
|
|
&& TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
|
6531 |
|
|
&& bitnum < TYPE_PRECISION (type)
|
6532 |
|
|
&& 0 > compare_tree_int (TREE_OPERAND (inner, 1),
|
6533 |
|
|
bitnum - TYPE_PRECISION (type)))
|
6534 |
|
|
{
|
6535 |
|
|
bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
|
6536 |
|
|
inner = TREE_OPERAND (inner, 0);
|
6537 |
|
|
}
|
6538 |
|
|
|
6539 |
|
|
/* If we are going to be able to omit the AND below, we must do our
|
6540 |
|
|
operations as unsigned. If we must use the AND, we have a choice.
|
6541 |
|
|
Normally unsigned is faster, but for some machines signed is. */
|
6542 |
|
|
#ifdef LOAD_EXTEND_OP
|
6543 |
|
|
ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
|
6544 |
|
|
&& !flag_syntax_only) ? 0 : 1;
|
6545 |
|
|
#else
|
6546 |
|
|
ops_unsigned = 1;
|
6547 |
|
|
#endif
|
6548 |
|
|
|
6549 |
|
|
signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
|
6550 |
|
|
unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
|
6551 |
|
|
intermediate_type = ops_unsigned ? unsigned_type : signed_type;
|
6552 |
|
|
inner = fold_convert_loc (loc, intermediate_type, inner);
|
6553 |
|
|
|
6554 |
|
|
if (bitnum != 0)
|
6555 |
|
|
inner = build2 (RSHIFT_EXPR, intermediate_type,
|
6556 |
|
|
inner, size_int (bitnum));
|
6557 |
|
|
|
6558 |
|
|
one = build_int_cst (intermediate_type, 1);
|
6559 |
|
|
|
6560 |
|
|
if (code == EQ_EXPR)
|
6561 |
|
|
inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
|
6562 |
|
|
|
6563 |
|
|
/* Put the AND last so it can combine with more things. */
|
6564 |
|
|
inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
|
6565 |
|
|
|
6566 |
|
|
/* Make sure to return the proper type. */
|
6567 |
|
|
inner = fold_convert_loc (loc, result_type, inner);
|
6568 |
|
|
|
6569 |
|
|
return inner;
|
6570 |
|
|
}
|
6571 |
|
|
return NULL_TREE;
|
6572 |
|
|
}
|
6573 |
|
|
|
6574 |
|
|
/* Check whether we are allowed to reorder operands arg0 and arg1,
|
6575 |
|
|
such that the evaluation of arg1 occurs before arg0. */
|
6576 |
|
|
|
6577 |
|
|
static bool
|
6578 |
|
|
reorder_operands_p (const_tree arg0, const_tree arg1)
|
6579 |
|
|
{
|
6580 |
|
|
if (! flag_evaluation_order)
|
6581 |
|
|
return true;
|
6582 |
|
|
if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
|
6583 |
|
|
return true;
|
6584 |
|
|
return ! TREE_SIDE_EFFECTS (arg0)
|
6585 |
|
|
&& ! TREE_SIDE_EFFECTS (arg1);
|
6586 |
|
|
}
|
6587 |
|
|
|
6588 |
|
|
/* Test whether it is preferable two swap two operands, ARG0 and
|
6589 |
|
|
ARG1, for example because ARG0 is an integer constant and ARG1
|
6590 |
|
|
isn't. If REORDER is true, only recommend swapping if we can
|
6591 |
|
|
evaluate the operands in reverse order. */
|
6592 |
|
|
|
6593 |
|
|
bool
|
6594 |
|
|
tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
|
6595 |
|
|
{
|
6596 |
|
|
STRIP_SIGN_NOPS (arg0);
|
6597 |
|
|
STRIP_SIGN_NOPS (arg1);
|
6598 |
|
|
|
6599 |
|
|
if (TREE_CODE (arg1) == INTEGER_CST)
|
6600 |
|
|
return 0;
|
6601 |
|
|
if (TREE_CODE (arg0) == INTEGER_CST)
|
6602 |
|
|
return 1;
|
6603 |
|
|
|
6604 |
|
|
if (TREE_CODE (arg1) == REAL_CST)
|
6605 |
|
|
return 0;
|
6606 |
|
|
if (TREE_CODE (arg0) == REAL_CST)
|
6607 |
|
|
return 1;
|
6608 |
|
|
|
6609 |
|
|
if (TREE_CODE (arg1) == FIXED_CST)
|
6610 |
|
|
return 0;
|
6611 |
|
|
if (TREE_CODE (arg0) == FIXED_CST)
|
6612 |
|
|
return 1;
|
6613 |
|
|
|
6614 |
|
|
if (TREE_CODE (arg1) == COMPLEX_CST)
|
6615 |
|
|
return 0;
|
6616 |
|
|
if (TREE_CODE (arg0) == COMPLEX_CST)
|
6617 |
|
|
return 1;
|
6618 |
|
|
|
6619 |
|
|
if (TREE_CONSTANT (arg1))
|
6620 |
|
|
return 0;
|
6621 |
|
|
if (TREE_CONSTANT (arg0))
|
6622 |
|
|
return 1;
|
6623 |
|
|
|
6624 |
|
|
if (optimize_function_for_size_p (cfun))
|
6625 |
|
|
return 0;
|
6626 |
|
|
|
6627 |
|
|
if (reorder && flag_evaluation_order
|
6628 |
|
|
&& (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
|
6629 |
|
|
return 0;
|
6630 |
|
|
|
6631 |
|
|
/* It is preferable to swap two SSA_NAME to ensure a canonical form
|
6632 |
|
|
for commutative and comparison operators. Ensuring a canonical
|
6633 |
|
|
form allows the optimizers to find additional redundancies without
|
6634 |
|
|
having to explicitly check for both orderings. */
|
6635 |
|
|
if (TREE_CODE (arg0) == SSA_NAME
|
6636 |
|
|
&& TREE_CODE (arg1) == SSA_NAME
|
6637 |
|
|
&& SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
|
6638 |
|
|
return 1;
|
6639 |
|
|
|
6640 |
|
|
/* Put SSA_NAMEs last. */
|
6641 |
|
|
if (TREE_CODE (arg1) == SSA_NAME)
|
6642 |
|
|
return 0;
|
6643 |
|
|
if (TREE_CODE (arg0) == SSA_NAME)
|
6644 |
|
|
return 1;
|
6645 |
|
|
|
6646 |
|
|
/* Put variables last. */
|
6647 |
|
|
if (DECL_P (arg1))
|
6648 |
|
|
return 0;
|
6649 |
|
|
if (DECL_P (arg0))
|
6650 |
|
|
return 1;
|
6651 |
|
|
|
6652 |
|
|
return 0;
|
6653 |
|
|
}
|
6654 |
|
|
|
6655 |
|
|
/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
|
6656 |
|
|
ARG0 is extended to a wider type. */
|
6657 |
|
|
|
6658 |
|
|
static tree
|
6659 |
|
|
fold_widened_comparison (location_t loc, enum tree_code code,
|
6660 |
|
|
tree type, tree arg0, tree arg1)
|
6661 |
|
|
{
|
6662 |
|
|
tree arg0_unw = get_unwidened (arg0, NULL_TREE);
|
6663 |
|
|
tree arg1_unw;
|
6664 |
|
|
tree shorter_type, outer_type;
|
6665 |
|
|
tree min, max;
|
6666 |
|
|
bool above, below;
|
6667 |
|
|
|
6668 |
|
|
if (arg0_unw == arg0)
|
6669 |
|
|
return NULL_TREE;
|
6670 |
|
|
shorter_type = TREE_TYPE (arg0_unw);
|
6671 |
|
|
|
6672 |
|
|
#ifdef HAVE_canonicalize_funcptr_for_compare
|
6673 |
|
|
/* Disable this optimization if we're casting a function pointer
|
6674 |
|
|
type on targets that require function pointer canonicalization. */
|
6675 |
|
|
if (HAVE_canonicalize_funcptr_for_compare
|
6676 |
|
|
&& TREE_CODE (shorter_type) == POINTER_TYPE
|
6677 |
|
|
&& TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
|
6678 |
|
|
return NULL_TREE;
|
6679 |
|
|
#endif
|
6680 |
|
|
|
6681 |
|
|
if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
|
6682 |
|
|
return NULL_TREE;
|
6683 |
|
|
|
6684 |
|
|
arg1_unw = get_unwidened (arg1, NULL_TREE);
|
6685 |
|
|
|
6686 |
|
|
/* If possible, express the comparison in the shorter mode. */
|
6687 |
|
|
if ((code == EQ_EXPR || code == NE_EXPR
|
6688 |
|
|
|| TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
|
6689 |
|
|
&& (TREE_TYPE (arg1_unw) == shorter_type
|
6690 |
|
|
|| ((TYPE_PRECISION (shorter_type)
|
6691 |
|
|
>= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
|
6692 |
|
|
&& (TYPE_UNSIGNED (shorter_type)
|
6693 |
|
|
== TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
|
6694 |
|
|
|| (TREE_CODE (arg1_unw) == INTEGER_CST
|
6695 |
|
|
&& (TREE_CODE (shorter_type) == INTEGER_TYPE
|
6696 |
|
|
|| TREE_CODE (shorter_type) == BOOLEAN_TYPE)
|
6697 |
|
|
&& int_fits_type_p (arg1_unw, shorter_type))))
|
6698 |
|
|
return fold_build2_loc (loc, code, type, arg0_unw,
|
6699 |
|
|
fold_convert_loc (loc, shorter_type, arg1_unw));
|
6700 |
|
|
|
6701 |
|
|
if (TREE_CODE (arg1_unw) != INTEGER_CST
|
6702 |
|
|
|| TREE_CODE (shorter_type) != INTEGER_TYPE
|
6703 |
|
|
|| !int_fits_type_p (arg1_unw, shorter_type))
|
6704 |
|
|
return NULL_TREE;
|
6705 |
|
|
|
6706 |
|
|
/* If we are comparing with the integer that does not fit into the range
|
6707 |
|
|
of the shorter type, the result is known. */
|
6708 |
|
|
outer_type = TREE_TYPE (arg1_unw);
|
6709 |
|
|
min = lower_bound_in_type (outer_type, shorter_type);
|
6710 |
|
|
max = upper_bound_in_type (outer_type, shorter_type);
|
6711 |
|
|
|
6712 |
|
|
above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
|
6713 |
|
|
max, arg1_unw));
|
6714 |
|
|
below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
|
6715 |
|
|
arg1_unw, min));
|
6716 |
|
|
|
6717 |
|
|
switch (code)
|
6718 |
|
|
{
|
6719 |
|
|
case EQ_EXPR:
|
6720 |
|
|
if (above || below)
|
6721 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
|
6722 |
|
|
break;
|
6723 |
|
|
|
6724 |
|
|
case NE_EXPR:
|
6725 |
|
|
if (above || below)
|
6726 |
|
|
return omit_one_operand_loc (loc, type, integer_one_node, arg0);
|
6727 |
|
|
break;
|
6728 |
|
|
|
6729 |
|
|
case LT_EXPR:
|
6730 |
|
|
case LE_EXPR:
|
6731 |
|
|
if (above)
|
6732 |
|
|
return omit_one_operand_loc (loc, type, integer_one_node, arg0);
|
6733 |
|
|
else if (below)
|
6734 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
|
6735 |
|
|
|
6736 |
|
|
case GT_EXPR:
|
6737 |
|
|
case GE_EXPR:
|
6738 |
|
|
if (above)
|
6739 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
|
6740 |
|
|
else if (below)
|
6741 |
|
|
return omit_one_operand_loc (loc, type, integer_one_node, arg0);
|
6742 |
|
|
|
6743 |
|
|
default:
|
6744 |
|
|
break;
|
6745 |
|
|
}
|
6746 |
|
|
|
6747 |
|
|
return NULL_TREE;
|
6748 |
|
|
}
|
6749 |
|
|
|
6750 |
|
|
/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
|
6751 |
|
|
ARG0 just the signedness is changed. */
|
6752 |
|
|
|
6753 |
|
|
static tree
|
6754 |
|
|
fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
|
6755 |
|
|
tree arg0, tree arg1)
|
6756 |
|
|
{
|
6757 |
|
|
tree arg0_inner;
|
6758 |
|
|
tree inner_type, outer_type;
|
6759 |
|
|
|
6760 |
|
|
if (!CONVERT_EXPR_P (arg0))
|
6761 |
|
|
return NULL_TREE;
|
6762 |
|
|
|
6763 |
|
|
outer_type = TREE_TYPE (arg0);
|
6764 |
|
|
arg0_inner = TREE_OPERAND (arg0, 0);
|
6765 |
|
|
inner_type = TREE_TYPE (arg0_inner);
|
6766 |
|
|
|
6767 |
|
|
#ifdef HAVE_canonicalize_funcptr_for_compare
|
6768 |
|
|
/* Disable this optimization if we're casting a function pointer
|
6769 |
|
|
type on targets that require function pointer canonicalization. */
|
6770 |
|
|
if (HAVE_canonicalize_funcptr_for_compare
|
6771 |
|
|
&& TREE_CODE (inner_type) == POINTER_TYPE
|
6772 |
|
|
&& TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
|
6773 |
|
|
return NULL_TREE;
|
6774 |
|
|
#endif
|
6775 |
|
|
|
6776 |
|
|
if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
|
6777 |
|
|
return NULL_TREE;
|
6778 |
|
|
|
6779 |
|
|
if (TREE_CODE (arg1) != INTEGER_CST
|
6780 |
|
|
&& !(CONVERT_EXPR_P (arg1)
|
6781 |
|
|
&& TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
|
6782 |
|
|
return NULL_TREE;
|
6783 |
|
|
|
6784 |
|
|
if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
|
6785 |
|
|
|| POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
|
6786 |
|
|
&& code != NE_EXPR
|
6787 |
|
|
&& code != EQ_EXPR)
|
6788 |
|
|
return NULL_TREE;
|
6789 |
|
|
|
6790 |
|
|
if (TREE_CODE (arg1) == INTEGER_CST)
|
6791 |
|
|
arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
|
6792 |
|
|
0, TREE_OVERFLOW (arg1));
|
6793 |
|
|
else
|
6794 |
|
|
arg1 = fold_convert_loc (loc, inner_type, arg1);
|
6795 |
|
|
|
6796 |
|
|
return fold_build2_loc (loc, code, type, arg0_inner, arg1);
|
6797 |
|
|
}
|
6798 |
|
|
|
6799 |
|
|
/* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
|
6800 |
|
|
step of the array. Reconstructs s and delta in the case of s *
|
6801 |
|
|
delta being an integer constant (and thus already folded). ADDR is
|
6802 |
|
|
the address. MULT is the multiplicative expression. If the
|
6803 |
|
|
function succeeds, the new address expression is returned.
|
6804 |
|
|
Otherwise NULL_TREE is returned. LOC is the location of the
|
6805 |
|
|
resulting expression. */
|
6806 |
|
|
|
6807 |
|
|
static tree
|
6808 |
|
|
try_move_mult_to_index (location_t loc, tree addr, tree op1)
|
6809 |
|
|
{
|
6810 |
|
|
tree s, delta, step;
|
6811 |
|
|
tree ref = TREE_OPERAND (addr, 0), pref;
|
6812 |
|
|
tree ret, pos;
|
6813 |
|
|
tree itype;
|
6814 |
|
|
bool mdim = false;
|
6815 |
|
|
|
6816 |
|
|
/* Strip the nops that might be added when converting op1 to sizetype. */
|
6817 |
|
|
STRIP_NOPS (op1);
|
6818 |
|
|
|
6819 |
|
|
/* Canonicalize op1 into a possibly non-constant delta
|
6820 |
|
|
and an INTEGER_CST s. */
|
6821 |
|
|
if (TREE_CODE (op1) == MULT_EXPR)
|
6822 |
|
|
{
|
6823 |
|
|
tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
|
6824 |
|
|
|
6825 |
|
|
STRIP_NOPS (arg0);
|
6826 |
|
|
STRIP_NOPS (arg1);
|
6827 |
|
|
|
6828 |
|
|
if (TREE_CODE (arg0) == INTEGER_CST)
|
6829 |
|
|
{
|
6830 |
|
|
s = arg0;
|
6831 |
|
|
delta = arg1;
|
6832 |
|
|
}
|
6833 |
|
|
else if (TREE_CODE (arg1) == INTEGER_CST)
|
6834 |
|
|
{
|
6835 |
|
|
s = arg1;
|
6836 |
|
|
delta = arg0;
|
6837 |
|
|
}
|
6838 |
|
|
else
|
6839 |
|
|
return NULL_TREE;
|
6840 |
|
|
}
|
6841 |
|
|
else if (TREE_CODE (op1) == INTEGER_CST)
|
6842 |
|
|
{
|
6843 |
|
|
delta = op1;
|
6844 |
|
|
s = NULL_TREE;
|
6845 |
|
|
}
|
6846 |
|
|
else
|
6847 |
|
|
{
|
6848 |
|
|
/* Simulate we are delta * 1. */
|
6849 |
|
|
delta = op1;
|
6850 |
|
|
s = integer_one_node;
|
6851 |
|
|
}
|
6852 |
|
|
|
6853 |
|
|
for (;; ref = TREE_OPERAND (ref, 0))
|
6854 |
|
|
{
|
6855 |
|
|
if (TREE_CODE (ref) == ARRAY_REF)
|
6856 |
|
|
{
|
6857 |
|
|
tree domain;
|
6858 |
|
|
|
6859 |
|
|
/* Remember if this was a multi-dimensional array. */
|
6860 |
|
|
if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
|
6861 |
|
|
mdim = true;
|
6862 |
|
|
|
6863 |
|
|
domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
|
6864 |
|
|
if (! domain)
|
6865 |
|
|
continue;
|
6866 |
|
|
itype = TREE_TYPE (domain);
|
6867 |
|
|
|
6868 |
|
|
step = array_ref_element_size (ref);
|
6869 |
|
|
if (TREE_CODE (step) != INTEGER_CST)
|
6870 |
|
|
continue;
|
6871 |
|
|
|
6872 |
|
|
if (s)
|
6873 |
|
|
{
|
6874 |
|
|
if (! tree_int_cst_equal (step, s))
|
6875 |
|
|
continue;
|
6876 |
|
|
}
|
6877 |
|
|
else
|
6878 |
|
|
{
|
6879 |
|
|
/* Try if delta is a multiple of step. */
|
6880 |
|
|
tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
|
6881 |
|
|
if (! tmp)
|
6882 |
|
|
continue;
|
6883 |
|
|
delta = tmp;
|
6884 |
|
|
}
|
6885 |
|
|
|
6886 |
|
|
/* Only fold here if we can verify we do not overflow one
|
6887 |
|
|
dimension of a multi-dimensional array. */
|
6888 |
|
|
if (mdim)
|
6889 |
|
|
{
|
6890 |
|
|
tree tmp;
|
6891 |
|
|
|
6892 |
|
|
if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
|
6893 |
|
|
|| !TYPE_MAX_VALUE (domain)
|
6894 |
|
|
|| TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
|
6895 |
|
|
continue;
|
6896 |
|
|
|
6897 |
|
|
tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
|
6898 |
|
|
fold_convert_loc (loc, itype,
|
6899 |
|
|
TREE_OPERAND (ref, 1)),
|
6900 |
|
|
fold_convert_loc (loc, itype, delta));
|
6901 |
|
|
if (!tmp
|
6902 |
|
|
|| TREE_CODE (tmp) != INTEGER_CST
|
6903 |
|
|
|| tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
|
6904 |
|
|
continue;
|
6905 |
|
|
}
|
6906 |
|
|
|
6907 |
|
|
break;
|
6908 |
|
|
}
|
6909 |
|
|
else if (TREE_CODE (ref) == COMPONENT_REF
|
6910 |
|
|
&& TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
|
6911 |
|
|
{
|
6912 |
|
|
tree domain;
|
6913 |
|
|
|
6914 |
|
|
/* Remember if this was a multi-dimensional array. */
|
6915 |
|
|
if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
|
6916 |
|
|
mdim = true;
|
6917 |
|
|
|
6918 |
|
|
domain = TYPE_DOMAIN (TREE_TYPE (ref));
|
6919 |
|
|
if (! domain)
|
6920 |
|
|
continue;
|
6921 |
|
|
itype = TREE_TYPE (domain);
|
6922 |
|
|
|
6923 |
|
|
step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
|
6924 |
|
|
if (TREE_CODE (step) != INTEGER_CST)
|
6925 |
|
|
continue;
|
6926 |
|
|
|
6927 |
|
|
if (s)
|
6928 |
|
|
{
|
6929 |
|
|
if (! tree_int_cst_equal (step, s))
|
6930 |
|
|
continue;
|
6931 |
|
|
}
|
6932 |
|
|
else
|
6933 |
|
|
{
|
6934 |
|
|
/* Try if delta is a multiple of step. */
|
6935 |
|
|
tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
|
6936 |
|
|
if (! tmp)
|
6937 |
|
|
continue;
|
6938 |
|
|
delta = tmp;
|
6939 |
|
|
}
|
6940 |
|
|
|
6941 |
|
|
/* Only fold here if we can verify we do not overflow one
|
6942 |
|
|
dimension of a multi-dimensional array. */
|
6943 |
|
|
if (mdim)
|
6944 |
|
|
{
|
6945 |
|
|
tree tmp;
|
6946 |
|
|
|
6947 |
|
|
if (!TYPE_MIN_VALUE (domain)
|
6948 |
|
|
|| !TYPE_MAX_VALUE (domain)
|
6949 |
|
|
|| TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
|
6950 |
|
|
continue;
|
6951 |
|
|
|
6952 |
|
|
tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
|
6953 |
|
|
fold_convert_loc (loc, itype,
|
6954 |
|
|
TYPE_MIN_VALUE (domain)),
|
6955 |
|
|
fold_convert_loc (loc, itype, delta));
|
6956 |
|
|
if (TREE_CODE (tmp) != INTEGER_CST
|
6957 |
|
|
|| tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
|
6958 |
|
|
continue;
|
6959 |
|
|
}
|
6960 |
|
|
|
6961 |
|
|
break;
|
6962 |
|
|
}
|
6963 |
|
|
else
|
6964 |
|
|
mdim = false;
|
6965 |
|
|
|
6966 |
|
|
if (!handled_component_p (ref))
|
6967 |
|
|
return NULL_TREE;
|
6968 |
|
|
}
|
6969 |
|
|
|
6970 |
|
|
/* We found the suitable array reference. So copy everything up to it,
|
6971 |
|
|
and replace the index. */
|
6972 |
|
|
|
6973 |
|
|
pref = TREE_OPERAND (addr, 0);
|
6974 |
|
|
ret = copy_node (pref);
|
6975 |
|
|
SET_EXPR_LOCATION (ret, loc);
|
6976 |
|
|
pos = ret;
|
6977 |
|
|
|
6978 |
|
|
while (pref != ref)
|
6979 |
|
|
{
|
6980 |
|
|
pref = TREE_OPERAND (pref, 0);
|
6981 |
|
|
TREE_OPERAND (pos, 0) = copy_node (pref);
|
6982 |
|
|
pos = TREE_OPERAND (pos, 0);
|
6983 |
|
|
}
|
6984 |
|
|
|
6985 |
|
|
if (TREE_CODE (ref) == ARRAY_REF)
|
6986 |
|
|
{
|
6987 |
|
|
TREE_OPERAND (pos, 1)
|
6988 |
|
|
= fold_build2_loc (loc, PLUS_EXPR, itype,
|
6989 |
|
|
fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
|
6990 |
|
|
fold_convert_loc (loc, itype, delta));
|
6991 |
|
|
return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
|
6992 |
|
|
}
|
6993 |
|
|
else if (TREE_CODE (ref) == COMPONENT_REF)
|
6994 |
|
|
{
|
6995 |
|
|
gcc_assert (ret == pos);
|
6996 |
|
|
ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
|
6997 |
|
|
fold_build2_loc
|
6998 |
|
|
(loc, PLUS_EXPR, itype,
|
6999 |
|
|
fold_convert_loc (loc, itype,
|
7000 |
|
|
TYPE_MIN_VALUE
|
7001 |
|
|
(TYPE_DOMAIN (TREE_TYPE (ref)))),
|
7002 |
|
|
fold_convert_loc (loc, itype, delta)),
|
7003 |
|
|
NULL_TREE, NULL_TREE);
|
7004 |
|
|
return build_fold_addr_expr_loc (loc, ret);
|
7005 |
|
|
}
|
7006 |
|
|
else
|
7007 |
|
|
gcc_unreachable ();
|
7008 |
|
|
}
|
7009 |
|
|
|
7010 |
|
|
|
7011 |
|
|
/* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
|
7012 |
|
|
means A >= Y && A != MAX, but in this case we know that
|
7013 |
|
|
A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
|
7014 |
|
|
|
7015 |
|
|
static tree
|
7016 |
|
|
fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
|
7017 |
|
|
{
|
7018 |
|
|
tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
|
7019 |
|
|
|
7020 |
|
|
if (TREE_CODE (bound) == LT_EXPR)
|
7021 |
|
|
a = TREE_OPERAND (bound, 0);
|
7022 |
|
|
else if (TREE_CODE (bound) == GT_EXPR)
|
7023 |
|
|
a = TREE_OPERAND (bound, 1);
|
7024 |
|
|
else
|
7025 |
|
|
return NULL_TREE;
|
7026 |
|
|
|
7027 |
|
|
typea = TREE_TYPE (a);
|
7028 |
|
|
if (!INTEGRAL_TYPE_P (typea)
|
7029 |
|
|
&& !POINTER_TYPE_P (typea))
|
7030 |
|
|
return NULL_TREE;
|
7031 |
|
|
|
7032 |
|
|
if (TREE_CODE (ineq) == LT_EXPR)
|
7033 |
|
|
{
|
7034 |
|
|
a1 = TREE_OPERAND (ineq, 1);
|
7035 |
|
|
y = TREE_OPERAND (ineq, 0);
|
7036 |
|
|
}
|
7037 |
|
|
else if (TREE_CODE (ineq) == GT_EXPR)
|
7038 |
|
|
{
|
7039 |
|
|
a1 = TREE_OPERAND (ineq, 0);
|
7040 |
|
|
y = TREE_OPERAND (ineq, 1);
|
7041 |
|
|
}
|
7042 |
|
|
else
|
7043 |
|
|
return NULL_TREE;
|
7044 |
|
|
|
7045 |
|
|
if (TREE_TYPE (a1) != typea)
|
7046 |
|
|
return NULL_TREE;
|
7047 |
|
|
|
7048 |
|
|
if (POINTER_TYPE_P (typea))
|
7049 |
|
|
{
|
7050 |
|
|
/* Convert the pointer types into integer before taking the difference. */
|
7051 |
|
|
tree ta = fold_convert_loc (loc, ssizetype, a);
|
7052 |
|
|
tree ta1 = fold_convert_loc (loc, ssizetype, a1);
|
7053 |
|
|
diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
|
7054 |
|
|
}
|
7055 |
|
|
else
|
7056 |
|
|
diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
|
7057 |
|
|
|
7058 |
|
|
if (!diff || !integer_onep (diff))
|
7059 |
|
|
return NULL_TREE;
|
7060 |
|
|
|
7061 |
|
|
return fold_build2_loc (loc, GE_EXPR, type, a, y);
|
7062 |
|
|
}
|
7063 |
|
|
|
7064 |
|
|
/* Fold a sum or difference of at least one multiplication.
|
7065 |
|
|
Returns the folded tree or NULL if no simplification could be made. */
|
7066 |
|
|
|
7067 |
|
|
static tree
|
7068 |
|
|
fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
|
7069 |
|
|
tree arg0, tree arg1)
|
7070 |
|
|
{
|
7071 |
|
|
tree arg00, arg01, arg10, arg11;
|
7072 |
|
|
tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
|
7073 |
|
|
|
7074 |
|
|
/* (A * C) +- (B * C) -> (A+-B) * C.
|
7075 |
|
|
(A * C) +- A -> A * (C+-1).
|
7076 |
|
|
We are most concerned about the case where C is a constant,
|
7077 |
|
|
but other combinations show up during loop reduction. Since
|
7078 |
|
|
it is not difficult, try all four possibilities. */
|
7079 |
|
|
|
7080 |
|
|
if (TREE_CODE (arg0) == MULT_EXPR)
|
7081 |
|
|
{
|
7082 |
|
|
arg00 = TREE_OPERAND (arg0, 0);
|
7083 |
|
|
arg01 = TREE_OPERAND (arg0, 1);
|
7084 |
|
|
}
|
7085 |
|
|
else if (TREE_CODE (arg0) == INTEGER_CST)
|
7086 |
|
|
{
|
7087 |
|
|
arg00 = build_one_cst (type);
|
7088 |
|
|
arg01 = arg0;
|
7089 |
|
|
}
|
7090 |
|
|
else
|
7091 |
|
|
{
|
7092 |
|
|
/* We cannot generate constant 1 for fract. */
|
7093 |
|
|
if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
|
7094 |
|
|
return NULL_TREE;
|
7095 |
|
|
arg00 = arg0;
|
7096 |
|
|
arg01 = build_one_cst (type);
|
7097 |
|
|
}
|
7098 |
|
|
if (TREE_CODE (arg1) == MULT_EXPR)
|
7099 |
|
|
{
|
7100 |
|
|
arg10 = TREE_OPERAND (arg1, 0);
|
7101 |
|
|
arg11 = TREE_OPERAND (arg1, 1);
|
7102 |
|
|
}
|
7103 |
|
|
else if (TREE_CODE (arg1) == INTEGER_CST)
|
7104 |
|
|
{
|
7105 |
|
|
arg10 = build_one_cst (type);
|
7106 |
|
|
/* As we canonicalize A - 2 to A + -2 get rid of that sign for
|
7107 |
|
|
the purpose of this canonicalization. */
|
7108 |
|
|
if (TREE_INT_CST_HIGH (arg1) == -1
|
7109 |
|
|
&& negate_expr_p (arg1)
|
7110 |
|
|
&& code == PLUS_EXPR)
|
7111 |
|
|
{
|
7112 |
|
|
arg11 = negate_expr (arg1);
|
7113 |
|
|
code = MINUS_EXPR;
|
7114 |
|
|
}
|
7115 |
|
|
else
|
7116 |
|
|
arg11 = arg1;
|
7117 |
|
|
}
|
7118 |
|
|
else
|
7119 |
|
|
{
|
7120 |
|
|
/* We cannot generate constant 1 for fract. */
|
7121 |
|
|
if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
|
7122 |
|
|
return NULL_TREE;
|
7123 |
|
|
arg10 = arg1;
|
7124 |
|
|
arg11 = build_one_cst (type);
|
7125 |
|
|
}
|
7126 |
|
|
same = NULL_TREE;
|
7127 |
|
|
|
7128 |
|
|
if (operand_equal_p (arg01, arg11, 0))
|
7129 |
|
|
same = arg01, alt0 = arg00, alt1 = arg10;
|
7130 |
|
|
else if (operand_equal_p (arg00, arg10, 0))
|
7131 |
|
|
same = arg00, alt0 = arg01, alt1 = arg11;
|
7132 |
|
|
else if (operand_equal_p (arg00, arg11, 0))
|
7133 |
|
|
same = arg00, alt0 = arg01, alt1 = arg10;
|
7134 |
|
|
else if (operand_equal_p (arg01, arg10, 0))
|
7135 |
|
|
same = arg01, alt0 = arg00, alt1 = arg11;
|
7136 |
|
|
|
7137 |
|
|
/* No identical multiplicands; see if we can find a common
|
7138 |
|
|
power-of-two factor in non-power-of-two multiplies. This
|
7139 |
|
|
can help in multi-dimensional array access. */
|
7140 |
|
|
else if (host_integerp (arg01, 0)
|
7141 |
|
|
&& host_integerp (arg11, 0))
|
7142 |
|
|
{
|
7143 |
|
|
HOST_WIDE_INT int01, int11, tmp;
|
7144 |
|
|
bool swap = false;
|
7145 |
|
|
tree maybe_same;
|
7146 |
|
|
int01 = TREE_INT_CST_LOW (arg01);
|
7147 |
|
|
int11 = TREE_INT_CST_LOW (arg11);
|
7148 |
|
|
|
7149 |
|
|
/* Move min of absolute values to int11. */
|
7150 |
|
|
if (absu_hwi (int01) < absu_hwi (int11))
|
7151 |
|
|
{
|
7152 |
|
|
tmp = int01, int01 = int11, int11 = tmp;
|
7153 |
|
|
alt0 = arg00, arg00 = arg10, arg10 = alt0;
|
7154 |
|
|
maybe_same = arg01;
|
7155 |
|
|
swap = true;
|
7156 |
|
|
}
|
7157 |
|
|
else
|
7158 |
|
|
maybe_same = arg11;
|
7159 |
|
|
|
7160 |
|
|
if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
|
7161 |
|
|
/* The remainder should not be a constant, otherwise we
|
7162 |
|
|
end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
|
7163 |
|
|
increased the number of multiplications necessary. */
|
7164 |
|
|
&& TREE_CODE (arg10) != INTEGER_CST)
|
7165 |
|
|
{
|
7166 |
|
|
alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
|
7167 |
|
|
build_int_cst (TREE_TYPE (arg00),
|
7168 |
|
|
int01 / int11));
|
7169 |
|
|
alt1 = arg10;
|
7170 |
|
|
same = maybe_same;
|
7171 |
|
|
if (swap)
|
7172 |
|
|
maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
|
7173 |
|
|
}
|
7174 |
|
|
}
|
7175 |
|
|
|
7176 |
|
|
if (same)
|
7177 |
|
|
return fold_build2_loc (loc, MULT_EXPR, type,
|
7178 |
|
|
fold_build2_loc (loc, code, type,
|
7179 |
|
|
fold_convert_loc (loc, type, alt0),
|
7180 |
|
|
fold_convert_loc (loc, type, alt1)),
|
7181 |
|
|
fold_convert_loc (loc, type, same));
|
7182 |
|
|
|
7183 |
|
|
return NULL_TREE;
|
7184 |
|
|
}
|
7185 |
|
|
|
7186 |
|
|
/* Subroutine of native_encode_expr. Encode the INTEGER_CST
|
7187 |
|
|
specified by EXPR into the buffer PTR of length LEN bytes.
|
7188 |
|
|
Return the number of bytes placed in the buffer, or zero
|
7189 |
|
|
upon failure. */
|
7190 |
|
|
|
7191 |
|
|
static int
|
7192 |
|
|
native_encode_int (const_tree expr, unsigned char *ptr, int len)
|
7193 |
|
|
{
|
7194 |
|
|
tree type = TREE_TYPE (expr);
|
7195 |
|
|
int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
|
7196 |
|
|
int byte, offset, word, words;
|
7197 |
|
|
unsigned char value;
|
7198 |
|
|
|
7199 |
|
|
if (total_bytes > len)
|
7200 |
|
|
return 0;
|
7201 |
|
|
words = total_bytes / UNITS_PER_WORD;
|
7202 |
|
|
|
7203 |
|
|
for (byte = 0; byte < total_bytes; byte++)
|
7204 |
|
|
{
|
7205 |
|
|
int bitpos = byte * BITS_PER_UNIT;
|
7206 |
|
|
if (bitpos < HOST_BITS_PER_WIDE_INT)
|
7207 |
|
|
value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
|
7208 |
|
|
else
|
7209 |
|
|
value = (unsigned char) (TREE_INT_CST_HIGH (expr)
|
7210 |
|
|
>> (bitpos - HOST_BITS_PER_WIDE_INT));
|
7211 |
|
|
|
7212 |
|
|
if (total_bytes > UNITS_PER_WORD)
|
7213 |
|
|
{
|
7214 |
|
|
word = byte / UNITS_PER_WORD;
|
7215 |
|
|
if (WORDS_BIG_ENDIAN)
|
7216 |
|
|
word = (words - 1) - word;
|
7217 |
|
|
offset = word * UNITS_PER_WORD;
|
7218 |
|
|
if (BYTES_BIG_ENDIAN)
|
7219 |
|
|
offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
|
7220 |
|
|
else
|
7221 |
|
|
offset += byte % UNITS_PER_WORD;
|
7222 |
|
|
}
|
7223 |
|
|
else
|
7224 |
|
|
offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
|
7225 |
|
|
ptr[offset] = value;
|
7226 |
|
|
}
|
7227 |
|
|
return total_bytes;
|
7228 |
|
|
}
|
7229 |
|
|
|
7230 |
|
|
|
7231 |
|
|
/* Subroutine of native_encode_expr. Encode the REAL_CST
|
7232 |
|
|
specified by EXPR into the buffer PTR of length LEN bytes.
|
7233 |
|
|
Return the number of bytes placed in the buffer, or zero
|
7234 |
|
|
upon failure. */
|
7235 |
|
|
|
7236 |
|
|
static int
|
7237 |
|
|
native_encode_real (const_tree expr, unsigned char *ptr, int len)
|
7238 |
|
|
{
|
7239 |
|
|
tree type = TREE_TYPE (expr);
|
7240 |
|
|
int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
|
7241 |
|
|
int byte, offset, word, words, bitpos;
|
7242 |
|
|
unsigned char value;
|
7243 |
|
|
|
7244 |
|
|
/* There are always 32 bits in each long, no matter the size of
|
7245 |
|
|
the hosts long. We handle floating point representations with
|
7246 |
|
|
up to 192 bits. */
|
7247 |
|
|
long tmp[6];
|
7248 |
|
|
|
7249 |
|
|
if (total_bytes > len)
|
7250 |
|
|
return 0;
|
7251 |
|
|
words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
|
7252 |
|
|
|
7253 |
|
|
real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
|
7254 |
|
|
|
7255 |
|
|
for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
|
7256 |
|
|
bitpos += BITS_PER_UNIT)
|
7257 |
|
|
{
|
7258 |
|
|
byte = (bitpos / BITS_PER_UNIT) & 3;
|
7259 |
|
|
value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
|
7260 |
|
|
|
7261 |
|
|
if (UNITS_PER_WORD < 4)
|
7262 |
|
|
{
|
7263 |
|
|
word = byte / UNITS_PER_WORD;
|
7264 |
|
|
if (WORDS_BIG_ENDIAN)
|
7265 |
|
|
word = (words - 1) - word;
|
7266 |
|
|
offset = word * UNITS_PER_WORD;
|
7267 |
|
|
if (BYTES_BIG_ENDIAN)
|
7268 |
|
|
offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
|
7269 |
|
|
else
|
7270 |
|
|
offset += byte % UNITS_PER_WORD;
|
7271 |
|
|
}
|
7272 |
|
|
else
|
7273 |
|
|
offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
|
7274 |
|
|
ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
|
7275 |
|
|
}
|
7276 |
|
|
return total_bytes;
|
7277 |
|
|
}
|
7278 |
|
|
|
7279 |
|
|
/* Subroutine of native_encode_expr. Encode the COMPLEX_CST
|
7280 |
|
|
specified by EXPR into the buffer PTR of length LEN bytes.
|
7281 |
|
|
Return the number of bytes placed in the buffer, or zero
|
7282 |
|
|
upon failure. */
|
7283 |
|
|
|
7284 |
|
|
static int
|
7285 |
|
|
native_encode_complex (const_tree expr, unsigned char *ptr, int len)
|
7286 |
|
|
{
|
7287 |
|
|
int rsize, isize;
|
7288 |
|
|
tree part;
|
7289 |
|
|
|
7290 |
|
|
part = TREE_REALPART (expr);
|
7291 |
|
|
rsize = native_encode_expr (part, ptr, len);
|
7292 |
|
|
if (rsize == 0)
|
7293 |
|
|
return 0;
|
7294 |
|
|
part = TREE_IMAGPART (expr);
|
7295 |
|
|
isize = native_encode_expr (part, ptr+rsize, len-rsize);
|
7296 |
|
|
if (isize != rsize)
|
7297 |
|
|
return 0;
|
7298 |
|
|
return rsize + isize;
|
7299 |
|
|
}
|
7300 |
|
|
|
7301 |
|
|
|
7302 |
|
|
/* Subroutine of native_encode_expr. Encode the VECTOR_CST
|
7303 |
|
|
specified by EXPR into the buffer PTR of length LEN bytes.
|
7304 |
|
|
Return the number of bytes placed in the buffer, or zero
|
7305 |
|
|
upon failure. */
|
7306 |
|
|
|
7307 |
|
|
static int
|
7308 |
|
|
native_encode_vector (const_tree expr, unsigned char *ptr, int len)
|
7309 |
|
|
{
|
7310 |
|
|
int i, size, offset, count;
|
7311 |
|
|
tree itype, elem, elements;
|
7312 |
|
|
|
7313 |
|
|
offset = 0;
|
7314 |
|
|
elements = TREE_VECTOR_CST_ELTS (expr);
|
7315 |
|
|
count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
|
7316 |
|
|
itype = TREE_TYPE (TREE_TYPE (expr));
|
7317 |
|
|
size = GET_MODE_SIZE (TYPE_MODE (itype));
|
7318 |
|
|
for (i = 0; i < count; i++)
|
7319 |
|
|
{
|
7320 |
|
|
if (elements)
|
7321 |
|
|
{
|
7322 |
|
|
elem = TREE_VALUE (elements);
|
7323 |
|
|
elements = TREE_CHAIN (elements);
|
7324 |
|
|
}
|
7325 |
|
|
else
|
7326 |
|
|
elem = NULL_TREE;
|
7327 |
|
|
|
7328 |
|
|
if (elem)
|
7329 |
|
|
{
|
7330 |
|
|
if (native_encode_expr (elem, ptr+offset, len-offset) != size)
|
7331 |
|
|
return 0;
|
7332 |
|
|
}
|
7333 |
|
|
else
|
7334 |
|
|
{
|
7335 |
|
|
if (offset + size > len)
|
7336 |
|
|
return 0;
|
7337 |
|
|
memset (ptr+offset, 0, size);
|
7338 |
|
|
}
|
7339 |
|
|
offset += size;
|
7340 |
|
|
}
|
7341 |
|
|
return offset;
|
7342 |
|
|
}
|
7343 |
|
|
|
7344 |
|
|
|
7345 |
|
|
/* Subroutine of native_encode_expr. Encode the STRING_CST
|
7346 |
|
|
specified by EXPR into the buffer PTR of length LEN bytes.
|
7347 |
|
|
Return the number of bytes placed in the buffer, or zero
|
7348 |
|
|
upon failure. */
|
7349 |
|
|
|
7350 |
|
|
static int
|
7351 |
|
|
native_encode_string (const_tree expr, unsigned char *ptr, int len)
|
7352 |
|
|
{
|
7353 |
|
|
tree type = TREE_TYPE (expr);
|
7354 |
|
|
HOST_WIDE_INT total_bytes;
|
7355 |
|
|
|
7356 |
|
|
if (TREE_CODE (type) != ARRAY_TYPE
|
7357 |
|
|
|| TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
|
7358 |
|
|
|| GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
|
7359 |
|
|
|| !host_integerp (TYPE_SIZE_UNIT (type), 0))
|
7360 |
|
|
return 0;
|
7361 |
|
|
total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
|
7362 |
|
|
if (total_bytes > len)
|
7363 |
|
|
return 0;
|
7364 |
|
|
if (TREE_STRING_LENGTH (expr) < total_bytes)
|
7365 |
|
|
{
|
7366 |
|
|
memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
|
7367 |
|
|
memset (ptr + TREE_STRING_LENGTH (expr), 0,
|
7368 |
|
|
total_bytes - TREE_STRING_LENGTH (expr));
|
7369 |
|
|
}
|
7370 |
|
|
else
|
7371 |
|
|
memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
|
7372 |
|
|
return total_bytes;
|
7373 |
|
|
}
|
7374 |
|
|
|
7375 |
|
|
|
7376 |
|
|
/* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
|
7377 |
|
|
REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
|
7378 |
|
|
buffer PTR of length LEN bytes. Return the number of bytes
|
7379 |
|
|
placed in the buffer, or zero upon failure. */
|
7380 |
|
|
|
7381 |
|
|
int
|
7382 |
|
|
native_encode_expr (const_tree expr, unsigned char *ptr, int len)
|
7383 |
|
|
{
|
7384 |
|
|
switch (TREE_CODE (expr))
|
7385 |
|
|
{
|
7386 |
|
|
case INTEGER_CST:
|
7387 |
|
|
return native_encode_int (expr, ptr, len);
|
7388 |
|
|
|
7389 |
|
|
case REAL_CST:
|
7390 |
|
|
return native_encode_real (expr, ptr, len);
|
7391 |
|
|
|
7392 |
|
|
case COMPLEX_CST:
|
7393 |
|
|
return native_encode_complex (expr, ptr, len);
|
7394 |
|
|
|
7395 |
|
|
case VECTOR_CST:
|
7396 |
|
|
return native_encode_vector (expr, ptr, len);
|
7397 |
|
|
|
7398 |
|
|
case STRING_CST:
|
7399 |
|
|
return native_encode_string (expr, ptr, len);
|
7400 |
|
|
|
7401 |
|
|
default:
|
7402 |
|
|
return 0;
|
7403 |
|
|
}
|
7404 |
|
|
}
|
7405 |
|
|
|
7406 |
|
|
|
7407 |
|
|
/* Subroutine of native_interpret_expr. Interpret the contents of
|
7408 |
|
|
the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
|
7409 |
|
|
If the buffer cannot be interpreted, return NULL_TREE. */
|
7410 |
|
|
|
7411 |
|
|
static tree
|
7412 |
|
|
native_interpret_int (tree type, const unsigned char *ptr, int len)
|
7413 |
|
|
{
|
7414 |
|
|
int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
|
7415 |
|
|
int byte, offset, word, words;
|
7416 |
|
|
unsigned char value;
|
7417 |
|
|
double_int result;
|
7418 |
|
|
|
7419 |
|
|
if (total_bytes > len)
|
7420 |
|
|
return NULL_TREE;
|
7421 |
|
|
if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
|
7422 |
|
|
return NULL_TREE;
|
7423 |
|
|
|
7424 |
|
|
result = double_int_zero;
|
7425 |
|
|
words = total_bytes / UNITS_PER_WORD;
|
7426 |
|
|
|
7427 |
|
|
for (byte = 0; byte < total_bytes; byte++)
|
7428 |
|
|
{
|
7429 |
|
|
int bitpos = byte * BITS_PER_UNIT;
|
7430 |
|
|
if (total_bytes > UNITS_PER_WORD)
|
7431 |
|
|
{
|
7432 |
|
|
word = byte / UNITS_PER_WORD;
|
7433 |
|
|
if (WORDS_BIG_ENDIAN)
|
7434 |
|
|
word = (words - 1) - word;
|
7435 |
|
|
offset = word * UNITS_PER_WORD;
|
7436 |
|
|
if (BYTES_BIG_ENDIAN)
|
7437 |
|
|
offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
|
7438 |
|
|
else
|
7439 |
|
|
offset += byte % UNITS_PER_WORD;
|
7440 |
|
|
}
|
7441 |
|
|
else
|
7442 |
|
|
offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
|
7443 |
|
|
value = ptr[offset];
|
7444 |
|
|
|
7445 |
|
|
if (bitpos < HOST_BITS_PER_WIDE_INT)
|
7446 |
|
|
result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
|
7447 |
|
|
else
|
7448 |
|
|
result.high |= (unsigned HOST_WIDE_INT) value
|
7449 |
|
|
<< (bitpos - HOST_BITS_PER_WIDE_INT);
|
7450 |
|
|
}
|
7451 |
|
|
|
7452 |
|
|
return double_int_to_tree (type, result);
|
7453 |
|
|
}
|
7454 |
|
|
|
7455 |
|
|
|
7456 |
|
|
/* Subroutine of native_interpret_expr. Interpret the contents of
|
7457 |
|
|
the buffer PTR of length LEN as a REAL_CST of type TYPE.
|
7458 |
|
|
If the buffer cannot be interpreted, return NULL_TREE. */
|
7459 |
|
|
|
7460 |
|
|
static tree
|
7461 |
|
|
native_interpret_real (tree type, const unsigned char *ptr, int len)
|
7462 |
|
|
{
|
7463 |
|
|
enum machine_mode mode = TYPE_MODE (type);
|
7464 |
|
|
int total_bytes = GET_MODE_SIZE (mode);
|
7465 |
|
|
int byte, offset, word, words, bitpos;
|
7466 |
|
|
unsigned char value;
|
7467 |
|
|
/* There are always 32 bits in each long, no matter the size of
|
7468 |
|
|
the hosts long. We handle floating point representations with
|
7469 |
|
|
up to 192 bits. */
|
7470 |
|
|
REAL_VALUE_TYPE r;
|
7471 |
|
|
long tmp[6];
|
7472 |
|
|
|
7473 |
|
|
total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
|
7474 |
|
|
if (total_bytes > len || total_bytes > 24)
|
7475 |
|
|
return NULL_TREE;
|
7476 |
|
|
words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
|
7477 |
|
|
|
7478 |
|
|
memset (tmp, 0, sizeof (tmp));
|
7479 |
|
|
for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
|
7480 |
|
|
bitpos += BITS_PER_UNIT)
|
7481 |
|
|
{
|
7482 |
|
|
byte = (bitpos / BITS_PER_UNIT) & 3;
|
7483 |
|
|
if (UNITS_PER_WORD < 4)
|
7484 |
|
|
{
|
7485 |
|
|
word = byte / UNITS_PER_WORD;
|
7486 |
|
|
if (WORDS_BIG_ENDIAN)
|
7487 |
|
|
word = (words - 1) - word;
|
7488 |
|
|
offset = word * UNITS_PER_WORD;
|
7489 |
|
|
if (BYTES_BIG_ENDIAN)
|
7490 |
|
|
offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
|
7491 |
|
|
else
|
7492 |
|
|
offset += byte % UNITS_PER_WORD;
|
7493 |
|
|
}
|
7494 |
|
|
else
|
7495 |
|
|
offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
|
7496 |
|
|
value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
|
7497 |
|
|
|
7498 |
|
|
tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
|
7499 |
|
|
}
|
7500 |
|
|
|
7501 |
|
|
real_from_target (&r, tmp, mode);
|
7502 |
|
|
return build_real (type, r);
|
7503 |
|
|
}
|
7504 |
|
|
|
7505 |
|
|
|
7506 |
|
|
/* Subroutine of native_interpret_expr. Interpret the contents of
|
7507 |
|
|
the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
|
7508 |
|
|
If the buffer cannot be interpreted, return NULL_TREE. */
|
7509 |
|
|
|
7510 |
|
|
static tree
|
7511 |
|
|
native_interpret_complex (tree type, const unsigned char *ptr, int len)
|
7512 |
|
|
{
|
7513 |
|
|
tree etype, rpart, ipart;
|
7514 |
|
|
int size;
|
7515 |
|
|
|
7516 |
|
|
etype = TREE_TYPE (type);
|
7517 |
|
|
size = GET_MODE_SIZE (TYPE_MODE (etype));
|
7518 |
|
|
if (size * 2 > len)
|
7519 |
|
|
return NULL_TREE;
|
7520 |
|
|
rpart = native_interpret_expr (etype, ptr, size);
|
7521 |
|
|
if (!rpart)
|
7522 |
|
|
return NULL_TREE;
|
7523 |
|
|
ipart = native_interpret_expr (etype, ptr+size, size);
|
7524 |
|
|
if (!ipart)
|
7525 |
|
|
return NULL_TREE;
|
7526 |
|
|
return build_complex (type, rpart, ipart);
|
7527 |
|
|
}
|
7528 |
|
|
|
7529 |
|
|
|
7530 |
|
|
/* Subroutine of native_interpret_expr. Interpret the contents of
|
7531 |
|
|
the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
|
7532 |
|
|
If the buffer cannot be interpreted, return NULL_TREE. */
|
7533 |
|
|
|
7534 |
|
|
static tree
|
7535 |
|
|
native_interpret_vector (tree type, const unsigned char *ptr, int len)
|
7536 |
|
|
{
|
7537 |
|
|
tree etype, elem, elements;
|
7538 |
|
|
int i, size, count;
|
7539 |
|
|
|
7540 |
|
|
etype = TREE_TYPE (type);
|
7541 |
|
|
size = GET_MODE_SIZE (TYPE_MODE (etype));
|
7542 |
|
|
count = TYPE_VECTOR_SUBPARTS (type);
|
7543 |
|
|
if (size * count > len)
|
7544 |
|
|
return NULL_TREE;
|
7545 |
|
|
|
7546 |
|
|
elements = NULL_TREE;
|
7547 |
|
|
for (i = count - 1; i >= 0; i--)
|
7548 |
|
|
{
|
7549 |
|
|
elem = native_interpret_expr (etype, ptr+(i*size), size);
|
7550 |
|
|
if (!elem)
|
7551 |
|
|
return NULL_TREE;
|
7552 |
|
|
elements = tree_cons (NULL_TREE, elem, elements);
|
7553 |
|
|
}
|
7554 |
|
|
return build_vector (type, elements);
|
7555 |
|
|
}
|
7556 |
|
|
|
7557 |
|
|
|
7558 |
|
|
/* Subroutine of fold_view_convert_expr. Interpret the contents of
|
7559 |
|
|
the buffer PTR of length LEN as a constant of type TYPE. For
|
7560 |
|
|
INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
|
7561 |
|
|
we return a REAL_CST, etc... If the buffer cannot be interpreted,
|
7562 |
|
|
return NULL_TREE. */
|
7563 |
|
|
|
7564 |
|
|
tree
|
7565 |
|
|
native_interpret_expr (tree type, const unsigned char *ptr, int len)
|
7566 |
|
|
{
|
7567 |
|
|
switch (TREE_CODE (type))
|
7568 |
|
|
{
|
7569 |
|
|
case INTEGER_TYPE:
|
7570 |
|
|
case ENUMERAL_TYPE:
|
7571 |
|
|
case BOOLEAN_TYPE:
|
7572 |
|
|
return native_interpret_int (type, ptr, len);
|
7573 |
|
|
|
7574 |
|
|
case REAL_TYPE:
|
7575 |
|
|
return native_interpret_real (type, ptr, len);
|
7576 |
|
|
|
7577 |
|
|
case COMPLEX_TYPE:
|
7578 |
|
|
return native_interpret_complex (type, ptr, len);
|
7579 |
|
|
|
7580 |
|
|
case VECTOR_TYPE:
|
7581 |
|
|
return native_interpret_vector (type, ptr, len);
|
7582 |
|
|
|
7583 |
|
|
default:
|
7584 |
|
|
return NULL_TREE;
|
7585 |
|
|
}
|
7586 |
|
|
}
|
7587 |
|
|
|
7588 |
|
|
|
7589 |
|
|
/* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
|
7590 |
|
|
TYPE at compile-time. If we're unable to perform the conversion
|
7591 |
|
|
return NULL_TREE. */
|
7592 |
|
|
|
7593 |
|
|
static tree
|
7594 |
|
|
fold_view_convert_expr (tree type, tree expr)
|
7595 |
|
|
{
|
7596 |
|
|
/* We support up to 512-bit values (for V8DFmode). */
|
7597 |
|
|
unsigned char buffer[64];
|
7598 |
|
|
int len;
|
7599 |
|
|
|
7600 |
|
|
/* Check that the host and target are sane. */
|
7601 |
|
|
if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
|
7602 |
|
|
return NULL_TREE;
|
7603 |
|
|
|
7604 |
|
|
len = native_encode_expr (expr, buffer, sizeof (buffer));
|
7605 |
|
|
if (len == 0)
|
7606 |
|
|
return NULL_TREE;
|
7607 |
|
|
|
7608 |
|
|
return native_interpret_expr (type, buffer, len);
|
7609 |
|
|
}
|
7610 |
|
|
|
7611 |
|
|
/* Build an expression for the address of T. Folds away INDIRECT_REF
|
7612 |
|
|
to avoid confusing the gimplify process. */
|
7613 |
|
|
|
7614 |
|
|
tree
|
7615 |
|
|
build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
|
7616 |
|
|
{
|
7617 |
|
|
/* The size of the object is not relevant when talking about its address. */
|
7618 |
|
|
if (TREE_CODE (t) == WITH_SIZE_EXPR)
|
7619 |
|
|
t = TREE_OPERAND (t, 0);
|
7620 |
|
|
|
7621 |
|
|
if (TREE_CODE (t) == INDIRECT_REF)
|
7622 |
|
|
{
|
7623 |
|
|
t = TREE_OPERAND (t, 0);
|
7624 |
|
|
|
7625 |
|
|
if (TREE_TYPE (t) != ptrtype)
|
7626 |
|
|
t = build1_loc (loc, NOP_EXPR, ptrtype, t);
|
7627 |
|
|
}
|
7628 |
|
|
else if (TREE_CODE (t) == MEM_REF
|
7629 |
|
|
&& integer_zerop (TREE_OPERAND (t, 1)))
|
7630 |
|
|
return TREE_OPERAND (t, 0);
|
7631 |
|
|
else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
|
7632 |
|
|
{
|
7633 |
|
|
t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
|
7634 |
|
|
|
7635 |
|
|
if (TREE_TYPE (t) != ptrtype)
|
7636 |
|
|
t = fold_convert_loc (loc, ptrtype, t);
|
7637 |
|
|
}
|
7638 |
|
|
else
|
7639 |
|
|
t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
|
7640 |
|
|
|
7641 |
|
|
return t;
|
7642 |
|
|
}
|
7643 |
|
|
|
7644 |
|
|
/* Build an expression for the address of T. */
|
7645 |
|
|
|
7646 |
|
|
tree
|
7647 |
|
|
build_fold_addr_expr_loc (location_t loc, tree t)
|
7648 |
|
|
{
|
7649 |
|
|
tree ptrtype = build_pointer_type (TREE_TYPE (t));
|
7650 |
|
|
|
7651 |
|
|
return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
|
7652 |
|
|
}
|
7653 |
|
|
|
7654 |
|
|
static bool vec_cst_ctor_to_array (tree, tree *);
|
7655 |
|
|
|
7656 |
|
|
/* Fold a unary expression of code CODE and type TYPE with operand
|
7657 |
|
|
OP0. Return the folded expression if folding is successful.
|
7658 |
|
|
Otherwise, return NULL_TREE. */
|
7659 |
|
|
|
7660 |
|
|
tree
|
7661 |
|
|
fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
|
7662 |
|
|
{
|
7663 |
|
|
tree tem;
|
7664 |
|
|
tree arg0;
|
7665 |
|
|
enum tree_code_class kind = TREE_CODE_CLASS (code);
|
7666 |
|
|
|
7667 |
|
|
gcc_assert (IS_EXPR_CODE_CLASS (kind)
|
7668 |
|
|
&& TREE_CODE_LENGTH (code) == 1);
|
7669 |
|
|
|
7670 |
|
|
arg0 = op0;
|
7671 |
|
|
if (arg0)
|
7672 |
|
|
{
|
7673 |
|
|
if (CONVERT_EXPR_CODE_P (code)
|
7674 |
|
|
|| code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
|
7675 |
|
|
{
|
7676 |
|
|
/* Don't use STRIP_NOPS, because signedness of argument type
|
7677 |
|
|
matters. */
|
7678 |
|
|
STRIP_SIGN_NOPS (arg0);
|
7679 |
|
|
}
|
7680 |
|
|
else
|
7681 |
|
|
{
|
7682 |
|
|
/* Strip any conversions that don't change the mode. This
|
7683 |
|
|
is safe for every expression, except for a comparison
|
7684 |
|
|
expression because its signedness is derived from its
|
7685 |
|
|
operands.
|
7686 |
|
|
|
7687 |
|
|
Note that this is done as an internal manipulation within
|
7688 |
|
|
the constant folder, in order to find the simplest
|
7689 |
|
|
representation of the arguments so that their form can be
|
7690 |
|
|
studied. In any cases, the appropriate type conversions
|
7691 |
|
|
should be put back in the tree that will get out of the
|
7692 |
|
|
constant folder. */
|
7693 |
|
|
STRIP_NOPS (arg0);
|
7694 |
|
|
}
|
7695 |
|
|
}
|
7696 |
|
|
|
7697 |
|
|
if (TREE_CODE_CLASS (code) == tcc_unary)
|
7698 |
|
|
{
|
7699 |
|
|
if (TREE_CODE (arg0) == COMPOUND_EXPR)
|
7700 |
|
|
return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
|
7701 |
|
|
fold_build1_loc (loc, code, type,
|
7702 |
|
|
fold_convert_loc (loc, TREE_TYPE (op0),
|
7703 |
|
|
TREE_OPERAND (arg0, 1))));
|
7704 |
|
|
else if (TREE_CODE (arg0) == COND_EXPR)
|
7705 |
|
|
{
|
7706 |
|
|
tree arg01 = TREE_OPERAND (arg0, 1);
|
7707 |
|
|
tree arg02 = TREE_OPERAND (arg0, 2);
|
7708 |
|
|
if (! VOID_TYPE_P (TREE_TYPE (arg01)))
|
7709 |
|
|
arg01 = fold_build1_loc (loc, code, type,
|
7710 |
|
|
fold_convert_loc (loc,
|
7711 |
|
|
TREE_TYPE (op0), arg01));
|
7712 |
|
|
if (! VOID_TYPE_P (TREE_TYPE (arg02)))
|
7713 |
|
|
arg02 = fold_build1_loc (loc, code, type,
|
7714 |
|
|
fold_convert_loc (loc,
|
7715 |
|
|
TREE_TYPE (op0), arg02));
|
7716 |
|
|
tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
|
7717 |
|
|
arg01, arg02);
|
7718 |
|
|
|
7719 |
|
|
/* If this was a conversion, and all we did was to move into
|
7720 |
|
|
inside the COND_EXPR, bring it back out. But leave it if
|
7721 |
|
|
it is a conversion from integer to integer and the
|
7722 |
|
|
result precision is no wider than a word since such a
|
7723 |
|
|
conversion is cheap and may be optimized away by combine,
|
7724 |
|
|
while it couldn't if it were outside the COND_EXPR. Then return
|
7725 |
|
|
so we don't get into an infinite recursion loop taking the
|
7726 |
|
|
conversion out and then back in. */
|
7727 |
|
|
|
7728 |
|
|
if ((CONVERT_EXPR_CODE_P (code)
|
7729 |
|
|
|| code == NON_LVALUE_EXPR)
|
7730 |
|
|
&& TREE_CODE (tem) == COND_EXPR
|
7731 |
|
|
&& TREE_CODE (TREE_OPERAND (tem, 1)) == code
|
7732 |
|
|
&& TREE_CODE (TREE_OPERAND (tem, 2)) == code
|
7733 |
|
|
&& ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
|
7734 |
|
|
&& ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
|
7735 |
|
|
&& (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
|
7736 |
|
|
== TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
|
7737 |
|
|
&& (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
|
7738 |
|
|
&& (INTEGRAL_TYPE_P
|
7739 |
|
|
(TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
|
7740 |
|
|
&& TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
|
7741 |
|
|
|| flag_syntax_only))
|
7742 |
|
|
tem = build1_loc (loc, code, type,
|
7743 |
|
|
build3 (COND_EXPR,
|
7744 |
|
|
TREE_TYPE (TREE_OPERAND
|
7745 |
|
|
(TREE_OPERAND (tem, 1), 0)),
|
7746 |
|
|
TREE_OPERAND (tem, 0),
|
7747 |
|
|
TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
|
7748 |
|
|
TREE_OPERAND (TREE_OPERAND (tem, 2),
|
7749 |
|
|
0)));
|
7750 |
|
|
return tem;
|
7751 |
|
|
}
|
7752 |
|
|
}
|
7753 |
|
|
|
7754 |
|
|
switch (code)
|
7755 |
|
|
{
|
7756 |
|
|
case PAREN_EXPR:
|
7757 |
|
|
/* Re-association barriers around constants and other re-association
|
7758 |
|
|
barriers can be removed. */
|
7759 |
|
|
if (CONSTANT_CLASS_P (op0)
|
7760 |
|
|
|| TREE_CODE (op0) == PAREN_EXPR)
|
7761 |
|
|
return fold_convert_loc (loc, type, op0);
|
7762 |
|
|
return NULL_TREE;
|
7763 |
|
|
|
7764 |
|
|
CASE_CONVERT:
|
7765 |
|
|
case FLOAT_EXPR:
|
7766 |
|
|
case FIX_TRUNC_EXPR:
|
7767 |
|
|
if (TREE_TYPE (op0) == type)
|
7768 |
|
|
return op0;
|
7769 |
|
|
|
7770 |
|
|
if (COMPARISON_CLASS_P (op0))
|
7771 |
|
|
{
|
7772 |
|
|
/* If we have (type) (a CMP b) and type is an integral type, return
|
7773 |
|
|
new expression involving the new type. Canonicalize
|
7774 |
|
|
(type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
|
7775 |
|
|
non-integral type.
|
7776 |
|
|
Do not fold the result as that would not simplify further, also
|
7777 |
|
|
folding again results in recursions. */
|
7778 |
|
|
if (TREE_CODE (type) == BOOLEAN_TYPE)
|
7779 |
|
|
return build2_loc (loc, TREE_CODE (op0), type,
|
7780 |
|
|
TREE_OPERAND (op0, 0),
|
7781 |
|
|
TREE_OPERAND (op0, 1));
|
7782 |
|
|
else if (!INTEGRAL_TYPE_P (type))
|
7783 |
|
|
return build3_loc (loc, COND_EXPR, type, op0,
|
7784 |
|
|
constant_boolean_node (true, type),
|
7785 |
|
|
constant_boolean_node (false, type));
|
7786 |
|
|
}
|
7787 |
|
|
|
7788 |
|
|
/* Handle cases of two conversions in a row. */
|
7789 |
|
|
if (CONVERT_EXPR_P (op0))
|
7790 |
|
|
{
|
7791 |
|
|
tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
|
7792 |
|
|
tree inter_type = TREE_TYPE (op0);
|
7793 |
|
|
int inside_int = INTEGRAL_TYPE_P (inside_type);
|
7794 |
|
|
int inside_ptr = POINTER_TYPE_P (inside_type);
|
7795 |
|
|
int inside_float = FLOAT_TYPE_P (inside_type);
|
7796 |
|
|
int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
|
7797 |
|
|
unsigned int inside_prec = TYPE_PRECISION (inside_type);
|
7798 |
|
|
int inside_unsignedp = TYPE_UNSIGNED (inside_type);
|
7799 |
|
|
int inter_int = INTEGRAL_TYPE_P (inter_type);
|
7800 |
|
|
int inter_ptr = POINTER_TYPE_P (inter_type);
|
7801 |
|
|
int inter_float = FLOAT_TYPE_P (inter_type);
|
7802 |
|
|
int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
|
7803 |
|
|
unsigned int inter_prec = TYPE_PRECISION (inter_type);
|
7804 |
|
|
int inter_unsignedp = TYPE_UNSIGNED (inter_type);
|
7805 |
|
|
int final_int = INTEGRAL_TYPE_P (type);
|
7806 |
|
|
int final_ptr = POINTER_TYPE_P (type);
|
7807 |
|
|
int final_float = FLOAT_TYPE_P (type);
|
7808 |
|
|
int final_vec = TREE_CODE (type) == VECTOR_TYPE;
|
7809 |
|
|
unsigned int final_prec = TYPE_PRECISION (type);
|
7810 |
|
|
int final_unsignedp = TYPE_UNSIGNED (type);
|
7811 |
|
|
|
7812 |
|
|
/* In addition to the cases of two conversions in a row
|
7813 |
|
|
handled below, if we are converting something to its own
|
7814 |
|
|
type via an object of identical or wider precision, neither
|
7815 |
|
|
conversion is needed. */
|
7816 |
|
|
if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
|
7817 |
|
|
&& (((inter_int || inter_ptr) && final_int)
|
7818 |
|
|
|| (inter_float && final_float))
|
7819 |
|
|
&& inter_prec >= final_prec)
|
7820 |
|
|
return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
|
7821 |
|
|
|
7822 |
|
|
/* Likewise, if the intermediate and initial types are either both
|
7823 |
|
|
float or both integer, we don't need the middle conversion if the
|
7824 |
|
|
former is wider than the latter and doesn't change the signedness
|
7825 |
|
|
(for integers). Avoid this if the final type is a pointer since
|
7826 |
|
|
then we sometimes need the middle conversion. Likewise if the
|
7827 |
|
|
final type has a precision not equal to the size of its mode. */
|
7828 |
|
|
if (((inter_int && inside_int)
|
7829 |
|
|
|| (inter_float && inside_float)
|
7830 |
|
|
|| (inter_vec && inside_vec))
|
7831 |
|
|
&& inter_prec >= inside_prec
|
7832 |
|
|
&& (inter_float || inter_vec
|
7833 |
|
|
|| inter_unsignedp == inside_unsignedp)
|
7834 |
|
|
&& ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
|
7835 |
|
|
&& TYPE_MODE (type) == TYPE_MODE (inter_type))
|
7836 |
|
|
&& ! final_ptr
|
7837 |
|
|
&& (! final_vec || inter_prec == inside_prec))
|
7838 |
|
|
return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
|
7839 |
|
|
|
7840 |
|
|
/* If we have a sign-extension of a zero-extended value, we can
|
7841 |
|
|
replace that by a single zero-extension. */
|
7842 |
|
|
if (inside_int && inter_int && final_int
|
7843 |
|
|
&& inside_prec < inter_prec && inter_prec < final_prec
|
7844 |
|
|
&& inside_unsignedp && !inter_unsignedp)
|
7845 |
|
|
return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
|
7846 |
|
|
|
7847 |
|
|
/* Two conversions in a row are not needed unless:
|
7848 |
|
|
- some conversion is floating-point (overstrict for now), or
|
7849 |
|
|
- some conversion is a vector (overstrict for now), or
|
7850 |
|
|
- the intermediate type is narrower than both initial and
|
7851 |
|
|
final, or
|
7852 |
|
|
- the intermediate type and innermost type differ in signedness,
|
7853 |
|
|
and the outermost type is wider than the intermediate, or
|
7854 |
|
|
- the initial type is a pointer type and the precisions of the
|
7855 |
|
|
intermediate and final types differ, or
|
7856 |
|
|
- the final type is a pointer type and the precisions of the
|
7857 |
|
|
initial and intermediate types differ. */
|
7858 |
|
|
if (! inside_float && ! inter_float && ! final_float
|
7859 |
|
|
&& ! inside_vec && ! inter_vec && ! final_vec
|
7860 |
|
|
&& (inter_prec >= inside_prec || inter_prec >= final_prec)
|
7861 |
|
|
&& ! (inside_int && inter_int
|
7862 |
|
|
&& inter_unsignedp != inside_unsignedp
|
7863 |
|
|
&& inter_prec < final_prec)
|
7864 |
|
|
&& ((inter_unsignedp && inter_prec > inside_prec)
|
7865 |
|
|
== (final_unsignedp && final_prec > inter_prec))
|
7866 |
|
|
&& ! (inside_ptr && inter_prec != final_prec)
|
7867 |
|
|
&& ! (final_ptr && inside_prec != inter_prec)
|
7868 |
|
|
&& ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
|
7869 |
|
|
&& TYPE_MODE (type) == TYPE_MODE (inter_type)))
|
7870 |
|
|
return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
|
7871 |
|
|
}
|
7872 |
|
|
|
7873 |
|
|
/* Handle (T *)&A.B.C for A being of type T and B and C
|
7874 |
|
|
living at offset zero. This occurs frequently in
|
7875 |
|
|
C++ upcasting and then accessing the base. */
|
7876 |
|
|
if (TREE_CODE (op0) == ADDR_EXPR
|
7877 |
|
|
&& POINTER_TYPE_P (type)
|
7878 |
|
|
&& handled_component_p (TREE_OPERAND (op0, 0)))
|
7879 |
|
|
{
|
7880 |
|
|
HOST_WIDE_INT bitsize, bitpos;
|
7881 |
|
|
tree offset;
|
7882 |
|
|
enum machine_mode mode;
|
7883 |
|
|
int unsignedp, volatilep;
|
7884 |
|
|
tree base = TREE_OPERAND (op0, 0);
|
7885 |
|
|
base = get_inner_reference (base, &bitsize, &bitpos, &offset,
|
7886 |
|
|
&mode, &unsignedp, &volatilep, false);
|
7887 |
|
|
/* If the reference was to a (constant) zero offset, we can use
|
7888 |
|
|
the address of the base if it has the same base type
|
7889 |
|
|
as the result type and the pointer type is unqualified. */
|
7890 |
|
|
if (! offset && bitpos == 0
|
7891 |
|
|
&& (TYPE_MAIN_VARIANT (TREE_TYPE (type))
|
7892 |
|
|
== TYPE_MAIN_VARIANT (TREE_TYPE (base)))
|
7893 |
|
|
&& TYPE_QUALS (type) == TYPE_UNQUALIFIED)
|
7894 |
|
|
return fold_convert_loc (loc, type,
|
7895 |
|
|
build_fold_addr_expr_loc (loc, base));
|
7896 |
|
|
}
|
7897 |
|
|
|
7898 |
|
|
if (TREE_CODE (op0) == MODIFY_EXPR
|
7899 |
|
|
&& TREE_CONSTANT (TREE_OPERAND (op0, 1))
|
7900 |
|
|
/* Detect assigning a bitfield. */
|
7901 |
|
|
&& !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
|
7902 |
|
|
&& DECL_BIT_FIELD
|
7903 |
|
|
(TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
|
7904 |
|
|
{
|
7905 |
|
|
/* Don't leave an assignment inside a conversion
|
7906 |
|
|
unless assigning a bitfield. */
|
7907 |
|
|
tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
|
7908 |
|
|
/* First do the assignment, then return converted constant. */
|
7909 |
|
|
tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
|
7910 |
|
|
TREE_NO_WARNING (tem) = 1;
|
7911 |
|
|
TREE_USED (tem) = 1;
|
7912 |
|
|
return tem;
|
7913 |
|
|
}
|
7914 |
|
|
|
7915 |
|
|
/* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
|
7916 |
|
|
constants (if x has signed type, the sign bit cannot be set
|
7917 |
|
|
in c). This folds extension into the BIT_AND_EXPR.
|
7918 |
|
|
??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
|
7919 |
|
|
very likely don't have maximal range for their precision and this
|
7920 |
|
|
transformation effectively doesn't preserve non-maximal ranges. */
|
7921 |
|
|
if (TREE_CODE (type) == INTEGER_TYPE
|
7922 |
|
|
&& TREE_CODE (op0) == BIT_AND_EXPR
|
7923 |
|
|
&& TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
|
7924 |
|
|
{
|
7925 |
|
|
tree and_expr = op0;
|
7926 |
|
|
tree and0 = TREE_OPERAND (and_expr, 0);
|
7927 |
|
|
tree and1 = TREE_OPERAND (and_expr, 1);
|
7928 |
|
|
int change = 0;
|
7929 |
|
|
|
7930 |
|
|
if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
|
7931 |
|
|
|| (TYPE_PRECISION (type)
|
7932 |
|
|
<= TYPE_PRECISION (TREE_TYPE (and_expr))))
|
7933 |
|
|
change = 1;
|
7934 |
|
|
else if (TYPE_PRECISION (TREE_TYPE (and1))
|
7935 |
|
|
<= HOST_BITS_PER_WIDE_INT
|
7936 |
|
|
&& host_integerp (and1, 1))
|
7937 |
|
|
{
|
7938 |
|
|
unsigned HOST_WIDE_INT cst;
|
7939 |
|
|
|
7940 |
|
|
cst = tree_low_cst (and1, 1);
|
7941 |
|
|
cst &= (HOST_WIDE_INT) -1
|
7942 |
|
|
<< (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
|
7943 |
|
|
change = (cst == 0);
|
7944 |
|
|
#ifdef LOAD_EXTEND_OP
|
7945 |
|
|
if (change
|
7946 |
|
|
&& !flag_syntax_only
|
7947 |
|
|
&& (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
|
7948 |
|
|
== ZERO_EXTEND))
|
7949 |
|
|
{
|
7950 |
|
|
tree uns = unsigned_type_for (TREE_TYPE (and0));
|
7951 |
|
|
and0 = fold_convert_loc (loc, uns, and0);
|
7952 |
|
|
and1 = fold_convert_loc (loc, uns, and1);
|
7953 |
|
|
}
|
7954 |
|
|
#endif
|
7955 |
|
|
}
|
7956 |
|
|
if (change)
|
7957 |
|
|
{
|
7958 |
|
|
tem = force_fit_type_double (type, tree_to_double_int (and1),
|
7959 |
|
|
0, TREE_OVERFLOW (and1));
|
7960 |
|
|
return fold_build2_loc (loc, BIT_AND_EXPR, type,
|
7961 |
|
|
fold_convert_loc (loc, type, and0), tem);
|
7962 |
|
|
}
|
7963 |
|
|
}
|
7964 |
|
|
|
7965 |
|
|
/* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
|
7966 |
|
|
when one of the new casts will fold away. Conservatively we assume
|
7967 |
|
|
that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
|
7968 |
|
|
if (POINTER_TYPE_P (type)
|
7969 |
|
|
&& TREE_CODE (arg0) == POINTER_PLUS_EXPR
|
7970 |
|
|
&& (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
|
7971 |
|
|
&& (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
|
7972 |
|
|
|| TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
|
7973 |
|
|
|| TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
|
7974 |
|
|
{
|
7975 |
|
|
tree arg00 = TREE_OPERAND (arg0, 0);
|
7976 |
|
|
tree arg01 = TREE_OPERAND (arg0, 1);
|
7977 |
|
|
|
7978 |
|
|
return fold_build_pointer_plus_loc
|
7979 |
|
|
(loc, fold_convert_loc (loc, type, arg00), arg01);
|
7980 |
|
|
}
|
7981 |
|
|
|
7982 |
|
|
/* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
|
7983 |
|
|
of the same precision, and X is an integer type not narrower than
|
7984 |
|
|
types T1 or T2, i.e. the cast (T2)X isn't an extension. */
|
7985 |
|
|
if (INTEGRAL_TYPE_P (type)
|
7986 |
|
|
&& TREE_CODE (op0) == BIT_NOT_EXPR
|
7987 |
|
|
&& INTEGRAL_TYPE_P (TREE_TYPE (op0))
|
7988 |
|
|
&& CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
|
7989 |
|
|
&& TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
|
7990 |
|
|
{
|
7991 |
|
|
tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
|
7992 |
|
|
if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
|
7993 |
|
|
&& TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
|
7994 |
|
|
return fold_build1_loc (loc, BIT_NOT_EXPR, type,
|
7995 |
|
|
fold_convert_loc (loc, type, tem));
|
7996 |
|
|
}
|
7997 |
|
|
|
7998 |
|
|
/* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
|
7999 |
|
|
type of X and Y (integer types only). */
|
8000 |
|
|
if (INTEGRAL_TYPE_P (type)
|
8001 |
|
|
&& TREE_CODE (op0) == MULT_EXPR
|
8002 |
|
|
&& INTEGRAL_TYPE_P (TREE_TYPE (op0))
|
8003 |
|
|
&& TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
|
8004 |
|
|
{
|
8005 |
|
|
/* Be careful not to introduce new overflows. */
|
8006 |
|
|
tree mult_type;
|
8007 |
|
|
if (TYPE_OVERFLOW_WRAPS (type))
|
8008 |
|
|
mult_type = type;
|
8009 |
|
|
else
|
8010 |
|
|
mult_type = unsigned_type_for (type);
|
8011 |
|
|
|
8012 |
|
|
if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
|
8013 |
|
|
{
|
8014 |
|
|
tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
|
8015 |
|
|
fold_convert_loc (loc, mult_type,
|
8016 |
|
|
TREE_OPERAND (op0, 0)),
|
8017 |
|
|
fold_convert_loc (loc, mult_type,
|
8018 |
|
|
TREE_OPERAND (op0, 1)));
|
8019 |
|
|
return fold_convert_loc (loc, type, tem);
|
8020 |
|
|
}
|
8021 |
|
|
}
|
8022 |
|
|
|
8023 |
|
|
tem = fold_convert_const (code, type, op0);
|
8024 |
|
|
return tem ? tem : NULL_TREE;
|
8025 |
|
|
|
8026 |
|
|
case ADDR_SPACE_CONVERT_EXPR:
|
8027 |
|
|
if (integer_zerop (arg0))
|
8028 |
|
|
return fold_convert_const (code, type, arg0);
|
8029 |
|
|
return NULL_TREE;
|
8030 |
|
|
|
8031 |
|
|
case FIXED_CONVERT_EXPR:
|
8032 |
|
|
tem = fold_convert_const (code, type, arg0);
|
8033 |
|
|
return tem ? tem : NULL_TREE;
|
8034 |
|
|
|
8035 |
|
|
case VIEW_CONVERT_EXPR:
|
8036 |
|
|
if (TREE_TYPE (op0) == type)
|
8037 |
|
|
return op0;
|
8038 |
|
|
if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
|
8039 |
|
|
return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
|
8040 |
|
|
type, TREE_OPERAND (op0, 0));
|
8041 |
|
|
if (TREE_CODE (op0) == MEM_REF)
|
8042 |
|
|
return fold_build2_loc (loc, MEM_REF, type,
|
8043 |
|
|
TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
|
8044 |
|
|
|
8045 |
|
|
/* For integral conversions with the same precision or pointer
|
8046 |
|
|
conversions use a NOP_EXPR instead. */
|
8047 |
|
|
if ((INTEGRAL_TYPE_P (type)
|
8048 |
|
|
|| POINTER_TYPE_P (type))
|
8049 |
|
|
&& (INTEGRAL_TYPE_P (TREE_TYPE (op0))
|
8050 |
|
|
|| POINTER_TYPE_P (TREE_TYPE (op0)))
|
8051 |
|
|
&& TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
|
8052 |
|
|
return fold_convert_loc (loc, type, op0);
|
8053 |
|
|
|
8054 |
|
|
/* Strip inner integral conversions that do not change the precision. */
|
8055 |
|
|
if (CONVERT_EXPR_P (op0)
|
8056 |
|
|
&& (INTEGRAL_TYPE_P (TREE_TYPE (op0))
|
8057 |
|
|
|| POINTER_TYPE_P (TREE_TYPE (op0)))
|
8058 |
|
|
&& (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
|
8059 |
|
|
|| POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
|
8060 |
|
|
&& (TYPE_PRECISION (TREE_TYPE (op0))
|
8061 |
|
|
== TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
|
8062 |
|
|
return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
|
8063 |
|
|
type, TREE_OPERAND (op0, 0));
|
8064 |
|
|
|
8065 |
|
|
return fold_view_convert_expr (type, op0);
|
8066 |
|
|
|
8067 |
|
|
case NEGATE_EXPR:
|
8068 |
|
|
tem = fold_negate_expr (loc, arg0);
|
8069 |
|
|
if (tem)
|
8070 |
|
|
return fold_convert_loc (loc, type, tem);
|
8071 |
|
|
return NULL_TREE;
|
8072 |
|
|
|
8073 |
|
|
case ABS_EXPR:
|
8074 |
|
|
if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
|
8075 |
|
|
return fold_abs_const (arg0, type);
|
8076 |
|
|
else if (TREE_CODE (arg0) == NEGATE_EXPR)
|
8077 |
|
|
return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
|
8078 |
|
|
/* Convert fabs((double)float) into (double)fabsf(float). */
|
8079 |
|
|
else if (TREE_CODE (arg0) == NOP_EXPR
|
8080 |
|
|
&& TREE_CODE (type) == REAL_TYPE)
|
8081 |
|
|
{
|
8082 |
|
|
tree targ0 = strip_float_extensions (arg0);
|
8083 |
|
|
if (targ0 != arg0)
|
8084 |
|
|
return fold_convert_loc (loc, type,
|
8085 |
|
|
fold_build1_loc (loc, ABS_EXPR,
|
8086 |
|
|
TREE_TYPE (targ0),
|
8087 |
|
|
targ0));
|
8088 |
|
|
}
|
8089 |
|
|
/* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
|
8090 |
|
|
else if (TREE_CODE (arg0) == ABS_EXPR)
|
8091 |
|
|
return arg0;
|
8092 |
|
|
else if (tree_expr_nonnegative_p (arg0))
|
8093 |
|
|
return arg0;
|
8094 |
|
|
|
8095 |
|
|
/* Strip sign ops from argument. */
|
8096 |
|
|
if (TREE_CODE (type) == REAL_TYPE)
|
8097 |
|
|
{
|
8098 |
|
|
tem = fold_strip_sign_ops (arg0);
|
8099 |
|
|
if (tem)
|
8100 |
|
|
return fold_build1_loc (loc, ABS_EXPR, type,
|
8101 |
|
|
fold_convert_loc (loc, type, tem));
|
8102 |
|
|
}
|
8103 |
|
|
return NULL_TREE;
|
8104 |
|
|
|
8105 |
|
|
case CONJ_EXPR:
|
8106 |
|
|
if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
|
8107 |
|
|
return fold_convert_loc (loc, type, arg0);
|
8108 |
|
|
if (TREE_CODE (arg0) == COMPLEX_EXPR)
|
8109 |
|
|
{
|
8110 |
|
|
tree itype = TREE_TYPE (type);
|
8111 |
|
|
tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
|
8112 |
|
|
tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
|
8113 |
|
|
return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
|
8114 |
|
|
negate_expr (ipart));
|
8115 |
|
|
}
|
8116 |
|
|
if (TREE_CODE (arg0) == COMPLEX_CST)
|
8117 |
|
|
{
|
8118 |
|
|
tree itype = TREE_TYPE (type);
|
8119 |
|
|
tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
|
8120 |
|
|
tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
|
8121 |
|
|
return build_complex (type, rpart, negate_expr (ipart));
|
8122 |
|
|
}
|
8123 |
|
|
if (TREE_CODE (arg0) == CONJ_EXPR)
|
8124 |
|
|
return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
|
8125 |
|
|
return NULL_TREE;
|
8126 |
|
|
|
8127 |
|
|
case BIT_NOT_EXPR:
|
8128 |
|
|
if (TREE_CODE (arg0) == INTEGER_CST)
|
8129 |
|
|
return fold_not_const (arg0, type);
|
8130 |
|
|
else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
|
8131 |
|
|
return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
|
8132 |
|
|
/* Convert ~ (-A) to A - 1. */
|
8133 |
|
|
else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
|
8134 |
|
|
return fold_build2_loc (loc, MINUS_EXPR, type,
|
8135 |
|
|
fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
|
8136 |
|
|
build_int_cst (type, 1));
|
8137 |
|
|
/* Convert ~ (A - 1) or ~ (A + -1) to -A. */
|
8138 |
|
|
else if (INTEGRAL_TYPE_P (type)
|
8139 |
|
|
&& ((TREE_CODE (arg0) == MINUS_EXPR
|
8140 |
|
|
&& integer_onep (TREE_OPERAND (arg0, 1)))
|
8141 |
|
|
|| (TREE_CODE (arg0) == PLUS_EXPR
|
8142 |
|
|
&& integer_all_onesp (TREE_OPERAND (arg0, 1)))))
|
8143 |
|
|
return fold_build1_loc (loc, NEGATE_EXPR, type,
|
8144 |
|
|
fold_convert_loc (loc, type,
|
8145 |
|
|
TREE_OPERAND (arg0, 0)));
|
8146 |
|
|
/* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
|
8147 |
|
|
else if (TREE_CODE (arg0) == BIT_XOR_EXPR
|
8148 |
|
|
&& (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
|
8149 |
|
|
fold_convert_loc (loc, type,
|
8150 |
|
|
TREE_OPERAND (arg0, 0)))))
|
8151 |
|
|
return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
|
8152 |
|
|
fold_convert_loc (loc, type,
|
8153 |
|
|
TREE_OPERAND (arg0, 1)));
|
8154 |
|
|
else if (TREE_CODE (arg0) == BIT_XOR_EXPR
|
8155 |
|
|
&& (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
|
8156 |
|
|
fold_convert_loc (loc, type,
|
8157 |
|
|
TREE_OPERAND (arg0, 1)))))
|
8158 |
|
|
return fold_build2_loc (loc, BIT_XOR_EXPR, type,
|
8159 |
|
|
fold_convert_loc (loc, type,
|
8160 |
|
|
TREE_OPERAND (arg0, 0)), tem);
|
8161 |
|
|
/* Perform BIT_NOT_EXPR on each element individually. */
|
8162 |
|
|
else if (TREE_CODE (arg0) == VECTOR_CST)
|
8163 |
|
|
{
|
8164 |
|
|
tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
|
8165 |
|
|
int count = TYPE_VECTOR_SUBPARTS (type), i;
|
8166 |
|
|
|
8167 |
|
|
for (i = 0; i < count; i++)
|
8168 |
|
|
{
|
8169 |
|
|
if (elements)
|
8170 |
|
|
{
|
8171 |
|
|
elem = TREE_VALUE (elements);
|
8172 |
|
|
elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
|
8173 |
|
|
if (elem == NULL_TREE)
|
8174 |
|
|
break;
|
8175 |
|
|
elements = TREE_CHAIN (elements);
|
8176 |
|
|
}
|
8177 |
|
|
else
|
8178 |
|
|
elem = build_int_cst (TREE_TYPE (type), -1);
|
8179 |
|
|
list = tree_cons (NULL_TREE, elem, list);
|
8180 |
|
|
}
|
8181 |
|
|
if (i == count)
|
8182 |
|
|
return build_vector (type, nreverse (list));
|
8183 |
|
|
}
|
8184 |
|
|
|
8185 |
|
|
return NULL_TREE;
|
8186 |
|
|
|
8187 |
|
|
case TRUTH_NOT_EXPR:
|
8188 |
|
|
/* The argument to invert_truthvalue must have Boolean type. */
|
8189 |
|
|
if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
|
8190 |
|
|
arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
|
8191 |
|
|
|
8192 |
|
|
/* Note that the operand of this must be an int
|
8193 |
|
|
and its values must be 0 or 1.
|
8194 |
|
|
("true" is a fixed value perhaps depending on the language,
|
8195 |
|
|
but we don't handle values other than 1 correctly yet.) */
|
8196 |
|
|
tem = fold_truth_not_expr (loc, arg0);
|
8197 |
|
|
if (!tem)
|
8198 |
|
|
return NULL_TREE;
|
8199 |
|
|
return fold_convert_loc (loc, type, tem);
|
8200 |
|
|
|
8201 |
|
|
case REALPART_EXPR:
|
8202 |
|
|
if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
|
8203 |
|
|
return fold_convert_loc (loc, type, arg0);
|
8204 |
|
|
if (TREE_CODE (arg0) == COMPLEX_EXPR)
|
8205 |
|
|
return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
|
8206 |
|
|
TREE_OPERAND (arg0, 1));
|
8207 |
|
|
if (TREE_CODE (arg0) == COMPLEX_CST)
|
8208 |
|
|
return fold_convert_loc (loc, type, TREE_REALPART (arg0));
|
8209 |
|
|
if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
|
8210 |
|
|
{
|
8211 |
|
|
tree itype = TREE_TYPE (TREE_TYPE (arg0));
|
8212 |
|
|
tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
|
8213 |
|
|
fold_build1_loc (loc, REALPART_EXPR, itype,
|
8214 |
|
|
TREE_OPERAND (arg0, 0)),
|
8215 |
|
|
fold_build1_loc (loc, REALPART_EXPR, itype,
|
8216 |
|
|
TREE_OPERAND (arg0, 1)));
|
8217 |
|
|
return fold_convert_loc (loc, type, tem);
|
8218 |
|
|
}
|
8219 |
|
|
if (TREE_CODE (arg0) == CONJ_EXPR)
|
8220 |
|
|
{
|
8221 |
|
|
tree itype = TREE_TYPE (TREE_TYPE (arg0));
|
8222 |
|
|
tem = fold_build1_loc (loc, REALPART_EXPR, itype,
|
8223 |
|
|
TREE_OPERAND (arg0, 0));
|
8224 |
|
|
return fold_convert_loc (loc, type, tem);
|
8225 |
|
|
}
|
8226 |
|
|
if (TREE_CODE (arg0) == CALL_EXPR)
|
8227 |
|
|
{
|
8228 |
|
|
tree fn = get_callee_fndecl (arg0);
|
8229 |
|
|
if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
|
8230 |
|
|
switch (DECL_FUNCTION_CODE (fn))
|
8231 |
|
|
{
|
8232 |
|
|
CASE_FLT_FN (BUILT_IN_CEXPI):
|
8233 |
|
|
fn = mathfn_built_in (type, BUILT_IN_COS);
|
8234 |
|
|
if (fn)
|
8235 |
|
|
return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
|
8236 |
|
|
break;
|
8237 |
|
|
|
8238 |
|
|
default:
|
8239 |
|
|
break;
|
8240 |
|
|
}
|
8241 |
|
|
}
|
8242 |
|
|
return NULL_TREE;
|
8243 |
|
|
|
8244 |
|
|
case IMAGPART_EXPR:
|
8245 |
|
|
if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
|
8246 |
|
|
return build_zero_cst (type);
|
8247 |
|
|
if (TREE_CODE (arg0) == COMPLEX_EXPR)
|
8248 |
|
|
return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
|
8249 |
|
|
TREE_OPERAND (arg0, 0));
|
8250 |
|
|
if (TREE_CODE (arg0) == COMPLEX_CST)
|
8251 |
|
|
return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
|
8252 |
|
|
if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
|
8253 |
|
|
{
|
8254 |
|
|
tree itype = TREE_TYPE (TREE_TYPE (arg0));
|
8255 |
|
|
tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
|
8256 |
|
|
fold_build1_loc (loc, IMAGPART_EXPR, itype,
|
8257 |
|
|
TREE_OPERAND (arg0, 0)),
|
8258 |
|
|
fold_build1_loc (loc, IMAGPART_EXPR, itype,
|
8259 |
|
|
TREE_OPERAND (arg0, 1)));
|
8260 |
|
|
return fold_convert_loc (loc, type, tem);
|
8261 |
|
|
}
|
8262 |
|
|
if (TREE_CODE (arg0) == CONJ_EXPR)
|
8263 |
|
|
{
|
8264 |
|
|
tree itype = TREE_TYPE (TREE_TYPE (arg0));
|
8265 |
|
|
tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
|
8266 |
|
|
return fold_convert_loc (loc, type, negate_expr (tem));
|
8267 |
|
|
}
|
8268 |
|
|
if (TREE_CODE (arg0) == CALL_EXPR)
|
8269 |
|
|
{
|
8270 |
|
|
tree fn = get_callee_fndecl (arg0);
|
8271 |
|
|
if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
|
8272 |
|
|
switch (DECL_FUNCTION_CODE (fn))
|
8273 |
|
|
{
|
8274 |
|
|
CASE_FLT_FN (BUILT_IN_CEXPI):
|
8275 |
|
|
fn = mathfn_built_in (type, BUILT_IN_SIN);
|
8276 |
|
|
if (fn)
|
8277 |
|
|
return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
|
8278 |
|
|
break;
|
8279 |
|
|
|
8280 |
|
|
default:
|
8281 |
|
|
break;
|
8282 |
|
|
}
|
8283 |
|
|
}
|
8284 |
|
|
return NULL_TREE;
|
8285 |
|
|
|
8286 |
|
|
case INDIRECT_REF:
|
8287 |
|
|
/* Fold *&X to X if X is an lvalue. */
|
8288 |
|
|
if (TREE_CODE (op0) == ADDR_EXPR)
|
8289 |
|
|
{
|
8290 |
|
|
tree op00 = TREE_OPERAND (op0, 0);
|
8291 |
|
|
if ((TREE_CODE (op00) == VAR_DECL
|
8292 |
|
|
|| TREE_CODE (op00) == PARM_DECL
|
8293 |
|
|
|| TREE_CODE (op00) == RESULT_DECL)
|
8294 |
|
|
&& !TREE_READONLY (op00))
|
8295 |
|
|
return op00;
|
8296 |
|
|
}
|
8297 |
|
|
return NULL_TREE;
|
8298 |
|
|
|
8299 |
|
|
case VEC_UNPACK_LO_EXPR:
|
8300 |
|
|
case VEC_UNPACK_HI_EXPR:
|
8301 |
|
|
case VEC_UNPACK_FLOAT_LO_EXPR:
|
8302 |
|
|
case VEC_UNPACK_FLOAT_HI_EXPR:
|
8303 |
|
|
{
|
8304 |
|
|
unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
|
8305 |
|
|
tree *elts, vals = NULL_TREE;
|
8306 |
|
|
enum tree_code subcode;
|
8307 |
|
|
|
8308 |
|
|
gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
|
8309 |
|
|
if (TREE_CODE (arg0) != VECTOR_CST)
|
8310 |
|
|
return NULL_TREE;
|
8311 |
|
|
|
8312 |
|
|
elts = XALLOCAVEC (tree, nelts * 2);
|
8313 |
|
|
if (!vec_cst_ctor_to_array (arg0, elts))
|
8314 |
|
|
return NULL_TREE;
|
8315 |
|
|
|
8316 |
|
|
if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
|
8317 |
|
|
|| code == VEC_UNPACK_FLOAT_LO_EXPR))
|
8318 |
|
|
elts += nelts;
|
8319 |
|
|
|
8320 |
|
|
if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
|
8321 |
|
|
subcode = NOP_EXPR;
|
8322 |
|
|
else
|
8323 |
|
|
subcode = FLOAT_EXPR;
|
8324 |
|
|
|
8325 |
|
|
for (i = 0; i < nelts; i++)
|
8326 |
|
|
{
|
8327 |
|
|
elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
|
8328 |
|
|
if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
|
8329 |
|
|
return NULL_TREE;
|
8330 |
|
|
}
|
8331 |
|
|
|
8332 |
|
|
for (i = 0; i < nelts; i++)
|
8333 |
|
|
vals = tree_cons (NULL_TREE, elts[nelts - i - 1], vals);
|
8334 |
|
|
return build_vector (type, vals);
|
8335 |
|
|
}
|
8336 |
|
|
|
8337 |
|
|
default:
|
8338 |
|
|
return NULL_TREE;
|
8339 |
|
|
} /* switch (code) */
|
8340 |
|
|
}
|
8341 |
|
|
|
8342 |
|
|
|
8343 |
|
|
/* If the operation was a conversion do _not_ mark a resulting constant
|
8344 |
|
|
with TREE_OVERFLOW if the original constant was not. These conversions
|
8345 |
|
|
have implementation defined behavior and retaining the TREE_OVERFLOW
|
8346 |
|
|
flag here would confuse later passes such as VRP. */
|
8347 |
|
|
tree
|
8348 |
|
|
fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
|
8349 |
|
|
tree type, tree op0)
|
8350 |
|
|
{
|
8351 |
|
|
tree res = fold_unary_loc (loc, code, type, op0);
|
8352 |
|
|
if (res
|
8353 |
|
|
&& TREE_CODE (res) == INTEGER_CST
|
8354 |
|
|
&& TREE_CODE (op0) == INTEGER_CST
|
8355 |
|
|
&& CONVERT_EXPR_CODE_P (code))
|
8356 |
|
|
TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
|
8357 |
|
|
|
8358 |
|
|
return res;
|
8359 |
|
|
}
|
8360 |
|
|
|
8361 |
|
|
/* Fold a binary bitwise/truth expression of code CODE and type TYPE with
|
8362 |
|
|
operands OP0 and OP1. LOC is the location of the resulting expression.
|
8363 |
|
|
ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
|
8364 |
|
|
Return the folded expression if folding is successful. Otherwise,
|
8365 |
|
|
return NULL_TREE. */
|
8366 |
|
|
static tree
|
8367 |
|
|
fold_truth_andor (location_t loc, enum tree_code code, tree type,
|
8368 |
|
|
tree arg0, tree arg1, tree op0, tree op1)
|
8369 |
|
|
{
|
8370 |
|
|
tree tem;
|
8371 |
|
|
|
8372 |
|
|
/* We only do these simplifications if we are optimizing. */
|
8373 |
|
|
if (!optimize)
|
8374 |
|
|
return NULL_TREE;
|
8375 |
|
|
|
8376 |
|
|
/* Check for things like (A || B) && (A || C). We can convert this
|
8377 |
|
|
to A || (B && C). Note that either operator can be any of the four
|
8378 |
|
|
truth and/or operations and the transformation will still be
|
8379 |
|
|
valid. Also note that we only care about order for the
|
8380 |
|
|
ANDIF and ORIF operators. If B contains side effects, this
|
8381 |
|
|
might change the truth-value of A. */
|
8382 |
|
|
if (TREE_CODE (arg0) == TREE_CODE (arg1)
|
8383 |
|
|
&& (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
|
8384 |
|
|
|| TREE_CODE (arg0) == TRUTH_ORIF_EXPR
|
8385 |
|
|
|| TREE_CODE (arg0) == TRUTH_AND_EXPR
|
8386 |
|
|
|| TREE_CODE (arg0) == TRUTH_OR_EXPR)
|
8387 |
|
|
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
|
8388 |
|
|
{
|
8389 |
|
|
tree a00 = TREE_OPERAND (arg0, 0);
|
8390 |
|
|
tree a01 = TREE_OPERAND (arg0, 1);
|
8391 |
|
|
tree a10 = TREE_OPERAND (arg1, 0);
|
8392 |
|
|
tree a11 = TREE_OPERAND (arg1, 1);
|
8393 |
|
|
int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
|
8394 |
|
|
|| TREE_CODE (arg0) == TRUTH_AND_EXPR)
|
8395 |
|
|
&& (code == TRUTH_AND_EXPR
|
8396 |
|
|
|| code == TRUTH_OR_EXPR));
|
8397 |
|
|
|
8398 |
|
|
if (operand_equal_p (a00, a10, 0))
|
8399 |
|
|
return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
|
8400 |
|
|
fold_build2_loc (loc, code, type, a01, a11));
|
8401 |
|
|
else if (commutative && operand_equal_p (a00, a11, 0))
|
8402 |
|
|
return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
|
8403 |
|
|
fold_build2_loc (loc, code, type, a01, a10));
|
8404 |
|
|
else if (commutative && operand_equal_p (a01, a10, 0))
|
8405 |
|
|
return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
|
8406 |
|
|
fold_build2_loc (loc, code, type, a00, a11));
|
8407 |
|
|
|
8408 |
|
|
/* This case if tricky because we must either have commutative
|
8409 |
|
|
operators or else A10 must not have side-effects. */
|
8410 |
|
|
|
8411 |
|
|
else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
|
8412 |
|
|
&& operand_equal_p (a01, a11, 0))
|
8413 |
|
|
return fold_build2_loc (loc, TREE_CODE (arg0), type,
|
8414 |
|
|
fold_build2_loc (loc, code, type, a00, a10),
|
8415 |
|
|
a01);
|
8416 |
|
|
}
|
8417 |
|
|
|
8418 |
|
|
/* See if we can build a range comparison. */
|
8419 |
|
|
if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
|
8420 |
|
|
return tem;
|
8421 |
|
|
|
8422 |
|
|
if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
|
8423 |
|
|
|| (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
|
8424 |
|
|
{
|
8425 |
|
|
tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
|
8426 |
|
|
if (tem)
|
8427 |
|
|
return fold_build2_loc (loc, code, type, tem, arg1);
|
8428 |
|
|
}
|
8429 |
|
|
|
8430 |
|
|
if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
|
8431 |
|
|
|| (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
|
8432 |
|
|
{
|
8433 |
|
|
tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
|
8434 |
|
|
if (tem)
|
8435 |
|
|
return fold_build2_loc (loc, code, type, arg0, tem);
|
8436 |
|
|
}
|
8437 |
|
|
|
8438 |
|
|
/* Check for the possibility of merging component references. If our
|
8439 |
|
|
lhs is another similar operation, try to merge its rhs with our
|
8440 |
|
|
rhs. Then try to merge our lhs and rhs. */
|
8441 |
|
|
if (TREE_CODE (arg0) == code
|
8442 |
|
|
&& 0 != (tem = fold_truth_andor_1 (loc, code, type,
|
8443 |
|
|
TREE_OPERAND (arg0, 1), arg1)))
|
8444 |
|
|
return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
|
8445 |
|
|
|
8446 |
|
|
if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
|
8447 |
|
|
return tem;
|
8448 |
|
|
|
8449 |
|
|
if ((BRANCH_COST (optimize_function_for_speed_p (cfun),
|
8450 |
|
|
false) >= 2)
|
8451 |
|
|
&& LOGICAL_OP_NON_SHORT_CIRCUIT
|
8452 |
|
|
&& (code == TRUTH_AND_EXPR
|
8453 |
|
|
|| code == TRUTH_ANDIF_EXPR
|
8454 |
|
|
|| code == TRUTH_OR_EXPR
|
8455 |
|
|
|| code == TRUTH_ORIF_EXPR))
|
8456 |
|
|
{
|
8457 |
|
|
enum tree_code ncode, icode;
|
8458 |
|
|
|
8459 |
|
|
ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
|
8460 |
|
|
? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
|
8461 |
|
|
icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
|
8462 |
|
|
|
8463 |
|
|
/* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
|
8464 |
|
|
or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
|
8465 |
|
|
We don't want to pack more than two leafs to a non-IF AND/OR
|
8466 |
|
|
expression.
|
8467 |
|
|
If tree-code of left-hand operand isn't an AND/OR-IF code and not
|
8468 |
|
|
equal to IF-CODE, then we don't want to add right-hand operand.
|
8469 |
|
|
If the inner right-hand side of left-hand operand has
|
8470 |
|
|
side-effects, or isn't simple, then we can't add to it,
|
8471 |
|
|
as otherwise we might destroy if-sequence. */
|
8472 |
|
|
if (TREE_CODE (arg0) == icode
|
8473 |
|
|
&& simple_operand_p_2 (arg1)
|
8474 |
|
|
/* Needed for sequence points to handle trappings, and
|
8475 |
|
|
side-effects. */
|
8476 |
|
|
&& simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
|
8477 |
|
|
{
|
8478 |
|
|
tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
|
8479 |
|
|
arg1);
|
8480 |
|
|
return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
|
8481 |
|
|
tem);
|
8482 |
|
|
}
|
8483 |
|
|
/* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
|
8484 |
|
|
or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
|
8485 |
|
|
else if (TREE_CODE (arg1) == icode
|
8486 |
|
|
&& simple_operand_p_2 (arg0)
|
8487 |
|
|
/* Needed for sequence points to handle trappings, and
|
8488 |
|
|
side-effects. */
|
8489 |
|
|
&& simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
|
8490 |
|
|
{
|
8491 |
|
|
tem = fold_build2_loc (loc, ncode, type,
|
8492 |
|
|
arg0, TREE_OPERAND (arg1, 0));
|
8493 |
|
|
return fold_build2_loc (loc, icode, type, tem,
|
8494 |
|
|
TREE_OPERAND (arg1, 1));
|
8495 |
|
|
}
|
8496 |
|
|
/* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
|
8497 |
|
|
into (A OR B).
|
8498 |
|
|
For sequence point consistancy, we need to check for trapping,
|
8499 |
|
|
and side-effects. */
|
8500 |
|
|
else if (code == icode && simple_operand_p_2 (arg0)
|
8501 |
|
|
&& simple_operand_p_2 (arg1))
|
8502 |
|
|
return fold_build2_loc (loc, ncode, type, arg0, arg1);
|
8503 |
|
|
}
|
8504 |
|
|
|
8505 |
|
|
return NULL_TREE;
|
8506 |
|
|
}
|
8507 |
|
|
|
8508 |
|
|
/* Fold a binary expression of code CODE and type TYPE with operands
|
8509 |
|
|
OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
|
8510 |
|
|
Return the folded expression if folding is successful. Otherwise,
|
8511 |
|
|
return NULL_TREE. */
|
8512 |
|
|
|
8513 |
|
|
static tree
|
8514 |
|
|
fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
|
8515 |
|
|
{
|
8516 |
|
|
enum tree_code compl_code;
|
8517 |
|
|
|
8518 |
|
|
if (code == MIN_EXPR)
|
8519 |
|
|
compl_code = MAX_EXPR;
|
8520 |
|
|
else if (code == MAX_EXPR)
|
8521 |
|
|
compl_code = MIN_EXPR;
|
8522 |
|
|
else
|
8523 |
|
|
gcc_unreachable ();
|
8524 |
|
|
|
8525 |
|
|
/* MIN (MAX (a, b), b) == b. */
|
8526 |
|
|
if (TREE_CODE (op0) == compl_code
|
8527 |
|
|
&& operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
|
8528 |
|
|
return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
|
8529 |
|
|
|
8530 |
|
|
/* MIN (MAX (b, a), b) == b. */
|
8531 |
|
|
if (TREE_CODE (op0) == compl_code
|
8532 |
|
|
&& operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
|
8533 |
|
|
&& reorder_operands_p (TREE_OPERAND (op0, 1), op1))
|
8534 |
|
|
return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
|
8535 |
|
|
|
8536 |
|
|
/* MIN (a, MAX (a, b)) == a. */
|
8537 |
|
|
if (TREE_CODE (op1) == compl_code
|
8538 |
|
|
&& operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
|
8539 |
|
|
&& reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
|
8540 |
|
|
return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
|
8541 |
|
|
|
8542 |
|
|
/* MIN (a, MAX (b, a)) == a. */
|
8543 |
|
|
if (TREE_CODE (op1) == compl_code
|
8544 |
|
|
&& operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
|
8545 |
|
|
&& reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
|
8546 |
|
|
return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
|
8547 |
|
|
|
8548 |
|
|
return NULL_TREE;
|
8549 |
|
|
}
|
8550 |
|
|
|
8551 |
|
|
/* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
|
8552 |
|
|
by changing CODE to reduce the magnitude of constants involved in
|
8553 |
|
|
ARG0 of the comparison.
|
8554 |
|
|
Returns a canonicalized comparison tree if a simplification was
|
8555 |
|
|
possible, otherwise returns NULL_TREE.
|
8556 |
|
|
Set *STRICT_OVERFLOW_P to true if the canonicalization is only
|
8557 |
|
|
valid if signed overflow is undefined. */
|
8558 |
|
|
|
8559 |
|
|
static tree
|
8560 |
|
|
maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
|
8561 |
|
|
tree arg0, tree arg1,
|
8562 |
|
|
bool *strict_overflow_p)
|
8563 |
|
|
{
|
8564 |
|
|
enum tree_code code0 = TREE_CODE (arg0);
|
8565 |
|
|
tree t, cst0 = NULL_TREE;
|
8566 |
|
|
int sgn0;
|
8567 |
|
|
bool swap = false;
|
8568 |
|
|
|
8569 |
|
|
/* Match A +- CST code arg1 and CST code arg1. We can change the
|
8570 |
|
|
first form only if overflow is undefined. */
|
8571 |
|
|
if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
|
8572 |
|
|
/* In principle pointers also have undefined overflow behavior,
|
8573 |
|
|
but that causes problems elsewhere. */
|
8574 |
|
|
&& !POINTER_TYPE_P (TREE_TYPE (arg0))
|
8575 |
|
|
&& (code0 == MINUS_EXPR
|
8576 |
|
|
|| code0 == PLUS_EXPR)
|
8577 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
|
8578 |
|
|
|| code0 == INTEGER_CST))
|
8579 |
|
|
return NULL_TREE;
|
8580 |
|
|
|
8581 |
|
|
/* Identify the constant in arg0 and its sign. */
|
8582 |
|
|
if (code0 == INTEGER_CST)
|
8583 |
|
|
cst0 = arg0;
|
8584 |
|
|
else
|
8585 |
|
|
cst0 = TREE_OPERAND (arg0, 1);
|
8586 |
|
|
sgn0 = tree_int_cst_sgn (cst0);
|
8587 |
|
|
|
8588 |
|
|
/* Overflowed constants and zero will cause problems. */
|
8589 |
|
|
if (integer_zerop (cst0)
|
8590 |
|
|
|| TREE_OVERFLOW (cst0))
|
8591 |
|
|
return NULL_TREE;
|
8592 |
|
|
|
8593 |
|
|
/* See if we can reduce the magnitude of the constant in
|
8594 |
|
|
arg0 by changing the comparison code. */
|
8595 |
|
|
if (code0 == INTEGER_CST)
|
8596 |
|
|
{
|
8597 |
|
|
/* CST <= arg1 -> CST-1 < arg1. */
|
8598 |
|
|
if (code == LE_EXPR && sgn0 == 1)
|
8599 |
|
|
code = LT_EXPR;
|
8600 |
|
|
/* -CST < arg1 -> -CST-1 <= arg1. */
|
8601 |
|
|
else if (code == LT_EXPR && sgn0 == -1)
|
8602 |
|
|
code = LE_EXPR;
|
8603 |
|
|
/* CST > arg1 -> CST-1 >= arg1. */
|
8604 |
|
|
else if (code == GT_EXPR && sgn0 == 1)
|
8605 |
|
|
code = GE_EXPR;
|
8606 |
|
|
/* -CST >= arg1 -> -CST-1 > arg1. */
|
8607 |
|
|
else if (code == GE_EXPR && sgn0 == -1)
|
8608 |
|
|
code = GT_EXPR;
|
8609 |
|
|
else
|
8610 |
|
|
return NULL_TREE;
|
8611 |
|
|
/* arg1 code' CST' might be more canonical. */
|
8612 |
|
|
swap = true;
|
8613 |
|
|
}
|
8614 |
|
|
else
|
8615 |
|
|
{
|
8616 |
|
|
/* A - CST < arg1 -> A - CST-1 <= arg1. */
|
8617 |
|
|
if (code == LT_EXPR
|
8618 |
|
|
&& code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
|
8619 |
|
|
code = LE_EXPR;
|
8620 |
|
|
/* A + CST > arg1 -> A + CST-1 >= arg1. */
|
8621 |
|
|
else if (code == GT_EXPR
|
8622 |
|
|
&& code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
|
8623 |
|
|
code = GE_EXPR;
|
8624 |
|
|
/* A + CST <= arg1 -> A + CST-1 < arg1. */
|
8625 |
|
|
else if (code == LE_EXPR
|
8626 |
|
|
&& code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
|
8627 |
|
|
code = LT_EXPR;
|
8628 |
|
|
/* A - CST >= arg1 -> A - CST-1 > arg1. */
|
8629 |
|
|
else if (code == GE_EXPR
|
8630 |
|
|
&& code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
|
8631 |
|
|
code = GT_EXPR;
|
8632 |
|
|
else
|
8633 |
|
|
return NULL_TREE;
|
8634 |
|
|
*strict_overflow_p = true;
|
8635 |
|
|
}
|
8636 |
|
|
|
8637 |
|
|
/* Now build the constant reduced in magnitude. But not if that
|
8638 |
|
|
would produce one outside of its types range. */
|
8639 |
|
|
if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
|
8640 |
|
|
&& ((sgn0 == 1
|
8641 |
|
|
&& TYPE_MIN_VALUE (TREE_TYPE (cst0))
|
8642 |
|
|
&& tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
|
8643 |
|
|
|| (sgn0 == -1
|
8644 |
|
|
&& TYPE_MAX_VALUE (TREE_TYPE (cst0))
|
8645 |
|
|
&& tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
|
8646 |
|
|
/* We cannot swap the comparison here as that would cause us to
|
8647 |
|
|
endlessly recurse. */
|
8648 |
|
|
return NULL_TREE;
|
8649 |
|
|
|
8650 |
|
|
t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
|
8651 |
|
|
cst0, build_int_cst (TREE_TYPE (cst0), 1));
|
8652 |
|
|
if (code0 != INTEGER_CST)
|
8653 |
|
|
t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
|
8654 |
|
|
t = fold_convert (TREE_TYPE (arg1), t);
|
8655 |
|
|
|
8656 |
|
|
/* If swapping might yield to a more canonical form, do so. */
|
8657 |
|
|
if (swap)
|
8658 |
|
|
return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
|
8659 |
|
|
else
|
8660 |
|
|
return fold_build2_loc (loc, code, type, t, arg1);
|
8661 |
|
|
}
|
8662 |
|
|
|
8663 |
|
|
/* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
|
8664 |
|
|
overflow further. Try to decrease the magnitude of constants involved
|
8665 |
|
|
by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
|
8666 |
|
|
and put sole constants at the second argument position.
|
8667 |
|
|
Returns the canonicalized tree if changed, otherwise NULL_TREE. */
|
8668 |
|
|
|
8669 |
|
|
static tree
|
8670 |
|
|
maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
|
8671 |
|
|
tree arg0, tree arg1)
|
8672 |
|
|
{
|
8673 |
|
|
tree t;
|
8674 |
|
|
bool strict_overflow_p;
|
8675 |
|
|
const char * const warnmsg = G_("assuming signed overflow does not occur "
|
8676 |
|
|
"when reducing constant in comparison");
|
8677 |
|
|
|
8678 |
|
|
/* Try canonicalization by simplifying arg0. */
|
8679 |
|
|
strict_overflow_p = false;
|
8680 |
|
|
t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
|
8681 |
|
|
&strict_overflow_p);
|
8682 |
|
|
if (t)
|
8683 |
|
|
{
|
8684 |
|
|
if (strict_overflow_p)
|
8685 |
|
|
fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
|
8686 |
|
|
return t;
|
8687 |
|
|
}
|
8688 |
|
|
|
8689 |
|
|
/* Try canonicalization by simplifying arg1 using the swapped
|
8690 |
|
|
comparison. */
|
8691 |
|
|
code = swap_tree_comparison (code);
|
8692 |
|
|
strict_overflow_p = false;
|
8693 |
|
|
t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
|
8694 |
|
|
&strict_overflow_p);
|
8695 |
|
|
if (t && strict_overflow_p)
|
8696 |
|
|
fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
|
8697 |
|
|
return t;
|
8698 |
|
|
}
|
8699 |
|
|
|
8700 |
|
|
/* Return whether BASE + OFFSET + BITPOS may wrap around the address
|
8701 |
|
|
space. This is used to avoid issuing overflow warnings for
|
8702 |
|
|
expressions like &p->x which can not wrap. */
|
8703 |
|
|
|
8704 |
|
|
static bool
|
8705 |
|
|
pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
|
8706 |
|
|
{
|
8707 |
|
|
unsigned HOST_WIDE_INT offset_low, total_low;
|
8708 |
|
|
HOST_WIDE_INT size, offset_high, total_high;
|
8709 |
|
|
|
8710 |
|
|
if (!POINTER_TYPE_P (TREE_TYPE (base)))
|
8711 |
|
|
return true;
|
8712 |
|
|
|
8713 |
|
|
if (bitpos < 0)
|
8714 |
|
|
return true;
|
8715 |
|
|
|
8716 |
|
|
if (offset == NULL_TREE)
|
8717 |
|
|
{
|
8718 |
|
|
offset_low = 0;
|
8719 |
|
|
offset_high = 0;
|
8720 |
|
|
}
|
8721 |
|
|
else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
|
8722 |
|
|
return true;
|
8723 |
|
|
else
|
8724 |
|
|
{
|
8725 |
|
|
offset_low = TREE_INT_CST_LOW (offset);
|
8726 |
|
|
offset_high = TREE_INT_CST_HIGH (offset);
|
8727 |
|
|
}
|
8728 |
|
|
|
8729 |
|
|
if (add_double_with_sign (offset_low, offset_high,
|
8730 |
|
|
bitpos / BITS_PER_UNIT, 0,
|
8731 |
|
|
&total_low, &total_high,
|
8732 |
|
|
true))
|
8733 |
|
|
return true;
|
8734 |
|
|
|
8735 |
|
|
if (total_high != 0)
|
8736 |
|
|
return true;
|
8737 |
|
|
|
8738 |
|
|
size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
|
8739 |
|
|
if (size <= 0)
|
8740 |
|
|
return true;
|
8741 |
|
|
|
8742 |
|
|
/* We can do slightly better for SIZE if we have an ADDR_EXPR of an
|
8743 |
|
|
array. */
|
8744 |
|
|
if (TREE_CODE (base) == ADDR_EXPR)
|
8745 |
|
|
{
|
8746 |
|
|
HOST_WIDE_INT base_size;
|
8747 |
|
|
|
8748 |
|
|
base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
|
8749 |
|
|
if (base_size > 0 && size < base_size)
|
8750 |
|
|
size = base_size;
|
8751 |
|
|
}
|
8752 |
|
|
|
8753 |
|
|
return total_low > (unsigned HOST_WIDE_INT) size;
|
8754 |
|
|
}
|
8755 |
|
|
|
8756 |
|
|
/* Subroutine of fold_binary. This routine performs all of the
|
8757 |
|
|
transformations that are common to the equality/inequality
|
8758 |
|
|
operators (EQ_EXPR and NE_EXPR) and the ordering operators
|
8759 |
|
|
(LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
|
8760 |
|
|
fold_binary should call fold_binary. Fold a comparison with
|
8761 |
|
|
tree code CODE and type TYPE with operands OP0 and OP1. Return
|
8762 |
|
|
the folded comparison or NULL_TREE. */
|
8763 |
|
|
|
8764 |
|
|
static tree
|
8765 |
|
|
fold_comparison (location_t loc, enum tree_code code, tree type,
|
8766 |
|
|
tree op0, tree op1)
|
8767 |
|
|
{
|
8768 |
|
|
tree arg0, arg1, tem;
|
8769 |
|
|
|
8770 |
|
|
arg0 = op0;
|
8771 |
|
|
arg1 = op1;
|
8772 |
|
|
|
8773 |
|
|
STRIP_SIGN_NOPS (arg0);
|
8774 |
|
|
STRIP_SIGN_NOPS (arg1);
|
8775 |
|
|
|
8776 |
|
|
tem = fold_relational_const (code, type, arg0, arg1);
|
8777 |
|
|
if (tem != NULL_TREE)
|
8778 |
|
|
return tem;
|
8779 |
|
|
|
8780 |
|
|
/* If one arg is a real or integer constant, put it last. */
|
8781 |
|
|
if (tree_swap_operands_p (arg0, arg1, true))
|
8782 |
|
|
return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
|
8783 |
|
|
|
8784 |
|
|
/* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
|
8785 |
|
|
if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
|
8786 |
|
|
&& (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
|
8787 |
|
|
&& !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
|
8788 |
|
|
&& TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
|
8789 |
|
|
&& (TREE_CODE (arg1) == INTEGER_CST
|
8790 |
|
|
&& !TREE_OVERFLOW (arg1)))
|
8791 |
|
|
{
|
8792 |
|
|
tree const1 = TREE_OPERAND (arg0, 1);
|
8793 |
|
|
tree const2 = arg1;
|
8794 |
|
|
tree variable = TREE_OPERAND (arg0, 0);
|
8795 |
|
|
tree lhs;
|
8796 |
|
|
int lhs_add;
|
8797 |
|
|
lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
|
8798 |
|
|
|
8799 |
|
|
lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
|
8800 |
|
|
TREE_TYPE (arg1), const2, const1);
|
8801 |
|
|
|
8802 |
|
|
/* If the constant operation overflowed this can be
|
8803 |
|
|
simplified as a comparison against INT_MAX/INT_MIN. */
|
8804 |
|
|
if (TREE_CODE (lhs) == INTEGER_CST
|
8805 |
|
|
&& TREE_OVERFLOW (lhs))
|
8806 |
|
|
{
|
8807 |
|
|
int const1_sgn = tree_int_cst_sgn (const1);
|
8808 |
|
|
enum tree_code code2 = code;
|
8809 |
|
|
|
8810 |
|
|
/* Get the sign of the constant on the lhs if the
|
8811 |
|
|
operation were VARIABLE + CONST1. */
|
8812 |
|
|
if (TREE_CODE (arg0) == MINUS_EXPR)
|
8813 |
|
|
const1_sgn = -const1_sgn;
|
8814 |
|
|
|
8815 |
|
|
/* The sign of the constant determines if we overflowed
|
8816 |
|
|
INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
|
8817 |
|
|
Canonicalize to the INT_MIN overflow by swapping the comparison
|
8818 |
|
|
if necessary. */
|
8819 |
|
|
if (const1_sgn == -1)
|
8820 |
|
|
code2 = swap_tree_comparison (code);
|
8821 |
|
|
|
8822 |
|
|
/* We now can look at the canonicalized case
|
8823 |
|
|
VARIABLE + 1 CODE2 INT_MIN
|
8824 |
|
|
and decide on the result. */
|
8825 |
|
|
if (code2 == LT_EXPR
|
8826 |
|
|
|| code2 == LE_EXPR
|
8827 |
|
|
|| code2 == EQ_EXPR)
|
8828 |
|
|
return omit_one_operand_loc (loc, type, boolean_false_node, variable);
|
8829 |
|
|
else if (code2 == NE_EXPR
|
8830 |
|
|
|| code2 == GE_EXPR
|
8831 |
|
|
|| code2 == GT_EXPR)
|
8832 |
|
|
return omit_one_operand_loc (loc, type, boolean_true_node, variable);
|
8833 |
|
|
}
|
8834 |
|
|
|
8835 |
|
|
if (TREE_CODE (lhs) == TREE_CODE (arg1)
|
8836 |
|
|
&& (TREE_CODE (lhs) != INTEGER_CST
|
8837 |
|
|
|| !TREE_OVERFLOW (lhs)))
|
8838 |
|
|
{
|
8839 |
|
|
if (code != EQ_EXPR && code != NE_EXPR)
|
8840 |
|
|
fold_overflow_warning ("assuming signed overflow does not occur "
|
8841 |
|
|
"when changing X +- C1 cmp C2 to "
|
8842 |
|
|
"X cmp C1 +- C2",
|
8843 |
|
|
WARN_STRICT_OVERFLOW_COMPARISON);
|
8844 |
|
|
return fold_build2_loc (loc, code, type, variable, lhs);
|
8845 |
|
|
}
|
8846 |
|
|
}
|
8847 |
|
|
|
8848 |
|
|
/* For comparisons of pointers we can decompose it to a compile time
|
8849 |
|
|
comparison of the base objects and the offsets into the object.
|
8850 |
|
|
This requires at least one operand being an ADDR_EXPR or a
|
8851 |
|
|
POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
|
8852 |
|
|
if (POINTER_TYPE_P (TREE_TYPE (arg0))
|
8853 |
|
|
&& (TREE_CODE (arg0) == ADDR_EXPR
|
8854 |
|
|
|| TREE_CODE (arg1) == ADDR_EXPR
|
8855 |
|
|
|| TREE_CODE (arg0) == POINTER_PLUS_EXPR
|
8856 |
|
|
|| TREE_CODE (arg1) == POINTER_PLUS_EXPR))
|
8857 |
|
|
{
|
8858 |
|
|
tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
|
8859 |
|
|
HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
|
8860 |
|
|
enum machine_mode mode;
|
8861 |
|
|
int volatilep, unsignedp;
|
8862 |
|
|
bool indirect_base0 = false, indirect_base1 = false;
|
8863 |
|
|
|
8864 |
|
|
/* Get base and offset for the access. Strip ADDR_EXPR for
|
8865 |
|
|
get_inner_reference, but put it back by stripping INDIRECT_REF
|
8866 |
|
|
off the base object if possible. indirect_baseN will be true
|
8867 |
|
|
if baseN is not an address but refers to the object itself. */
|
8868 |
|
|
base0 = arg0;
|
8869 |
|
|
if (TREE_CODE (arg0) == ADDR_EXPR)
|
8870 |
|
|
{
|
8871 |
|
|
base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
|
8872 |
|
|
&bitsize, &bitpos0, &offset0, &mode,
|
8873 |
|
|
&unsignedp, &volatilep, false);
|
8874 |
|
|
if (TREE_CODE (base0) == INDIRECT_REF)
|
8875 |
|
|
base0 = TREE_OPERAND (base0, 0);
|
8876 |
|
|
else
|
8877 |
|
|
indirect_base0 = true;
|
8878 |
|
|
}
|
8879 |
|
|
else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
|
8880 |
|
|
{
|
8881 |
|
|
base0 = TREE_OPERAND (arg0, 0);
|
8882 |
|
|
STRIP_SIGN_NOPS (base0);
|
8883 |
|
|
if (TREE_CODE (base0) == ADDR_EXPR)
|
8884 |
|
|
{
|
8885 |
|
|
base0 = TREE_OPERAND (base0, 0);
|
8886 |
|
|
indirect_base0 = true;
|
8887 |
|
|
}
|
8888 |
|
|
offset0 = TREE_OPERAND (arg0, 1);
|
8889 |
|
|
if (host_integerp (offset0, 0))
|
8890 |
|
|
{
|
8891 |
|
|
HOST_WIDE_INT off = size_low_cst (offset0);
|
8892 |
|
|
if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
|
8893 |
|
|
* BITS_PER_UNIT)
|
8894 |
|
|
/ BITS_PER_UNIT == (HOST_WIDE_INT) off)
|
8895 |
|
|
{
|
8896 |
|
|
bitpos0 = off * BITS_PER_UNIT;
|
8897 |
|
|
offset0 = NULL_TREE;
|
8898 |
|
|
}
|
8899 |
|
|
}
|
8900 |
|
|
}
|
8901 |
|
|
|
8902 |
|
|
base1 = arg1;
|
8903 |
|
|
if (TREE_CODE (arg1) == ADDR_EXPR)
|
8904 |
|
|
{
|
8905 |
|
|
base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
|
8906 |
|
|
&bitsize, &bitpos1, &offset1, &mode,
|
8907 |
|
|
&unsignedp, &volatilep, false);
|
8908 |
|
|
if (TREE_CODE (base1) == INDIRECT_REF)
|
8909 |
|
|
base1 = TREE_OPERAND (base1, 0);
|
8910 |
|
|
else
|
8911 |
|
|
indirect_base1 = true;
|
8912 |
|
|
}
|
8913 |
|
|
else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
|
8914 |
|
|
{
|
8915 |
|
|
base1 = TREE_OPERAND (arg1, 0);
|
8916 |
|
|
STRIP_SIGN_NOPS (base1);
|
8917 |
|
|
if (TREE_CODE (base1) == ADDR_EXPR)
|
8918 |
|
|
{
|
8919 |
|
|
base1 = TREE_OPERAND (base1, 0);
|
8920 |
|
|
indirect_base1 = true;
|
8921 |
|
|
}
|
8922 |
|
|
offset1 = TREE_OPERAND (arg1, 1);
|
8923 |
|
|
if (host_integerp (offset1, 0))
|
8924 |
|
|
{
|
8925 |
|
|
HOST_WIDE_INT off = size_low_cst (offset1);
|
8926 |
|
|
if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
|
8927 |
|
|
* BITS_PER_UNIT)
|
8928 |
|
|
/ BITS_PER_UNIT == (HOST_WIDE_INT) off)
|
8929 |
|
|
{
|
8930 |
|
|
bitpos1 = off * BITS_PER_UNIT;
|
8931 |
|
|
offset1 = NULL_TREE;
|
8932 |
|
|
}
|
8933 |
|
|
}
|
8934 |
|
|
}
|
8935 |
|
|
|
8936 |
|
|
/* A local variable can never be pointed to by
|
8937 |
|
|
the default SSA name of an incoming parameter. */
|
8938 |
|
|
if ((TREE_CODE (arg0) == ADDR_EXPR
|
8939 |
|
|
&& indirect_base0
|
8940 |
|
|
&& TREE_CODE (base0) == VAR_DECL
|
8941 |
|
|
&& auto_var_in_fn_p (base0, current_function_decl)
|
8942 |
|
|
&& !indirect_base1
|
8943 |
|
|
&& TREE_CODE (base1) == SSA_NAME
|
8944 |
|
|
&& TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
|
8945 |
|
|
&& SSA_NAME_IS_DEFAULT_DEF (base1))
|
8946 |
|
|
|| (TREE_CODE (arg1) == ADDR_EXPR
|
8947 |
|
|
&& indirect_base1
|
8948 |
|
|
&& TREE_CODE (base1) == VAR_DECL
|
8949 |
|
|
&& auto_var_in_fn_p (base1, current_function_decl)
|
8950 |
|
|
&& !indirect_base0
|
8951 |
|
|
&& TREE_CODE (base0) == SSA_NAME
|
8952 |
|
|
&& TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
|
8953 |
|
|
&& SSA_NAME_IS_DEFAULT_DEF (base0)))
|
8954 |
|
|
{
|
8955 |
|
|
if (code == NE_EXPR)
|
8956 |
|
|
return constant_boolean_node (1, type);
|
8957 |
|
|
else if (code == EQ_EXPR)
|
8958 |
|
|
return constant_boolean_node (0, type);
|
8959 |
|
|
}
|
8960 |
|
|
/* If we have equivalent bases we might be able to simplify. */
|
8961 |
|
|
else if (indirect_base0 == indirect_base1
|
8962 |
|
|
&& operand_equal_p (base0, base1, 0))
|
8963 |
|
|
{
|
8964 |
|
|
/* We can fold this expression to a constant if the non-constant
|
8965 |
|
|
offset parts are equal. */
|
8966 |
|
|
if ((offset0 == offset1
|
8967 |
|
|
|| (offset0 && offset1
|
8968 |
|
|
&& operand_equal_p (offset0, offset1, 0)))
|
8969 |
|
|
&& (code == EQ_EXPR
|
8970 |
|
|
|| code == NE_EXPR
|
8971 |
|
|
|| (indirect_base0 && DECL_P (base0))
|
8972 |
|
|
|| POINTER_TYPE_OVERFLOW_UNDEFINED))
|
8973 |
|
|
|
8974 |
|
|
{
|
8975 |
|
|
if (code != EQ_EXPR
|
8976 |
|
|
&& code != NE_EXPR
|
8977 |
|
|
&& bitpos0 != bitpos1
|
8978 |
|
|
&& (pointer_may_wrap_p (base0, offset0, bitpos0)
|
8979 |
|
|
|| pointer_may_wrap_p (base1, offset1, bitpos1)))
|
8980 |
|
|
fold_overflow_warning (("assuming pointer wraparound does not "
|
8981 |
|
|
"occur when comparing P +- C1 with "
|
8982 |
|
|
"P +- C2"),
|
8983 |
|
|
WARN_STRICT_OVERFLOW_CONDITIONAL);
|
8984 |
|
|
|
8985 |
|
|
switch (code)
|
8986 |
|
|
{
|
8987 |
|
|
case EQ_EXPR:
|
8988 |
|
|
return constant_boolean_node (bitpos0 == bitpos1, type);
|
8989 |
|
|
case NE_EXPR:
|
8990 |
|
|
return constant_boolean_node (bitpos0 != bitpos1, type);
|
8991 |
|
|
case LT_EXPR:
|
8992 |
|
|
return constant_boolean_node (bitpos0 < bitpos1, type);
|
8993 |
|
|
case LE_EXPR:
|
8994 |
|
|
return constant_boolean_node (bitpos0 <= bitpos1, type);
|
8995 |
|
|
case GE_EXPR:
|
8996 |
|
|
return constant_boolean_node (bitpos0 >= bitpos1, type);
|
8997 |
|
|
case GT_EXPR:
|
8998 |
|
|
return constant_boolean_node (bitpos0 > bitpos1, type);
|
8999 |
|
|
default:;
|
9000 |
|
|
}
|
9001 |
|
|
}
|
9002 |
|
|
/* We can simplify the comparison to a comparison of the variable
|
9003 |
|
|
offset parts if the constant offset parts are equal.
|
9004 |
|
|
Be careful to use signed size type here because otherwise we
|
9005 |
|
|
mess with array offsets in the wrong way. This is possible
|
9006 |
|
|
because pointer arithmetic is restricted to retain within an
|
9007 |
|
|
object and overflow on pointer differences is undefined as of
|
9008 |
|
|
6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
|
9009 |
|
|
else if (bitpos0 == bitpos1
|
9010 |
|
|
&& ((code == EQ_EXPR || code == NE_EXPR)
|
9011 |
|
|
|| (indirect_base0 && DECL_P (base0))
|
9012 |
|
|
|| POINTER_TYPE_OVERFLOW_UNDEFINED))
|
9013 |
|
|
{
|
9014 |
|
|
/* By converting to signed size type we cover middle-end pointer
|
9015 |
|
|
arithmetic which operates on unsigned pointer types of size
|
9016 |
|
|
type size and ARRAY_REF offsets which are properly sign or
|
9017 |
|
|
zero extended from their type in case it is narrower than
|
9018 |
|
|
size type. */
|
9019 |
|
|
if (offset0 == NULL_TREE)
|
9020 |
|
|
offset0 = build_int_cst (ssizetype, 0);
|
9021 |
|
|
else
|
9022 |
|
|
offset0 = fold_convert_loc (loc, ssizetype, offset0);
|
9023 |
|
|
if (offset1 == NULL_TREE)
|
9024 |
|
|
offset1 = build_int_cst (ssizetype, 0);
|
9025 |
|
|
else
|
9026 |
|
|
offset1 = fold_convert_loc (loc, ssizetype, offset1);
|
9027 |
|
|
|
9028 |
|
|
if (code != EQ_EXPR
|
9029 |
|
|
&& code != NE_EXPR
|
9030 |
|
|
&& (pointer_may_wrap_p (base0, offset0, bitpos0)
|
9031 |
|
|
|| pointer_may_wrap_p (base1, offset1, bitpos1)))
|
9032 |
|
|
fold_overflow_warning (("assuming pointer wraparound does not "
|
9033 |
|
|
"occur when comparing P +- C1 with "
|
9034 |
|
|
"P +- C2"),
|
9035 |
|
|
WARN_STRICT_OVERFLOW_COMPARISON);
|
9036 |
|
|
|
9037 |
|
|
return fold_build2_loc (loc, code, type, offset0, offset1);
|
9038 |
|
|
}
|
9039 |
|
|
}
|
9040 |
|
|
/* For non-equal bases we can simplify if they are addresses
|
9041 |
|
|
of local binding decls or constants. */
|
9042 |
|
|
else if (indirect_base0 && indirect_base1
|
9043 |
|
|
/* We know that !operand_equal_p (base0, base1, 0)
|
9044 |
|
|
because the if condition was false. But make
|
9045 |
|
|
sure two decls are not the same. */
|
9046 |
|
|
&& base0 != base1
|
9047 |
|
|
&& TREE_CODE (arg0) == ADDR_EXPR
|
9048 |
|
|
&& TREE_CODE (arg1) == ADDR_EXPR
|
9049 |
|
|
&& (((TREE_CODE (base0) == VAR_DECL
|
9050 |
|
|
|| TREE_CODE (base0) == PARM_DECL)
|
9051 |
|
|
&& (targetm.binds_local_p (base0)
|
9052 |
|
|
|| CONSTANT_CLASS_P (base1)))
|
9053 |
|
|
|| CONSTANT_CLASS_P (base0))
|
9054 |
|
|
&& (((TREE_CODE (base1) == VAR_DECL
|
9055 |
|
|
|| TREE_CODE (base1) == PARM_DECL)
|
9056 |
|
|
&& (targetm.binds_local_p (base1)
|
9057 |
|
|
|| CONSTANT_CLASS_P (base0)))
|
9058 |
|
|
|| CONSTANT_CLASS_P (base1)))
|
9059 |
|
|
{
|
9060 |
|
|
if (code == EQ_EXPR)
|
9061 |
|
|
return omit_two_operands_loc (loc, type, boolean_false_node,
|
9062 |
|
|
arg0, arg1);
|
9063 |
|
|
else if (code == NE_EXPR)
|
9064 |
|
|
return omit_two_operands_loc (loc, type, boolean_true_node,
|
9065 |
|
|
arg0, arg1);
|
9066 |
|
|
}
|
9067 |
|
|
/* For equal offsets we can simplify to a comparison of the
|
9068 |
|
|
base addresses. */
|
9069 |
|
|
else if (bitpos0 == bitpos1
|
9070 |
|
|
&& (indirect_base0
|
9071 |
|
|
? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
|
9072 |
|
|
&& (indirect_base1
|
9073 |
|
|
? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
|
9074 |
|
|
&& ((offset0 == offset1)
|
9075 |
|
|
|| (offset0 && offset1
|
9076 |
|
|
&& operand_equal_p (offset0, offset1, 0))))
|
9077 |
|
|
{
|
9078 |
|
|
if (indirect_base0)
|
9079 |
|
|
base0 = build_fold_addr_expr_loc (loc, base0);
|
9080 |
|
|
if (indirect_base1)
|
9081 |
|
|
base1 = build_fold_addr_expr_loc (loc, base1);
|
9082 |
|
|
return fold_build2_loc (loc, code, type, base0, base1);
|
9083 |
|
|
}
|
9084 |
|
|
}
|
9085 |
|
|
|
9086 |
|
|
/* Transform comparisons of the form X +- C1 CMP Y +- C2 to
|
9087 |
|
|
X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
|
9088 |
|
|
the resulting offset is smaller in absolute value than the
|
9089 |
|
|
original one. */
|
9090 |
|
|
if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
|
9091 |
|
|
&& (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
|
9092 |
|
|
&& (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
|
9093 |
|
|
&& !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
|
9094 |
|
|
&& (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
|
9095 |
|
|
&& (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
|
9096 |
|
|
&& !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
|
9097 |
|
|
{
|
9098 |
|
|
tree const1 = TREE_OPERAND (arg0, 1);
|
9099 |
|
|
tree const2 = TREE_OPERAND (arg1, 1);
|
9100 |
|
|
tree variable1 = TREE_OPERAND (arg0, 0);
|
9101 |
|
|
tree variable2 = TREE_OPERAND (arg1, 0);
|
9102 |
|
|
tree cst;
|
9103 |
|
|
const char * const warnmsg = G_("assuming signed overflow does not "
|
9104 |
|
|
"occur when combining constants around "
|
9105 |
|
|
"a comparison");
|
9106 |
|
|
|
9107 |
|
|
/* Put the constant on the side where it doesn't overflow and is
|
9108 |
|
|
of lower absolute value than before. */
|
9109 |
|
|
cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
|
9110 |
|
|
? MINUS_EXPR : PLUS_EXPR,
|
9111 |
|
|
const2, const1);
|
9112 |
|
|
if (!TREE_OVERFLOW (cst)
|
9113 |
|
|
&& tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
|
9114 |
|
|
{
|
9115 |
|
|
fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
|
9116 |
|
|
return fold_build2_loc (loc, code, type,
|
9117 |
|
|
variable1,
|
9118 |
|
|
fold_build2_loc (loc,
|
9119 |
|
|
TREE_CODE (arg1), TREE_TYPE (arg1),
|
9120 |
|
|
variable2, cst));
|
9121 |
|
|
}
|
9122 |
|
|
|
9123 |
|
|
cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
|
9124 |
|
|
? MINUS_EXPR : PLUS_EXPR,
|
9125 |
|
|
const1, const2);
|
9126 |
|
|
if (!TREE_OVERFLOW (cst)
|
9127 |
|
|
&& tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
|
9128 |
|
|
{
|
9129 |
|
|
fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
|
9130 |
|
|
return fold_build2_loc (loc, code, type,
|
9131 |
|
|
fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
|
9132 |
|
|
variable1, cst),
|
9133 |
|
|
variable2);
|
9134 |
|
|
}
|
9135 |
|
|
}
|
9136 |
|
|
|
9137 |
|
|
/* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
|
9138 |
|
|
signed arithmetic case. That form is created by the compiler
|
9139 |
|
|
often enough for folding it to be of value. One example is in
|
9140 |
|
|
computing loop trip counts after Operator Strength Reduction. */
|
9141 |
|
|
if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
|
9142 |
|
|
&& TREE_CODE (arg0) == MULT_EXPR
|
9143 |
|
|
&& (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
|
9144 |
|
|
&& !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
|
9145 |
|
|
&& integer_zerop (arg1))
|
9146 |
|
|
{
|
9147 |
|
|
tree const1 = TREE_OPERAND (arg0, 1);
|
9148 |
|
|
tree const2 = arg1; /* zero */
|
9149 |
|
|
tree variable1 = TREE_OPERAND (arg0, 0);
|
9150 |
|
|
enum tree_code cmp_code = code;
|
9151 |
|
|
|
9152 |
|
|
/* Handle unfolded multiplication by zero. */
|
9153 |
|
|
if (integer_zerop (const1))
|
9154 |
|
|
return fold_build2_loc (loc, cmp_code, type, const1, const2);
|
9155 |
|
|
|
9156 |
|
|
fold_overflow_warning (("assuming signed overflow does not occur when "
|
9157 |
|
|
"eliminating multiplication in comparison "
|
9158 |
|
|
"with zero"),
|
9159 |
|
|
WARN_STRICT_OVERFLOW_COMPARISON);
|
9160 |
|
|
|
9161 |
|
|
/* If const1 is negative we swap the sense of the comparison. */
|
9162 |
|
|
if (tree_int_cst_sgn (const1) < 0)
|
9163 |
|
|
cmp_code = swap_tree_comparison (cmp_code);
|
9164 |
|
|
|
9165 |
|
|
return fold_build2_loc (loc, cmp_code, type, variable1, const2);
|
9166 |
|
|
}
|
9167 |
|
|
|
9168 |
|
|
tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
|
9169 |
|
|
if (tem)
|
9170 |
|
|
return tem;
|
9171 |
|
|
|
9172 |
|
|
if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
|
9173 |
|
|
{
|
9174 |
|
|
tree targ0 = strip_float_extensions (arg0);
|
9175 |
|
|
tree targ1 = strip_float_extensions (arg1);
|
9176 |
|
|
tree newtype = TREE_TYPE (targ0);
|
9177 |
|
|
|
9178 |
|
|
if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
|
9179 |
|
|
newtype = TREE_TYPE (targ1);
|
9180 |
|
|
|
9181 |
|
|
/* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
|
9182 |
|
|
if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
|
9183 |
|
|
return fold_build2_loc (loc, code, type,
|
9184 |
|
|
fold_convert_loc (loc, newtype, targ0),
|
9185 |
|
|
fold_convert_loc (loc, newtype, targ1));
|
9186 |
|
|
|
9187 |
|
|
/* (-a) CMP (-b) -> b CMP a */
|
9188 |
|
|
if (TREE_CODE (arg0) == NEGATE_EXPR
|
9189 |
|
|
&& TREE_CODE (arg1) == NEGATE_EXPR)
|
9190 |
|
|
return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
|
9191 |
|
|
TREE_OPERAND (arg0, 0));
|
9192 |
|
|
|
9193 |
|
|
if (TREE_CODE (arg1) == REAL_CST)
|
9194 |
|
|
{
|
9195 |
|
|
REAL_VALUE_TYPE cst;
|
9196 |
|
|
cst = TREE_REAL_CST (arg1);
|
9197 |
|
|
|
9198 |
|
|
/* (-a) CMP CST -> a swap(CMP) (-CST) */
|
9199 |
|
|
if (TREE_CODE (arg0) == NEGATE_EXPR)
|
9200 |
|
|
return fold_build2_loc (loc, swap_tree_comparison (code), type,
|
9201 |
|
|
TREE_OPERAND (arg0, 0),
|
9202 |
|
|
build_real (TREE_TYPE (arg1),
|
9203 |
|
|
real_value_negate (&cst)));
|
9204 |
|
|
|
9205 |
|
|
/* IEEE doesn't distinguish +0 and -0 in comparisons. */
|
9206 |
|
|
/* a CMP (-0) -> a CMP 0 */
|
9207 |
|
|
if (REAL_VALUE_MINUS_ZERO (cst))
|
9208 |
|
|
return fold_build2_loc (loc, code, type, arg0,
|
9209 |
|
|
build_real (TREE_TYPE (arg1), dconst0));
|
9210 |
|
|
|
9211 |
|
|
/* x != NaN is always true, other ops are always false. */
|
9212 |
|
|
if (REAL_VALUE_ISNAN (cst)
|
9213 |
|
|
&& ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
|
9214 |
|
|
{
|
9215 |
|
|
tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
|
9216 |
|
|
return omit_one_operand_loc (loc, type, tem, arg0);
|
9217 |
|
|
}
|
9218 |
|
|
|
9219 |
|
|
/* Fold comparisons against infinity. */
|
9220 |
|
|
if (REAL_VALUE_ISINF (cst)
|
9221 |
|
|
&& MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
|
9222 |
|
|
{
|
9223 |
|
|
tem = fold_inf_compare (loc, code, type, arg0, arg1);
|
9224 |
|
|
if (tem != NULL_TREE)
|
9225 |
|
|
return tem;
|
9226 |
|
|
}
|
9227 |
|
|
}
|
9228 |
|
|
|
9229 |
|
|
/* If this is a comparison of a real constant with a PLUS_EXPR
|
9230 |
|
|
or a MINUS_EXPR of a real constant, we can convert it into a
|
9231 |
|
|
comparison with a revised real constant as long as no overflow
|
9232 |
|
|
occurs when unsafe_math_optimizations are enabled. */
|
9233 |
|
|
if (flag_unsafe_math_optimizations
|
9234 |
|
|
&& TREE_CODE (arg1) == REAL_CST
|
9235 |
|
|
&& (TREE_CODE (arg0) == PLUS_EXPR
|
9236 |
|
|
|| TREE_CODE (arg0) == MINUS_EXPR)
|
9237 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
|
9238 |
|
|
&& 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
|
9239 |
|
|
? MINUS_EXPR : PLUS_EXPR,
|
9240 |
|
|
arg1, TREE_OPERAND (arg0, 1)))
|
9241 |
|
|
&& !TREE_OVERFLOW (tem))
|
9242 |
|
|
return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
|
9243 |
|
|
|
9244 |
|
|
/* Likewise, we can simplify a comparison of a real constant with
|
9245 |
|
|
a MINUS_EXPR whose first operand is also a real constant, i.e.
|
9246 |
|
|
(c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
|
9247 |
|
|
floating-point types only if -fassociative-math is set. */
|
9248 |
|
|
if (flag_associative_math
|
9249 |
|
|
&& TREE_CODE (arg1) == REAL_CST
|
9250 |
|
|
&& TREE_CODE (arg0) == MINUS_EXPR
|
9251 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
|
9252 |
|
|
&& 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
|
9253 |
|
|
arg1))
|
9254 |
|
|
&& !TREE_OVERFLOW (tem))
|
9255 |
|
|
return fold_build2_loc (loc, swap_tree_comparison (code), type,
|
9256 |
|
|
TREE_OPERAND (arg0, 1), tem);
|
9257 |
|
|
|
9258 |
|
|
/* Fold comparisons against built-in math functions. */
|
9259 |
|
|
if (TREE_CODE (arg1) == REAL_CST
|
9260 |
|
|
&& flag_unsafe_math_optimizations
|
9261 |
|
|
&& ! flag_errno_math)
|
9262 |
|
|
{
|
9263 |
|
|
enum built_in_function fcode = builtin_mathfn_code (arg0);
|
9264 |
|
|
|
9265 |
|
|
if (fcode != END_BUILTINS)
|
9266 |
|
|
{
|
9267 |
|
|
tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
|
9268 |
|
|
if (tem != NULL_TREE)
|
9269 |
|
|
return tem;
|
9270 |
|
|
}
|
9271 |
|
|
}
|
9272 |
|
|
}
|
9273 |
|
|
|
9274 |
|
|
if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
|
9275 |
|
|
&& CONVERT_EXPR_P (arg0))
|
9276 |
|
|
{
|
9277 |
|
|
/* If we are widening one operand of an integer comparison,
|
9278 |
|
|
see if the other operand is similarly being widened. Perhaps we
|
9279 |
|
|
can do the comparison in the narrower type. */
|
9280 |
|
|
tem = fold_widened_comparison (loc, code, type, arg0, arg1);
|
9281 |
|
|
if (tem)
|
9282 |
|
|
return tem;
|
9283 |
|
|
|
9284 |
|
|
/* Or if we are changing signedness. */
|
9285 |
|
|
tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
|
9286 |
|
|
if (tem)
|
9287 |
|
|
return tem;
|
9288 |
|
|
}
|
9289 |
|
|
|
9290 |
|
|
/* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
|
9291 |
|
|
constant, we can simplify it. */
|
9292 |
|
|
if (TREE_CODE (arg1) == INTEGER_CST
|
9293 |
|
|
&& (TREE_CODE (arg0) == MIN_EXPR
|
9294 |
|
|
|| TREE_CODE (arg0) == MAX_EXPR)
|
9295 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
|
9296 |
|
|
{
|
9297 |
|
|
tem = optimize_minmax_comparison (loc, code, type, op0, op1);
|
9298 |
|
|
if (tem)
|
9299 |
|
|
return tem;
|
9300 |
|
|
}
|
9301 |
|
|
|
9302 |
|
|
/* Simplify comparison of something with itself. (For IEEE
|
9303 |
|
|
floating-point, we can only do some of these simplifications.) */
|
9304 |
|
|
if (operand_equal_p (arg0, arg1, 0))
|
9305 |
|
|
{
|
9306 |
|
|
switch (code)
|
9307 |
|
|
{
|
9308 |
|
|
case EQ_EXPR:
|
9309 |
|
|
if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
|
9310 |
|
|
|| ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
|
9311 |
|
|
return constant_boolean_node (1, type);
|
9312 |
|
|
break;
|
9313 |
|
|
|
9314 |
|
|
case GE_EXPR:
|
9315 |
|
|
case LE_EXPR:
|
9316 |
|
|
if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
|
9317 |
|
|
|| ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
|
9318 |
|
|
return constant_boolean_node (1, type);
|
9319 |
|
|
return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
|
9320 |
|
|
|
9321 |
|
|
case NE_EXPR:
|
9322 |
|
|
/* For NE, we can only do this simplification if integer
|
9323 |
|
|
or we don't honor IEEE floating point NaNs. */
|
9324 |
|
|
if (FLOAT_TYPE_P (TREE_TYPE (arg0))
|
9325 |
|
|
&& HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
|
9326 |
|
|
break;
|
9327 |
|
|
/* ... fall through ... */
|
9328 |
|
|
case GT_EXPR:
|
9329 |
|
|
case LT_EXPR:
|
9330 |
|
|
return constant_boolean_node (0, type);
|
9331 |
|
|
default:
|
9332 |
|
|
gcc_unreachable ();
|
9333 |
|
|
}
|
9334 |
|
|
}
|
9335 |
|
|
|
9336 |
|
|
/* If we are comparing an expression that just has comparisons
|
9337 |
|
|
of two integer values, arithmetic expressions of those comparisons,
|
9338 |
|
|
and constants, we can simplify it. There are only three cases
|
9339 |
|
|
to check: the two values can either be equal, the first can be
|
9340 |
|
|
greater, or the second can be greater. Fold the expression for
|
9341 |
|
|
those three values. Since each value must be 0 or 1, we have
|
9342 |
|
|
eight possibilities, each of which corresponds to the constant 0
|
9343 |
|
|
or 1 or one of the six possible comparisons.
|
9344 |
|
|
|
9345 |
|
|
This handles common cases like (a > b) == 0 but also handles
|
9346 |
|
|
expressions like ((x > y) - (y > x)) > 0, which supposedly
|
9347 |
|
|
occur in macroized code. */
|
9348 |
|
|
|
9349 |
|
|
if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
|
9350 |
|
|
{
|
9351 |
|
|
tree cval1 = 0, cval2 = 0;
|
9352 |
|
|
int save_p = 0;
|
9353 |
|
|
|
9354 |
|
|
if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
|
9355 |
|
|
/* Don't handle degenerate cases here; they should already
|
9356 |
|
|
have been handled anyway. */
|
9357 |
|
|
&& cval1 != 0 && cval2 != 0
|
9358 |
|
|
&& ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
|
9359 |
|
|
&& TREE_TYPE (cval1) == TREE_TYPE (cval2)
|
9360 |
|
|
&& INTEGRAL_TYPE_P (TREE_TYPE (cval1))
|
9361 |
|
|
&& TYPE_MAX_VALUE (TREE_TYPE (cval1))
|
9362 |
|
|
&& TYPE_MAX_VALUE (TREE_TYPE (cval2))
|
9363 |
|
|
&& ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
|
9364 |
|
|
TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
|
9365 |
|
|
{
|
9366 |
|
|
tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
|
9367 |
|
|
tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
|
9368 |
|
|
|
9369 |
|
|
/* We can't just pass T to eval_subst in case cval1 or cval2
|
9370 |
|
|
was the same as ARG1. */
|
9371 |
|
|
|
9372 |
|
|
tree high_result
|
9373 |
|
|
= fold_build2_loc (loc, code, type,
|
9374 |
|
|
eval_subst (loc, arg0, cval1, maxval,
|
9375 |
|
|
cval2, minval),
|
9376 |
|
|
arg1);
|
9377 |
|
|
tree equal_result
|
9378 |
|
|
= fold_build2_loc (loc, code, type,
|
9379 |
|
|
eval_subst (loc, arg0, cval1, maxval,
|
9380 |
|
|
cval2, maxval),
|
9381 |
|
|
arg1);
|
9382 |
|
|
tree low_result
|
9383 |
|
|
= fold_build2_loc (loc, code, type,
|
9384 |
|
|
eval_subst (loc, arg0, cval1, minval,
|
9385 |
|
|
cval2, maxval),
|
9386 |
|
|
arg1);
|
9387 |
|
|
|
9388 |
|
|
/* All three of these results should be 0 or 1. Confirm they are.
|
9389 |
|
|
Then use those values to select the proper code to use. */
|
9390 |
|
|
|
9391 |
|
|
if (TREE_CODE (high_result) == INTEGER_CST
|
9392 |
|
|
&& TREE_CODE (equal_result) == INTEGER_CST
|
9393 |
|
|
&& TREE_CODE (low_result) == INTEGER_CST)
|
9394 |
|
|
{
|
9395 |
|
|
/* Make a 3-bit mask with the high-order bit being the
|
9396 |
|
|
value for `>', the next for '=', and the low for '<'. */
|
9397 |
|
|
switch ((integer_onep (high_result) * 4)
|
9398 |
|
|
+ (integer_onep (equal_result) * 2)
|
9399 |
|
|
+ integer_onep (low_result))
|
9400 |
|
|
{
|
9401 |
|
|
case 0:
|
9402 |
|
|
/* Always false. */
|
9403 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
|
9404 |
|
|
case 1:
|
9405 |
|
|
code = LT_EXPR;
|
9406 |
|
|
break;
|
9407 |
|
|
case 2:
|
9408 |
|
|
code = EQ_EXPR;
|
9409 |
|
|
break;
|
9410 |
|
|
case 3:
|
9411 |
|
|
code = LE_EXPR;
|
9412 |
|
|
break;
|
9413 |
|
|
case 4:
|
9414 |
|
|
code = GT_EXPR;
|
9415 |
|
|
break;
|
9416 |
|
|
case 5:
|
9417 |
|
|
code = NE_EXPR;
|
9418 |
|
|
break;
|
9419 |
|
|
case 6:
|
9420 |
|
|
code = GE_EXPR;
|
9421 |
|
|
break;
|
9422 |
|
|
case 7:
|
9423 |
|
|
/* Always true. */
|
9424 |
|
|
return omit_one_operand_loc (loc, type, integer_one_node, arg0);
|
9425 |
|
|
}
|
9426 |
|
|
|
9427 |
|
|
if (save_p)
|
9428 |
|
|
{
|
9429 |
|
|
tem = save_expr (build2 (code, type, cval1, cval2));
|
9430 |
|
|
SET_EXPR_LOCATION (tem, loc);
|
9431 |
|
|
return tem;
|
9432 |
|
|
}
|
9433 |
|
|
return fold_build2_loc (loc, code, type, cval1, cval2);
|
9434 |
|
|
}
|
9435 |
|
|
}
|
9436 |
|
|
}
|
9437 |
|
|
|
9438 |
|
|
/* We can fold X/C1 op C2 where C1 and C2 are integer constants
|
9439 |
|
|
into a single range test. */
|
9440 |
|
|
if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
|
9441 |
|
|
|| TREE_CODE (arg0) == EXACT_DIV_EXPR)
|
9442 |
|
|
&& TREE_CODE (arg1) == INTEGER_CST
|
9443 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
|
9444 |
|
|
&& !integer_zerop (TREE_OPERAND (arg0, 1))
|
9445 |
|
|
&& !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
|
9446 |
|
|
&& !TREE_OVERFLOW (arg1))
|
9447 |
|
|
{
|
9448 |
|
|
tem = fold_div_compare (loc, code, type, arg0, arg1);
|
9449 |
|
|
if (tem != NULL_TREE)
|
9450 |
|
|
return tem;
|
9451 |
|
|
}
|
9452 |
|
|
|
9453 |
|
|
/* Fold ~X op ~Y as Y op X. */
|
9454 |
|
|
if (TREE_CODE (arg0) == BIT_NOT_EXPR
|
9455 |
|
|
&& TREE_CODE (arg1) == BIT_NOT_EXPR)
|
9456 |
|
|
{
|
9457 |
|
|
tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
|
9458 |
|
|
return fold_build2_loc (loc, code, type,
|
9459 |
|
|
fold_convert_loc (loc, cmp_type,
|
9460 |
|
|
TREE_OPERAND (arg1, 0)),
|
9461 |
|
|
TREE_OPERAND (arg0, 0));
|
9462 |
|
|
}
|
9463 |
|
|
|
9464 |
|
|
/* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
|
9465 |
|
|
if (TREE_CODE (arg0) == BIT_NOT_EXPR
|
9466 |
|
|
&& TREE_CODE (arg1) == INTEGER_CST)
|
9467 |
|
|
{
|
9468 |
|
|
tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
|
9469 |
|
|
return fold_build2_loc (loc, swap_tree_comparison (code), type,
|
9470 |
|
|
TREE_OPERAND (arg0, 0),
|
9471 |
|
|
fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
|
9472 |
|
|
fold_convert_loc (loc, cmp_type, arg1)));
|
9473 |
|
|
}
|
9474 |
|
|
|
9475 |
|
|
return NULL_TREE;
|
9476 |
|
|
}
|
9477 |
|
|
|
9478 |
|
|
|
9479 |
|
|
/* Subroutine of fold_binary. Optimize complex multiplications of the
|
9480 |
|
|
form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
|
9481 |
|
|
argument EXPR represents the expression "z" of type TYPE. */
|
9482 |
|
|
|
9483 |
|
|
static tree
|
9484 |
|
|
fold_mult_zconjz (location_t loc, tree type, tree expr)
|
9485 |
|
|
{
|
9486 |
|
|
tree itype = TREE_TYPE (type);
|
9487 |
|
|
tree rpart, ipart, tem;
|
9488 |
|
|
|
9489 |
|
|
if (TREE_CODE (expr) == COMPLEX_EXPR)
|
9490 |
|
|
{
|
9491 |
|
|
rpart = TREE_OPERAND (expr, 0);
|
9492 |
|
|
ipart = TREE_OPERAND (expr, 1);
|
9493 |
|
|
}
|
9494 |
|
|
else if (TREE_CODE (expr) == COMPLEX_CST)
|
9495 |
|
|
{
|
9496 |
|
|
rpart = TREE_REALPART (expr);
|
9497 |
|
|
ipart = TREE_IMAGPART (expr);
|
9498 |
|
|
}
|
9499 |
|
|
else
|
9500 |
|
|
{
|
9501 |
|
|
expr = save_expr (expr);
|
9502 |
|
|
rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
|
9503 |
|
|
ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
|
9504 |
|
|
}
|
9505 |
|
|
|
9506 |
|
|
rpart = save_expr (rpart);
|
9507 |
|
|
ipart = save_expr (ipart);
|
9508 |
|
|
tem = fold_build2_loc (loc, PLUS_EXPR, itype,
|
9509 |
|
|
fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
|
9510 |
|
|
fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
|
9511 |
|
|
return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
|
9512 |
|
|
build_zero_cst (itype));
|
9513 |
|
|
}
|
9514 |
|
|
|
9515 |
|
|
|
9516 |
|
|
/* Subroutine of fold_binary. If P is the value of EXPR, computes
|
9517 |
|
|
power-of-two M and (arbitrary) N such that M divides (P-N). This condition
|
9518 |
|
|
guarantees that P and N have the same least significant log2(M) bits.
|
9519 |
|
|
N is not otherwise constrained. In particular, N is not normalized to
|
9520 |
|
|
|
9521 |
|
|
M is chosen as large as possible such that constant N can be determined.
|
9522 |
|
|
|
9523 |
|
|
Returns M and sets *RESIDUE to N.
|
9524 |
|
|
|
9525 |
|
|
If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
|
9526 |
|
|
account. This is not always possible due to PR 35705.
|
9527 |
|
|
*/
|
9528 |
|
|
|
9529 |
|
|
static unsigned HOST_WIDE_INT
|
9530 |
|
|
get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
|
9531 |
|
|
bool allow_func_align)
|
9532 |
|
|
{
|
9533 |
|
|
enum tree_code code;
|
9534 |
|
|
|
9535 |
|
|
*residue = 0;
|
9536 |
|
|
|
9537 |
|
|
code = TREE_CODE (expr);
|
9538 |
|
|
if (code == ADDR_EXPR)
|
9539 |
|
|
{
|
9540 |
|
|
unsigned int bitalign;
|
9541 |
|
|
bitalign = get_object_alignment_1 (TREE_OPERAND (expr, 0), residue);
|
9542 |
|
|
*residue /= BITS_PER_UNIT;
|
9543 |
|
|
return bitalign / BITS_PER_UNIT;
|
9544 |
|
|
}
|
9545 |
|
|
else if (code == POINTER_PLUS_EXPR)
|
9546 |
|
|
{
|
9547 |
|
|
tree op0, op1;
|
9548 |
|
|
unsigned HOST_WIDE_INT modulus;
|
9549 |
|
|
enum tree_code inner_code;
|
9550 |
|
|
|
9551 |
|
|
op0 = TREE_OPERAND (expr, 0);
|
9552 |
|
|
STRIP_NOPS (op0);
|
9553 |
|
|
modulus = get_pointer_modulus_and_residue (op0, residue,
|
9554 |
|
|
allow_func_align);
|
9555 |
|
|
|
9556 |
|
|
op1 = TREE_OPERAND (expr, 1);
|
9557 |
|
|
STRIP_NOPS (op1);
|
9558 |
|
|
inner_code = TREE_CODE (op1);
|
9559 |
|
|
if (inner_code == INTEGER_CST)
|
9560 |
|
|
{
|
9561 |
|
|
*residue += TREE_INT_CST_LOW (op1);
|
9562 |
|
|
return modulus;
|
9563 |
|
|
}
|
9564 |
|
|
else if (inner_code == MULT_EXPR)
|
9565 |
|
|
{
|
9566 |
|
|
op1 = TREE_OPERAND (op1, 1);
|
9567 |
|
|
if (TREE_CODE (op1) == INTEGER_CST)
|
9568 |
|
|
{
|
9569 |
|
|
unsigned HOST_WIDE_INT align;
|
9570 |
|
|
|
9571 |
|
|
/* Compute the greatest power-of-2 divisor of op1. */
|
9572 |
|
|
align = TREE_INT_CST_LOW (op1);
|
9573 |
|
|
align &= -align;
|
9574 |
|
|
|
9575 |
|
|
/* If align is non-zero and less than *modulus, replace
|
9576 |
|
|
*modulus with align., If align is 0, then either op1 is 0
|
9577 |
|
|
or the greatest power-of-2 divisor of op1 doesn't fit in an
|
9578 |
|
|
unsigned HOST_WIDE_INT. In either case, no additional
|
9579 |
|
|
constraint is imposed. */
|
9580 |
|
|
if (align)
|
9581 |
|
|
modulus = MIN (modulus, align);
|
9582 |
|
|
|
9583 |
|
|
return modulus;
|
9584 |
|
|
}
|
9585 |
|
|
}
|
9586 |
|
|
}
|
9587 |
|
|
|
9588 |
|
|
/* If we get here, we were unable to determine anything useful about the
|
9589 |
|
|
expression. */
|
9590 |
|
|
return 1;
|
9591 |
|
|
}
|
9592 |
|
|
|
9593 |
|
|
/* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
|
9594 |
|
|
CONSTRUCTOR ARG into array ELTS and return true if successful. */
|
9595 |
|
|
|
9596 |
|
|
static bool
|
9597 |
|
|
vec_cst_ctor_to_array (tree arg, tree *elts)
|
9598 |
|
|
{
|
9599 |
|
|
unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
|
9600 |
|
|
|
9601 |
|
|
if (TREE_CODE (arg) == VECTOR_CST)
|
9602 |
|
|
{
|
9603 |
|
|
tree t;
|
9604 |
|
|
|
9605 |
|
|
for (i = 0, t = TREE_VECTOR_CST_ELTS (arg);
|
9606 |
|
|
i < nelts && t; i++, t = TREE_CHAIN (t))
|
9607 |
|
|
elts[i] = TREE_VALUE (t);
|
9608 |
|
|
if (t)
|
9609 |
|
|
return false;
|
9610 |
|
|
}
|
9611 |
|
|
else if (TREE_CODE (arg) == CONSTRUCTOR)
|
9612 |
|
|
{
|
9613 |
|
|
constructor_elt *elt;
|
9614 |
|
|
|
9615 |
|
|
FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (arg), i, elt)
|
9616 |
|
|
if (i >= nelts)
|
9617 |
|
|
return false;
|
9618 |
|
|
else
|
9619 |
|
|
elts[i] = elt->value;
|
9620 |
|
|
}
|
9621 |
|
|
else
|
9622 |
|
|
return false;
|
9623 |
|
|
for (; i < nelts; i++)
|
9624 |
|
|
elts[i]
|
9625 |
|
|
= fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
|
9626 |
|
|
return true;
|
9627 |
|
|
}
|
9628 |
|
|
|
9629 |
|
|
/* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
|
9630 |
|
|
selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
|
9631 |
|
|
NULL_TREE otherwise. */
|
9632 |
|
|
|
9633 |
|
|
static tree
|
9634 |
|
|
fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
|
9635 |
|
|
{
|
9636 |
|
|
unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
|
9637 |
|
|
tree *elts;
|
9638 |
|
|
bool need_ctor = false;
|
9639 |
|
|
|
9640 |
|
|
gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
|
9641 |
|
|
&& TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
|
9642 |
|
|
if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
|
9643 |
|
|
|| TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
|
9644 |
|
|
return NULL_TREE;
|
9645 |
|
|
|
9646 |
|
|
elts = XALLOCAVEC (tree, nelts * 3);
|
9647 |
|
|
if (!vec_cst_ctor_to_array (arg0, elts)
|
9648 |
|
|
|| !vec_cst_ctor_to_array (arg1, elts + nelts))
|
9649 |
|
|
return NULL_TREE;
|
9650 |
|
|
|
9651 |
|
|
for (i = 0; i < nelts; i++)
|
9652 |
|
|
{
|
9653 |
|
|
if (!CONSTANT_CLASS_P (elts[sel[i]]))
|
9654 |
|
|
need_ctor = true;
|
9655 |
|
|
elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
|
9656 |
|
|
}
|
9657 |
|
|
|
9658 |
|
|
if (need_ctor)
|
9659 |
|
|
{
|
9660 |
|
|
VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, nelts);
|
9661 |
|
|
for (i = 0; i < nelts; i++)
|
9662 |
|
|
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
|
9663 |
|
|
return build_constructor (type, v);
|
9664 |
|
|
}
|
9665 |
|
|
else
|
9666 |
|
|
{
|
9667 |
|
|
tree vals = NULL_TREE;
|
9668 |
|
|
for (i = 0; i < nelts; i++)
|
9669 |
|
|
vals = tree_cons (NULL_TREE, elts[3 * nelts - i - 1], vals);
|
9670 |
|
|
return build_vector (type, vals);
|
9671 |
|
|
}
|
9672 |
|
|
}
|
9673 |
|
|
|
9674 |
|
|
/* Try to fold a pointer difference of type TYPE two address expressions of
|
9675 |
|
|
array references AREF0 and AREF1 using location LOC. Return a
|
9676 |
|
|
simplified expression for the difference or NULL_TREE. */
|
9677 |
|
|
|
9678 |
|
|
static tree
|
9679 |
|
|
fold_addr_of_array_ref_difference (location_t loc, tree type,
|
9680 |
|
|
tree aref0, tree aref1)
|
9681 |
|
|
{
|
9682 |
|
|
tree base0 = TREE_OPERAND (aref0, 0);
|
9683 |
|
|
tree base1 = TREE_OPERAND (aref1, 0);
|
9684 |
|
|
tree base_offset = build_int_cst (type, 0);
|
9685 |
|
|
|
9686 |
|
|
/* If the bases are array references as well, recurse. If the bases
|
9687 |
|
|
are pointer indirections compute the difference of the pointers.
|
9688 |
|
|
If the bases are equal, we are set. */
|
9689 |
|
|
if ((TREE_CODE (base0) == ARRAY_REF
|
9690 |
|
|
&& TREE_CODE (base1) == ARRAY_REF
|
9691 |
|
|
&& (base_offset
|
9692 |
|
|
= fold_addr_of_array_ref_difference (loc, type, base0, base1)))
|
9693 |
|
|
|| (INDIRECT_REF_P (base0)
|
9694 |
|
|
&& INDIRECT_REF_P (base1)
|
9695 |
|
|
&& (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
|
9696 |
|
|
TREE_OPERAND (base0, 0),
|
9697 |
|
|
TREE_OPERAND (base1, 0))))
|
9698 |
|
|
|| operand_equal_p (base0, base1, 0))
|
9699 |
|
|
{
|
9700 |
|
|
tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
|
9701 |
|
|
tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
|
9702 |
|
|
tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
|
9703 |
|
|
tree diff = build2 (MINUS_EXPR, type, op0, op1);
|
9704 |
|
|
return fold_build2_loc (loc, PLUS_EXPR, type,
|
9705 |
|
|
base_offset,
|
9706 |
|
|
fold_build2_loc (loc, MULT_EXPR, type,
|
9707 |
|
|
diff, esz));
|
9708 |
|
|
}
|
9709 |
|
|
return NULL_TREE;
|
9710 |
|
|
}
|
9711 |
|
|
|
9712 |
|
|
/* Fold a binary expression of code CODE and type TYPE with operands
|
9713 |
|
|
OP0 and OP1. LOC is the location of the resulting expression.
|
9714 |
|
|
Return the folded expression if folding is successful. Otherwise,
|
9715 |
|
|
return NULL_TREE. */
|
9716 |
|
|
|
9717 |
|
|
tree
|
9718 |
|
|
fold_binary_loc (location_t loc,
|
9719 |
|
|
enum tree_code code, tree type, tree op0, tree op1)
|
9720 |
|
|
{
|
9721 |
|
|
enum tree_code_class kind = TREE_CODE_CLASS (code);
|
9722 |
|
|
tree arg0, arg1, tem;
|
9723 |
|
|
tree t1 = NULL_TREE;
|
9724 |
|
|
bool strict_overflow_p;
|
9725 |
|
|
|
9726 |
|
|
gcc_assert (IS_EXPR_CODE_CLASS (kind)
|
9727 |
|
|
&& TREE_CODE_LENGTH (code) == 2
|
9728 |
|
|
&& op0 != NULL_TREE
|
9729 |
|
|
&& op1 != NULL_TREE);
|
9730 |
|
|
|
9731 |
|
|
arg0 = op0;
|
9732 |
|
|
arg1 = op1;
|
9733 |
|
|
|
9734 |
|
|
/* Strip any conversions that don't change the mode. This is
|
9735 |
|
|
safe for every expression, except for a comparison expression
|
9736 |
|
|
because its signedness is derived from its operands. So, in
|
9737 |
|
|
the latter case, only strip conversions that don't change the
|
9738 |
|
|
signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
|
9739 |
|
|
preserved.
|
9740 |
|
|
|
9741 |
|
|
Note that this is done as an internal manipulation within the
|
9742 |
|
|
constant folder, in order to find the simplest representation
|
9743 |
|
|
of the arguments so that their form can be studied. In any
|
9744 |
|
|
cases, the appropriate type conversions should be put back in
|
9745 |
|
|
the tree that will get out of the constant folder. */
|
9746 |
|
|
|
9747 |
|
|
if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
|
9748 |
|
|
{
|
9749 |
|
|
STRIP_SIGN_NOPS (arg0);
|
9750 |
|
|
STRIP_SIGN_NOPS (arg1);
|
9751 |
|
|
}
|
9752 |
|
|
else
|
9753 |
|
|
{
|
9754 |
|
|
STRIP_NOPS (arg0);
|
9755 |
|
|
STRIP_NOPS (arg1);
|
9756 |
|
|
}
|
9757 |
|
|
|
9758 |
|
|
/* Note that TREE_CONSTANT isn't enough: static var addresses are
|
9759 |
|
|
constant but we can't do arithmetic on them. */
|
9760 |
|
|
if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
|
9761 |
|
|
|| (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
|
9762 |
|
|
|| (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
|
9763 |
|
|
|| (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
|
9764 |
|
|
|| (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
|
9765 |
|
|
|| (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
|
9766 |
|
|
{
|
9767 |
|
|
if (kind == tcc_binary)
|
9768 |
|
|
{
|
9769 |
|
|
/* Make sure type and arg0 have the same saturating flag. */
|
9770 |
|
|
gcc_assert (TYPE_SATURATING (type)
|
9771 |
|
|
== TYPE_SATURATING (TREE_TYPE (arg0)));
|
9772 |
|
|
tem = const_binop (code, arg0, arg1);
|
9773 |
|
|
}
|
9774 |
|
|
else if (kind == tcc_comparison)
|
9775 |
|
|
tem = fold_relational_const (code, type, arg0, arg1);
|
9776 |
|
|
else
|
9777 |
|
|
tem = NULL_TREE;
|
9778 |
|
|
|
9779 |
|
|
if (tem != NULL_TREE)
|
9780 |
|
|
{
|
9781 |
|
|
if (TREE_TYPE (tem) != type)
|
9782 |
|
|
tem = fold_convert_loc (loc, type, tem);
|
9783 |
|
|
return tem;
|
9784 |
|
|
}
|
9785 |
|
|
}
|
9786 |
|
|
|
9787 |
|
|
/* If this is a commutative operation, and ARG0 is a constant, move it
|
9788 |
|
|
to ARG1 to reduce the number of tests below. */
|
9789 |
|
|
if (commutative_tree_code (code)
|
9790 |
|
|
&& tree_swap_operands_p (arg0, arg1, true))
|
9791 |
|
|
return fold_build2_loc (loc, code, type, op1, op0);
|
9792 |
|
|
|
9793 |
|
|
/* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
|
9794 |
|
|
|
9795 |
|
|
First check for cases where an arithmetic operation is applied to a
|
9796 |
|
|
compound, conditional, or comparison operation. Push the arithmetic
|
9797 |
|
|
operation inside the compound or conditional to see if any folding
|
9798 |
|
|
can then be done. Convert comparison to conditional for this purpose.
|
9799 |
|
|
The also optimizes non-constant cases that used to be done in
|
9800 |
|
|
expand_expr.
|
9801 |
|
|
|
9802 |
|
|
Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
|
9803 |
|
|
one of the operands is a comparison and the other is a comparison, a
|
9804 |
|
|
BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
|
9805 |
|
|
code below would make the expression more complex. Change it to a
|
9806 |
|
|
TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
|
9807 |
|
|
TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
|
9808 |
|
|
|
9809 |
|
|
if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
|
9810 |
|
|
|| code == EQ_EXPR || code == NE_EXPR)
|
9811 |
|
|
&& ((truth_value_p (TREE_CODE (arg0))
|
9812 |
|
|
&& (truth_value_p (TREE_CODE (arg1))
|
9813 |
|
|
|| (TREE_CODE (arg1) == BIT_AND_EXPR
|
9814 |
|
|
&& integer_onep (TREE_OPERAND (arg1, 1)))))
|
9815 |
|
|
|| (truth_value_p (TREE_CODE (arg1))
|
9816 |
|
|
&& (truth_value_p (TREE_CODE (arg0))
|
9817 |
|
|
|| (TREE_CODE (arg0) == BIT_AND_EXPR
|
9818 |
|
|
&& integer_onep (TREE_OPERAND (arg0, 1)))))))
|
9819 |
|
|
{
|
9820 |
|
|
tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
|
9821 |
|
|
: code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
|
9822 |
|
|
: TRUTH_XOR_EXPR,
|
9823 |
|
|
boolean_type_node,
|
9824 |
|
|
fold_convert_loc (loc, boolean_type_node, arg0),
|
9825 |
|
|
fold_convert_loc (loc, boolean_type_node, arg1));
|
9826 |
|
|
|
9827 |
|
|
if (code == EQ_EXPR)
|
9828 |
|
|
tem = invert_truthvalue_loc (loc, tem);
|
9829 |
|
|
|
9830 |
|
|
return fold_convert_loc (loc, type, tem);
|
9831 |
|
|
}
|
9832 |
|
|
|
9833 |
|
|
if (TREE_CODE_CLASS (code) == tcc_binary
|
9834 |
|
|
|| TREE_CODE_CLASS (code) == tcc_comparison)
|
9835 |
|
|
{
|
9836 |
|
|
if (TREE_CODE (arg0) == COMPOUND_EXPR)
|
9837 |
|
|
{
|
9838 |
|
|
tem = fold_build2_loc (loc, code, type,
|
9839 |
|
|
fold_convert_loc (loc, TREE_TYPE (op0),
|
9840 |
|
|
TREE_OPERAND (arg0, 1)), op1);
|
9841 |
|
|
return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
|
9842 |
|
|
tem);
|
9843 |
|
|
}
|
9844 |
|
|
if (TREE_CODE (arg1) == COMPOUND_EXPR
|
9845 |
|
|
&& reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
|
9846 |
|
|
{
|
9847 |
|
|
tem = fold_build2_loc (loc, code, type, op0,
|
9848 |
|
|
fold_convert_loc (loc, TREE_TYPE (op1),
|
9849 |
|
|
TREE_OPERAND (arg1, 1)));
|
9850 |
|
|
return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
|
9851 |
|
|
tem);
|
9852 |
|
|
}
|
9853 |
|
|
|
9854 |
|
|
if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
|
9855 |
|
|
{
|
9856 |
|
|
tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
|
9857 |
|
|
arg0, arg1,
|
9858 |
|
|
/*cond_first_p=*/1);
|
9859 |
|
|
if (tem != NULL_TREE)
|
9860 |
|
|
return tem;
|
9861 |
|
|
}
|
9862 |
|
|
|
9863 |
|
|
if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
|
9864 |
|
|
{
|
9865 |
|
|
tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
|
9866 |
|
|
arg1, arg0,
|
9867 |
|
|
/*cond_first_p=*/0);
|
9868 |
|
|
if (tem != NULL_TREE)
|
9869 |
|
|
return tem;
|
9870 |
|
|
}
|
9871 |
|
|
}
|
9872 |
|
|
|
9873 |
|
|
switch (code)
|
9874 |
|
|
{
|
9875 |
|
|
case MEM_REF:
|
9876 |
|
|
/* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
|
9877 |
|
|
if (TREE_CODE (arg0) == ADDR_EXPR
|
9878 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
|
9879 |
|
|
{
|
9880 |
|
|
tree iref = TREE_OPERAND (arg0, 0);
|
9881 |
|
|
return fold_build2 (MEM_REF, type,
|
9882 |
|
|
TREE_OPERAND (iref, 0),
|
9883 |
|
|
int_const_binop (PLUS_EXPR, arg1,
|
9884 |
|
|
TREE_OPERAND (iref, 1)));
|
9885 |
|
|
}
|
9886 |
|
|
|
9887 |
|
|
/* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
|
9888 |
|
|
if (TREE_CODE (arg0) == ADDR_EXPR
|
9889 |
|
|
&& handled_component_p (TREE_OPERAND (arg0, 0)))
|
9890 |
|
|
{
|
9891 |
|
|
tree base;
|
9892 |
|
|
HOST_WIDE_INT coffset;
|
9893 |
|
|
base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
|
9894 |
|
|
&coffset);
|
9895 |
|
|
if (!base)
|
9896 |
|
|
return NULL_TREE;
|
9897 |
|
|
return fold_build2 (MEM_REF, type,
|
9898 |
|
|
build_fold_addr_expr (base),
|
9899 |
|
|
int_const_binop (PLUS_EXPR, arg1,
|
9900 |
|
|
size_int (coffset)));
|
9901 |
|
|
}
|
9902 |
|
|
|
9903 |
|
|
return NULL_TREE;
|
9904 |
|
|
|
9905 |
|
|
case POINTER_PLUS_EXPR:
|
9906 |
|
|
/* 0 +p index -> (type)index */
|
9907 |
|
|
if (integer_zerop (arg0))
|
9908 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
|
9909 |
|
|
|
9910 |
|
|
/* PTR +p 0 -> PTR */
|
9911 |
|
|
if (integer_zerop (arg1))
|
9912 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
|
9913 |
|
|
|
9914 |
|
|
/* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
|
9915 |
|
|
if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
|
9916 |
|
|
&& INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
|
9917 |
|
|
return fold_convert_loc (loc, type,
|
9918 |
|
|
fold_build2_loc (loc, PLUS_EXPR, sizetype,
|
9919 |
|
|
fold_convert_loc (loc, sizetype,
|
9920 |
|
|
arg1),
|
9921 |
|
|
fold_convert_loc (loc, sizetype,
|
9922 |
|
|
arg0)));
|
9923 |
|
|
|
9924 |
|
|
/* (PTR +p B) +p A -> PTR +p (B + A) */
|
9925 |
|
|
if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
|
9926 |
|
|
{
|
9927 |
|
|
tree inner;
|
9928 |
|
|
tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
|
9929 |
|
|
tree arg00 = TREE_OPERAND (arg0, 0);
|
9930 |
|
|
inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
|
9931 |
|
|
arg01, fold_convert_loc (loc, sizetype, arg1));
|
9932 |
|
|
return fold_convert_loc (loc, type,
|
9933 |
|
|
fold_build_pointer_plus_loc (loc,
|
9934 |
|
|
arg00, inner));
|
9935 |
|
|
}
|
9936 |
|
|
|
9937 |
|
|
/* PTR_CST +p CST -> CST1 */
|
9938 |
|
|
if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
|
9939 |
|
|
return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
|
9940 |
|
|
fold_convert_loc (loc, type, arg1));
|
9941 |
|
|
|
9942 |
|
|
/* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
|
9943 |
|
|
of the array. Loop optimizer sometimes produce this type of
|
9944 |
|
|
expressions. */
|
9945 |
|
|
if (TREE_CODE (arg0) == ADDR_EXPR)
|
9946 |
|
|
{
|
9947 |
|
|
tem = try_move_mult_to_index (loc, arg0,
|
9948 |
|
|
fold_convert_loc (loc, sizetype, arg1));
|
9949 |
|
|
if (tem)
|
9950 |
|
|
return fold_convert_loc (loc, type, tem);
|
9951 |
|
|
}
|
9952 |
|
|
|
9953 |
|
|
return NULL_TREE;
|
9954 |
|
|
|
9955 |
|
|
case PLUS_EXPR:
|
9956 |
|
|
/* A + (-B) -> A - B */
|
9957 |
|
|
if (TREE_CODE (arg1) == NEGATE_EXPR)
|
9958 |
|
|
return fold_build2_loc (loc, MINUS_EXPR, type,
|
9959 |
|
|
fold_convert_loc (loc, type, arg0),
|
9960 |
|
|
fold_convert_loc (loc, type,
|
9961 |
|
|
TREE_OPERAND (arg1, 0)));
|
9962 |
|
|
/* (-A) + B -> B - A */
|
9963 |
|
|
if (TREE_CODE (arg0) == NEGATE_EXPR
|
9964 |
|
|
&& reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
|
9965 |
|
|
return fold_build2_loc (loc, MINUS_EXPR, type,
|
9966 |
|
|
fold_convert_loc (loc, type, arg1),
|
9967 |
|
|
fold_convert_loc (loc, type,
|
9968 |
|
|
TREE_OPERAND (arg0, 0)));
|
9969 |
|
|
|
9970 |
|
|
if (INTEGRAL_TYPE_P (type))
|
9971 |
|
|
{
|
9972 |
|
|
/* Convert ~A + 1 to -A. */
|
9973 |
|
|
if (TREE_CODE (arg0) == BIT_NOT_EXPR
|
9974 |
|
|
&& integer_onep (arg1))
|
9975 |
|
|
return fold_build1_loc (loc, NEGATE_EXPR, type,
|
9976 |
|
|
fold_convert_loc (loc, type,
|
9977 |
|
|
TREE_OPERAND (arg0, 0)));
|
9978 |
|
|
|
9979 |
|
|
/* ~X + X is -1. */
|
9980 |
|
|
if (TREE_CODE (arg0) == BIT_NOT_EXPR
|
9981 |
|
|
&& !TYPE_OVERFLOW_TRAPS (type))
|
9982 |
|
|
{
|
9983 |
|
|
tree tem = TREE_OPERAND (arg0, 0);
|
9984 |
|
|
|
9985 |
|
|
STRIP_NOPS (tem);
|
9986 |
|
|
if (operand_equal_p (tem, arg1, 0))
|
9987 |
|
|
{
|
9988 |
|
|
t1 = build_int_cst_type (type, -1);
|
9989 |
|
|
return omit_one_operand_loc (loc, type, t1, arg1);
|
9990 |
|
|
}
|
9991 |
|
|
}
|
9992 |
|
|
|
9993 |
|
|
/* X + ~X is -1. */
|
9994 |
|
|
if (TREE_CODE (arg1) == BIT_NOT_EXPR
|
9995 |
|
|
&& !TYPE_OVERFLOW_TRAPS (type))
|
9996 |
|
|
{
|
9997 |
|
|
tree tem = TREE_OPERAND (arg1, 0);
|
9998 |
|
|
|
9999 |
|
|
STRIP_NOPS (tem);
|
10000 |
|
|
if (operand_equal_p (arg0, tem, 0))
|
10001 |
|
|
{
|
10002 |
|
|
t1 = build_int_cst_type (type, -1);
|
10003 |
|
|
return omit_one_operand_loc (loc, type, t1, arg0);
|
10004 |
|
|
}
|
10005 |
|
|
}
|
10006 |
|
|
|
10007 |
|
|
/* X + (X / CST) * -CST is X % CST. */
|
10008 |
|
|
if (TREE_CODE (arg1) == MULT_EXPR
|
10009 |
|
|
&& TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
|
10010 |
|
|
&& operand_equal_p (arg0,
|
10011 |
|
|
TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
|
10012 |
|
|
{
|
10013 |
|
|
tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
|
10014 |
|
|
tree cst1 = TREE_OPERAND (arg1, 1);
|
10015 |
|
|
tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
|
10016 |
|
|
cst1, cst0);
|
10017 |
|
|
if (sum && integer_zerop (sum))
|
10018 |
|
|
return fold_convert_loc (loc, type,
|
10019 |
|
|
fold_build2_loc (loc, TRUNC_MOD_EXPR,
|
10020 |
|
|
TREE_TYPE (arg0), arg0,
|
10021 |
|
|
cst0));
|
10022 |
|
|
}
|
10023 |
|
|
}
|
10024 |
|
|
|
10025 |
|
|
/* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
|
10026 |
|
|
same or one. Make sure type is not saturating.
|
10027 |
|
|
fold_plusminus_mult_expr will re-associate. */
|
10028 |
|
|
if ((TREE_CODE (arg0) == MULT_EXPR
|
10029 |
|
|
|| TREE_CODE (arg1) == MULT_EXPR)
|
10030 |
|
|
&& !TYPE_SATURATING (type)
|
10031 |
|
|
&& (!FLOAT_TYPE_P (type) || flag_associative_math))
|
10032 |
|
|
{
|
10033 |
|
|
tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
|
10034 |
|
|
if (tem)
|
10035 |
|
|
return tem;
|
10036 |
|
|
}
|
10037 |
|
|
|
10038 |
|
|
if (! FLOAT_TYPE_P (type))
|
10039 |
|
|
{
|
10040 |
|
|
if (integer_zerop (arg1))
|
10041 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
|
10042 |
|
|
|
10043 |
|
|
/* If we are adding two BIT_AND_EXPR's, both of which are and'ing
|
10044 |
|
|
with a constant, and the two constants have no bits in common,
|
10045 |
|
|
we should treat this as a BIT_IOR_EXPR since this may produce more
|
10046 |
|
|
simplifications. */
|
10047 |
|
|
if (TREE_CODE (arg0) == BIT_AND_EXPR
|
10048 |
|
|
&& TREE_CODE (arg1) == BIT_AND_EXPR
|
10049 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
|
10050 |
|
|
&& TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
|
10051 |
|
|
&& integer_zerop (const_binop (BIT_AND_EXPR,
|
10052 |
|
|
TREE_OPERAND (arg0, 1),
|
10053 |
|
|
TREE_OPERAND (arg1, 1))))
|
10054 |
|
|
{
|
10055 |
|
|
code = BIT_IOR_EXPR;
|
10056 |
|
|
goto bit_ior;
|
10057 |
|
|
}
|
10058 |
|
|
|
10059 |
|
|
/* Reassociate (plus (plus (mult) (foo)) (mult)) as
|
10060 |
|
|
(plus (plus (mult) (mult)) (foo)) so that we can
|
10061 |
|
|
take advantage of the factoring cases below. */
|
10062 |
|
|
if (TYPE_OVERFLOW_WRAPS (type)
|
10063 |
|
|
&& (((TREE_CODE (arg0) == PLUS_EXPR
|
10064 |
|
|
|| TREE_CODE (arg0) == MINUS_EXPR)
|
10065 |
|
|
&& TREE_CODE (arg1) == MULT_EXPR)
|
10066 |
|
|
|| ((TREE_CODE (arg1) == PLUS_EXPR
|
10067 |
|
|
|| TREE_CODE (arg1) == MINUS_EXPR)
|
10068 |
|
|
&& TREE_CODE (arg0) == MULT_EXPR)))
|
10069 |
|
|
{
|
10070 |
|
|
tree parg0, parg1, parg, marg;
|
10071 |
|
|
enum tree_code pcode;
|
10072 |
|
|
|
10073 |
|
|
if (TREE_CODE (arg1) == MULT_EXPR)
|
10074 |
|
|
parg = arg0, marg = arg1;
|
10075 |
|
|
else
|
10076 |
|
|
parg = arg1, marg = arg0;
|
10077 |
|
|
pcode = TREE_CODE (parg);
|
10078 |
|
|
parg0 = TREE_OPERAND (parg, 0);
|
10079 |
|
|
parg1 = TREE_OPERAND (parg, 1);
|
10080 |
|
|
STRIP_NOPS (parg0);
|
10081 |
|
|
STRIP_NOPS (parg1);
|
10082 |
|
|
|
10083 |
|
|
if (TREE_CODE (parg0) == MULT_EXPR
|
10084 |
|
|
&& TREE_CODE (parg1) != MULT_EXPR)
|
10085 |
|
|
return fold_build2_loc (loc, pcode, type,
|
10086 |
|
|
fold_build2_loc (loc, PLUS_EXPR, type,
|
10087 |
|
|
fold_convert_loc (loc, type,
|
10088 |
|
|
parg0),
|
10089 |
|
|
fold_convert_loc (loc, type,
|
10090 |
|
|
marg)),
|
10091 |
|
|
fold_convert_loc (loc, type, parg1));
|
10092 |
|
|
if (TREE_CODE (parg0) != MULT_EXPR
|
10093 |
|
|
&& TREE_CODE (parg1) == MULT_EXPR)
|
10094 |
|
|
return
|
10095 |
|
|
fold_build2_loc (loc, PLUS_EXPR, type,
|
10096 |
|
|
fold_convert_loc (loc, type, parg0),
|
10097 |
|
|
fold_build2_loc (loc, pcode, type,
|
10098 |
|
|
fold_convert_loc (loc, type, marg),
|
10099 |
|
|
fold_convert_loc (loc, type,
|
10100 |
|
|
parg1)));
|
10101 |
|
|
}
|
10102 |
|
|
}
|
10103 |
|
|
else
|
10104 |
|
|
{
|
10105 |
|
|
/* See if ARG1 is zero and X + ARG1 reduces to X. */
|
10106 |
|
|
if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
|
10107 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
|
10108 |
|
|
|
10109 |
|
|
/* Likewise if the operands are reversed. */
|
10110 |
|
|
if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
|
10111 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
|
10112 |
|
|
|
10113 |
|
|
/* Convert X + -C into X - C. */
|
10114 |
|
|
if (TREE_CODE (arg1) == REAL_CST
|
10115 |
|
|
&& REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
|
10116 |
|
|
{
|
10117 |
|
|
tem = fold_negate_const (arg1, type);
|
10118 |
|
|
if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
|
10119 |
|
|
return fold_build2_loc (loc, MINUS_EXPR, type,
|
10120 |
|
|
fold_convert_loc (loc, type, arg0),
|
10121 |
|
|
fold_convert_loc (loc, type, tem));
|
10122 |
|
|
}
|
10123 |
|
|
|
10124 |
|
|
/* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
|
10125 |
|
|
to __complex__ ( x, y ). This is not the same for SNaNs or
|
10126 |
|
|
if signed zeros are involved. */
|
10127 |
|
|
if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
|
10128 |
|
|
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
|
10129 |
|
|
&& COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
|
10130 |
|
|
{
|
10131 |
|
|
tree rtype = TREE_TYPE (TREE_TYPE (arg0));
|
10132 |
|
|
tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
|
10133 |
|
|
tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
|
10134 |
|
|
bool arg0rz = false, arg0iz = false;
|
10135 |
|
|
if ((arg0r && (arg0rz = real_zerop (arg0r)))
|
10136 |
|
|
|| (arg0i && (arg0iz = real_zerop (arg0i))))
|
10137 |
|
|
{
|
10138 |
|
|
tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
|
10139 |
|
|
tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
|
10140 |
|
|
if (arg0rz && arg1i && real_zerop (arg1i))
|
10141 |
|
|
{
|
10142 |
|
|
tree rp = arg1r ? arg1r
|
10143 |
|
|
: build1 (REALPART_EXPR, rtype, arg1);
|
10144 |
|
|
tree ip = arg0i ? arg0i
|
10145 |
|
|
: build1 (IMAGPART_EXPR, rtype, arg0);
|
10146 |
|
|
return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
|
10147 |
|
|
}
|
10148 |
|
|
else if (arg0iz && arg1r && real_zerop (arg1r))
|
10149 |
|
|
{
|
10150 |
|
|
tree rp = arg0r ? arg0r
|
10151 |
|
|
: build1 (REALPART_EXPR, rtype, arg0);
|
10152 |
|
|
tree ip = arg1i ? arg1i
|
10153 |
|
|
: build1 (IMAGPART_EXPR, rtype, arg1);
|
10154 |
|
|
return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
|
10155 |
|
|
}
|
10156 |
|
|
}
|
10157 |
|
|
}
|
10158 |
|
|
|
10159 |
|
|
if (flag_unsafe_math_optimizations
|
10160 |
|
|
&& (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
|
10161 |
|
|
&& (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
|
10162 |
|
|
&& (tem = distribute_real_division (loc, code, type, arg0, arg1)))
|
10163 |
|
|
return tem;
|
10164 |
|
|
|
10165 |
|
|
/* Convert x+x into x*2.0. */
|
10166 |
|
|
if (operand_equal_p (arg0, arg1, 0)
|
10167 |
|
|
&& SCALAR_FLOAT_TYPE_P (type))
|
10168 |
|
|
return fold_build2_loc (loc, MULT_EXPR, type, arg0,
|
10169 |
|
|
build_real (type, dconst2));
|
10170 |
|
|
|
10171 |
|
|
/* Convert a + (b*c + d*e) into (a + b*c) + d*e.
|
10172 |
|
|
We associate floats only if the user has specified
|
10173 |
|
|
-fassociative-math. */
|
10174 |
|
|
if (flag_associative_math
|
10175 |
|
|
&& TREE_CODE (arg1) == PLUS_EXPR
|
10176 |
|
|
&& TREE_CODE (arg0) != MULT_EXPR)
|
10177 |
|
|
{
|
10178 |
|
|
tree tree10 = TREE_OPERAND (arg1, 0);
|
10179 |
|
|
tree tree11 = TREE_OPERAND (arg1, 1);
|
10180 |
|
|
if (TREE_CODE (tree11) == MULT_EXPR
|
10181 |
|
|
&& TREE_CODE (tree10) == MULT_EXPR)
|
10182 |
|
|
{
|
10183 |
|
|
tree tree0;
|
10184 |
|
|
tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
|
10185 |
|
|
return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
|
10186 |
|
|
}
|
10187 |
|
|
}
|
10188 |
|
|
/* Convert (b*c + d*e) + a into b*c + (d*e +a).
|
10189 |
|
|
We associate floats only if the user has specified
|
10190 |
|
|
-fassociative-math. */
|
10191 |
|
|
if (flag_associative_math
|
10192 |
|
|
&& TREE_CODE (arg0) == PLUS_EXPR
|
10193 |
|
|
&& TREE_CODE (arg1) != MULT_EXPR)
|
10194 |
|
|
{
|
10195 |
|
|
tree tree00 = TREE_OPERAND (arg0, 0);
|
10196 |
|
|
tree tree01 = TREE_OPERAND (arg0, 1);
|
10197 |
|
|
if (TREE_CODE (tree01) == MULT_EXPR
|
10198 |
|
|
&& TREE_CODE (tree00) == MULT_EXPR)
|
10199 |
|
|
{
|
10200 |
|
|
tree tree0;
|
10201 |
|
|
tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
|
10202 |
|
|
return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
|
10203 |
|
|
}
|
10204 |
|
|
}
|
10205 |
|
|
}
|
10206 |
|
|
|
10207 |
|
|
bit_rotate:
|
10208 |
|
|
/* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
|
10209 |
|
|
is a rotate of A by C1 bits. */
|
10210 |
|
|
/* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
|
10211 |
|
|
is a rotate of A by B bits. */
|
10212 |
|
|
{
|
10213 |
|
|
enum tree_code code0, code1;
|
10214 |
|
|
tree rtype;
|
10215 |
|
|
code0 = TREE_CODE (arg0);
|
10216 |
|
|
code1 = TREE_CODE (arg1);
|
10217 |
|
|
if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
|
10218 |
|
|
|| (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
|
10219 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 0),
|
10220 |
|
|
TREE_OPERAND (arg1, 0), 0)
|
10221 |
|
|
&& (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
|
10222 |
|
|
TYPE_UNSIGNED (rtype))
|
10223 |
|
|
/* Only create rotates in complete modes. Other cases are not
|
10224 |
|
|
expanded properly. */
|
10225 |
|
|
&& TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
|
10226 |
|
|
{
|
10227 |
|
|
tree tree01, tree11;
|
10228 |
|
|
enum tree_code code01, code11;
|
10229 |
|
|
|
10230 |
|
|
tree01 = TREE_OPERAND (arg0, 1);
|
10231 |
|
|
tree11 = TREE_OPERAND (arg1, 1);
|
10232 |
|
|
STRIP_NOPS (tree01);
|
10233 |
|
|
STRIP_NOPS (tree11);
|
10234 |
|
|
code01 = TREE_CODE (tree01);
|
10235 |
|
|
code11 = TREE_CODE (tree11);
|
10236 |
|
|
if (code01 == INTEGER_CST
|
10237 |
|
|
&& code11 == INTEGER_CST
|
10238 |
|
|
&& TREE_INT_CST_HIGH (tree01) == 0
|
10239 |
|
|
&& TREE_INT_CST_HIGH (tree11) == 0
|
10240 |
|
|
&& ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
|
10241 |
|
|
== TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
|
10242 |
|
|
{
|
10243 |
|
|
tem = build2_loc (loc, LROTATE_EXPR,
|
10244 |
|
|
TREE_TYPE (TREE_OPERAND (arg0, 0)),
|
10245 |
|
|
TREE_OPERAND (arg0, 0),
|
10246 |
|
|
code0 == LSHIFT_EXPR ? tree01 : tree11);
|
10247 |
|
|
return fold_convert_loc (loc, type, tem);
|
10248 |
|
|
}
|
10249 |
|
|
else if (code11 == MINUS_EXPR)
|
10250 |
|
|
{
|
10251 |
|
|
tree tree110, tree111;
|
10252 |
|
|
tree110 = TREE_OPERAND (tree11, 0);
|
10253 |
|
|
tree111 = TREE_OPERAND (tree11, 1);
|
10254 |
|
|
STRIP_NOPS (tree110);
|
10255 |
|
|
STRIP_NOPS (tree111);
|
10256 |
|
|
if (TREE_CODE (tree110) == INTEGER_CST
|
10257 |
|
|
&& 0 == compare_tree_int (tree110,
|
10258 |
|
|
TYPE_PRECISION
|
10259 |
|
|
(TREE_TYPE (TREE_OPERAND
|
10260 |
|
|
(arg0, 0))))
|
10261 |
|
|
&& operand_equal_p (tree01, tree111, 0))
|
10262 |
|
|
return
|
10263 |
|
|
fold_convert_loc (loc, type,
|
10264 |
|
|
build2 ((code0 == LSHIFT_EXPR
|
10265 |
|
|
? LROTATE_EXPR
|
10266 |
|
|
: RROTATE_EXPR),
|
10267 |
|
|
TREE_TYPE (TREE_OPERAND (arg0, 0)),
|
10268 |
|
|
TREE_OPERAND (arg0, 0), tree01));
|
10269 |
|
|
}
|
10270 |
|
|
else if (code01 == MINUS_EXPR)
|
10271 |
|
|
{
|
10272 |
|
|
tree tree010, tree011;
|
10273 |
|
|
tree010 = TREE_OPERAND (tree01, 0);
|
10274 |
|
|
tree011 = TREE_OPERAND (tree01, 1);
|
10275 |
|
|
STRIP_NOPS (tree010);
|
10276 |
|
|
STRIP_NOPS (tree011);
|
10277 |
|
|
if (TREE_CODE (tree010) == INTEGER_CST
|
10278 |
|
|
&& 0 == compare_tree_int (tree010,
|
10279 |
|
|
TYPE_PRECISION
|
10280 |
|
|
(TREE_TYPE (TREE_OPERAND
|
10281 |
|
|
(arg0, 0))))
|
10282 |
|
|
&& operand_equal_p (tree11, tree011, 0))
|
10283 |
|
|
return fold_convert_loc
|
10284 |
|
|
(loc, type,
|
10285 |
|
|
build2 ((code0 != LSHIFT_EXPR
|
10286 |
|
|
? LROTATE_EXPR
|
10287 |
|
|
: RROTATE_EXPR),
|
10288 |
|
|
TREE_TYPE (TREE_OPERAND (arg0, 0)),
|
10289 |
|
|
TREE_OPERAND (arg0, 0), tree11));
|
10290 |
|
|
}
|
10291 |
|
|
}
|
10292 |
|
|
}
|
10293 |
|
|
|
10294 |
|
|
associate:
|
10295 |
|
|
/* In most languages, can't associate operations on floats through
|
10296 |
|
|
parentheses. Rather than remember where the parentheses were, we
|
10297 |
|
|
don't associate floats at all, unless the user has specified
|
10298 |
|
|
-fassociative-math.
|
10299 |
|
|
And, we need to make sure type is not saturating. */
|
10300 |
|
|
|
10301 |
|
|
if ((! FLOAT_TYPE_P (type) || flag_associative_math)
|
10302 |
|
|
&& !TYPE_SATURATING (type))
|
10303 |
|
|
{
|
10304 |
|
|
tree var0, con0, lit0, minus_lit0;
|
10305 |
|
|
tree var1, con1, lit1, minus_lit1;
|
10306 |
|
|
bool ok = true;
|
10307 |
|
|
|
10308 |
|
|
/* Split both trees into variables, constants, and literals. Then
|
10309 |
|
|
associate each group together, the constants with literals,
|
10310 |
|
|
then the result with variables. This increases the chances of
|
10311 |
|
|
literals being recombined later and of generating relocatable
|
10312 |
|
|
expressions for the sum of a constant and literal. */
|
10313 |
|
|
var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
|
10314 |
|
|
var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
|
10315 |
|
|
code == MINUS_EXPR);
|
10316 |
|
|
|
10317 |
|
|
/* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
|
10318 |
|
|
if (code == MINUS_EXPR)
|
10319 |
|
|
code = PLUS_EXPR;
|
10320 |
|
|
|
10321 |
|
|
/* With undefined overflow we can only associate constants with one
|
10322 |
|
|
variable, and constants whose association doesn't overflow. */
|
10323 |
|
|
if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
|
10324 |
|
|
|| (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
|
10325 |
|
|
{
|
10326 |
|
|
if (var0 && var1)
|
10327 |
|
|
{
|
10328 |
|
|
tree tmp0 = var0;
|
10329 |
|
|
tree tmp1 = var1;
|
10330 |
|
|
|
10331 |
|
|
if (TREE_CODE (tmp0) == NEGATE_EXPR)
|
10332 |
|
|
tmp0 = TREE_OPERAND (tmp0, 0);
|
10333 |
|
|
if (TREE_CODE (tmp1) == NEGATE_EXPR)
|
10334 |
|
|
tmp1 = TREE_OPERAND (tmp1, 0);
|
10335 |
|
|
/* The only case we can still associate with two variables
|
10336 |
|
|
is if they are the same, modulo negation. */
|
10337 |
|
|
if (!operand_equal_p (tmp0, tmp1, 0))
|
10338 |
|
|
ok = false;
|
10339 |
|
|
}
|
10340 |
|
|
|
10341 |
|
|
if (ok && lit0 && lit1)
|
10342 |
|
|
{
|
10343 |
|
|
tree tmp0 = fold_convert (type, lit0);
|
10344 |
|
|
tree tmp1 = fold_convert (type, lit1);
|
10345 |
|
|
|
10346 |
|
|
if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
|
10347 |
|
|
&& TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
|
10348 |
|
|
ok = false;
|
10349 |
|
|
}
|
10350 |
|
|
}
|
10351 |
|
|
|
10352 |
|
|
/* Only do something if we found more than two objects. Otherwise,
|
10353 |
|
|
nothing has changed and we risk infinite recursion. */
|
10354 |
|
|
if (ok
|
10355 |
|
|
&& (2 < ((var0 != 0) + (var1 != 0)
|
10356 |
|
|
+ (con0 != 0) + (con1 != 0)
|
10357 |
|
|
+ (lit0 != 0) + (lit1 != 0)
|
10358 |
|
|
+ (minus_lit0 != 0) + (minus_lit1 != 0))))
|
10359 |
|
|
{
|
10360 |
|
|
var0 = associate_trees (loc, var0, var1, code, type);
|
10361 |
|
|
con0 = associate_trees (loc, con0, con1, code, type);
|
10362 |
|
|
lit0 = associate_trees (loc, lit0, lit1, code, type);
|
10363 |
|
|
minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
|
10364 |
|
|
|
10365 |
|
|
/* Preserve the MINUS_EXPR if the negative part of the literal is
|
10366 |
|
|
greater than the positive part. Otherwise, the multiplicative
|
10367 |
|
|
folding code (i.e extract_muldiv) may be fooled in case
|
10368 |
|
|
unsigned constants are subtracted, like in the following
|
10369 |
|
|
example: ((X*2 + 4) - 8U)/2. */
|
10370 |
|
|
if (minus_lit0 && lit0)
|
10371 |
|
|
{
|
10372 |
|
|
if (TREE_CODE (lit0) == INTEGER_CST
|
10373 |
|
|
&& TREE_CODE (minus_lit0) == INTEGER_CST
|
10374 |
|
|
&& tree_int_cst_lt (lit0, minus_lit0))
|
10375 |
|
|
{
|
10376 |
|
|
minus_lit0 = associate_trees (loc, minus_lit0, lit0,
|
10377 |
|
|
MINUS_EXPR, type);
|
10378 |
|
|
lit0 = 0;
|
10379 |
|
|
}
|
10380 |
|
|
else
|
10381 |
|
|
{
|
10382 |
|
|
lit0 = associate_trees (loc, lit0, minus_lit0,
|
10383 |
|
|
MINUS_EXPR, type);
|
10384 |
|
|
minus_lit0 = 0;
|
10385 |
|
|
}
|
10386 |
|
|
}
|
10387 |
|
|
if (minus_lit0)
|
10388 |
|
|
{
|
10389 |
|
|
if (con0 == 0)
|
10390 |
|
|
return
|
10391 |
|
|
fold_convert_loc (loc, type,
|
10392 |
|
|
associate_trees (loc, var0, minus_lit0,
|
10393 |
|
|
MINUS_EXPR, type));
|
10394 |
|
|
else
|
10395 |
|
|
{
|
10396 |
|
|
con0 = associate_trees (loc, con0, minus_lit0,
|
10397 |
|
|
MINUS_EXPR, type);
|
10398 |
|
|
return
|
10399 |
|
|
fold_convert_loc (loc, type,
|
10400 |
|
|
associate_trees (loc, var0, con0,
|
10401 |
|
|
PLUS_EXPR, type));
|
10402 |
|
|
}
|
10403 |
|
|
}
|
10404 |
|
|
|
10405 |
|
|
con0 = associate_trees (loc, con0, lit0, code, type);
|
10406 |
|
|
return
|
10407 |
|
|
fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
|
10408 |
|
|
code, type));
|
10409 |
|
|
}
|
10410 |
|
|
}
|
10411 |
|
|
|
10412 |
|
|
return NULL_TREE;
|
10413 |
|
|
|
10414 |
|
|
case MINUS_EXPR:
|
10415 |
|
|
/* Pointer simplifications for subtraction, simple reassociations. */
|
10416 |
|
|
if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
|
10417 |
|
|
{
|
10418 |
|
|
/* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
|
10419 |
|
|
if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
|
10420 |
|
|
&& TREE_CODE (arg1) == POINTER_PLUS_EXPR)
|
10421 |
|
|
{
|
10422 |
|
|
tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
|
10423 |
|
|
tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
|
10424 |
|
|
tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
|
10425 |
|
|
tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
|
10426 |
|
|
return fold_build2_loc (loc, PLUS_EXPR, type,
|
10427 |
|
|
fold_build2_loc (loc, MINUS_EXPR, type,
|
10428 |
|
|
arg00, arg10),
|
10429 |
|
|
fold_build2_loc (loc, MINUS_EXPR, type,
|
10430 |
|
|
arg01, arg11));
|
10431 |
|
|
}
|
10432 |
|
|
/* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
|
10433 |
|
|
else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
|
10434 |
|
|
{
|
10435 |
|
|
tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
|
10436 |
|
|
tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
|
10437 |
|
|
tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
|
10438 |
|
|
fold_convert_loc (loc, type, arg1));
|
10439 |
|
|
if (tmp)
|
10440 |
|
|
return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
|
10441 |
|
|
}
|
10442 |
|
|
}
|
10443 |
|
|
/* A - (-B) -> A + B */
|
10444 |
|
|
if (TREE_CODE (arg1) == NEGATE_EXPR)
|
10445 |
|
|
return fold_build2_loc (loc, PLUS_EXPR, type, op0,
|
10446 |
|
|
fold_convert_loc (loc, type,
|
10447 |
|
|
TREE_OPERAND (arg1, 0)));
|
10448 |
|
|
/* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
|
10449 |
|
|
if (TREE_CODE (arg0) == NEGATE_EXPR
|
10450 |
|
|
&& (FLOAT_TYPE_P (type)
|
10451 |
|
|
|| INTEGRAL_TYPE_P (type))
|
10452 |
|
|
&& negate_expr_p (arg1)
|
10453 |
|
|
&& reorder_operands_p (arg0, arg1))
|
10454 |
|
|
return fold_build2_loc (loc, MINUS_EXPR, type,
|
10455 |
|
|
fold_convert_loc (loc, type,
|
10456 |
|
|
negate_expr (arg1)),
|
10457 |
|
|
fold_convert_loc (loc, type,
|
10458 |
|
|
TREE_OPERAND (arg0, 0)));
|
10459 |
|
|
/* Convert -A - 1 to ~A. */
|
10460 |
|
|
if (INTEGRAL_TYPE_P (type)
|
10461 |
|
|
&& TREE_CODE (arg0) == NEGATE_EXPR
|
10462 |
|
|
&& integer_onep (arg1)
|
10463 |
|
|
&& !TYPE_OVERFLOW_TRAPS (type))
|
10464 |
|
|
return fold_build1_loc (loc, BIT_NOT_EXPR, type,
|
10465 |
|
|
fold_convert_loc (loc, type,
|
10466 |
|
|
TREE_OPERAND (arg0, 0)));
|
10467 |
|
|
|
10468 |
|
|
/* Convert -1 - A to ~A. */
|
10469 |
|
|
if (INTEGRAL_TYPE_P (type)
|
10470 |
|
|
&& integer_all_onesp (arg0))
|
10471 |
|
|
return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
|
10472 |
|
|
|
10473 |
|
|
|
10474 |
|
|
/* X - (X / CST) * CST is X % CST. */
|
10475 |
|
|
if (INTEGRAL_TYPE_P (type)
|
10476 |
|
|
&& TREE_CODE (arg1) == MULT_EXPR
|
10477 |
|
|
&& TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
|
10478 |
|
|
&& operand_equal_p (arg0,
|
10479 |
|
|
TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
|
10480 |
|
|
&& operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
|
10481 |
|
|
TREE_OPERAND (arg1, 1), 0))
|
10482 |
|
|
return
|
10483 |
|
|
fold_convert_loc (loc, type,
|
10484 |
|
|
fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
|
10485 |
|
|
arg0, TREE_OPERAND (arg1, 1)));
|
10486 |
|
|
|
10487 |
|
|
if (! FLOAT_TYPE_P (type))
|
10488 |
|
|
{
|
10489 |
|
|
if (integer_zerop (arg0))
|
10490 |
|
|
return negate_expr (fold_convert_loc (loc, type, arg1));
|
10491 |
|
|
if (integer_zerop (arg1))
|
10492 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
|
10493 |
|
|
|
10494 |
|
|
/* Fold A - (A & B) into ~B & A. */
|
10495 |
|
|
if (!TREE_SIDE_EFFECTS (arg0)
|
10496 |
|
|
&& TREE_CODE (arg1) == BIT_AND_EXPR)
|
10497 |
|
|
{
|
10498 |
|
|
if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
|
10499 |
|
|
{
|
10500 |
|
|
tree arg10 = fold_convert_loc (loc, type,
|
10501 |
|
|
TREE_OPERAND (arg1, 0));
|
10502 |
|
|
return fold_build2_loc (loc, BIT_AND_EXPR, type,
|
10503 |
|
|
fold_build1_loc (loc, BIT_NOT_EXPR,
|
10504 |
|
|
type, arg10),
|
10505 |
|
|
fold_convert_loc (loc, type, arg0));
|
10506 |
|
|
}
|
10507 |
|
|
if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
|
10508 |
|
|
{
|
10509 |
|
|
tree arg11 = fold_convert_loc (loc,
|
10510 |
|
|
type, TREE_OPERAND (arg1, 1));
|
10511 |
|
|
return fold_build2_loc (loc, BIT_AND_EXPR, type,
|
10512 |
|
|
fold_build1_loc (loc, BIT_NOT_EXPR,
|
10513 |
|
|
type, arg11),
|
10514 |
|
|
fold_convert_loc (loc, type, arg0));
|
10515 |
|
|
}
|
10516 |
|
|
}
|
10517 |
|
|
|
10518 |
|
|
/* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
|
10519 |
|
|
any power of 2 minus 1. */
|
10520 |
|
|
if (TREE_CODE (arg0) == BIT_AND_EXPR
|
10521 |
|
|
&& TREE_CODE (arg1) == BIT_AND_EXPR
|
10522 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 0),
|
10523 |
|
|
TREE_OPERAND (arg1, 0), 0))
|
10524 |
|
|
{
|
10525 |
|
|
tree mask0 = TREE_OPERAND (arg0, 1);
|
10526 |
|
|
tree mask1 = TREE_OPERAND (arg1, 1);
|
10527 |
|
|
tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
|
10528 |
|
|
|
10529 |
|
|
if (operand_equal_p (tem, mask1, 0))
|
10530 |
|
|
{
|
10531 |
|
|
tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
|
10532 |
|
|
TREE_OPERAND (arg0, 0), mask1);
|
10533 |
|
|
return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
|
10534 |
|
|
}
|
10535 |
|
|
}
|
10536 |
|
|
}
|
10537 |
|
|
|
10538 |
|
|
/* See if ARG1 is zero and X - ARG1 reduces to X. */
|
10539 |
|
|
else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
|
10540 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
|
10541 |
|
|
|
10542 |
|
|
/* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
|
10543 |
|
|
ARG0 is zero and X + ARG0 reduces to X, since that would mean
|
10544 |
|
|
(-ARG1 + ARG0) reduces to -ARG1. */
|
10545 |
|
|
else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
|
10546 |
|
|
return negate_expr (fold_convert_loc (loc, type, arg1));
|
10547 |
|
|
|
10548 |
|
|
/* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
|
10549 |
|
|
__complex__ ( x, -y ). This is not the same for SNaNs or if
|
10550 |
|
|
signed zeros are involved. */
|
10551 |
|
|
if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
|
10552 |
|
|
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
|
10553 |
|
|
&& COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
|
10554 |
|
|
{
|
10555 |
|
|
tree rtype = TREE_TYPE (TREE_TYPE (arg0));
|
10556 |
|
|
tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
|
10557 |
|
|
tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
|
10558 |
|
|
bool arg0rz = false, arg0iz = false;
|
10559 |
|
|
if ((arg0r && (arg0rz = real_zerop (arg0r)))
|
10560 |
|
|
|| (arg0i && (arg0iz = real_zerop (arg0i))))
|
10561 |
|
|
{
|
10562 |
|
|
tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
|
10563 |
|
|
tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
|
10564 |
|
|
if (arg0rz && arg1i && real_zerop (arg1i))
|
10565 |
|
|
{
|
10566 |
|
|
tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
|
10567 |
|
|
arg1r ? arg1r
|
10568 |
|
|
: build1 (REALPART_EXPR, rtype, arg1));
|
10569 |
|
|
tree ip = arg0i ? arg0i
|
10570 |
|
|
: build1 (IMAGPART_EXPR, rtype, arg0);
|
10571 |
|
|
return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
|
10572 |
|
|
}
|
10573 |
|
|
else if (arg0iz && arg1r && real_zerop (arg1r))
|
10574 |
|
|
{
|
10575 |
|
|
tree rp = arg0r ? arg0r
|
10576 |
|
|
: build1 (REALPART_EXPR, rtype, arg0);
|
10577 |
|
|
tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
|
10578 |
|
|
arg1i ? arg1i
|
10579 |
|
|
: build1 (IMAGPART_EXPR, rtype, arg1));
|
10580 |
|
|
return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
|
10581 |
|
|
}
|
10582 |
|
|
}
|
10583 |
|
|
}
|
10584 |
|
|
|
10585 |
|
|
/* Fold &x - &x. This can happen from &x.foo - &x.
|
10586 |
|
|
This is unsafe for certain floats even in non-IEEE formats.
|
10587 |
|
|
In IEEE, it is unsafe because it does wrong for NaNs.
|
10588 |
|
|
Also note that operand_equal_p is always false if an operand
|
10589 |
|
|
is volatile. */
|
10590 |
|
|
|
10591 |
|
|
if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
|
10592 |
|
|
&& operand_equal_p (arg0, arg1, 0))
|
10593 |
|
|
return build_zero_cst (type);
|
10594 |
|
|
|
10595 |
|
|
/* A - B -> A + (-B) if B is easily negatable. */
|
10596 |
|
|
if (negate_expr_p (arg1)
|
10597 |
|
|
&& ((FLOAT_TYPE_P (type)
|
10598 |
|
|
/* Avoid this transformation if B is a positive REAL_CST. */
|
10599 |
|
|
&& (TREE_CODE (arg1) != REAL_CST
|
10600 |
|
|
|| REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
|
10601 |
|
|
|| INTEGRAL_TYPE_P (type)))
|
10602 |
|
|
return fold_build2_loc (loc, PLUS_EXPR, type,
|
10603 |
|
|
fold_convert_loc (loc, type, arg0),
|
10604 |
|
|
fold_convert_loc (loc, type,
|
10605 |
|
|
negate_expr (arg1)));
|
10606 |
|
|
|
10607 |
|
|
/* Try folding difference of addresses. */
|
10608 |
|
|
{
|
10609 |
|
|
HOST_WIDE_INT diff;
|
10610 |
|
|
|
10611 |
|
|
if ((TREE_CODE (arg0) == ADDR_EXPR
|
10612 |
|
|
|| TREE_CODE (arg1) == ADDR_EXPR)
|
10613 |
|
|
&& ptr_difference_const (arg0, arg1, &diff))
|
10614 |
|
|
return build_int_cst_type (type, diff);
|
10615 |
|
|
}
|
10616 |
|
|
|
10617 |
|
|
/* Fold &a[i] - &a[j] to i-j. */
|
10618 |
|
|
if (TREE_CODE (arg0) == ADDR_EXPR
|
10619 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
|
10620 |
|
|
&& TREE_CODE (arg1) == ADDR_EXPR
|
10621 |
|
|
&& TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
|
10622 |
|
|
{
|
10623 |
|
|
tree tem = fold_addr_of_array_ref_difference (loc, type,
|
10624 |
|
|
TREE_OPERAND (arg0, 0),
|
10625 |
|
|
TREE_OPERAND (arg1, 0));
|
10626 |
|
|
if (tem)
|
10627 |
|
|
return tem;
|
10628 |
|
|
}
|
10629 |
|
|
|
10630 |
|
|
if (FLOAT_TYPE_P (type)
|
10631 |
|
|
&& flag_unsafe_math_optimizations
|
10632 |
|
|
&& (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
|
10633 |
|
|
&& (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
|
10634 |
|
|
&& (tem = distribute_real_division (loc, code, type, arg0, arg1)))
|
10635 |
|
|
return tem;
|
10636 |
|
|
|
10637 |
|
|
/* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
|
10638 |
|
|
same or one. Make sure type is not saturating.
|
10639 |
|
|
fold_plusminus_mult_expr will re-associate. */
|
10640 |
|
|
if ((TREE_CODE (arg0) == MULT_EXPR
|
10641 |
|
|
|| TREE_CODE (arg1) == MULT_EXPR)
|
10642 |
|
|
&& !TYPE_SATURATING (type)
|
10643 |
|
|
&& (!FLOAT_TYPE_P (type) || flag_associative_math))
|
10644 |
|
|
{
|
10645 |
|
|
tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
|
10646 |
|
|
if (tem)
|
10647 |
|
|
return tem;
|
10648 |
|
|
}
|
10649 |
|
|
|
10650 |
|
|
goto associate;
|
10651 |
|
|
|
10652 |
|
|
case MULT_EXPR:
|
10653 |
|
|
/* (-A) * (-B) -> A * B */
|
10654 |
|
|
if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
|
10655 |
|
|
return fold_build2_loc (loc, MULT_EXPR, type,
|
10656 |
|
|
fold_convert_loc (loc, type,
|
10657 |
|
|
TREE_OPERAND (arg0, 0)),
|
10658 |
|
|
fold_convert_loc (loc, type,
|
10659 |
|
|
negate_expr (arg1)));
|
10660 |
|
|
if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
|
10661 |
|
|
return fold_build2_loc (loc, MULT_EXPR, type,
|
10662 |
|
|
fold_convert_loc (loc, type,
|
10663 |
|
|
negate_expr (arg0)),
|
10664 |
|
|
fold_convert_loc (loc, type,
|
10665 |
|
|
TREE_OPERAND (arg1, 0)));
|
10666 |
|
|
|
10667 |
|
|
if (! FLOAT_TYPE_P (type))
|
10668 |
|
|
{
|
10669 |
|
|
if (integer_zerop (arg1))
|
10670 |
|
|
return omit_one_operand_loc (loc, type, arg1, arg0);
|
10671 |
|
|
if (integer_onep (arg1))
|
10672 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
|
10673 |
|
|
/* Transform x * -1 into -x. Make sure to do the negation
|
10674 |
|
|
on the original operand with conversions not stripped
|
10675 |
|
|
because we can only strip non-sign-changing conversions. */
|
10676 |
|
|
if (integer_all_onesp (arg1))
|
10677 |
|
|
return fold_convert_loc (loc, type, negate_expr (op0));
|
10678 |
|
|
/* Transform x * -C into -x * C if x is easily negatable. */
|
10679 |
|
|
if (TREE_CODE (arg1) == INTEGER_CST
|
10680 |
|
|
&& tree_int_cst_sgn (arg1) == -1
|
10681 |
|
|
&& negate_expr_p (arg0)
|
10682 |
|
|
&& (tem = negate_expr (arg1)) != arg1
|
10683 |
|
|
&& !TREE_OVERFLOW (tem))
|
10684 |
|
|
return fold_build2_loc (loc, MULT_EXPR, type,
|
10685 |
|
|
fold_convert_loc (loc, type,
|
10686 |
|
|
negate_expr (arg0)),
|
10687 |
|
|
tem);
|
10688 |
|
|
|
10689 |
|
|
/* (a * (1 << b)) is (a << b) */
|
10690 |
|
|
if (TREE_CODE (arg1) == LSHIFT_EXPR
|
10691 |
|
|
&& integer_onep (TREE_OPERAND (arg1, 0)))
|
10692 |
|
|
return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
|
10693 |
|
|
TREE_OPERAND (arg1, 1));
|
10694 |
|
|
if (TREE_CODE (arg0) == LSHIFT_EXPR
|
10695 |
|
|
&& integer_onep (TREE_OPERAND (arg0, 0)))
|
10696 |
|
|
return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
|
10697 |
|
|
TREE_OPERAND (arg0, 1));
|
10698 |
|
|
|
10699 |
|
|
/* (A + A) * C -> A * 2 * C */
|
10700 |
|
|
if (TREE_CODE (arg0) == PLUS_EXPR
|
10701 |
|
|
&& TREE_CODE (arg1) == INTEGER_CST
|
10702 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 0),
|
10703 |
|
|
TREE_OPERAND (arg0, 1), 0))
|
10704 |
|
|
return fold_build2_loc (loc, MULT_EXPR, type,
|
10705 |
|
|
omit_one_operand_loc (loc, type,
|
10706 |
|
|
TREE_OPERAND (arg0, 0),
|
10707 |
|
|
TREE_OPERAND (arg0, 1)),
|
10708 |
|
|
fold_build2_loc (loc, MULT_EXPR, type,
|
10709 |
|
|
build_int_cst (type, 2) , arg1));
|
10710 |
|
|
|
10711 |
|
|
strict_overflow_p = false;
|
10712 |
|
|
if (TREE_CODE (arg1) == INTEGER_CST
|
10713 |
|
|
&& 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
|
10714 |
|
|
&strict_overflow_p)))
|
10715 |
|
|
{
|
10716 |
|
|
if (strict_overflow_p)
|
10717 |
|
|
fold_overflow_warning (("assuming signed overflow does not "
|
10718 |
|
|
"occur when simplifying "
|
10719 |
|
|
"multiplication"),
|
10720 |
|
|
WARN_STRICT_OVERFLOW_MISC);
|
10721 |
|
|
return fold_convert_loc (loc, type, tem);
|
10722 |
|
|
}
|
10723 |
|
|
|
10724 |
|
|
/* Optimize z * conj(z) for integer complex numbers. */
|
10725 |
|
|
if (TREE_CODE (arg0) == CONJ_EXPR
|
10726 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
|
10727 |
|
|
return fold_mult_zconjz (loc, type, arg1);
|
10728 |
|
|
if (TREE_CODE (arg1) == CONJ_EXPR
|
10729 |
|
|
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
|
10730 |
|
|
return fold_mult_zconjz (loc, type, arg0);
|
10731 |
|
|
}
|
10732 |
|
|
else
|
10733 |
|
|
{
|
10734 |
|
|
/* Maybe fold x * 0 to 0. The expressions aren't the same
|
10735 |
|
|
when x is NaN, since x * 0 is also NaN. Nor are they the
|
10736 |
|
|
same in modes with signed zeros, since multiplying a
|
10737 |
|
|
negative value by 0 gives -0, not +0. */
|
10738 |
|
|
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
|
10739 |
|
|
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
|
10740 |
|
|
&& real_zerop (arg1))
|
10741 |
|
|
return omit_one_operand_loc (loc, type, arg1, arg0);
|
10742 |
|
|
/* In IEEE floating point, x*1 is not equivalent to x for snans.
|
10743 |
|
|
Likewise for complex arithmetic with signed zeros. */
|
10744 |
|
|
if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
|
10745 |
|
|
&& (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
|
10746 |
|
|
|| !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
|
10747 |
|
|
&& real_onep (arg1))
|
10748 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
|
10749 |
|
|
|
10750 |
|
|
/* Transform x * -1.0 into -x. */
|
10751 |
|
|
if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
|
10752 |
|
|
&& (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
|
10753 |
|
|
|| !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
|
10754 |
|
|
&& real_minus_onep (arg1))
|
10755 |
|
|
return fold_convert_loc (loc, type, negate_expr (arg0));
|
10756 |
|
|
|
10757 |
|
|
/* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
|
10758 |
|
|
the result for floating point types due to rounding so it is applied
|
10759 |
|
|
only if -fassociative-math was specify. */
|
10760 |
|
|
if (flag_associative_math
|
10761 |
|
|
&& TREE_CODE (arg0) == RDIV_EXPR
|
10762 |
|
|
&& TREE_CODE (arg1) == REAL_CST
|
10763 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
|
10764 |
|
|
{
|
10765 |
|
|
tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
|
10766 |
|
|
arg1);
|
10767 |
|
|
if (tem)
|
10768 |
|
|
return fold_build2_loc (loc, RDIV_EXPR, type, tem,
|
10769 |
|
|
TREE_OPERAND (arg0, 1));
|
10770 |
|
|
}
|
10771 |
|
|
|
10772 |
|
|
/* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
|
10773 |
|
|
if (operand_equal_p (arg0, arg1, 0))
|
10774 |
|
|
{
|
10775 |
|
|
tree tem = fold_strip_sign_ops (arg0);
|
10776 |
|
|
if (tem != NULL_TREE)
|
10777 |
|
|
{
|
10778 |
|
|
tem = fold_convert_loc (loc, type, tem);
|
10779 |
|
|
return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
|
10780 |
|
|
}
|
10781 |
|
|
}
|
10782 |
|
|
|
10783 |
|
|
/* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
|
10784 |
|
|
This is not the same for NaNs or if signed zeros are
|
10785 |
|
|
involved. */
|
10786 |
|
|
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
|
10787 |
|
|
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
|
10788 |
|
|
&& COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
|
10789 |
|
|
&& TREE_CODE (arg1) == COMPLEX_CST
|
10790 |
|
|
&& real_zerop (TREE_REALPART (arg1)))
|
10791 |
|
|
{
|
10792 |
|
|
tree rtype = TREE_TYPE (TREE_TYPE (arg0));
|
10793 |
|
|
if (real_onep (TREE_IMAGPART (arg1)))
|
10794 |
|
|
return
|
10795 |
|
|
fold_build2_loc (loc, COMPLEX_EXPR, type,
|
10796 |
|
|
negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
|
10797 |
|
|
rtype, arg0)),
|
10798 |
|
|
fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
|
10799 |
|
|
else if (real_minus_onep (TREE_IMAGPART (arg1)))
|
10800 |
|
|
return
|
10801 |
|
|
fold_build2_loc (loc, COMPLEX_EXPR, type,
|
10802 |
|
|
fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
|
10803 |
|
|
negate_expr (fold_build1_loc (loc, REALPART_EXPR,
|
10804 |
|
|
rtype, arg0)));
|
10805 |
|
|
}
|
10806 |
|
|
|
10807 |
|
|
/* Optimize z * conj(z) for floating point complex numbers.
|
10808 |
|
|
Guarded by flag_unsafe_math_optimizations as non-finite
|
10809 |
|
|
imaginary components don't produce scalar results. */
|
10810 |
|
|
if (flag_unsafe_math_optimizations
|
10811 |
|
|
&& TREE_CODE (arg0) == CONJ_EXPR
|
10812 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
|
10813 |
|
|
return fold_mult_zconjz (loc, type, arg1);
|
10814 |
|
|
if (flag_unsafe_math_optimizations
|
10815 |
|
|
&& TREE_CODE (arg1) == CONJ_EXPR
|
10816 |
|
|
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
|
10817 |
|
|
return fold_mult_zconjz (loc, type, arg0);
|
10818 |
|
|
|
10819 |
|
|
if (flag_unsafe_math_optimizations)
|
10820 |
|
|
{
|
10821 |
|
|
enum built_in_function fcode0 = builtin_mathfn_code (arg0);
|
10822 |
|
|
enum built_in_function fcode1 = builtin_mathfn_code (arg1);
|
10823 |
|
|
|
10824 |
|
|
/* Optimizations of root(...)*root(...). */
|
10825 |
|
|
if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
|
10826 |
|
|
{
|
10827 |
|
|
tree rootfn, arg;
|
10828 |
|
|
tree arg00 = CALL_EXPR_ARG (arg0, 0);
|
10829 |
|
|
tree arg10 = CALL_EXPR_ARG (arg1, 0);
|
10830 |
|
|
|
10831 |
|
|
/* Optimize sqrt(x)*sqrt(x) as x. */
|
10832 |
|
|
if (BUILTIN_SQRT_P (fcode0)
|
10833 |
|
|
&& operand_equal_p (arg00, arg10, 0)
|
10834 |
|
|
&& ! HONOR_SNANS (TYPE_MODE (type)))
|
10835 |
|
|
return arg00;
|
10836 |
|
|
|
10837 |
|
|
/* Optimize root(x)*root(y) as root(x*y). */
|
10838 |
|
|
rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
|
10839 |
|
|
arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
|
10840 |
|
|
return build_call_expr_loc (loc, rootfn, 1, arg);
|
10841 |
|
|
}
|
10842 |
|
|
|
10843 |
|
|
/* Optimize expN(x)*expN(y) as expN(x+y). */
|
10844 |
|
|
if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
|
10845 |
|
|
{
|
10846 |
|
|
tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
|
10847 |
|
|
tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
|
10848 |
|
|
CALL_EXPR_ARG (arg0, 0),
|
10849 |
|
|
CALL_EXPR_ARG (arg1, 0));
|
10850 |
|
|
return build_call_expr_loc (loc, expfn, 1, arg);
|
10851 |
|
|
}
|
10852 |
|
|
|
10853 |
|
|
/* Optimizations of pow(...)*pow(...). */
|
10854 |
|
|
if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
|
10855 |
|
|
|| (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
|
10856 |
|
|
|| (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
|
10857 |
|
|
{
|
10858 |
|
|
tree arg00 = CALL_EXPR_ARG (arg0, 0);
|
10859 |
|
|
tree arg01 = CALL_EXPR_ARG (arg0, 1);
|
10860 |
|
|
tree arg10 = CALL_EXPR_ARG (arg1, 0);
|
10861 |
|
|
tree arg11 = CALL_EXPR_ARG (arg1, 1);
|
10862 |
|
|
|
10863 |
|
|
/* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
|
10864 |
|
|
if (operand_equal_p (arg01, arg11, 0))
|
10865 |
|
|
{
|
10866 |
|
|
tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
|
10867 |
|
|
tree arg = fold_build2_loc (loc, MULT_EXPR, type,
|
10868 |
|
|
arg00, arg10);
|
10869 |
|
|
return build_call_expr_loc (loc, powfn, 2, arg, arg01);
|
10870 |
|
|
}
|
10871 |
|
|
|
10872 |
|
|
/* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
|
10873 |
|
|
if (operand_equal_p (arg00, arg10, 0))
|
10874 |
|
|
{
|
10875 |
|
|
tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
|
10876 |
|
|
tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
|
10877 |
|
|
arg01, arg11);
|
10878 |
|
|
return build_call_expr_loc (loc, powfn, 2, arg00, arg);
|
10879 |
|
|
}
|
10880 |
|
|
}
|
10881 |
|
|
|
10882 |
|
|
/* Optimize tan(x)*cos(x) as sin(x). */
|
10883 |
|
|
if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
|
10884 |
|
|
|| (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
|
10885 |
|
|
|| (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
|
10886 |
|
|
|| (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
|
10887 |
|
|
|| (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
|
10888 |
|
|
|| (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
|
10889 |
|
|
&& operand_equal_p (CALL_EXPR_ARG (arg0, 0),
|
10890 |
|
|
CALL_EXPR_ARG (arg1, 0), 0))
|
10891 |
|
|
{
|
10892 |
|
|
tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
|
10893 |
|
|
|
10894 |
|
|
if (sinfn != NULL_TREE)
|
10895 |
|
|
return build_call_expr_loc (loc, sinfn, 1,
|
10896 |
|
|
CALL_EXPR_ARG (arg0, 0));
|
10897 |
|
|
}
|
10898 |
|
|
|
10899 |
|
|
/* Optimize x*pow(x,c) as pow(x,c+1). */
|
10900 |
|
|
if (fcode1 == BUILT_IN_POW
|
10901 |
|
|
|| fcode1 == BUILT_IN_POWF
|
10902 |
|
|
|| fcode1 == BUILT_IN_POWL)
|
10903 |
|
|
{
|
10904 |
|
|
tree arg10 = CALL_EXPR_ARG (arg1, 0);
|
10905 |
|
|
tree arg11 = CALL_EXPR_ARG (arg1, 1);
|
10906 |
|
|
if (TREE_CODE (arg11) == REAL_CST
|
10907 |
|
|
&& !TREE_OVERFLOW (arg11)
|
10908 |
|
|
&& operand_equal_p (arg0, arg10, 0))
|
10909 |
|
|
{
|
10910 |
|
|
tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
|
10911 |
|
|
REAL_VALUE_TYPE c;
|
10912 |
|
|
tree arg;
|
10913 |
|
|
|
10914 |
|
|
c = TREE_REAL_CST (arg11);
|
10915 |
|
|
real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
|
10916 |
|
|
arg = build_real (type, c);
|
10917 |
|
|
return build_call_expr_loc (loc, powfn, 2, arg0, arg);
|
10918 |
|
|
}
|
10919 |
|
|
}
|
10920 |
|
|
|
10921 |
|
|
/* Optimize pow(x,c)*x as pow(x,c+1). */
|
10922 |
|
|
if (fcode0 == BUILT_IN_POW
|
10923 |
|
|
|| fcode0 == BUILT_IN_POWF
|
10924 |
|
|
|| fcode0 == BUILT_IN_POWL)
|
10925 |
|
|
{
|
10926 |
|
|
tree arg00 = CALL_EXPR_ARG (arg0, 0);
|
10927 |
|
|
tree arg01 = CALL_EXPR_ARG (arg0, 1);
|
10928 |
|
|
if (TREE_CODE (arg01) == REAL_CST
|
10929 |
|
|
&& !TREE_OVERFLOW (arg01)
|
10930 |
|
|
&& operand_equal_p (arg1, arg00, 0))
|
10931 |
|
|
{
|
10932 |
|
|
tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
|
10933 |
|
|
REAL_VALUE_TYPE c;
|
10934 |
|
|
tree arg;
|
10935 |
|
|
|
10936 |
|
|
c = TREE_REAL_CST (arg01);
|
10937 |
|
|
real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
|
10938 |
|
|
arg = build_real (type, c);
|
10939 |
|
|
return build_call_expr_loc (loc, powfn, 2, arg1, arg);
|
10940 |
|
|
}
|
10941 |
|
|
}
|
10942 |
|
|
|
10943 |
|
|
/* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
|
10944 |
|
|
if (!in_gimple_form
|
10945 |
|
|
&& optimize
|
10946 |
|
|
&& operand_equal_p (arg0, arg1, 0))
|
10947 |
|
|
{
|
10948 |
|
|
tree powfn = mathfn_built_in (type, BUILT_IN_POW);
|
10949 |
|
|
|
10950 |
|
|
if (powfn)
|
10951 |
|
|
{
|
10952 |
|
|
tree arg = build_real (type, dconst2);
|
10953 |
|
|
return build_call_expr_loc (loc, powfn, 2, arg0, arg);
|
10954 |
|
|
}
|
10955 |
|
|
}
|
10956 |
|
|
}
|
10957 |
|
|
}
|
10958 |
|
|
goto associate;
|
10959 |
|
|
|
10960 |
|
|
case BIT_IOR_EXPR:
|
10961 |
|
|
bit_ior:
|
10962 |
|
|
if (integer_all_onesp (arg1))
|
10963 |
|
|
return omit_one_operand_loc (loc, type, arg1, arg0);
|
10964 |
|
|
if (integer_zerop (arg1))
|
10965 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
|
10966 |
|
|
if (operand_equal_p (arg0, arg1, 0))
|
10967 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
|
10968 |
|
|
|
10969 |
|
|
/* ~X | X is -1. */
|
10970 |
|
|
if (TREE_CODE (arg0) == BIT_NOT_EXPR
|
10971 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
|
10972 |
|
|
{
|
10973 |
|
|
t1 = build_zero_cst (type);
|
10974 |
|
|
t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
|
10975 |
|
|
return omit_one_operand_loc (loc, type, t1, arg1);
|
10976 |
|
|
}
|
10977 |
|
|
|
10978 |
|
|
/* X | ~X is -1. */
|
10979 |
|
|
if (TREE_CODE (arg1) == BIT_NOT_EXPR
|
10980 |
|
|
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
|
10981 |
|
|
{
|
10982 |
|
|
t1 = build_zero_cst (type);
|
10983 |
|
|
t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
|
10984 |
|
|
return omit_one_operand_loc (loc, type, t1, arg0);
|
10985 |
|
|
}
|
10986 |
|
|
|
10987 |
|
|
/* Canonicalize (X & C1) | C2. */
|
10988 |
|
|
if (TREE_CODE (arg0) == BIT_AND_EXPR
|
10989 |
|
|
&& TREE_CODE (arg1) == INTEGER_CST
|
10990 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
|
10991 |
|
|
{
|
10992 |
|
|
double_int c1, c2, c3, msk;
|
10993 |
|
|
int width = TYPE_PRECISION (type), w;
|
10994 |
|
|
c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
|
10995 |
|
|
c2 = tree_to_double_int (arg1);
|
10996 |
|
|
|
10997 |
|
|
/* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
|
10998 |
|
|
if (double_int_equal_p (double_int_and (c1, c2), c1))
|
10999 |
|
|
return omit_one_operand_loc (loc, type, arg1,
|
11000 |
|
|
TREE_OPERAND (arg0, 0));
|
11001 |
|
|
|
11002 |
|
|
msk = double_int_mask (width);
|
11003 |
|
|
|
11004 |
|
|
/* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
|
11005 |
|
|
if (double_int_zero_p (double_int_and_not (msk,
|
11006 |
|
|
double_int_ior (c1, c2))))
|
11007 |
|
|
return fold_build2_loc (loc, BIT_IOR_EXPR, type,
|
11008 |
|
|
TREE_OPERAND (arg0, 0), arg1);
|
11009 |
|
|
|
11010 |
|
|
/* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
|
11011 |
|
|
unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
|
11012 |
|
|
mode which allows further optimizations. */
|
11013 |
|
|
c1 = double_int_and (c1, msk);
|
11014 |
|
|
c2 = double_int_and (c2, msk);
|
11015 |
|
|
c3 = double_int_and_not (c1, c2);
|
11016 |
|
|
for (w = BITS_PER_UNIT;
|
11017 |
|
|
w <= width && w <= HOST_BITS_PER_WIDE_INT;
|
11018 |
|
|
w <<= 1)
|
11019 |
|
|
{
|
11020 |
|
|
unsigned HOST_WIDE_INT mask
|
11021 |
|
|
= (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
|
11022 |
|
|
if (((c1.low | c2.low) & mask) == mask
|
11023 |
|
|
&& (c1.low & ~mask) == 0 && c1.high == 0)
|
11024 |
|
|
{
|
11025 |
|
|
c3 = uhwi_to_double_int (mask);
|
11026 |
|
|
break;
|
11027 |
|
|
}
|
11028 |
|
|
}
|
11029 |
|
|
if (!double_int_equal_p (c3, c1))
|
11030 |
|
|
return fold_build2_loc (loc, BIT_IOR_EXPR, type,
|
11031 |
|
|
fold_build2_loc (loc, BIT_AND_EXPR, type,
|
11032 |
|
|
TREE_OPERAND (arg0, 0),
|
11033 |
|
|
double_int_to_tree (type,
|
11034 |
|
|
c3)),
|
11035 |
|
|
arg1);
|
11036 |
|
|
}
|
11037 |
|
|
|
11038 |
|
|
/* (X & Y) | Y is (X, Y). */
|
11039 |
|
|
if (TREE_CODE (arg0) == BIT_AND_EXPR
|
11040 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
|
11041 |
|
|
return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
|
11042 |
|
|
/* (X & Y) | X is (Y, X). */
|
11043 |
|
|
if (TREE_CODE (arg0) == BIT_AND_EXPR
|
11044 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
|
11045 |
|
|
&& reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
|
11046 |
|
|
return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
|
11047 |
|
|
/* X | (X & Y) is (Y, X). */
|
11048 |
|
|
if (TREE_CODE (arg1) == BIT_AND_EXPR
|
11049 |
|
|
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
|
11050 |
|
|
&& reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
|
11051 |
|
|
return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
|
11052 |
|
|
/* X | (Y & X) is (Y, X). */
|
11053 |
|
|
if (TREE_CODE (arg1) == BIT_AND_EXPR
|
11054 |
|
|
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
|
11055 |
|
|
&& reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
|
11056 |
|
|
return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
|
11057 |
|
|
|
11058 |
|
|
/* (X & ~Y) | (~X & Y) is X ^ Y */
|
11059 |
|
|
if (TREE_CODE (arg0) == BIT_AND_EXPR
|
11060 |
|
|
&& TREE_CODE (arg1) == BIT_AND_EXPR)
|
11061 |
|
|
{
|
11062 |
|
|
tree a0, a1, l0, l1, n0, n1;
|
11063 |
|
|
|
11064 |
|
|
a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
|
11065 |
|
|
a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
|
11066 |
|
|
|
11067 |
|
|
l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
|
11068 |
|
|
l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
|
11069 |
|
|
|
11070 |
|
|
n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
|
11071 |
|
|
n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
|
11072 |
|
|
|
11073 |
|
|
if ((operand_equal_p (n0, a0, 0)
|
11074 |
|
|
&& operand_equal_p (n1, a1, 0))
|
11075 |
|
|
|| (operand_equal_p (n0, a1, 0)
|
11076 |
|
|
&& operand_equal_p (n1, a0, 0)))
|
11077 |
|
|
return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
|
11078 |
|
|
}
|
11079 |
|
|
|
11080 |
|
|
t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
|
11081 |
|
|
if (t1 != NULL_TREE)
|
11082 |
|
|
return t1;
|
11083 |
|
|
|
11084 |
|
|
/* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
|
11085 |
|
|
|
11086 |
|
|
This results in more efficient code for machines without a NAND
|
11087 |
|
|
instruction. Combine will canonicalize to the first form
|
11088 |
|
|
which will allow use of NAND instructions provided by the
|
11089 |
|
|
backend if they exist. */
|
11090 |
|
|
if (TREE_CODE (arg0) == BIT_NOT_EXPR
|
11091 |
|
|
&& TREE_CODE (arg1) == BIT_NOT_EXPR)
|
11092 |
|
|
{
|
11093 |
|
|
return
|
11094 |
|
|
fold_build1_loc (loc, BIT_NOT_EXPR, type,
|
11095 |
|
|
build2 (BIT_AND_EXPR, type,
|
11096 |
|
|
fold_convert_loc (loc, type,
|
11097 |
|
|
TREE_OPERAND (arg0, 0)),
|
11098 |
|
|
fold_convert_loc (loc, type,
|
11099 |
|
|
TREE_OPERAND (arg1, 0))));
|
11100 |
|
|
}
|
11101 |
|
|
|
11102 |
|
|
/* See if this can be simplified into a rotate first. If that
|
11103 |
|
|
is unsuccessful continue in the association code. */
|
11104 |
|
|
goto bit_rotate;
|
11105 |
|
|
|
11106 |
|
|
case BIT_XOR_EXPR:
|
11107 |
|
|
if (integer_zerop (arg1))
|
11108 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
|
11109 |
|
|
if (integer_all_onesp (arg1))
|
11110 |
|
|
return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
|
11111 |
|
|
if (operand_equal_p (arg0, arg1, 0))
|
11112 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
|
11113 |
|
|
|
11114 |
|
|
/* ~X ^ X is -1. */
|
11115 |
|
|
if (TREE_CODE (arg0) == BIT_NOT_EXPR
|
11116 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
|
11117 |
|
|
{
|
11118 |
|
|
t1 = build_zero_cst (type);
|
11119 |
|
|
t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
|
11120 |
|
|
return omit_one_operand_loc (loc, type, t1, arg1);
|
11121 |
|
|
}
|
11122 |
|
|
|
11123 |
|
|
/* X ^ ~X is -1. */
|
11124 |
|
|
if (TREE_CODE (arg1) == BIT_NOT_EXPR
|
11125 |
|
|
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
|
11126 |
|
|
{
|
11127 |
|
|
t1 = build_zero_cst (type);
|
11128 |
|
|
t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
|
11129 |
|
|
return omit_one_operand_loc (loc, type, t1, arg0);
|
11130 |
|
|
}
|
11131 |
|
|
|
11132 |
|
|
/* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
|
11133 |
|
|
with a constant, and the two constants have no bits in common,
|
11134 |
|
|
we should treat this as a BIT_IOR_EXPR since this may produce more
|
11135 |
|
|
simplifications. */
|
11136 |
|
|
if (TREE_CODE (arg0) == BIT_AND_EXPR
|
11137 |
|
|
&& TREE_CODE (arg1) == BIT_AND_EXPR
|
11138 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
|
11139 |
|
|
&& TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
|
11140 |
|
|
&& integer_zerop (const_binop (BIT_AND_EXPR,
|
11141 |
|
|
TREE_OPERAND (arg0, 1),
|
11142 |
|
|
TREE_OPERAND (arg1, 1))))
|
11143 |
|
|
{
|
11144 |
|
|
code = BIT_IOR_EXPR;
|
11145 |
|
|
goto bit_ior;
|
11146 |
|
|
}
|
11147 |
|
|
|
11148 |
|
|
/* (X | Y) ^ X -> Y & ~ X*/
|
11149 |
|
|
if (TREE_CODE (arg0) == BIT_IOR_EXPR
|
11150 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
|
11151 |
|
|
{
|
11152 |
|
|
tree t2 = TREE_OPERAND (arg0, 1);
|
11153 |
|
|
t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
|
11154 |
|
|
arg1);
|
11155 |
|
|
t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
|
11156 |
|
|
fold_convert_loc (loc, type, t2),
|
11157 |
|
|
fold_convert_loc (loc, type, t1));
|
11158 |
|
|
return t1;
|
11159 |
|
|
}
|
11160 |
|
|
|
11161 |
|
|
/* (Y | X) ^ X -> Y & ~ X*/
|
11162 |
|
|
if (TREE_CODE (arg0) == BIT_IOR_EXPR
|
11163 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
|
11164 |
|
|
{
|
11165 |
|
|
tree t2 = TREE_OPERAND (arg0, 0);
|
11166 |
|
|
t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
|
11167 |
|
|
arg1);
|
11168 |
|
|
t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
|
11169 |
|
|
fold_convert_loc (loc, type, t2),
|
11170 |
|
|
fold_convert_loc (loc, type, t1));
|
11171 |
|
|
return t1;
|
11172 |
|
|
}
|
11173 |
|
|
|
11174 |
|
|
/* X ^ (X | Y) -> Y & ~ X*/
|
11175 |
|
|
if (TREE_CODE (arg1) == BIT_IOR_EXPR
|
11176 |
|
|
&& operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
|
11177 |
|
|
{
|
11178 |
|
|
tree t2 = TREE_OPERAND (arg1, 1);
|
11179 |
|
|
t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
|
11180 |
|
|
arg0);
|
11181 |
|
|
t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
|
11182 |
|
|
fold_convert_loc (loc, type, t2),
|
11183 |
|
|
fold_convert_loc (loc, type, t1));
|
11184 |
|
|
return t1;
|
11185 |
|
|
}
|
11186 |
|
|
|
11187 |
|
|
/* X ^ (Y | X) -> Y & ~ X*/
|
11188 |
|
|
if (TREE_CODE (arg1) == BIT_IOR_EXPR
|
11189 |
|
|
&& operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
|
11190 |
|
|
{
|
11191 |
|
|
tree t2 = TREE_OPERAND (arg1, 0);
|
11192 |
|
|
t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
|
11193 |
|
|
arg0);
|
11194 |
|
|
t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
|
11195 |
|
|
fold_convert_loc (loc, type, t2),
|
11196 |
|
|
fold_convert_loc (loc, type, t1));
|
11197 |
|
|
return t1;
|
11198 |
|
|
}
|
11199 |
|
|
|
11200 |
|
|
/* Convert ~X ^ ~Y to X ^ Y. */
|
11201 |
|
|
if (TREE_CODE (arg0) == BIT_NOT_EXPR
|
11202 |
|
|
&& TREE_CODE (arg1) == BIT_NOT_EXPR)
|
11203 |
|
|
return fold_build2_loc (loc, code, type,
|
11204 |
|
|
fold_convert_loc (loc, type,
|
11205 |
|
|
TREE_OPERAND (arg0, 0)),
|
11206 |
|
|
fold_convert_loc (loc, type,
|
11207 |
|
|
TREE_OPERAND (arg1, 0)));
|
11208 |
|
|
|
11209 |
|
|
/* Convert ~X ^ C to X ^ ~C. */
|
11210 |
|
|
if (TREE_CODE (arg0) == BIT_NOT_EXPR
|
11211 |
|
|
&& TREE_CODE (arg1) == INTEGER_CST)
|
11212 |
|
|
return fold_build2_loc (loc, code, type,
|
11213 |
|
|
fold_convert_loc (loc, type,
|
11214 |
|
|
TREE_OPERAND (arg0, 0)),
|
11215 |
|
|
fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
|
11216 |
|
|
|
11217 |
|
|
/* Fold (X & 1) ^ 1 as (X & 1) == 0. */
|
11218 |
|
|
if (TREE_CODE (arg0) == BIT_AND_EXPR
|
11219 |
|
|
&& integer_onep (TREE_OPERAND (arg0, 1))
|
11220 |
|
|
&& integer_onep (arg1))
|
11221 |
|
|
return fold_build2_loc (loc, EQ_EXPR, type, arg0,
|
11222 |
|
|
build_int_cst (TREE_TYPE (arg0), 0));
|
11223 |
|
|
|
11224 |
|
|
/* Fold (X & Y) ^ Y as ~X & Y. */
|
11225 |
|
|
if (TREE_CODE (arg0) == BIT_AND_EXPR
|
11226 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
|
11227 |
|
|
{
|
11228 |
|
|
tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
|
11229 |
|
|
return fold_build2_loc (loc, BIT_AND_EXPR, type,
|
11230 |
|
|
fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
|
11231 |
|
|
fold_convert_loc (loc, type, arg1));
|
11232 |
|
|
}
|
11233 |
|
|
/* Fold (X & Y) ^ X as ~Y & X. */
|
11234 |
|
|
if (TREE_CODE (arg0) == BIT_AND_EXPR
|
11235 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
|
11236 |
|
|
&& reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
|
11237 |
|
|
{
|
11238 |
|
|
tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
|
11239 |
|
|
return fold_build2_loc (loc, BIT_AND_EXPR, type,
|
11240 |
|
|
fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
|
11241 |
|
|
fold_convert_loc (loc, type, arg1));
|
11242 |
|
|
}
|
11243 |
|
|
/* Fold X ^ (X & Y) as X & ~Y. */
|
11244 |
|
|
if (TREE_CODE (arg1) == BIT_AND_EXPR
|
11245 |
|
|
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
|
11246 |
|
|
{
|
11247 |
|
|
tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
|
11248 |
|
|
return fold_build2_loc (loc, BIT_AND_EXPR, type,
|
11249 |
|
|
fold_convert_loc (loc, type, arg0),
|
11250 |
|
|
fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
|
11251 |
|
|
}
|
11252 |
|
|
/* Fold X ^ (Y & X) as ~Y & X. */
|
11253 |
|
|
if (TREE_CODE (arg1) == BIT_AND_EXPR
|
11254 |
|
|
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
|
11255 |
|
|
&& reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
|
11256 |
|
|
{
|
11257 |
|
|
tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
|
11258 |
|
|
return fold_build2_loc (loc, BIT_AND_EXPR, type,
|
11259 |
|
|
fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
|
11260 |
|
|
fold_convert_loc (loc, type, arg0));
|
11261 |
|
|
}
|
11262 |
|
|
|
11263 |
|
|
/* See if this can be simplified into a rotate first. If that
|
11264 |
|
|
is unsuccessful continue in the association code. */
|
11265 |
|
|
goto bit_rotate;
|
11266 |
|
|
|
11267 |
|
|
case BIT_AND_EXPR:
|
11268 |
|
|
if (integer_all_onesp (arg1))
|
11269 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
|
11270 |
|
|
if (integer_zerop (arg1))
|
11271 |
|
|
return omit_one_operand_loc (loc, type, arg1, arg0);
|
11272 |
|
|
if (operand_equal_p (arg0, arg1, 0))
|
11273 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
|
11274 |
|
|
|
11275 |
|
|
/* ~X & X, (X == 0) & X, and !X & X are always zero. */
|
11276 |
|
|
if ((TREE_CODE (arg0) == BIT_NOT_EXPR
|
11277 |
|
|
|| TREE_CODE (arg0) == TRUTH_NOT_EXPR
|
11278 |
|
|
|| (TREE_CODE (arg0) == EQ_EXPR
|
11279 |
|
|
&& integer_zerop (TREE_OPERAND (arg0, 1))))
|
11280 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
|
11281 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
|
11282 |
|
|
|
11283 |
|
|
/* X & ~X , X & (X == 0), and X & !X are always zero. */
|
11284 |
|
|
if ((TREE_CODE (arg1) == BIT_NOT_EXPR
|
11285 |
|
|
|| TREE_CODE (arg1) == TRUTH_NOT_EXPR
|
11286 |
|
|
|| (TREE_CODE (arg1) == EQ_EXPR
|
11287 |
|
|
&& integer_zerop (TREE_OPERAND (arg1, 1))))
|
11288 |
|
|
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
|
11289 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
|
11290 |
|
|
|
11291 |
|
|
/* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
|
11292 |
|
|
if (TREE_CODE (arg0) == BIT_IOR_EXPR
|
11293 |
|
|
&& TREE_CODE (arg1) == INTEGER_CST
|
11294 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
|
11295 |
|
|
{
|
11296 |
|
|
tree tmp1 = fold_convert_loc (loc, type, arg1);
|
11297 |
|
|
tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
|
11298 |
|
|
tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
|
11299 |
|
|
tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
|
11300 |
|
|
tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
|
11301 |
|
|
return
|
11302 |
|
|
fold_convert_loc (loc, type,
|
11303 |
|
|
fold_build2_loc (loc, BIT_IOR_EXPR,
|
11304 |
|
|
type, tmp2, tmp3));
|
11305 |
|
|
}
|
11306 |
|
|
|
11307 |
|
|
/* (X | Y) & Y is (X, Y). */
|
11308 |
|
|
if (TREE_CODE (arg0) == BIT_IOR_EXPR
|
11309 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
|
11310 |
|
|
return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
|
11311 |
|
|
/* (X | Y) & X is (Y, X). */
|
11312 |
|
|
if (TREE_CODE (arg0) == BIT_IOR_EXPR
|
11313 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
|
11314 |
|
|
&& reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
|
11315 |
|
|
return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
|
11316 |
|
|
/* X & (X | Y) is (Y, X). */
|
11317 |
|
|
if (TREE_CODE (arg1) == BIT_IOR_EXPR
|
11318 |
|
|
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
|
11319 |
|
|
&& reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
|
11320 |
|
|
return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
|
11321 |
|
|
/* X & (Y | X) is (Y, X). */
|
11322 |
|
|
if (TREE_CODE (arg1) == BIT_IOR_EXPR
|
11323 |
|
|
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
|
11324 |
|
|
&& reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
|
11325 |
|
|
return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
|
11326 |
|
|
|
11327 |
|
|
/* Fold (X ^ 1) & 1 as (X & 1) == 0. */
|
11328 |
|
|
if (TREE_CODE (arg0) == BIT_XOR_EXPR
|
11329 |
|
|
&& integer_onep (TREE_OPERAND (arg0, 1))
|
11330 |
|
|
&& integer_onep (arg1))
|
11331 |
|
|
{
|
11332 |
|
|
tem = TREE_OPERAND (arg0, 0);
|
11333 |
|
|
return fold_build2_loc (loc, EQ_EXPR, type,
|
11334 |
|
|
fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
|
11335 |
|
|
build_int_cst (TREE_TYPE (tem), 1)),
|
11336 |
|
|
build_int_cst (TREE_TYPE (tem), 0));
|
11337 |
|
|
}
|
11338 |
|
|
/* Fold ~X & 1 as (X & 1) == 0. */
|
11339 |
|
|
if (TREE_CODE (arg0) == BIT_NOT_EXPR
|
11340 |
|
|
&& integer_onep (arg1))
|
11341 |
|
|
{
|
11342 |
|
|
tem = TREE_OPERAND (arg0, 0);
|
11343 |
|
|
return fold_build2_loc (loc, EQ_EXPR, type,
|
11344 |
|
|
fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
|
11345 |
|
|
build_int_cst (TREE_TYPE (tem), 1)),
|
11346 |
|
|
build_int_cst (TREE_TYPE (tem), 0));
|
11347 |
|
|
}
|
11348 |
|
|
/* Fold !X & 1 as X == 0. */
|
11349 |
|
|
if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
|
11350 |
|
|
&& integer_onep (arg1))
|
11351 |
|
|
{
|
11352 |
|
|
tem = TREE_OPERAND (arg0, 0);
|
11353 |
|
|
return fold_build2_loc (loc, EQ_EXPR, type, tem,
|
11354 |
|
|
build_int_cst (TREE_TYPE (tem), 0));
|
11355 |
|
|
}
|
11356 |
|
|
|
11357 |
|
|
/* Fold (X ^ Y) & Y as ~X & Y. */
|
11358 |
|
|
if (TREE_CODE (arg0) == BIT_XOR_EXPR
|
11359 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
|
11360 |
|
|
{
|
11361 |
|
|
tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
|
11362 |
|
|
return fold_build2_loc (loc, BIT_AND_EXPR, type,
|
11363 |
|
|
fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
|
11364 |
|
|
fold_convert_loc (loc, type, arg1));
|
11365 |
|
|
}
|
11366 |
|
|
/* Fold (X ^ Y) & X as ~Y & X. */
|
11367 |
|
|
if (TREE_CODE (arg0) == BIT_XOR_EXPR
|
11368 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
|
11369 |
|
|
&& reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
|
11370 |
|
|
{
|
11371 |
|
|
tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
|
11372 |
|
|
return fold_build2_loc (loc, BIT_AND_EXPR, type,
|
11373 |
|
|
fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
|
11374 |
|
|
fold_convert_loc (loc, type, arg1));
|
11375 |
|
|
}
|
11376 |
|
|
/* Fold X & (X ^ Y) as X & ~Y. */
|
11377 |
|
|
if (TREE_CODE (arg1) == BIT_XOR_EXPR
|
11378 |
|
|
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
|
11379 |
|
|
{
|
11380 |
|
|
tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
|
11381 |
|
|
return fold_build2_loc (loc, BIT_AND_EXPR, type,
|
11382 |
|
|
fold_convert_loc (loc, type, arg0),
|
11383 |
|
|
fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
|
11384 |
|
|
}
|
11385 |
|
|
/* Fold X & (Y ^ X) as ~Y & X. */
|
11386 |
|
|
if (TREE_CODE (arg1) == BIT_XOR_EXPR
|
11387 |
|
|
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
|
11388 |
|
|
&& reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
|
11389 |
|
|
{
|
11390 |
|
|
tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
|
11391 |
|
|
return fold_build2_loc (loc, BIT_AND_EXPR, type,
|
11392 |
|
|
fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
|
11393 |
|
|
fold_convert_loc (loc, type, arg0));
|
11394 |
|
|
}
|
11395 |
|
|
|
11396 |
|
|
/* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
|
11397 |
|
|
((A & N) + B) & M -> (A + B) & M
|
11398 |
|
|
Similarly if (N & M) == 0,
|
11399 |
|
|
((A | N) + B) & M -> (A + B) & M
|
11400 |
|
|
and for - instead of + (or unary - instead of +)
|
11401 |
|
|
and/or ^ instead of |.
|
11402 |
|
|
If B is constant and (B & M) == 0, fold into A & M. */
|
11403 |
|
|
if (host_integerp (arg1, 1))
|
11404 |
|
|
{
|
11405 |
|
|
unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
|
11406 |
|
|
if (~cst1 && (cst1 & (cst1 + 1)) == 0
|
11407 |
|
|
&& INTEGRAL_TYPE_P (TREE_TYPE (arg0))
|
11408 |
|
|
&& (TREE_CODE (arg0) == PLUS_EXPR
|
11409 |
|
|
|| TREE_CODE (arg0) == MINUS_EXPR
|
11410 |
|
|
|| TREE_CODE (arg0) == NEGATE_EXPR)
|
11411 |
|
|
&& (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
|
11412 |
|
|
|| TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
|
11413 |
|
|
{
|
11414 |
|
|
tree pmop[2];
|
11415 |
|
|
int which = 0;
|
11416 |
|
|
unsigned HOST_WIDE_INT cst0;
|
11417 |
|
|
|
11418 |
|
|
/* Now we know that arg0 is (C + D) or (C - D) or
|
11419 |
|
|
-C and arg1 (M) is == (1LL << cst) - 1.
|
11420 |
|
|
Store C into PMOP[0] and D into PMOP[1]. */
|
11421 |
|
|
pmop[0] = TREE_OPERAND (arg0, 0);
|
11422 |
|
|
pmop[1] = NULL;
|
11423 |
|
|
if (TREE_CODE (arg0) != NEGATE_EXPR)
|
11424 |
|
|
{
|
11425 |
|
|
pmop[1] = TREE_OPERAND (arg0, 1);
|
11426 |
|
|
which = 1;
|
11427 |
|
|
}
|
11428 |
|
|
|
11429 |
|
|
if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
|
11430 |
|
|
|| (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
|
11431 |
|
|
& cst1) != cst1)
|
11432 |
|
|
which = -1;
|
11433 |
|
|
|
11434 |
|
|
for (; which >= 0; which--)
|
11435 |
|
|
switch (TREE_CODE (pmop[which]))
|
11436 |
|
|
{
|
11437 |
|
|
case BIT_AND_EXPR:
|
11438 |
|
|
case BIT_IOR_EXPR:
|
11439 |
|
|
case BIT_XOR_EXPR:
|
11440 |
|
|
if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
|
11441 |
|
|
!= INTEGER_CST)
|
11442 |
|
|
break;
|
11443 |
|
|
/* tree_low_cst not used, because we don't care about
|
11444 |
|
|
the upper bits. */
|
11445 |
|
|
cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
|
11446 |
|
|
cst0 &= cst1;
|
11447 |
|
|
if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
|
11448 |
|
|
{
|
11449 |
|
|
if (cst0 != cst1)
|
11450 |
|
|
break;
|
11451 |
|
|
}
|
11452 |
|
|
else if (cst0 != 0)
|
11453 |
|
|
break;
|
11454 |
|
|
/* If C or D is of the form (A & N) where
|
11455 |
|
|
(N & M) == M, or of the form (A | N) or
|
11456 |
|
|
(A ^ N) where (N & M) == 0, replace it with A. */
|
11457 |
|
|
pmop[which] = TREE_OPERAND (pmop[which], 0);
|
11458 |
|
|
break;
|
11459 |
|
|
case INTEGER_CST:
|
11460 |
|
|
/* If C or D is a N where (N & M) == 0, it can be
|
11461 |
|
|
omitted (assumed 0). */
|
11462 |
|
|
if ((TREE_CODE (arg0) == PLUS_EXPR
|
11463 |
|
|
|| (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
|
11464 |
|
|
&& (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
|
11465 |
|
|
pmop[which] = NULL;
|
11466 |
|
|
break;
|
11467 |
|
|
default:
|
11468 |
|
|
break;
|
11469 |
|
|
}
|
11470 |
|
|
|
11471 |
|
|
/* Only build anything new if we optimized one or both arguments
|
11472 |
|
|
above. */
|
11473 |
|
|
if (pmop[0] != TREE_OPERAND (arg0, 0)
|
11474 |
|
|
|| (TREE_CODE (arg0) != NEGATE_EXPR
|
11475 |
|
|
&& pmop[1] != TREE_OPERAND (arg0, 1)))
|
11476 |
|
|
{
|
11477 |
|
|
tree utype = TREE_TYPE (arg0);
|
11478 |
|
|
if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
|
11479 |
|
|
{
|
11480 |
|
|
/* Perform the operations in a type that has defined
|
11481 |
|
|
overflow behavior. */
|
11482 |
|
|
utype = unsigned_type_for (TREE_TYPE (arg0));
|
11483 |
|
|
if (pmop[0] != NULL)
|
11484 |
|
|
pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
|
11485 |
|
|
if (pmop[1] != NULL)
|
11486 |
|
|
pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
|
11487 |
|
|
}
|
11488 |
|
|
|
11489 |
|
|
if (TREE_CODE (arg0) == NEGATE_EXPR)
|
11490 |
|
|
tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
|
11491 |
|
|
else if (TREE_CODE (arg0) == PLUS_EXPR)
|
11492 |
|
|
{
|
11493 |
|
|
if (pmop[0] != NULL && pmop[1] != NULL)
|
11494 |
|
|
tem = fold_build2_loc (loc, PLUS_EXPR, utype,
|
11495 |
|
|
pmop[0], pmop[1]);
|
11496 |
|
|
else if (pmop[0] != NULL)
|
11497 |
|
|
tem = pmop[0];
|
11498 |
|
|
else if (pmop[1] != NULL)
|
11499 |
|
|
tem = pmop[1];
|
11500 |
|
|
else
|
11501 |
|
|
return build_int_cst (type, 0);
|
11502 |
|
|
}
|
11503 |
|
|
else if (pmop[0] == NULL)
|
11504 |
|
|
tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
|
11505 |
|
|
else
|
11506 |
|
|
tem = fold_build2_loc (loc, MINUS_EXPR, utype,
|
11507 |
|
|
pmop[0], pmop[1]);
|
11508 |
|
|
/* TEM is now the new binary +, - or unary - replacement. */
|
11509 |
|
|
tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
|
11510 |
|
|
fold_convert_loc (loc, utype, arg1));
|
11511 |
|
|
return fold_convert_loc (loc, type, tem);
|
11512 |
|
|
}
|
11513 |
|
|
}
|
11514 |
|
|
}
|
11515 |
|
|
|
11516 |
|
|
t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
|
11517 |
|
|
if (t1 != NULL_TREE)
|
11518 |
|
|
return t1;
|
11519 |
|
|
/* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
|
11520 |
|
|
if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
|
11521 |
|
|
&& TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
|
11522 |
|
|
{
|
11523 |
|
|
unsigned int prec
|
11524 |
|
|
= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
|
11525 |
|
|
|
11526 |
|
|
if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
|
11527 |
|
|
&& (~TREE_INT_CST_LOW (arg1)
|
11528 |
|
|
& (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
|
11529 |
|
|
return
|
11530 |
|
|
fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
|
11531 |
|
|
}
|
11532 |
|
|
|
11533 |
|
|
/* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
|
11534 |
|
|
|
11535 |
|
|
This results in more efficient code for machines without a NOR
|
11536 |
|
|
instruction. Combine will canonicalize to the first form
|
11537 |
|
|
which will allow use of NOR instructions provided by the
|
11538 |
|
|
backend if they exist. */
|
11539 |
|
|
if (TREE_CODE (arg0) == BIT_NOT_EXPR
|
11540 |
|
|
&& TREE_CODE (arg1) == BIT_NOT_EXPR)
|
11541 |
|
|
{
|
11542 |
|
|
return fold_build1_loc (loc, BIT_NOT_EXPR, type,
|
11543 |
|
|
build2 (BIT_IOR_EXPR, type,
|
11544 |
|
|
fold_convert_loc (loc, type,
|
11545 |
|
|
TREE_OPERAND (arg0, 0)),
|
11546 |
|
|
fold_convert_loc (loc, type,
|
11547 |
|
|
TREE_OPERAND (arg1, 0))));
|
11548 |
|
|
}
|
11549 |
|
|
|
11550 |
|
|
/* If arg0 is derived from the address of an object or function, we may
|
11551 |
|
|
be able to fold this expression using the object or function's
|
11552 |
|
|
alignment. */
|
11553 |
|
|
if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
|
11554 |
|
|
{
|
11555 |
|
|
unsigned HOST_WIDE_INT modulus, residue;
|
11556 |
|
|
unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
|
11557 |
|
|
|
11558 |
|
|
modulus = get_pointer_modulus_and_residue (arg0, &residue,
|
11559 |
|
|
integer_onep (arg1));
|
11560 |
|
|
|
11561 |
|
|
/* This works because modulus is a power of 2. If this weren't the
|
11562 |
|
|
case, we'd have to replace it by its greatest power-of-2
|
11563 |
|
|
divisor: modulus & -modulus. */
|
11564 |
|
|
if (low < modulus)
|
11565 |
|
|
return build_int_cst (type, residue & low);
|
11566 |
|
|
}
|
11567 |
|
|
|
11568 |
|
|
/* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
|
11569 |
|
|
(X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
|
11570 |
|
|
if the new mask might be further optimized. */
|
11571 |
|
|
if ((TREE_CODE (arg0) == LSHIFT_EXPR
|
11572 |
|
|
|| TREE_CODE (arg0) == RSHIFT_EXPR)
|
11573 |
|
|
&& host_integerp (TREE_OPERAND (arg0, 1), 1)
|
11574 |
|
|
&& host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
|
11575 |
|
|
&& tree_low_cst (TREE_OPERAND (arg0, 1), 1)
|
11576 |
|
|
< TYPE_PRECISION (TREE_TYPE (arg0))
|
11577 |
|
|
&& TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
|
11578 |
|
|
&& tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
|
11579 |
|
|
{
|
11580 |
|
|
unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
|
11581 |
|
|
unsigned HOST_WIDE_INT mask
|
11582 |
|
|
= tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
|
11583 |
|
|
unsigned HOST_WIDE_INT newmask, zerobits = 0;
|
11584 |
|
|
tree shift_type = TREE_TYPE (arg0);
|
11585 |
|
|
|
11586 |
|
|
if (TREE_CODE (arg0) == LSHIFT_EXPR)
|
11587 |
|
|
zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
|
11588 |
|
|
else if (TREE_CODE (arg0) == RSHIFT_EXPR
|
11589 |
|
|
&& TYPE_PRECISION (TREE_TYPE (arg0))
|
11590 |
|
|
== GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
|
11591 |
|
|
{
|
11592 |
|
|
unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
|
11593 |
|
|
tree arg00 = TREE_OPERAND (arg0, 0);
|
11594 |
|
|
/* See if more bits can be proven as zero because of
|
11595 |
|
|
zero extension. */
|
11596 |
|
|
if (TREE_CODE (arg00) == NOP_EXPR
|
11597 |
|
|
&& TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
|
11598 |
|
|
{
|
11599 |
|
|
tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
|
11600 |
|
|
if (TYPE_PRECISION (inner_type)
|
11601 |
|
|
== GET_MODE_BITSIZE (TYPE_MODE (inner_type))
|
11602 |
|
|
&& TYPE_PRECISION (inner_type) < prec)
|
11603 |
|
|
{
|
11604 |
|
|
prec = TYPE_PRECISION (inner_type);
|
11605 |
|
|
/* See if we can shorten the right shift. */
|
11606 |
|
|
if (shiftc < prec)
|
11607 |
|
|
shift_type = inner_type;
|
11608 |
|
|
}
|
11609 |
|
|
}
|
11610 |
|
|
zerobits = ~(unsigned HOST_WIDE_INT) 0;
|
11611 |
|
|
zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
|
11612 |
|
|
zerobits <<= prec - shiftc;
|
11613 |
|
|
/* For arithmetic shift if sign bit could be set, zerobits
|
11614 |
|
|
can contain actually sign bits, so no transformation is
|
11615 |
|
|
possible, unless MASK masks them all away. In that
|
11616 |
|
|
case the shift needs to be converted into logical shift. */
|
11617 |
|
|
if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
|
11618 |
|
|
&& prec == TYPE_PRECISION (TREE_TYPE (arg0)))
|
11619 |
|
|
{
|
11620 |
|
|
if ((mask & zerobits) == 0)
|
11621 |
|
|
shift_type = unsigned_type_for (TREE_TYPE (arg0));
|
11622 |
|
|
else
|
11623 |
|
|
zerobits = 0;
|
11624 |
|
|
}
|
11625 |
|
|
}
|
11626 |
|
|
|
11627 |
|
|
/* ((X << 16) & 0xff00) is (X, 0). */
|
11628 |
|
|
if ((mask & zerobits) == mask)
|
11629 |
|
|
return omit_one_operand_loc (loc, type,
|
11630 |
|
|
build_int_cst (type, 0), arg0);
|
11631 |
|
|
|
11632 |
|
|
newmask = mask | zerobits;
|
11633 |
|
|
if (newmask != mask && (newmask & (newmask + 1)) == 0)
|
11634 |
|
|
{
|
11635 |
|
|
unsigned int prec;
|
11636 |
|
|
|
11637 |
|
|
/* Only do the transformation if NEWMASK is some integer
|
11638 |
|
|
mode's mask. */
|
11639 |
|
|
for (prec = BITS_PER_UNIT;
|
11640 |
|
|
prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
|
11641 |
|
|
if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
|
11642 |
|
|
break;
|
11643 |
|
|
if (prec < HOST_BITS_PER_WIDE_INT
|
11644 |
|
|
|| newmask == ~(unsigned HOST_WIDE_INT) 0)
|
11645 |
|
|
{
|
11646 |
|
|
tree newmaskt;
|
11647 |
|
|
|
11648 |
|
|
if (shift_type != TREE_TYPE (arg0))
|
11649 |
|
|
{
|
11650 |
|
|
tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
|
11651 |
|
|
fold_convert_loc (loc, shift_type,
|
11652 |
|
|
TREE_OPERAND (arg0, 0)),
|
11653 |
|
|
TREE_OPERAND (arg0, 1));
|
11654 |
|
|
tem = fold_convert_loc (loc, type, tem);
|
11655 |
|
|
}
|
11656 |
|
|
else
|
11657 |
|
|
tem = op0;
|
11658 |
|
|
newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
|
11659 |
|
|
if (!tree_int_cst_equal (newmaskt, arg1))
|
11660 |
|
|
return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
|
11661 |
|
|
}
|
11662 |
|
|
}
|
11663 |
|
|
}
|
11664 |
|
|
|
11665 |
|
|
goto associate;
|
11666 |
|
|
|
11667 |
|
|
case RDIV_EXPR:
|
11668 |
|
|
/* Don't touch a floating-point divide by zero unless the mode
|
11669 |
|
|
of the constant can represent infinity. */
|
11670 |
|
|
if (TREE_CODE (arg1) == REAL_CST
|
11671 |
|
|
&& !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
|
11672 |
|
|
&& real_zerop (arg1))
|
11673 |
|
|
return NULL_TREE;
|
11674 |
|
|
|
11675 |
|
|
/* Optimize A / A to 1.0 if we don't care about
|
11676 |
|
|
NaNs or Infinities. Skip the transformation
|
11677 |
|
|
for non-real operands. */
|
11678 |
|
|
if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
|
11679 |
|
|
&& ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
|
11680 |
|
|
&& ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
|
11681 |
|
|
&& operand_equal_p (arg0, arg1, 0))
|
11682 |
|
|
{
|
11683 |
|
|
tree r = build_real (TREE_TYPE (arg0), dconst1);
|
11684 |
|
|
|
11685 |
|
|
return omit_two_operands_loc (loc, type, r, arg0, arg1);
|
11686 |
|
|
}
|
11687 |
|
|
|
11688 |
|
|
/* The complex version of the above A / A optimization. */
|
11689 |
|
|
if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
|
11690 |
|
|
&& operand_equal_p (arg0, arg1, 0))
|
11691 |
|
|
{
|
11692 |
|
|
tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
|
11693 |
|
|
if (! HONOR_NANS (TYPE_MODE (elem_type))
|
11694 |
|
|
&& ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
|
11695 |
|
|
{
|
11696 |
|
|
tree r = build_real (elem_type, dconst1);
|
11697 |
|
|
/* omit_two_operands will call fold_convert for us. */
|
11698 |
|
|
return omit_two_operands_loc (loc, type, r, arg0, arg1);
|
11699 |
|
|
}
|
11700 |
|
|
}
|
11701 |
|
|
|
11702 |
|
|
/* (-A) / (-B) -> A / B */
|
11703 |
|
|
if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
|
11704 |
|
|
return fold_build2_loc (loc, RDIV_EXPR, type,
|
11705 |
|
|
TREE_OPERAND (arg0, 0),
|
11706 |
|
|
negate_expr (arg1));
|
11707 |
|
|
if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
|
11708 |
|
|
return fold_build2_loc (loc, RDIV_EXPR, type,
|
11709 |
|
|
negate_expr (arg0),
|
11710 |
|
|
TREE_OPERAND (arg1, 0));
|
11711 |
|
|
|
11712 |
|
|
/* In IEEE floating point, x/1 is not equivalent to x for snans. */
|
11713 |
|
|
if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
|
11714 |
|
|
&& real_onep (arg1))
|
11715 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
|
11716 |
|
|
|
11717 |
|
|
/* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
|
11718 |
|
|
if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
|
11719 |
|
|
&& real_minus_onep (arg1))
|
11720 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type,
|
11721 |
|
|
negate_expr (arg0)));
|
11722 |
|
|
|
11723 |
|
|
/* If ARG1 is a constant, we can convert this to a multiply by the
|
11724 |
|
|
reciprocal. This does not have the same rounding properties,
|
11725 |
|
|
so only do this if -freciprocal-math. We can actually
|
11726 |
|
|
always safely do it if ARG1 is a power of two, but it's hard to
|
11727 |
|
|
tell if it is or not in a portable manner. */
|
11728 |
|
|
if (TREE_CODE (arg1) == REAL_CST)
|
11729 |
|
|
{
|
11730 |
|
|
if (flag_reciprocal_math
|
11731 |
|
|
&& 0 != (tem = const_binop (code, build_real (type, dconst1),
|
11732 |
|
|
arg1)))
|
11733 |
|
|
return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
|
11734 |
|
|
/* Find the reciprocal if optimizing and the result is exact. */
|
11735 |
|
|
if (optimize)
|
11736 |
|
|
{
|
11737 |
|
|
REAL_VALUE_TYPE r;
|
11738 |
|
|
r = TREE_REAL_CST (arg1);
|
11739 |
|
|
if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
|
11740 |
|
|
{
|
11741 |
|
|
tem = build_real (type, r);
|
11742 |
|
|
return fold_build2_loc (loc, MULT_EXPR, type,
|
11743 |
|
|
fold_convert_loc (loc, type, arg0), tem);
|
11744 |
|
|
}
|
11745 |
|
|
}
|
11746 |
|
|
}
|
11747 |
|
|
/* Convert A/B/C to A/(B*C). */
|
11748 |
|
|
if (flag_reciprocal_math
|
11749 |
|
|
&& TREE_CODE (arg0) == RDIV_EXPR)
|
11750 |
|
|
return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
|
11751 |
|
|
fold_build2_loc (loc, MULT_EXPR, type,
|
11752 |
|
|
TREE_OPERAND (arg0, 1), arg1));
|
11753 |
|
|
|
11754 |
|
|
/* Convert A/(B/C) to (A/B)*C. */
|
11755 |
|
|
if (flag_reciprocal_math
|
11756 |
|
|
&& TREE_CODE (arg1) == RDIV_EXPR)
|
11757 |
|
|
return fold_build2_loc (loc, MULT_EXPR, type,
|
11758 |
|
|
fold_build2_loc (loc, RDIV_EXPR, type, arg0,
|
11759 |
|
|
TREE_OPERAND (arg1, 0)),
|
11760 |
|
|
TREE_OPERAND (arg1, 1));
|
11761 |
|
|
|
11762 |
|
|
/* Convert C1/(X*C2) into (C1/C2)/X. */
|
11763 |
|
|
if (flag_reciprocal_math
|
11764 |
|
|
&& TREE_CODE (arg1) == MULT_EXPR
|
11765 |
|
|
&& TREE_CODE (arg0) == REAL_CST
|
11766 |
|
|
&& TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
|
11767 |
|
|
{
|
11768 |
|
|
tree tem = const_binop (RDIV_EXPR, arg0,
|
11769 |
|
|
TREE_OPERAND (arg1, 1));
|
11770 |
|
|
if (tem)
|
11771 |
|
|
return fold_build2_loc (loc, RDIV_EXPR, type, tem,
|
11772 |
|
|
TREE_OPERAND (arg1, 0));
|
11773 |
|
|
}
|
11774 |
|
|
|
11775 |
|
|
if (flag_unsafe_math_optimizations)
|
11776 |
|
|
{
|
11777 |
|
|
enum built_in_function fcode0 = builtin_mathfn_code (arg0);
|
11778 |
|
|
enum built_in_function fcode1 = builtin_mathfn_code (arg1);
|
11779 |
|
|
|
11780 |
|
|
/* Optimize sin(x)/cos(x) as tan(x). */
|
11781 |
|
|
if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
|
11782 |
|
|
|| (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
|
11783 |
|
|
|| (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
|
11784 |
|
|
&& operand_equal_p (CALL_EXPR_ARG (arg0, 0),
|
11785 |
|
|
CALL_EXPR_ARG (arg1, 0), 0))
|
11786 |
|
|
{
|
11787 |
|
|
tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
|
11788 |
|
|
|
11789 |
|
|
if (tanfn != NULL_TREE)
|
11790 |
|
|
return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
|
11791 |
|
|
}
|
11792 |
|
|
|
11793 |
|
|
/* Optimize cos(x)/sin(x) as 1.0/tan(x). */
|
11794 |
|
|
if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
|
11795 |
|
|
|| (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
|
11796 |
|
|
|| (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
|
11797 |
|
|
&& operand_equal_p (CALL_EXPR_ARG (arg0, 0),
|
11798 |
|
|
CALL_EXPR_ARG (arg1, 0), 0))
|
11799 |
|
|
{
|
11800 |
|
|
tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
|
11801 |
|
|
|
11802 |
|
|
if (tanfn != NULL_TREE)
|
11803 |
|
|
{
|
11804 |
|
|
tree tmp = build_call_expr_loc (loc, tanfn, 1,
|
11805 |
|
|
CALL_EXPR_ARG (arg0, 0));
|
11806 |
|
|
return fold_build2_loc (loc, RDIV_EXPR, type,
|
11807 |
|
|
build_real (type, dconst1), tmp);
|
11808 |
|
|
}
|
11809 |
|
|
}
|
11810 |
|
|
|
11811 |
|
|
/* Optimize sin(x)/tan(x) as cos(x) if we don't care about
|
11812 |
|
|
NaNs or Infinities. */
|
11813 |
|
|
if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
|
11814 |
|
|
|| (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
|
11815 |
|
|
|| (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
|
11816 |
|
|
{
|
11817 |
|
|
tree arg00 = CALL_EXPR_ARG (arg0, 0);
|
11818 |
|
|
tree arg01 = CALL_EXPR_ARG (arg1, 0);
|
11819 |
|
|
|
11820 |
|
|
if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
|
11821 |
|
|
&& ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
|
11822 |
|
|
&& operand_equal_p (arg00, arg01, 0))
|
11823 |
|
|
{
|
11824 |
|
|
tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
|
11825 |
|
|
|
11826 |
|
|
if (cosfn != NULL_TREE)
|
11827 |
|
|
return build_call_expr_loc (loc, cosfn, 1, arg00);
|
11828 |
|
|
}
|
11829 |
|
|
}
|
11830 |
|
|
|
11831 |
|
|
/* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
|
11832 |
|
|
NaNs or Infinities. */
|
11833 |
|
|
if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
|
11834 |
|
|
|| (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
|
11835 |
|
|
|| (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
|
11836 |
|
|
{
|
11837 |
|
|
tree arg00 = CALL_EXPR_ARG (arg0, 0);
|
11838 |
|
|
tree arg01 = CALL_EXPR_ARG (arg1, 0);
|
11839 |
|
|
|
11840 |
|
|
if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
|
11841 |
|
|
&& ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
|
11842 |
|
|
&& operand_equal_p (arg00, arg01, 0))
|
11843 |
|
|
{
|
11844 |
|
|
tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
|
11845 |
|
|
|
11846 |
|
|
if (cosfn != NULL_TREE)
|
11847 |
|
|
{
|
11848 |
|
|
tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
|
11849 |
|
|
return fold_build2_loc (loc, RDIV_EXPR, type,
|
11850 |
|
|
build_real (type, dconst1),
|
11851 |
|
|
tmp);
|
11852 |
|
|
}
|
11853 |
|
|
}
|
11854 |
|
|
}
|
11855 |
|
|
|
11856 |
|
|
/* Optimize pow(x,c)/x as pow(x,c-1). */
|
11857 |
|
|
if (fcode0 == BUILT_IN_POW
|
11858 |
|
|
|| fcode0 == BUILT_IN_POWF
|
11859 |
|
|
|| fcode0 == BUILT_IN_POWL)
|
11860 |
|
|
{
|
11861 |
|
|
tree arg00 = CALL_EXPR_ARG (arg0, 0);
|
11862 |
|
|
tree arg01 = CALL_EXPR_ARG (arg0, 1);
|
11863 |
|
|
if (TREE_CODE (arg01) == REAL_CST
|
11864 |
|
|
&& !TREE_OVERFLOW (arg01)
|
11865 |
|
|
&& operand_equal_p (arg1, arg00, 0))
|
11866 |
|
|
{
|
11867 |
|
|
tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
|
11868 |
|
|
REAL_VALUE_TYPE c;
|
11869 |
|
|
tree arg;
|
11870 |
|
|
|
11871 |
|
|
c = TREE_REAL_CST (arg01);
|
11872 |
|
|
real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
|
11873 |
|
|
arg = build_real (type, c);
|
11874 |
|
|
return build_call_expr_loc (loc, powfn, 2, arg1, arg);
|
11875 |
|
|
}
|
11876 |
|
|
}
|
11877 |
|
|
|
11878 |
|
|
/* Optimize a/root(b/c) into a*root(c/b). */
|
11879 |
|
|
if (BUILTIN_ROOT_P (fcode1))
|
11880 |
|
|
{
|
11881 |
|
|
tree rootarg = CALL_EXPR_ARG (arg1, 0);
|
11882 |
|
|
|
11883 |
|
|
if (TREE_CODE (rootarg) == RDIV_EXPR)
|
11884 |
|
|
{
|
11885 |
|
|
tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
|
11886 |
|
|
tree b = TREE_OPERAND (rootarg, 0);
|
11887 |
|
|
tree c = TREE_OPERAND (rootarg, 1);
|
11888 |
|
|
|
11889 |
|
|
tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
|
11890 |
|
|
|
11891 |
|
|
tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
|
11892 |
|
|
return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
|
11893 |
|
|
}
|
11894 |
|
|
}
|
11895 |
|
|
|
11896 |
|
|
/* Optimize x/expN(y) into x*expN(-y). */
|
11897 |
|
|
if (BUILTIN_EXPONENT_P (fcode1))
|
11898 |
|
|
{
|
11899 |
|
|
tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
|
11900 |
|
|
tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
|
11901 |
|
|
arg1 = build_call_expr_loc (loc,
|
11902 |
|
|
expfn, 1,
|
11903 |
|
|
fold_convert_loc (loc, type, arg));
|
11904 |
|
|
return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
|
11905 |
|
|
}
|
11906 |
|
|
|
11907 |
|
|
/* Optimize x/pow(y,z) into x*pow(y,-z). */
|
11908 |
|
|
if (fcode1 == BUILT_IN_POW
|
11909 |
|
|
|| fcode1 == BUILT_IN_POWF
|
11910 |
|
|
|| fcode1 == BUILT_IN_POWL)
|
11911 |
|
|
{
|
11912 |
|
|
tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
|
11913 |
|
|
tree arg10 = CALL_EXPR_ARG (arg1, 0);
|
11914 |
|
|
tree arg11 = CALL_EXPR_ARG (arg1, 1);
|
11915 |
|
|
tree neg11 = fold_convert_loc (loc, type,
|
11916 |
|
|
negate_expr (arg11));
|
11917 |
|
|
arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
|
11918 |
|
|
return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
|
11919 |
|
|
}
|
11920 |
|
|
}
|
11921 |
|
|
return NULL_TREE;
|
11922 |
|
|
|
11923 |
|
|
case TRUNC_DIV_EXPR:
|
11924 |
|
|
/* Optimize (X & (-A)) / A where A is a power of 2,
|
11925 |
|
|
to X >> log2(A) */
|
11926 |
|
|
if (TREE_CODE (arg0) == BIT_AND_EXPR
|
11927 |
|
|
&& !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
|
11928 |
|
|
&& integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
|
11929 |
|
|
{
|
11930 |
|
|
tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
|
11931 |
|
|
arg1, TREE_OPERAND (arg0, 1));
|
11932 |
|
|
if (sum && integer_zerop (sum)) {
|
11933 |
|
|
unsigned long pow2;
|
11934 |
|
|
|
11935 |
|
|
if (TREE_INT_CST_LOW (arg1))
|
11936 |
|
|
pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
|
11937 |
|
|
else
|
11938 |
|
|
pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
|
11939 |
|
|
+ HOST_BITS_PER_WIDE_INT;
|
11940 |
|
|
|
11941 |
|
|
return fold_build2_loc (loc, RSHIFT_EXPR, type,
|
11942 |
|
|
TREE_OPERAND (arg0, 0),
|
11943 |
|
|
build_int_cst (integer_type_node, pow2));
|
11944 |
|
|
}
|
11945 |
|
|
}
|
11946 |
|
|
|
11947 |
|
|
/* Fall thru */
|
11948 |
|
|
|
11949 |
|
|
case FLOOR_DIV_EXPR:
|
11950 |
|
|
/* Simplify A / (B << N) where A and B are positive and B is
|
11951 |
|
|
a power of 2, to A >> (N + log2(B)). */
|
11952 |
|
|
strict_overflow_p = false;
|
11953 |
|
|
if (TREE_CODE (arg1) == LSHIFT_EXPR
|
11954 |
|
|
&& (TYPE_UNSIGNED (type)
|
11955 |
|
|
|| tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
|
11956 |
|
|
{
|
11957 |
|
|
tree sval = TREE_OPERAND (arg1, 0);
|
11958 |
|
|
if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
|
11959 |
|
|
{
|
11960 |
|
|
tree sh_cnt = TREE_OPERAND (arg1, 1);
|
11961 |
|
|
unsigned long pow2;
|
11962 |
|
|
|
11963 |
|
|
if (TREE_INT_CST_LOW (sval))
|
11964 |
|
|
pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
|
11965 |
|
|
else
|
11966 |
|
|
pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
|
11967 |
|
|
+ HOST_BITS_PER_WIDE_INT;
|
11968 |
|
|
|
11969 |
|
|
if (strict_overflow_p)
|
11970 |
|
|
fold_overflow_warning (("assuming signed overflow does not "
|
11971 |
|
|
"occur when simplifying A / (B << N)"),
|
11972 |
|
|
WARN_STRICT_OVERFLOW_MISC);
|
11973 |
|
|
|
11974 |
|
|
sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
|
11975 |
|
|
sh_cnt,
|
11976 |
|
|
build_int_cst (TREE_TYPE (sh_cnt),
|
11977 |
|
|
pow2));
|
11978 |
|
|
return fold_build2_loc (loc, RSHIFT_EXPR, type,
|
11979 |
|
|
fold_convert_loc (loc, type, arg0), sh_cnt);
|
11980 |
|
|
}
|
11981 |
|
|
}
|
11982 |
|
|
|
11983 |
|
|
/* For unsigned integral types, FLOOR_DIV_EXPR is the same as
|
11984 |
|
|
TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
|
11985 |
|
|
if (INTEGRAL_TYPE_P (type)
|
11986 |
|
|
&& TYPE_UNSIGNED (type)
|
11987 |
|
|
&& code == FLOOR_DIV_EXPR)
|
11988 |
|
|
return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
|
11989 |
|
|
|
11990 |
|
|
/* Fall thru */
|
11991 |
|
|
|
11992 |
|
|
case ROUND_DIV_EXPR:
|
11993 |
|
|
case CEIL_DIV_EXPR:
|
11994 |
|
|
case EXACT_DIV_EXPR:
|
11995 |
|
|
if (integer_onep (arg1))
|
11996 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
|
11997 |
|
|
if (integer_zerop (arg1))
|
11998 |
|
|
return NULL_TREE;
|
11999 |
|
|
/* X / -1 is -X. */
|
12000 |
|
|
if (!TYPE_UNSIGNED (type)
|
12001 |
|
|
&& TREE_CODE (arg1) == INTEGER_CST
|
12002 |
|
|
&& TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
|
12003 |
|
|
&& TREE_INT_CST_HIGH (arg1) == -1)
|
12004 |
|
|
return fold_convert_loc (loc, type, negate_expr (arg0));
|
12005 |
|
|
|
12006 |
|
|
/* Convert -A / -B to A / B when the type is signed and overflow is
|
12007 |
|
|
undefined. */
|
12008 |
|
|
if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
|
12009 |
|
|
&& TREE_CODE (arg0) == NEGATE_EXPR
|
12010 |
|
|
&& negate_expr_p (arg1))
|
12011 |
|
|
{
|
12012 |
|
|
if (INTEGRAL_TYPE_P (type))
|
12013 |
|
|
fold_overflow_warning (("assuming signed overflow does not occur "
|
12014 |
|
|
"when distributing negation across "
|
12015 |
|
|
"division"),
|
12016 |
|
|
WARN_STRICT_OVERFLOW_MISC);
|
12017 |
|
|
return fold_build2_loc (loc, code, type,
|
12018 |
|
|
fold_convert_loc (loc, type,
|
12019 |
|
|
TREE_OPERAND (arg0, 0)),
|
12020 |
|
|
fold_convert_loc (loc, type,
|
12021 |
|
|
negate_expr (arg1)));
|
12022 |
|
|
}
|
12023 |
|
|
if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
|
12024 |
|
|
&& TREE_CODE (arg1) == NEGATE_EXPR
|
12025 |
|
|
&& negate_expr_p (arg0))
|
12026 |
|
|
{
|
12027 |
|
|
if (INTEGRAL_TYPE_P (type))
|
12028 |
|
|
fold_overflow_warning (("assuming signed overflow does not occur "
|
12029 |
|
|
"when distributing negation across "
|
12030 |
|
|
"division"),
|
12031 |
|
|
WARN_STRICT_OVERFLOW_MISC);
|
12032 |
|
|
return fold_build2_loc (loc, code, type,
|
12033 |
|
|
fold_convert_loc (loc, type,
|
12034 |
|
|
negate_expr (arg0)),
|
12035 |
|
|
fold_convert_loc (loc, type,
|
12036 |
|
|
TREE_OPERAND (arg1, 0)));
|
12037 |
|
|
}
|
12038 |
|
|
|
12039 |
|
|
/* If arg0 is a multiple of arg1, then rewrite to the fastest div
|
12040 |
|
|
operation, EXACT_DIV_EXPR.
|
12041 |
|
|
|
12042 |
|
|
Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
|
12043 |
|
|
At one time others generated faster code, it's not clear if they do
|
12044 |
|
|
after the last round to changes to the DIV code in expmed.c. */
|
12045 |
|
|
if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
|
12046 |
|
|
&& multiple_of_p (type, arg0, arg1))
|
12047 |
|
|
return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
|
12048 |
|
|
|
12049 |
|
|
strict_overflow_p = false;
|
12050 |
|
|
if (TREE_CODE (arg1) == INTEGER_CST
|
12051 |
|
|
&& 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
|
12052 |
|
|
&strict_overflow_p)))
|
12053 |
|
|
{
|
12054 |
|
|
if (strict_overflow_p)
|
12055 |
|
|
fold_overflow_warning (("assuming signed overflow does not occur "
|
12056 |
|
|
"when simplifying division"),
|
12057 |
|
|
WARN_STRICT_OVERFLOW_MISC);
|
12058 |
|
|
return fold_convert_loc (loc, type, tem);
|
12059 |
|
|
}
|
12060 |
|
|
|
12061 |
|
|
return NULL_TREE;
|
12062 |
|
|
|
12063 |
|
|
case CEIL_MOD_EXPR:
|
12064 |
|
|
case FLOOR_MOD_EXPR:
|
12065 |
|
|
case ROUND_MOD_EXPR:
|
12066 |
|
|
case TRUNC_MOD_EXPR:
|
12067 |
|
|
/* X % 1 is always zero, but be sure to preserve any side
|
12068 |
|
|
effects in X. */
|
12069 |
|
|
if (integer_onep (arg1))
|
12070 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
|
12071 |
|
|
|
12072 |
|
|
/* X % 0, return X % 0 unchanged so that we can get the
|
12073 |
|
|
proper warnings and errors. */
|
12074 |
|
|
if (integer_zerop (arg1))
|
12075 |
|
|
return NULL_TREE;
|
12076 |
|
|
|
12077 |
|
|
/* 0 % X is always zero, but be sure to preserve any side
|
12078 |
|
|
effects in X. Place this after checking for X == 0. */
|
12079 |
|
|
if (integer_zerop (arg0))
|
12080 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
|
12081 |
|
|
|
12082 |
|
|
/* X % -1 is zero. */
|
12083 |
|
|
if (!TYPE_UNSIGNED (type)
|
12084 |
|
|
&& TREE_CODE (arg1) == INTEGER_CST
|
12085 |
|
|
&& TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
|
12086 |
|
|
&& TREE_INT_CST_HIGH (arg1) == -1)
|
12087 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
|
12088 |
|
|
|
12089 |
|
|
/* X % -C is the same as X % C. */
|
12090 |
|
|
if (code == TRUNC_MOD_EXPR
|
12091 |
|
|
&& !TYPE_UNSIGNED (type)
|
12092 |
|
|
&& TREE_CODE (arg1) == INTEGER_CST
|
12093 |
|
|
&& !TREE_OVERFLOW (arg1)
|
12094 |
|
|
&& TREE_INT_CST_HIGH (arg1) < 0
|
12095 |
|
|
&& !TYPE_OVERFLOW_TRAPS (type)
|
12096 |
|
|
/* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
|
12097 |
|
|
&& !sign_bit_p (arg1, arg1))
|
12098 |
|
|
return fold_build2_loc (loc, code, type,
|
12099 |
|
|
fold_convert_loc (loc, type, arg0),
|
12100 |
|
|
fold_convert_loc (loc, type,
|
12101 |
|
|
negate_expr (arg1)));
|
12102 |
|
|
|
12103 |
|
|
/* X % -Y is the same as X % Y. */
|
12104 |
|
|
if (code == TRUNC_MOD_EXPR
|
12105 |
|
|
&& !TYPE_UNSIGNED (type)
|
12106 |
|
|
&& TREE_CODE (arg1) == NEGATE_EXPR
|
12107 |
|
|
&& !TYPE_OVERFLOW_TRAPS (type))
|
12108 |
|
|
return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
|
12109 |
|
|
fold_convert_loc (loc, type,
|
12110 |
|
|
TREE_OPERAND (arg1, 0)));
|
12111 |
|
|
|
12112 |
|
|
strict_overflow_p = false;
|
12113 |
|
|
if (TREE_CODE (arg1) == INTEGER_CST
|
12114 |
|
|
&& 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
|
12115 |
|
|
&strict_overflow_p)))
|
12116 |
|
|
{
|
12117 |
|
|
if (strict_overflow_p)
|
12118 |
|
|
fold_overflow_warning (("assuming signed overflow does not occur "
|
12119 |
|
|
"when simplifying modulus"),
|
12120 |
|
|
WARN_STRICT_OVERFLOW_MISC);
|
12121 |
|
|
return fold_convert_loc (loc, type, tem);
|
12122 |
|
|
}
|
12123 |
|
|
|
12124 |
|
|
/* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
|
12125 |
|
|
i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
|
12126 |
|
|
if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
|
12127 |
|
|
&& (TYPE_UNSIGNED (type)
|
12128 |
|
|
|| tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
|
12129 |
|
|
{
|
12130 |
|
|
tree c = arg1;
|
12131 |
|
|
/* Also optimize A % (C << N) where C is a power of 2,
|
12132 |
|
|
to A & ((C << N) - 1). */
|
12133 |
|
|
if (TREE_CODE (arg1) == LSHIFT_EXPR)
|
12134 |
|
|
c = TREE_OPERAND (arg1, 0);
|
12135 |
|
|
|
12136 |
|
|
if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
|
12137 |
|
|
{
|
12138 |
|
|
tree mask
|
12139 |
|
|
= fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
|
12140 |
|
|
build_int_cst (TREE_TYPE (arg1), 1));
|
12141 |
|
|
if (strict_overflow_p)
|
12142 |
|
|
fold_overflow_warning (("assuming signed overflow does not "
|
12143 |
|
|
"occur when simplifying "
|
12144 |
|
|
"X % (power of two)"),
|
12145 |
|
|
WARN_STRICT_OVERFLOW_MISC);
|
12146 |
|
|
return fold_build2_loc (loc, BIT_AND_EXPR, type,
|
12147 |
|
|
fold_convert_loc (loc, type, arg0),
|
12148 |
|
|
fold_convert_loc (loc, type, mask));
|
12149 |
|
|
}
|
12150 |
|
|
}
|
12151 |
|
|
|
12152 |
|
|
return NULL_TREE;
|
12153 |
|
|
|
12154 |
|
|
case LROTATE_EXPR:
|
12155 |
|
|
case RROTATE_EXPR:
|
12156 |
|
|
if (integer_all_onesp (arg0))
|
12157 |
|
|
return omit_one_operand_loc (loc, type, arg0, arg1);
|
12158 |
|
|
goto shift;
|
12159 |
|
|
|
12160 |
|
|
case RSHIFT_EXPR:
|
12161 |
|
|
/* Optimize -1 >> x for arithmetic right shifts. */
|
12162 |
|
|
if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
|
12163 |
|
|
&& tree_expr_nonnegative_p (arg1))
|
12164 |
|
|
return omit_one_operand_loc (loc, type, arg0, arg1);
|
12165 |
|
|
/* ... fall through ... */
|
12166 |
|
|
|
12167 |
|
|
case LSHIFT_EXPR:
|
12168 |
|
|
shift:
|
12169 |
|
|
if (integer_zerop (arg1))
|
12170 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
|
12171 |
|
|
if (integer_zerop (arg0))
|
12172 |
|
|
return omit_one_operand_loc (loc, type, arg0, arg1);
|
12173 |
|
|
|
12174 |
|
|
/* Since negative shift count is not well-defined,
|
12175 |
|
|
don't try to compute it in the compiler. */
|
12176 |
|
|
if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
|
12177 |
|
|
return NULL_TREE;
|
12178 |
|
|
|
12179 |
|
|
/* Turn (a OP c1) OP c2 into a OP (c1+c2). */
|
12180 |
|
|
if (TREE_CODE (op0) == code && host_integerp (arg1, false)
|
12181 |
|
|
&& TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
|
12182 |
|
|
&& host_integerp (TREE_OPERAND (arg0, 1), false)
|
12183 |
|
|
&& TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
|
12184 |
|
|
{
|
12185 |
|
|
HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
|
12186 |
|
|
+ TREE_INT_CST_LOW (arg1));
|
12187 |
|
|
|
12188 |
|
|
/* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
|
12189 |
|
|
being well defined. */
|
12190 |
|
|
if (low >= TYPE_PRECISION (type))
|
12191 |
|
|
{
|
12192 |
|
|
if (code == LROTATE_EXPR || code == RROTATE_EXPR)
|
12193 |
|
|
low = low % TYPE_PRECISION (type);
|
12194 |
|
|
else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
|
12195 |
|
|
return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
|
12196 |
|
|
TREE_OPERAND (arg0, 0));
|
12197 |
|
|
else
|
12198 |
|
|
low = TYPE_PRECISION (type) - 1;
|
12199 |
|
|
}
|
12200 |
|
|
|
12201 |
|
|
return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
|
12202 |
|
|
build_int_cst (type, low));
|
12203 |
|
|
}
|
12204 |
|
|
|
12205 |
|
|
/* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
|
12206 |
|
|
into x & ((unsigned)-1 >> c) for unsigned types. */
|
12207 |
|
|
if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
|
12208 |
|
|
|| (TYPE_UNSIGNED (type)
|
12209 |
|
|
&& code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
|
12210 |
|
|
&& host_integerp (arg1, false)
|
12211 |
|
|
&& TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
|
12212 |
|
|
&& host_integerp (TREE_OPERAND (arg0, 1), false)
|
12213 |
|
|
&& TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
|
12214 |
|
|
{
|
12215 |
|
|
HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
|
12216 |
|
|
HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
|
12217 |
|
|
tree lshift;
|
12218 |
|
|
tree arg00;
|
12219 |
|
|
|
12220 |
|
|
if (low0 == low1)
|
12221 |
|
|
{
|
12222 |
|
|
arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
|
12223 |
|
|
|
12224 |
|
|
lshift = build_int_cst (type, -1);
|
12225 |
|
|
lshift = int_const_binop (code, lshift, arg1);
|
12226 |
|
|
|
12227 |
|
|
return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
|
12228 |
|
|
}
|
12229 |
|
|
}
|
12230 |
|
|
|
12231 |
|
|
/* Rewrite an LROTATE_EXPR by a constant into an
|
12232 |
|
|
RROTATE_EXPR by a new constant. */
|
12233 |
|
|
if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
|
12234 |
|
|
{
|
12235 |
|
|
tree tem = build_int_cst (TREE_TYPE (arg1),
|
12236 |
|
|
TYPE_PRECISION (type));
|
12237 |
|
|
tem = const_binop (MINUS_EXPR, tem, arg1);
|
12238 |
|
|
return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
|
12239 |
|
|
}
|
12240 |
|
|
|
12241 |
|
|
/* If we have a rotate of a bit operation with the rotate count and
|
12242 |
|
|
the second operand of the bit operation both constant,
|
12243 |
|
|
permute the two operations. */
|
12244 |
|
|
if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
|
12245 |
|
|
&& (TREE_CODE (arg0) == BIT_AND_EXPR
|
12246 |
|
|
|| TREE_CODE (arg0) == BIT_IOR_EXPR
|
12247 |
|
|
|| TREE_CODE (arg0) == BIT_XOR_EXPR)
|
12248 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
|
12249 |
|
|
return fold_build2_loc (loc, TREE_CODE (arg0), type,
|
12250 |
|
|
fold_build2_loc (loc, code, type,
|
12251 |
|
|
TREE_OPERAND (arg0, 0), arg1),
|
12252 |
|
|
fold_build2_loc (loc, code, type,
|
12253 |
|
|
TREE_OPERAND (arg0, 1), arg1));
|
12254 |
|
|
|
12255 |
|
|
/* Two consecutive rotates adding up to the precision of the
|
12256 |
|
|
type can be ignored. */
|
12257 |
|
|
if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
|
12258 |
|
|
&& TREE_CODE (arg0) == RROTATE_EXPR
|
12259 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
|
12260 |
|
|
&& TREE_INT_CST_HIGH (arg1) == 0
|
12261 |
|
|
&& TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
|
12262 |
|
|
&& ((TREE_INT_CST_LOW (arg1)
|
12263 |
|
|
+ TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
|
12264 |
|
|
== (unsigned int) TYPE_PRECISION (type)))
|
12265 |
|
|
return TREE_OPERAND (arg0, 0);
|
12266 |
|
|
|
12267 |
|
|
/* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
|
12268 |
|
|
(X & C2) >> C1 into (X >> C1) & (C2 >> C1)
|
12269 |
|
|
if the latter can be further optimized. */
|
12270 |
|
|
if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
|
12271 |
|
|
&& TREE_CODE (arg0) == BIT_AND_EXPR
|
12272 |
|
|
&& TREE_CODE (arg1) == INTEGER_CST
|
12273 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
|
12274 |
|
|
{
|
12275 |
|
|
tree mask = fold_build2_loc (loc, code, type,
|
12276 |
|
|
fold_convert_loc (loc, type,
|
12277 |
|
|
TREE_OPERAND (arg0, 1)),
|
12278 |
|
|
arg1);
|
12279 |
|
|
tree shift = fold_build2_loc (loc, code, type,
|
12280 |
|
|
fold_convert_loc (loc, type,
|
12281 |
|
|
TREE_OPERAND (arg0, 0)),
|
12282 |
|
|
arg1);
|
12283 |
|
|
tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
|
12284 |
|
|
if (tem)
|
12285 |
|
|
return tem;
|
12286 |
|
|
}
|
12287 |
|
|
|
12288 |
|
|
return NULL_TREE;
|
12289 |
|
|
|
12290 |
|
|
case MIN_EXPR:
|
12291 |
|
|
if (operand_equal_p (arg0, arg1, 0))
|
12292 |
|
|
return omit_one_operand_loc (loc, type, arg0, arg1);
|
12293 |
|
|
if (INTEGRAL_TYPE_P (type)
|
12294 |
|
|
&& operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
|
12295 |
|
|
return omit_one_operand_loc (loc, type, arg1, arg0);
|
12296 |
|
|
tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
|
12297 |
|
|
if (tem)
|
12298 |
|
|
return tem;
|
12299 |
|
|
goto associate;
|
12300 |
|
|
|
12301 |
|
|
case MAX_EXPR:
|
12302 |
|
|
if (operand_equal_p (arg0, arg1, 0))
|
12303 |
|
|
return omit_one_operand_loc (loc, type, arg0, arg1);
|
12304 |
|
|
if (INTEGRAL_TYPE_P (type)
|
12305 |
|
|
&& TYPE_MAX_VALUE (type)
|
12306 |
|
|
&& operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
|
12307 |
|
|
return omit_one_operand_loc (loc, type, arg1, arg0);
|
12308 |
|
|
tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
|
12309 |
|
|
if (tem)
|
12310 |
|
|
return tem;
|
12311 |
|
|
goto associate;
|
12312 |
|
|
|
12313 |
|
|
case TRUTH_ANDIF_EXPR:
|
12314 |
|
|
/* Note that the operands of this must be ints
|
12315 |
|
|
and their values must be 0 or 1.
|
12316 |
|
|
("true" is a fixed value perhaps depending on the language.) */
|
12317 |
|
|
/* If first arg is constant zero, return it. */
|
12318 |
|
|
if (integer_zerop (arg0))
|
12319 |
|
|
return fold_convert_loc (loc, type, arg0);
|
12320 |
|
|
case TRUTH_AND_EXPR:
|
12321 |
|
|
/* If either arg is constant true, drop it. */
|
12322 |
|
|
if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
|
12323 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
|
12324 |
|
|
if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
|
12325 |
|
|
/* Preserve sequence points. */
|
12326 |
|
|
&& (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
|
12327 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
|
12328 |
|
|
/* If second arg is constant zero, result is zero, but first arg
|
12329 |
|
|
must be evaluated. */
|
12330 |
|
|
if (integer_zerop (arg1))
|
12331 |
|
|
return omit_one_operand_loc (loc, type, arg1, arg0);
|
12332 |
|
|
/* Likewise for first arg, but note that only the TRUTH_AND_EXPR
|
12333 |
|
|
case will be handled here. */
|
12334 |
|
|
if (integer_zerop (arg0))
|
12335 |
|
|
return omit_one_operand_loc (loc, type, arg0, arg1);
|
12336 |
|
|
|
12337 |
|
|
/* !X && X is always false. */
|
12338 |
|
|
if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
|
12339 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
|
12340 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
|
12341 |
|
|
/* X && !X is always false. */
|
12342 |
|
|
if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
|
12343 |
|
|
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
|
12344 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
|
12345 |
|
|
|
12346 |
|
|
/* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
|
12347 |
|
|
means A >= Y && A != MAX, but in this case we know that
|
12348 |
|
|
A < X <= MAX. */
|
12349 |
|
|
|
12350 |
|
|
if (!TREE_SIDE_EFFECTS (arg0)
|
12351 |
|
|
&& !TREE_SIDE_EFFECTS (arg1))
|
12352 |
|
|
{
|
12353 |
|
|
tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
|
12354 |
|
|
if (tem && !operand_equal_p (tem, arg0, 0))
|
12355 |
|
|
return fold_build2_loc (loc, code, type, tem, arg1);
|
12356 |
|
|
|
12357 |
|
|
tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
|
12358 |
|
|
if (tem && !operand_equal_p (tem, arg1, 0))
|
12359 |
|
|
return fold_build2_loc (loc, code, type, arg0, tem);
|
12360 |
|
|
}
|
12361 |
|
|
|
12362 |
|
|
if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
|
12363 |
|
|
!= NULL_TREE)
|
12364 |
|
|
return tem;
|
12365 |
|
|
|
12366 |
|
|
return NULL_TREE;
|
12367 |
|
|
|
12368 |
|
|
case TRUTH_ORIF_EXPR:
|
12369 |
|
|
/* Note that the operands of this must be ints
|
12370 |
|
|
and their values must be 0 or true.
|
12371 |
|
|
("true" is a fixed value perhaps depending on the language.) */
|
12372 |
|
|
/* If first arg is constant true, return it. */
|
12373 |
|
|
if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
|
12374 |
|
|
return fold_convert_loc (loc, type, arg0);
|
12375 |
|
|
case TRUTH_OR_EXPR:
|
12376 |
|
|
/* If either arg is constant zero, drop it. */
|
12377 |
|
|
if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
|
12378 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
|
12379 |
|
|
if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
|
12380 |
|
|
/* Preserve sequence points. */
|
12381 |
|
|
&& (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
|
12382 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
|
12383 |
|
|
/* If second arg is constant true, result is true, but we must
|
12384 |
|
|
evaluate first arg. */
|
12385 |
|
|
if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
|
12386 |
|
|
return omit_one_operand_loc (loc, type, arg1, arg0);
|
12387 |
|
|
/* Likewise for first arg, but note this only occurs here for
|
12388 |
|
|
TRUTH_OR_EXPR. */
|
12389 |
|
|
if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
|
12390 |
|
|
return omit_one_operand_loc (loc, type, arg0, arg1);
|
12391 |
|
|
|
12392 |
|
|
/* !X || X is always true. */
|
12393 |
|
|
if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
|
12394 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
|
12395 |
|
|
return omit_one_operand_loc (loc, type, integer_one_node, arg1);
|
12396 |
|
|
/* X || !X is always true. */
|
12397 |
|
|
if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
|
12398 |
|
|
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
|
12399 |
|
|
return omit_one_operand_loc (loc, type, integer_one_node, arg0);
|
12400 |
|
|
|
12401 |
|
|
/* (X && !Y) || (!X && Y) is X ^ Y */
|
12402 |
|
|
if (TREE_CODE (arg0) == TRUTH_AND_EXPR
|
12403 |
|
|
&& TREE_CODE (arg1) == TRUTH_AND_EXPR)
|
12404 |
|
|
{
|
12405 |
|
|
tree a0, a1, l0, l1, n0, n1;
|
12406 |
|
|
|
12407 |
|
|
a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
|
12408 |
|
|
a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
|
12409 |
|
|
|
12410 |
|
|
l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
|
12411 |
|
|
l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
|
12412 |
|
|
|
12413 |
|
|
n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
|
12414 |
|
|
n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
|
12415 |
|
|
|
12416 |
|
|
if ((operand_equal_p (n0, a0, 0)
|
12417 |
|
|
&& operand_equal_p (n1, a1, 0))
|
12418 |
|
|
|| (operand_equal_p (n0, a1, 0)
|
12419 |
|
|
&& operand_equal_p (n1, a0, 0)))
|
12420 |
|
|
return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
|
12421 |
|
|
}
|
12422 |
|
|
|
12423 |
|
|
if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
|
12424 |
|
|
!= NULL_TREE)
|
12425 |
|
|
return tem;
|
12426 |
|
|
|
12427 |
|
|
return NULL_TREE;
|
12428 |
|
|
|
12429 |
|
|
case TRUTH_XOR_EXPR:
|
12430 |
|
|
/* If the second arg is constant zero, drop it. */
|
12431 |
|
|
if (integer_zerop (arg1))
|
12432 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
|
12433 |
|
|
/* If the second arg is constant true, this is a logical inversion. */
|
12434 |
|
|
if (integer_onep (arg1))
|
12435 |
|
|
{
|
12436 |
|
|
/* Only call invert_truthvalue if operand is a truth value. */
|
12437 |
|
|
if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
|
12438 |
|
|
tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
|
12439 |
|
|
else
|
12440 |
|
|
tem = invert_truthvalue_loc (loc, arg0);
|
12441 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
|
12442 |
|
|
}
|
12443 |
|
|
/* Identical arguments cancel to zero. */
|
12444 |
|
|
if (operand_equal_p (arg0, arg1, 0))
|
12445 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
|
12446 |
|
|
|
12447 |
|
|
/* !X ^ X is always true. */
|
12448 |
|
|
if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
|
12449 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
|
12450 |
|
|
return omit_one_operand_loc (loc, type, integer_one_node, arg1);
|
12451 |
|
|
|
12452 |
|
|
/* X ^ !X is always true. */
|
12453 |
|
|
if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
|
12454 |
|
|
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
|
12455 |
|
|
return omit_one_operand_loc (loc, type, integer_one_node, arg0);
|
12456 |
|
|
|
12457 |
|
|
return NULL_TREE;
|
12458 |
|
|
|
12459 |
|
|
case EQ_EXPR:
|
12460 |
|
|
case NE_EXPR:
|
12461 |
|
|
STRIP_NOPS (arg0);
|
12462 |
|
|
STRIP_NOPS (arg1);
|
12463 |
|
|
|
12464 |
|
|
tem = fold_comparison (loc, code, type, op0, op1);
|
12465 |
|
|
if (tem != NULL_TREE)
|
12466 |
|
|
return tem;
|
12467 |
|
|
|
12468 |
|
|
/* bool_var != 0 becomes bool_var. */
|
12469 |
|
|
if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
|
12470 |
|
|
&& code == NE_EXPR)
|
12471 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
|
12472 |
|
|
|
12473 |
|
|
/* bool_var == 1 becomes bool_var. */
|
12474 |
|
|
if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
|
12475 |
|
|
&& code == EQ_EXPR)
|
12476 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
|
12477 |
|
|
|
12478 |
|
|
/* bool_var != 1 becomes !bool_var. */
|
12479 |
|
|
if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
|
12480 |
|
|
&& code == NE_EXPR)
|
12481 |
|
|
return fold_convert_loc (loc, type,
|
12482 |
|
|
fold_build1_loc (loc, TRUTH_NOT_EXPR,
|
12483 |
|
|
TREE_TYPE (arg0), arg0));
|
12484 |
|
|
|
12485 |
|
|
/* bool_var == 0 becomes !bool_var. */
|
12486 |
|
|
if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
|
12487 |
|
|
&& code == EQ_EXPR)
|
12488 |
|
|
return fold_convert_loc (loc, type,
|
12489 |
|
|
fold_build1_loc (loc, TRUTH_NOT_EXPR,
|
12490 |
|
|
TREE_TYPE (arg0), arg0));
|
12491 |
|
|
|
12492 |
|
|
/* !exp != 0 becomes !exp */
|
12493 |
|
|
if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
|
12494 |
|
|
&& code == NE_EXPR)
|
12495 |
|
|
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
|
12496 |
|
|
|
12497 |
|
|
/* If this is an equality comparison of the address of two non-weak,
|
12498 |
|
|
unaliased symbols neither of which are extern (since we do not
|
12499 |
|
|
have access to attributes for externs), then we know the result. */
|
12500 |
|
|
if (TREE_CODE (arg0) == ADDR_EXPR
|
12501 |
|
|
&& VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
|
12502 |
|
|
&& ! DECL_WEAK (TREE_OPERAND (arg0, 0))
|
12503 |
|
|
&& ! lookup_attribute ("alias",
|
12504 |
|
|
DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
|
12505 |
|
|
&& ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
|
12506 |
|
|
&& TREE_CODE (arg1) == ADDR_EXPR
|
12507 |
|
|
&& VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
|
12508 |
|
|
&& ! DECL_WEAK (TREE_OPERAND (arg1, 0))
|
12509 |
|
|
&& ! lookup_attribute ("alias",
|
12510 |
|
|
DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
|
12511 |
|
|
&& ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
|
12512 |
|
|
{
|
12513 |
|
|
/* We know that we're looking at the address of two
|
12514 |
|
|
non-weak, unaliased, static _DECL nodes.
|
12515 |
|
|
|
12516 |
|
|
It is both wasteful and incorrect to call operand_equal_p
|
12517 |
|
|
to compare the two ADDR_EXPR nodes. It is wasteful in that
|
12518 |
|
|
all we need to do is test pointer equality for the arguments
|
12519 |
|
|
to the two ADDR_EXPR nodes. It is incorrect to use
|
12520 |
|
|
operand_equal_p as that function is NOT equivalent to a
|
12521 |
|
|
C equality test. It can in fact return false for two
|
12522 |
|
|
objects which would test as equal using the C equality
|
12523 |
|
|
operator. */
|
12524 |
|
|
bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
|
12525 |
|
|
return constant_boolean_node (equal
|
12526 |
|
|
? code == EQ_EXPR : code != EQ_EXPR,
|
12527 |
|
|
type);
|
12528 |
|
|
}
|
12529 |
|
|
|
12530 |
|
|
/* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
|
12531 |
|
|
a MINUS_EXPR of a constant, we can convert it into a comparison with
|
12532 |
|
|
a revised constant as long as no overflow occurs. */
|
12533 |
|
|
if (TREE_CODE (arg1) == INTEGER_CST
|
12534 |
|
|
&& (TREE_CODE (arg0) == PLUS_EXPR
|
12535 |
|
|
|| TREE_CODE (arg0) == MINUS_EXPR)
|
12536 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
|
12537 |
|
|
&& 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
|
12538 |
|
|
? MINUS_EXPR : PLUS_EXPR,
|
12539 |
|
|
fold_convert_loc (loc, TREE_TYPE (arg0),
|
12540 |
|
|
arg1),
|
12541 |
|
|
TREE_OPERAND (arg0, 1)))
|
12542 |
|
|
&& !TREE_OVERFLOW (tem))
|
12543 |
|
|
return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
|
12544 |
|
|
|
12545 |
|
|
/* Similarly for a NEGATE_EXPR. */
|
12546 |
|
|
if (TREE_CODE (arg0) == NEGATE_EXPR
|
12547 |
|
|
&& TREE_CODE (arg1) == INTEGER_CST
|
12548 |
|
|
&& 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
|
12549 |
|
|
arg1)))
|
12550 |
|
|
&& TREE_CODE (tem) == INTEGER_CST
|
12551 |
|
|
&& !TREE_OVERFLOW (tem))
|
12552 |
|
|
return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
|
12553 |
|
|
|
12554 |
|
|
/* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
|
12555 |
|
|
if (TREE_CODE (arg0) == BIT_XOR_EXPR
|
12556 |
|
|
&& TREE_CODE (arg1) == INTEGER_CST
|
12557 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
|
12558 |
|
|
return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
|
12559 |
|
|
fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
|
12560 |
|
|
fold_convert_loc (loc,
|
12561 |
|
|
TREE_TYPE (arg0),
|
12562 |
|
|
arg1),
|
12563 |
|
|
TREE_OPERAND (arg0, 1)));
|
12564 |
|
|
|
12565 |
|
|
/* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
|
12566 |
|
|
if ((TREE_CODE (arg0) == PLUS_EXPR
|
12567 |
|
|
|| TREE_CODE (arg0) == POINTER_PLUS_EXPR
|
12568 |
|
|
|| TREE_CODE (arg0) == MINUS_EXPR)
|
12569 |
|
|
&& operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
|
12570 |
|
|
0)),
|
12571 |
|
|
arg1, 0)
|
12572 |
|
|
&& (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
|
12573 |
|
|
|| POINTER_TYPE_P (TREE_TYPE (arg0))))
|
12574 |
|
|
{
|
12575 |
|
|
tree val = TREE_OPERAND (arg0, 1);
|
12576 |
|
|
return omit_two_operands_loc (loc, type,
|
12577 |
|
|
fold_build2_loc (loc, code, type,
|
12578 |
|
|
val,
|
12579 |
|
|
build_int_cst (TREE_TYPE (val),
|
12580 |
|
|
0)),
|
12581 |
|
|
TREE_OPERAND (arg0, 0), arg1);
|
12582 |
|
|
}
|
12583 |
|
|
|
12584 |
|
|
/* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
|
12585 |
|
|
if (TREE_CODE (arg0) == MINUS_EXPR
|
12586 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
|
12587 |
|
|
&& operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
|
12588 |
|
|
1)),
|
12589 |
|
|
arg1, 0)
|
12590 |
|
|
&& (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
|
12591 |
|
|
{
|
12592 |
|
|
return omit_two_operands_loc (loc, type,
|
12593 |
|
|
code == NE_EXPR
|
12594 |
|
|
? boolean_true_node : boolean_false_node,
|
12595 |
|
|
TREE_OPERAND (arg0, 1), arg1);
|
12596 |
|
|
}
|
12597 |
|
|
|
12598 |
|
|
/* If we have X - Y == 0, we can convert that to X == Y and similarly
|
12599 |
|
|
for !=. Don't do this for ordered comparisons due to overflow. */
|
12600 |
|
|
if (TREE_CODE (arg0) == MINUS_EXPR
|
12601 |
|
|
&& integer_zerop (arg1))
|
12602 |
|
|
return fold_build2_loc (loc, code, type,
|
12603 |
|
|
TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
|
12604 |
|
|
|
12605 |
|
|
/* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
|
12606 |
|
|
if (TREE_CODE (arg0) == ABS_EXPR
|
12607 |
|
|
&& (integer_zerop (arg1) || real_zerop (arg1)))
|
12608 |
|
|
return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
|
12609 |
|
|
|
12610 |
|
|
/* If this is an EQ or NE comparison with zero and ARG0 is
|
12611 |
|
|
(1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
|
12612 |
|
|
two operations, but the latter can be done in one less insn
|
12613 |
|
|
on machines that have only two-operand insns or on which a
|
12614 |
|
|
constant cannot be the first operand. */
|
12615 |
|
|
if (TREE_CODE (arg0) == BIT_AND_EXPR
|
12616 |
|
|
&& integer_zerop (arg1))
|
12617 |
|
|
{
|
12618 |
|
|
tree arg00 = TREE_OPERAND (arg0, 0);
|
12619 |
|
|
tree arg01 = TREE_OPERAND (arg0, 1);
|
12620 |
|
|
if (TREE_CODE (arg00) == LSHIFT_EXPR
|
12621 |
|
|
&& integer_onep (TREE_OPERAND (arg00, 0)))
|
12622 |
|
|
{
|
12623 |
|
|
tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
|
12624 |
|
|
arg01, TREE_OPERAND (arg00, 1));
|
12625 |
|
|
tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
|
12626 |
|
|
build_int_cst (TREE_TYPE (arg0), 1));
|
12627 |
|
|
return fold_build2_loc (loc, code, type,
|
12628 |
|
|
fold_convert_loc (loc, TREE_TYPE (arg1), tem),
|
12629 |
|
|
arg1);
|
12630 |
|
|
}
|
12631 |
|
|
else if (TREE_CODE (arg01) == LSHIFT_EXPR
|
12632 |
|
|
&& integer_onep (TREE_OPERAND (arg01, 0)))
|
12633 |
|
|
{
|
12634 |
|
|
tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
|
12635 |
|
|
arg00, TREE_OPERAND (arg01, 1));
|
12636 |
|
|
tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
|
12637 |
|
|
build_int_cst (TREE_TYPE (arg0), 1));
|
12638 |
|
|
return fold_build2_loc (loc, code, type,
|
12639 |
|
|
fold_convert_loc (loc, TREE_TYPE (arg1), tem),
|
12640 |
|
|
arg1);
|
12641 |
|
|
}
|
12642 |
|
|
}
|
12643 |
|
|
|
12644 |
|
|
/* If this is an NE or EQ comparison of zero against the result of a
|
12645 |
|
|
signed MOD operation whose second operand is a power of 2, make
|
12646 |
|
|
the MOD operation unsigned since it is simpler and equivalent. */
|
12647 |
|
|
if (integer_zerop (arg1)
|
12648 |
|
|
&& !TYPE_UNSIGNED (TREE_TYPE (arg0))
|
12649 |
|
|
&& (TREE_CODE (arg0) == TRUNC_MOD_EXPR
|
12650 |
|
|
|| TREE_CODE (arg0) == CEIL_MOD_EXPR
|
12651 |
|
|
|| TREE_CODE (arg0) == FLOOR_MOD_EXPR
|
12652 |
|
|
|| TREE_CODE (arg0) == ROUND_MOD_EXPR)
|
12653 |
|
|
&& integer_pow2p (TREE_OPERAND (arg0, 1)))
|
12654 |
|
|
{
|
12655 |
|
|
tree newtype = unsigned_type_for (TREE_TYPE (arg0));
|
12656 |
|
|
tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
|
12657 |
|
|
fold_convert_loc (loc, newtype,
|
12658 |
|
|
TREE_OPERAND (arg0, 0)),
|
12659 |
|
|
fold_convert_loc (loc, newtype,
|
12660 |
|
|
TREE_OPERAND (arg0, 1)));
|
12661 |
|
|
|
12662 |
|
|
return fold_build2_loc (loc, code, type, newmod,
|
12663 |
|
|
fold_convert_loc (loc, newtype, arg1));
|
12664 |
|
|
}
|
12665 |
|
|
|
12666 |
|
|
/* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
|
12667 |
|
|
C1 is a valid shift constant, and C2 is a power of two, i.e.
|
12668 |
|
|
a single bit. */
|
12669 |
|
|
if (TREE_CODE (arg0) == BIT_AND_EXPR
|
12670 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
|
12671 |
|
|
&& TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
|
12672 |
|
|
== INTEGER_CST
|
12673 |
|
|
&& integer_pow2p (TREE_OPERAND (arg0, 1))
|
12674 |
|
|
&& integer_zerop (arg1))
|
12675 |
|
|
{
|
12676 |
|
|
tree itype = TREE_TYPE (arg0);
|
12677 |
|
|
unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
|
12678 |
|
|
tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
|
12679 |
|
|
|
12680 |
|
|
/* Check for a valid shift count. */
|
12681 |
|
|
if (TREE_INT_CST_HIGH (arg001) == 0
|
12682 |
|
|
&& TREE_INT_CST_LOW (arg001) < prec)
|
12683 |
|
|
{
|
12684 |
|
|
tree arg01 = TREE_OPERAND (arg0, 1);
|
12685 |
|
|
tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
|
12686 |
|
|
unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
|
12687 |
|
|
/* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
|
12688 |
|
|
can be rewritten as (X & (C2 << C1)) != 0. */
|
12689 |
|
|
if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
|
12690 |
|
|
{
|
12691 |
|
|
tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
|
12692 |
|
|
tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
|
12693 |
|
|
return fold_build2_loc (loc, code, type, tem,
|
12694 |
|
|
fold_convert_loc (loc, itype, arg1));
|
12695 |
|
|
}
|
12696 |
|
|
/* Otherwise, for signed (arithmetic) shifts,
|
12697 |
|
|
((X >> C1) & C2) != 0 is rewritten as X < 0, and
|
12698 |
|
|
((X >> C1) & C2) == 0 is rewritten as X >= 0. */
|
12699 |
|
|
else if (!TYPE_UNSIGNED (itype))
|
12700 |
|
|
return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
|
12701 |
|
|
arg000, build_int_cst (itype, 0));
|
12702 |
|
|
/* Otherwise, of unsigned (logical) shifts,
|
12703 |
|
|
((X >> C1) & C2) != 0 is rewritten as (X,false), and
|
12704 |
|
|
((X >> C1) & C2) == 0 is rewritten as (X,true). */
|
12705 |
|
|
else
|
12706 |
|
|
return omit_one_operand_loc (loc, type,
|
12707 |
|
|
code == EQ_EXPR ? integer_one_node
|
12708 |
|
|
: integer_zero_node,
|
12709 |
|
|
arg000);
|
12710 |
|
|
}
|
12711 |
|
|
}
|
12712 |
|
|
|
12713 |
|
|
/* If we have (A & C) == C where C is a power of 2, convert this into
|
12714 |
|
|
(A & C) != 0. Similarly for NE_EXPR. */
|
12715 |
|
|
if (TREE_CODE (arg0) == BIT_AND_EXPR
|
12716 |
|
|
&& integer_pow2p (TREE_OPERAND (arg0, 1))
|
12717 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
|
12718 |
|
|
return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
|
12719 |
|
|
arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
|
12720 |
|
|
integer_zero_node));
|
12721 |
|
|
|
12722 |
|
|
/* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
|
12723 |
|
|
bit, then fold the expression into A < 0 or A >= 0. */
|
12724 |
|
|
tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
|
12725 |
|
|
if (tem)
|
12726 |
|
|
return tem;
|
12727 |
|
|
|
12728 |
|
|
/* If we have (A & C) == D where D & ~C != 0, convert this into 0.
|
12729 |
|
|
Similarly for NE_EXPR. */
|
12730 |
|
|
if (TREE_CODE (arg0) == BIT_AND_EXPR
|
12731 |
|
|
&& TREE_CODE (arg1) == INTEGER_CST
|
12732 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
|
12733 |
|
|
{
|
12734 |
|
|
tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
|
12735 |
|
|
TREE_TYPE (TREE_OPERAND (arg0, 1)),
|
12736 |
|
|
TREE_OPERAND (arg0, 1));
|
12737 |
|
|
tree dandnotc
|
12738 |
|
|
= fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
|
12739 |
|
|
fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
|
12740 |
|
|
notc);
|
12741 |
|
|
tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
|
12742 |
|
|
if (integer_nonzerop (dandnotc))
|
12743 |
|
|
return omit_one_operand_loc (loc, type, rslt, arg0);
|
12744 |
|
|
}
|
12745 |
|
|
|
12746 |
|
|
/* If we have (A | C) == D where C & ~D != 0, convert this into 0.
|
12747 |
|
|
Similarly for NE_EXPR. */
|
12748 |
|
|
if (TREE_CODE (arg0) == BIT_IOR_EXPR
|
12749 |
|
|
&& TREE_CODE (arg1) == INTEGER_CST
|
12750 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
|
12751 |
|
|
{
|
12752 |
|
|
tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
|
12753 |
|
|
tree candnotd
|
12754 |
|
|
= fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
|
12755 |
|
|
TREE_OPERAND (arg0, 1),
|
12756 |
|
|
fold_convert_loc (loc, TREE_TYPE (arg0), notd));
|
12757 |
|
|
tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
|
12758 |
|
|
if (integer_nonzerop (candnotd))
|
12759 |
|
|
return omit_one_operand_loc (loc, type, rslt, arg0);
|
12760 |
|
|
}
|
12761 |
|
|
|
12762 |
|
|
/* If this is a comparison of a field, we may be able to simplify it. */
|
12763 |
|
|
if ((TREE_CODE (arg0) == COMPONENT_REF
|
12764 |
|
|
|| TREE_CODE (arg0) == BIT_FIELD_REF)
|
12765 |
|
|
/* Handle the constant case even without -O
|
12766 |
|
|
to make sure the warnings are given. */
|
12767 |
|
|
&& (optimize || TREE_CODE (arg1) == INTEGER_CST))
|
12768 |
|
|
{
|
12769 |
|
|
t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
|
12770 |
|
|
if (t1)
|
12771 |
|
|
return t1;
|
12772 |
|
|
}
|
12773 |
|
|
|
12774 |
|
|
/* Optimize comparisons of strlen vs zero to a compare of the
|
12775 |
|
|
first character of the string vs zero. To wit,
|
12776 |
|
|
strlen(ptr) == 0 => *ptr == 0
|
12777 |
|
|
strlen(ptr) != 0 => *ptr != 0
|
12778 |
|
|
Other cases should reduce to one of these two (or a constant)
|
12779 |
|
|
due to the return value of strlen being unsigned. */
|
12780 |
|
|
if (TREE_CODE (arg0) == CALL_EXPR
|
12781 |
|
|
&& integer_zerop (arg1))
|
12782 |
|
|
{
|
12783 |
|
|
tree fndecl = get_callee_fndecl (arg0);
|
12784 |
|
|
|
12785 |
|
|
if (fndecl
|
12786 |
|
|
&& DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
|
12787 |
|
|
&& DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
|
12788 |
|
|
&& call_expr_nargs (arg0) == 1
|
12789 |
|
|
&& TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
|
12790 |
|
|
{
|
12791 |
|
|
tree iref = build_fold_indirect_ref_loc (loc,
|
12792 |
|
|
CALL_EXPR_ARG (arg0, 0));
|
12793 |
|
|
return fold_build2_loc (loc, code, type, iref,
|
12794 |
|
|
build_int_cst (TREE_TYPE (iref), 0));
|
12795 |
|
|
}
|
12796 |
|
|
}
|
12797 |
|
|
|
12798 |
|
|
/* Fold (X >> C) != 0 into X < 0 if C is one less than the width
|
12799 |
|
|
of X. Similarly fold (X >> C) == 0 into X >= 0. */
|
12800 |
|
|
if (TREE_CODE (arg0) == RSHIFT_EXPR
|
12801 |
|
|
&& integer_zerop (arg1)
|
12802 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
|
12803 |
|
|
{
|
12804 |
|
|
tree arg00 = TREE_OPERAND (arg0, 0);
|
12805 |
|
|
tree arg01 = TREE_OPERAND (arg0, 1);
|
12806 |
|
|
tree itype = TREE_TYPE (arg00);
|
12807 |
|
|
if (TREE_INT_CST_HIGH (arg01) == 0
|
12808 |
|
|
&& TREE_INT_CST_LOW (arg01)
|
12809 |
|
|
== (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
|
12810 |
|
|
{
|
12811 |
|
|
if (TYPE_UNSIGNED (itype))
|
12812 |
|
|
{
|
12813 |
|
|
itype = signed_type_for (itype);
|
12814 |
|
|
arg00 = fold_convert_loc (loc, itype, arg00);
|
12815 |
|
|
}
|
12816 |
|
|
return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
|
12817 |
|
|
type, arg00, build_int_cst (itype, 0));
|
12818 |
|
|
}
|
12819 |
|
|
}
|
12820 |
|
|
|
12821 |
|
|
/* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
|
12822 |
|
|
if (integer_zerop (arg1)
|
12823 |
|
|
&& TREE_CODE (arg0) == BIT_XOR_EXPR)
|
12824 |
|
|
return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
|
12825 |
|
|
TREE_OPERAND (arg0, 1));
|
12826 |
|
|
|
12827 |
|
|
/* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
|
12828 |
|
|
if (TREE_CODE (arg0) == BIT_XOR_EXPR
|
12829 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
|
12830 |
|
|
return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
|
12831 |
|
|
build_int_cst (TREE_TYPE (arg0), 0));
|
12832 |
|
|
/* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
|
12833 |
|
|
if (TREE_CODE (arg0) == BIT_XOR_EXPR
|
12834 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
|
12835 |
|
|
&& reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
|
12836 |
|
|
return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
|
12837 |
|
|
build_int_cst (TREE_TYPE (arg0), 0));
|
12838 |
|
|
|
12839 |
|
|
/* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
|
12840 |
|
|
if (TREE_CODE (arg0) == BIT_XOR_EXPR
|
12841 |
|
|
&& TREE_CODE (arg1) == INTEGER_CST
|
12842 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
|
12843 |
|
|
return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
|
12844 |
|
|
fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
|
12845 |
|
|
TREE_OPERAND (arg0, 1), arg1));
|
12846 |
|
|
|
12847 |
|
|
/* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
|
12848 |
|
|
(X & C) == 0 when C is a single bit. */
|
12849 |
|
|
if (TREE_CODE (arg0) == BIT_AND_EXPR
|
12850 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
|
12851 |
|
|
&& integer_zerop (arg1)
|
12852 |
|
|
&& integer_pow2p (TREE_OPERAND (arg0, 1)))
|
12853 |
|
|
{
|
12854 |
|
|
tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
|
12855 |
|
|
TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
|
12856 |
|
|
TREE_OPERAND (arg0, 1));
|
12857 |
|
|
return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
|
12858 |
|
|
type, tem,
|
12859 |
|
|
fold_convert_loc (loc, TREE_TYPE (arg0),
|
12860 |
|
|
arg1));
|
12861 |
|
|
}
|
12862 |
|
|
|
12863 |
|
|
/* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
|
12864 |
|
|
constant C is a power of two, i.e. a single bit. */
|
12865 |
|
|
if (TREE_CODE (arg0) == BIT_XOR_EXPR
|
12866 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
|
12867 |
|
|
&& integer_zerop (arg1)
|
12868 |
|
|
&& integer_pow2p (TREE_OPERAND (arg0, 1))
|
12869 |
|
|
&& operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
|
12870 |
|
|
TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
|
12871 |
|
|
{
|
12872 |
|
|
tree arg00 = TREE_OPERAND (arg0, 0);
|
12873 |
|
|
return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
|
12874 |
|
|
arg00, build_int_cst (TREE_TYPE (arg00), 0));
|
12875 |
|
|
}
|
12876 |
|
|
|
12877 |
|
|
/* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
|
12878 |
|
|
when is C is a power of two, i.e. a single bit. */
|
12879 |
|
|
if (TREE_CODE (arg0) == BIT_AND_EXPR
|
12880 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
|
12881 |
|
|
&& integer_zerop (arg1)
|
12882 |
|
|
&& integer_pow2p (TREE_OPERAND (arg0, 1))
|
12883 |
|
|
&& operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
|
12884 |
|
|
TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
|
12885 |
|
|
{
|
12886 |
|
|
tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
|
12887 |
|
|
tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
|
12888 |
|
|
arg000, TREE_OPERAND (arg0, 1));
|
12889 |
|
|
return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
|
12890 |
|
|
tem, build_int_cst (TREE_TYPE (tem), 0));
|
12891 |
|
|
}
|
12892 |
|
|
|
12893 |
|
|
if (integer_zerop (arg1)
|
12894 |
|
|
&& tree_expr_nonzero_p (arg0))
|
12895 |
|
|
{
|
12896 |
|
|
tree res = constant_boolean_node (code==NE_EXPR, type);
|
12897 |
|
|
return omit_one_operand_loc (loc, type, res, arg0);
|
12898 |
|
|
}
|
12899 |
|
|
|
12900 |
|
|
/* Fold -X op -Y as X op Y, where op is eq/ne. */
|
12901 |
|
|
if (TREE_CODE (arg0) == NEGATE_EXPR
|
12902 |
|
|
&& TREE_CODE (arg1) == NEGATE_EXPR)
|
12903 |
|
|
return fold_build2_loc (loc, code, type,
|
12904 |
|
|
TREE_OPERAND (arg0, 0),
|
12905 |
|
|
fold_convert_loc (loc, TREE_TYPE (arg0),
|
12906 |
|
|
TREE_OPERAND (arg1, 0)));
|
12907 |
|
|
|
12908 |
|
|
/* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
|
12909 |
|
|
if (TREE_CODE (arg0) == BIT_AND_EXPR
|
12910 |
|
|
&& TREE_CODE (arg1) == BIT_AND_EXPR)
|
12911 |
|
|
{
|
12912 |
|
|
tree arg00 = TREE_OPERAND (arg0, 0);
|
12913 |
|
|
tree arg01 = TREE_OPERAND (arg0, 1);
|
12914 |
|
|
tree arg10 = TREE_OPERAND (arg1, 0);
|
12915 |
|
|
tree arg11 = TREE_OPERAND (arg1, 1);
|
12916 |
|
|
tree itype = TREE_TYPE (arg0);
|
12917 |
|
|
|
12918 |
|
|
if (operand_equal_p (arg01, arg11, 0))
|
12919 |
|
|
return fold_build2_loc (loc, code, type,
|
12920 |
|
|
fold_build2_loc (loc, BIT_AND_EXPR, itype,
|
12921 |
|
|
fold_build2_loc (loc,
|
12922 |
|
|
BIT_XOR_EXPR, itype,
|
12923 |
|
|
arg00, arg10),
|
12924 |
|
|
arg01),
|
12925 |
|
|
build_int_cst (itype, 0));
|
12926 |
|
|
|
12927 |
|
|
if (operand_equal_p (arg01, arg10, 0))
|
12928 |
|
|
return fold_build2_loc (loc, code, type,
|
12929 |
|
|
fold_build2_loc (loc, BIT_AND_EXPR, itype,
|
12930 |
|
|
fold_build2_loc (loc,
|
12931 |
|
|
BIT_XOR_EXPR, itype,
|
12932 |
|
|
arg00, arg11),
|
12933 |
|
|
arg01),
|
12934 |
|
|
build_int_cst (itype, 0));
|
12935 |
|
|
|
12936 |
|
|
if (operand_equal_p (arg00, arg11, 0))
|
12937 |
|
|
return fold_build2_loc (loc, code, type,
|
12938 |
|
|
fold_build2_loc (loc, BIT_AND_EXPR, itype,
|
12939 |
|
|
fold_build2_loc (loc,
|
12940 |
|
|
BIT_XOR_EXPR, itype,
|
12941 |
|
|
arg01, arg10),
|
12942 |
|
|
arg00),
|
12943 |
|
|
build_int_cst (itype, 0));
|
12944 |
|
|
|
12945 |
|
|
if (operand_equal_p (arg00, arg10, 0))
|
12946 |
|
|
return fold_build2_loc (loc, code, type,
|
12947 |
|
|
fold_build2_loc (loc, BIT_AND_EXPR, itype,
|
12948 |
|
|
fold_build2_loc (loc,
|
12949 |
|
|
BIT_XOR_EXPR, itype,
|
12950 |
|
|
arg01, arg11),
|
12951 |
|
|
arg00),
|
12952 |
|
|
build_int_cst (itype, 0));
|
12953 |
|
|
}
|
12954 |
|
|
|
12955 |
|
|
if (TREE_CODE (arg0) == BIT_XOR_EXPR
|
12956 |
|
|
&& TREE_CODE (arg1) == BIT_XOR_EXPR)
|
12957 |
|
|
{
|
12958 |
|
|
tree arg00 = TREE_OPERAND (arg0, 0);
|
12959 |
|
|
tree arg01 = TREE_OPERAND (arg0, 1);
|
12960 |
|
|
tree arg10 = TREE_OPERAND (arg1, 0);
|
12961 |
|
|
tree arg11 = TREE_OPERAND (arg1, 1);
|
12962 |
|
|
tree itype = TREE_TYPE (arg0);
|
12963 |
|
|
|
12964 |
|
|
/* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
|
12965 |
|
|
operand_equal_p guarantees no side-effects so we don't need
|
12966 |
|
|
to use omit_one_operand on Z. */
|
12967 |
|
|
if (operand_equal_p (arg01, arg11, 0))
|
12968 |
|
|
return fold_build2_loc (loc, code, type, arg00,
|
12969 |
|
|
fold_convert_loc (loc, TREE_TYPE (arg00),
|
12970 |
|
|
arg10));
|
12971 |
|
|
if (operand_equal_p (arg01, arg10, 0))
|
12972 |
|
|
return fold_build2_loc (loc, code, type, arg00,
|
12973 |
|
|
fold_convert_loc (loc, TREE_TYPE (arg00),
|
12974 |
|
|
arg11));
|
12975 |
|
|
if (operand_equal_p (arg00, arg11, 0))
|
12976 |
|
|
return fold_build2_loc (loc, code, type, arg01,
|
12977 |
|
|
fold_convert_loc (loc, TREE_TYPE (arg01),
|
12978 |
|
|
arg10));
|
12979 |
|
|
if (operand_equal_p (arg00, arg10, 0))
|
12980 |
|
|
return fold_build2_loc (loc, code, type, arg01,
|
12981 |
|
|
fold_convert_loc (loc, TREE_TYPE (arg01),
|
12982 |
|
|
arg11));
|
12983 |
|
|
|
12984 |
|
|
/* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
|
12985 |
|
|
if (TREE_CODE (arg01) == INTEGER_CST
|
12986 |
|
|
&& TREE_CODE (arg11) == INTEGER_CST)
|
12987 |
|
|
{
|
12988 |
|
|
tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
|
12989 |
|
|
fold_convert_loc (loc, itype, arg11));
|
12990 |
|
|
tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
|
12991 |
|
|
return fold_build2_loc (loc, code, type, tem,
|
12992 |
|
|
fold_convert_loc (loc, itype, arg10));
|
12993 |
|
|
}
|
12994 |
|
|
}
|
12995 |
|
|
|
12996 |
|
|
/* Attempt to simplify equality/inequality comparisons of complex
|
12997 |
|
|
values. Only lower the comparison if the result is known or
|
12998 |
|
|
can be simplified to a single scalar comparison. */
|
12999 |
|
|
if ((TREE_CODE (arg0) == COMPLEX_EXPR
|
13000 |
|
|
|| TREE_CODE (arg0) == COMPLEX_CST)
|
13001 |
|
|
&& (TREE_CODE (arg1) == COMPLEX_EXPR
|
13002 |
|
|
|| TREE_CODE (arg1) == COMPLEX_CST))
|
13003 |
|
|
{
|
13004 |
|
|
tree real0, imag0, real1, imag1;
|
13005 |
|
|
tree rcond, icond;
|
13006 |
|
|
|
13007 |
|
|
if (TREE_CODE (arg0) == COMPLEX_EXPR)
|
13008 |
|
|
{
|
13009 |
|
|
real0 = TREE_OPERAND (arg0, 0);
|
13010 |
|
|
imag0 = TREE_OPERAND (arg0, 1);
|
13011 |
|
|
}
|
13012 |
|
|
else
|
13013 |
|
|
{
|
13014 |
|
|
real0 = TREE_REALPART (arg0);
|
13015 |
|
|
imag0 = TREE_IMAGPART (arg0);
|
13016 |
|
|
}
|
13017 |
|
|
|
13018 |
|
|
if (TREE_CODE (arg1) == COMPLEX_EXPR)
|
13019 |
|
|
{
|
13020 |
|
|
real1 = TREE_OPERAND (arg1, 0);
|
13021 |
|
|
imag1 = TREE_OPERAND (arg1, 1);
|
13022 |
|
|
}
|
13023 |
|
|
else
|
13024 |
|
|
{
|
13025 |
|
|
real1 = TREE_REALPART (arg1);
|
13026 |
|
|
imag1 = TREE_IMAGPART (arg1);
|
13027 |
|
|
}
|
13028 |
|
|
|
13029 |
|
|
rcond = fold_binary_loc (loc, code, type, real0, real1);
|
13030 |
|
|
if (rcond && TREE_CODE (rcond) == INTEGER_CST)
|
13031 |
|
|
{
|
13032 |
|
|
if (integer_zerop (rcond))
|
13033 |
|
|
{
|
13034 |
|
|
if (code == EQ_EXPR)
|
13035 |
|
|
return omit_two_operands_loc (loc, type, boolean_false_node,
|
13036 |
|
|
imag0, imag1);
|
13037 |
|
|
return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
|
13038 |
|
|
}
|
13039 |
|
|
else
|
13040 |
|
|
{
|
13041 |
|
|
if (code == NE_EXPR)
|
13042 |
|
|
return omit_two_operands_loc (loc, type, boolean_true_node,
|
13043 |
|
|
imag0, imag1);
|
13044 |
|
|
return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
|
13045 |
|
|
}
|
13046 |
|
|
}
|
13047 |
|
|
|
13048 |
|
|
icond = fold_binary_loc (loc, code, type, imag0, imag1);
|
13049 |
|
|
if (icond && TREE_CODE (icond) == INTEGER_CST)
|
13050 |
|
|
{
|
13051 |
|
|
if (integer_zerop (icond))
|
13052 |
|
|
{
|
13053 |
|
|
if (code == EQ_EXPR)
|
13054 |
|
|
return omit_two_operands_loc (loc, type, boolean_false_node,
|
13055 |
|
|
real0, real1);
|
13056 |
|
|
return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
|
13057 |
|
|
}
|
13058 |
|
|
else
|
13059 |
|
|
{
|
13060 |
|
|
if (code == NE_EXPR)
|
13061 |
|
|
return omit_two_operands_loc (loc, type, boolean_true_node,
|
13062 |
|
|
real0, real1);
|
13063 |
|
|
return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
|
13064 |
|
|
}
|
13065 |
|
|
}
|
13066 |
|
|
}
|
13067 |
|
|
|
13068 |
|
|
return NULL_TREE;
|
13069 |
|
|
|
13070 |
|
|
case LT_EXPR:
|
13071 |
|
|
case GT_EXPR:
|
13072 |
|
|
case LE_EXPR:
|
13073 |
|
|
case GE_EXPR:
|
13074 |
|
|
tem = fold_comparison (loc, code, type, op0, op1);
|
13075 |
|
|
if (tem != NULL_TREE)
|
13076 |
|
|
return tem;
|
13077 |
|
|
|
13078 |
|
|
/* Transform comparisons of the form X +- C CMP X. */
|
13079 |
|
|
if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
|
13080 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
|
13081 |
|
|
&& ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
|
13082 |
|
|
&& !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
|
13083 |
|
|
|| (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
|
13084 |
|
|
&& TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
|
13085 |
|
|
{
|
13086 |
|
|
tree arg01 = TREE_OPERAND (arg0, 1);
|
13087 |
|
|
enum tree_code code0 = TREE_CODE (arg0);
|
13088 |
|
|
int is_positive;
|
13089 |
|
|
|
13090 |
|
|
if (TREE_CODE (arg01) == REAL_CST)
|
13091 |
|
|
is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
|
13092 |
|
|
else
|
13093 |
|
|
is_positive = tree_int_cst_sgn (arg01);
|
13094 |
|
|
|
13095 |
|
|
/* (X - c) > X becomes false. */
|
13096 |
|
|
if (code == GT_EXPR
|
13097 |
|
|
&& ((code0 == MINUS_EXPR && is_positive >= 0)
|
13098 |
|
|
|| (code0 == PLUS_EXPR && is_positive <= 0)))
|
13099 |
|
|
{
|
13100 |
|
|
if (TREE_CODE (arg01) == INTEGER_CST
|
13101 |
|
|
&& TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
|
13102 |
|
|
fold_overflow_warning (("assuming signed overflow does not "
|
13103 |
|
|
"occur when assuming that (X - c) > X "
|
13104 |
|
|
"is always false"),
|
13105 |
|
|
WARN_STRICT_OVERFLOW_ALL);
|
13106 |
|
|
return constant_boolean_node (0, type);
|
13107 |
|
|
}
|
13108 |
|
|
|
13109 |
|
|
/* Likewise (X + c) < X becomes false. */
|
13110 |
|
|
if (code == LT_EXPR
|
13111 |
|
|
&& ((code0 == PLUS_EXPR && is_positive >= 0)
|
13112 |
|
|
|| (code0 == MINUS_EXPR && is_positive <= 0)))
|
13113 |
|
|
{
|
13114 |
|
|
if (TREE_CODE (arg01) == INTEGER_CST
|
13115 |
|
|
&& TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
|
13116 |
|
|
fold_overflow_warning (("assuming signed overflow does not "
|
13117 |
|
|
"occur when assuming that "
|
13118 |
|
|
"(X + c) < X is always false"),
|
13119 |
|
|
WARN_STRICT_OVERFLOW_ALL);
|
13120 |
|
|
return constant_boolean_node (0, type);
|
13121 |
|
|
}
|
13122 |
|
|
|
13123 |
|
|
/* Convert (X - c) <= X to true. */
|
13124 |
|
|
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
|
13125 |
|
|
&& code == LE_EXPR
|
13126 |
|
|
&& ((code0 == MINUS_EXPR && is_positive >= 0)
|
13127 |
|
|
|| (code0 == PLUS_EXPR && is_positive <= 0)))
|
13128 |
|
|
{
|
13129 |
|
|
if (TREE_CODE (arg01) == INTEGER_CST
|
13130 |
|
|
&& TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
|
13131 |
|
|
fold_overflow_warning (("assuming signed overflow does not "
|
13132 |
|
|
"occur when assuming that "
|
13133 |
|
|
"(X - c) <= X is always true"),
|
13134 |
|
|
WARN_STRICT_OVERFLOW_ALL);
|
13135 |
|
|
return constant_boolean_node (1, type);
|
13136 |
|
|
}
|
13137 |
|
|
|
13138 |
|
|
/* Convert (X + c) >= X to true. */
|
13139 |
|
|
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
|
13140 |
|
|
&& code == GE_EXPR
|
13141 |
|
|
&& ((code0 == PLUS_EXPR && is_positive >= 0)
|
13142 |
|
|
|| (code0 == MINUS_EXPR && is_positive <= 0)))
|
13143 |
|
|
{
|
13144 |
|
|
if (TREE_CODE (arg01) == INTEGER_CST
|
13145 |
|
|
&& TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
|
13146 |
|
|
fold_overflow_warning (("assuming signed overflow does not "
|
13147 |
|
|
"occur when assuming that "
|
13148 |
|
|
"(X + c) >= X is always true"),
|
13149 |
|
|
WARN_STRICT_OVERFLOW_ALL);
|
13150 |
|
|
return constant_boolean_node (1, type);
|
13151 |
|
|
}
|
13152 |
|
|
|
13153 |
|
|
if (TREE_CODE (arg01) == INTEGER_CST)
|
13154 |
|
|
{
|
13155 |
|
|
/* Convert X + c > X and X - c < X to true for integers. */
|
13156 |
|
|
if (code == GT_EXPR
|
13157 |
|
|
&& ((code0 == PLUS_EXPR && is_positive > 0)
|
13158 |
|
|
|| (code0 == MINUS_EXPR && is_positive < 0)))
|
13159 |
|
|
{
|
13160 |
|
|
if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
|
13161 |
|
|
fold_overflow_warning (("assuming signed overflow does "
|
13162 |
|
|
"not occur when assuming that "
|
13163 |
|
|
"(X + c) > X is always true"),
|
13164 |
|
|
WARN_STRICT_OVERFLOW_ALL);
|
13165 |
|
|
return constant_boolean_node (1, type);
|
13166 |
|
|
}
|
13167 |
|
|
|
13168 |
|
|
if (code == LT_EXPR
|
13169 |
|
|
&& ((code0 == MINUS_EXPR && is_positive > 0)
|
13170 |
|
|
|| (code0 == PLUS_EXPR && is_positive < 0)))
|
13171 |
|
|
{
|
13172 |
|
|
if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
|
13173 |
|
|
fold_overflow_warning (("assuming signed overflow does "
|
13174 |
|
|
"not occur when assuming that "
|
13175 |
|
|
"(X - c) < X is always true"),
|
13176 |
|
|
WARN_STRICT_OVERFLOW_ALL);
|
13177 |
|
|
return constant_boolean_node (1, type);
|
13178 |
|
|
}
|
13179 |
|
|
|
13180 |
|
|
/* Convert X + c <= X and X - c >= X to false for integers. */
|
13181 |
|
|
if (code == LE_EXPR
|
13182 |
|
|
&& ((code0 == PLUS_EXPR && is_positive > 0)
|
13183 |
|
|
|| (code0 == MINUS_EXPR && is_positive < 0)))
|
13184 |
|
|
{
|
13185 |
|
|
if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
|
13186 |
|
|
fold_overflow_warning (("assuming signed overflow does "
|
13187 |
|
|
"not occur when assuming that "
|
13188 |
|
|
"(X + c) <= X is always false"),
|
13189 |
|
|
WARN_STRICT_OVERFLOW_ALL);
|
13190 |
|
|
return constant_boolean_node (0, type);
|
13191 |
|
|
}
|
13192 |
|
|
|
13193 |
|
|
if (code == GE_EXPR
|
13194 |
|
|
&& ((code0 == MINUS_EXPR && is_positive > 0)
|
13195 |
|
|
|| (code0 == PLUS_EXPR && is_positive < 0)))
|
13196 |
|
|
{
|
13197 |
|
|
if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
|
13198 |
|
|
fold_overflow_warning (("assuming signed overflow does "
|
13199 |
|
|
"not occur when assuming that "
|
13200 |
|
|
"(X - c) >= X is always false"),
|
13201 |
|
|
WARN_STRICT_OVERFLOW_ALL);
|
13202 |
|
|
return constant_boolean_node (0, type);
|
13203 |
|
|
}
|
13204 |
|
|
}
|
13205 |
|
|
}
|
13206 |
|
|
|
13207 |
|
|
/* Comparisons with the highest or lowest possible integer of
|
13208 |
|
|
the specified precision will have known values. */
|
13209 |
|
|
{
|
13210 |
|
|
tree arg1_type = TREE_TYPE (arg1);
|
13211 |
|
|
unsigned int width = TYPE_PRECISION (arg1_type);
|
13212 |
|
|
|
13213 |
|
|
if (TREE_CODE (arg1) == INTEGER_CST
|
13214 |
|
|
&& width <= 2 * HOST_BITS_PER_WIDE_INT
|
13215 |
|
|
&& (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
|
13216 |
|
|
{
|
13217 |
|
|
HOST_WIDE_INT signed_max_hi;
|
13218 |
|
|
unsigned HOST_WIDE_INT signed_max_lo;
|
13219 |
|
|
unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
|
13220 |
|
|
|
13221 |
|
|
if (width <= HOST_BITS_PER_WIDE_INT)
|
13222 |
|
|
{
|
13223 |
|
|
signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
|
13224 |
|
|
- 1;
|
13225 |
|
|
signed_max_hi = 0;
|
13226 |
|
|
max_hi = 0;
|
13227 |
|
|
|
13228 |
|
|
if (TYPE_UNSIGNED (arg1_type))
|
13229 |
|
|
{
|
13230 |
|
|
max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
|
13231 |
|
|
min_lo = 0;
|
13232 |
|
|
min_hi = 0;
|
13233 |
|
|
}
|
13234 |
|
|
else
|
13235 |
|
|
{
|
13236 |
|
|
max_lo = signed_max_lo;
|
13237 |
|
|
min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
|
13238 |
|
|
min_hi = -1;
|
13239 |
|
|
}
|
13240 |
|
|
}
|
13241 |
|
|
else
|
13242 |
|
|
{
|
13243 |
|
|
width -= HOST_BITS_PER_WIDE_INT;
|
13244 |
|
|
signed_max_lo = -1;
|
13245 |
|
|
signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
|
13246 |
|
|
- 1;
|
13247 |
|
|
max_lo = -1;
|
13248 |
|
|
min_lo = 0;
|
13249 |
|
|
|
13250 |
|
|
if (TYPE_UNSIGNED (arg1_type))
|
13251 |
|
|
{
|
13252 |
|
|
max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
|
13253 |
|
|
min_hi = 0;
|
13254 |
|
|
}
|
13255 |
|
|
else
|
13256 |
|
|
{
|
13257 |
|
|
max_hi = signed_max_hi;
|
13258 |
|
|
min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
|
13259 |
|
|
}
|
13260 |
|
|
}
|
13261 |
|
|
|
13262 |
|
|
if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
|
13263 |
|
|
&& TREE_INT_CST_LOW (arg1) == max_lo)
|
13264 |
|
|
switch (code)
|
13265 |
|
|
{
|
13266 |
|
|
case GT_EXPR:
|
13267 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
|
13268 |
|
|
|
13269 |
|
|
case GE_EXPR:
|
13270 |
|
|
return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
|
13271 |
|
|
|
13272 |
|
|
case LE_EXPR:
|
13273 |
|
|
return omit_one_operand_loc (loc, type, integer_one_node, arg0);
|
13274 |
|
|
|
13275 |
|
|
case LT_EXPR:
|
13276 |
|
|
return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
|
13277 |
|
|
|
13278 |
|
|
/* The GE_EXPR and LT_EXPR cases above are not normally
|
13279 |
|
|
reached because of previous transformations. */
|
13280 |
|
|
|
13281 |
|
|
default:
|
13282 |
|
|
break;
|
13283 |
|
|
}
|
13284 |
|
|
else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
|
13285 |
|
|
== max_hi
|
13286 |
|
|
&& TREE_INT_CST_LOW (arg1) == max_lo - 1)
|
13287 |
|
|
switch (code)
|
13288 |
|
|
{
|
13289 |
|
|
case GT_EXPR:
|
13290 |
|
|
arg1 = const_binop (PLUS_EXPR, arg1,
|
13291 |
|
|
build_int_cst (TREE_TYPE (arg1), 1));
|
13292 |
|
|
return fold_build2_loc (loc, EQ_EXPR, type,
|
13293 |
|
|
fold_convert_loc (loc,
|
13294 |
|
|
TREE_TYPE (arg1), arg0),
|
13295 |
|
|
arg1);
|
13296 |
|
|
case LE_EXPR:
|
13297 |
|
|
arg1 = const_binop (PLUS_EXPR, arg1,
|
13298 |
|
|
build_int_cst (TREE_TYPE (arg1), 1));
|
13299 |
|
|
return fold_build2_loc (loc, NE_EXPR, type,
|
13300 |
|
|
fold_convert_loc (loc, TREE_TYPE (arg1),
|
13301 |
|
|
arg0),
|
13302 |
|
|
arg1);
|
13303 |
|
|
default:
|
13304 |
|
|
break;
|
13305 |
|
|
}
|
13306 |
|
|
else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
|
13307 |
|
|
== min_hi
|
13308 |
|
|
&& TREE_INT_CST_LOW (arg1) == min_lo)
|
13309 |
|
|
switch (code)
|
13310 |
|
|
{
|
13311 |
|
|
case LT_EXPR:
|
13312 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
|
13313 |
|
|
|
13314 |
|
|
case LE_EXPR:
|
13315 |
|
|
return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
|
13316 |
|
|
|
13317 |
|
|
case GE_EXPR:
|
13318 |
|
|
return omit_one_operand_loc (loc, type, integer_one_node, arg0);
|
13319 |
|
|
|
13320 |
|
|
case GT_EXPR:
|
13321 |
|
|
return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
|
13322 |
|
|
|
13323 |
|
|
default:
|
13324 |
|
|
break;
|
13325 |
|
|
}
|
13326 |
|
|
else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
|
13327 |
|
|
== min_hi
|
13328 |
|
|
&& TREE_INT_CST_LOW (arg1) == min_lo + 1)
|
13329 |
|
|
switch (code)
|
13330 |
|
|
{
|
13331 |
|
|
case GE_EXPR:
|
13332 |
|
|
arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
|
13333 |
|
|
return fold_build2_loc (loc, NE_EXPR, type,
|
13334 |
|
|
fold_convert_loc (loc,
|
13335 |
|
|
TREE_TYPE (arg1), arg0),
|
13336 |
|
|
arg1);
|
13337 |
|
|
case LT_EXPR:
|
13338 |
|
|
arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
|
13339 |
|
|
return fold_build2_loc (loc, EQ_EXPR, type,
|
13340 |
|
|
fold_convert_loc (loc, TREE_TYPE (arg1),
|
13341 |
|
|
arg0),
|
13342 |
|
|
arg1);
|
13343 |
|
|
default:
|
13344 |
|
|
break;
|
13345 |
|
|
}
|
13346 |
|
|
|
13347 |
|
|
else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
|
13348 |
|
|
&& TREE_INT_CST_LOW (arg1) == signed_max_lo
|
13349 |
|
|
&& TYPE_UNSIGNED (arg1_type)
|
13350 |
|
|
/* We will flip the signedness of the comparison operator
|
13351 |
|
|
associated with the mode of arg1, so the sign bit is
|
13352 |
|
|
specified by this mode. Check that arg1 is the signed
|
13353 |
|
|
max associated with this sign bit. */
|
13354 |
|
|
&& width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
|
13355 |
|
|
/* signed_type does not work on pointer types. */
|
13356 |
|
|
&& INTEGRAL_TYPE_P (arg1_type))
|
13357 |
|
|
{
|
13358 |
|
|
/* The following case also applies to X < signed_max+1
|
13359 |
|
|
and X >= signed_max+1 because previous transformations. */
|
13360 |
|
|
if (code == LE_EXPR || code == GT_EXPR)
|
13361 |
|
|
{
|
13362 |
|
|
tree st;
|
13363 |
|
|
st = signed_type_for (TREE_TYPE (arg1));
|
13364 |
|
|
return fold_build2_loc (loc,
|
13365 |
|
|
code == LE_EXPR ? GE_EXPR : LT_EXPR,
|
13366 |
|
|
type, fold_convert_loc (loc, st, arg0),
|
13367 |
|
|
build_int_cst (st, 0));
|
13368 |
|
|
}
|
13369 |
|
|
}
|
13370 |
|
|
}
|
13371 |
|
|
}
|
13372 |
|
|
|
13373 |
|
|
/* If we are comparing an ABS_EXPR with a constant, we can
|
13374 |
|
|
convert all the cases into explicit comparisons, but they may
|
13375 |
|
|
well not be faster than doing the ABS and one comparison.
|
13376 |
|
|
But ABS (X) <= C is a range comparison, which becomes a subtraction
|
13377 |
|
|
and a comparison, and is probably faster. */
|
13378 |
|
|
if (code == LE_EXPR
|
13379 |
|
|
&& TREE_CODE (arg1) == INTEGER_CST
|
13380 |
|
|
&& TREE_CODE (arg0) == ABS_EXPR
|
13381 |
|
|
&& ! TREE_SIDE_EFFECTS (arg0)
|
13382 |
|
|
&& (0 != (tem = negate_expr (arg1)))
|
13383 |
|
|
&& TREE_CODE (tem) == INTEGER_CST
|
13384 |
|
|
&& !TREE_OVERFLOW (tem))
|
13385 |
|
|
return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
|
13386 |
|
|
build2 (GE_EXPR, type,
|
13387 |
|
|
TREE_OPERAND (arg0, 0), tem),
|
13388 |
|
|
build2 (LE_EXPR, type,
|
13389 |
|
|
TREE_OPERAND (arg0, 0), arg1));
|
13390 |
|
|
|
13391 |
|
|
/* Convert ABS_EXPR<x> >= 0 to true. */
|
13392 |
|
|
strict_overflow_p = false;
|
13393 |
|
|
if (code == GE_EXPR
|
13394 |
|
|
&& (integer_zerop (arg1)
|
13395 |
|
|
|| (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
|
13396 |
|
|
&& real_zerop (arg1)))
|
13397 |
|
|
&& tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
|
13398 |
|
|
{
|
13399 |
|
|
if (strict_overflow_p)
|
13400 |
|
|
fold_overflow_warning (("assuming signed overflow does not occur "
|
13401 |
|
|
"when simplifying comparison of "
|
13402 |
|
|
"absolute value and zero"),
|
13403 |
|
|
WARN_STRICT_OVERFLOW_CONDITIONAL);
|
13404 |
|
|
return omit_one_operand_loc (loc, type, integer_one_node, arg0);
|
13405 |
|
|
}
|
13406 |
|
|
|
13407 |
|
|
/* Convert ABS_EXPR<x> < 0 to false. */
|
13408 |
|
|
strict_overflow_p = false;
|
13409 |
|
|
if (code == LT_EXPR
|
13410 |
|
|
&& (integer_zerop (arg1) || real_zerop (arg1))
|
13411 |
|
|
&& tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
|
13412 |
|
|
{
|
13413 |
|
|
if (strict_overflow_p)
|
13414 |
|
|
fold_overflow_warning (("assuming signed overflow does not occur "
|
13415 |
|
|
"when simplifying comparison of "
|
13416 |
|
|
"absolute value and zero"),
|
13417 |
|
|
WARN_STRICT_OVERFLOW_CONDITIONAL);
|
13418 |
|
|
return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
|
13419 |
|
|
}
|
13420 |
|
|
|
13421 |
|
|
/* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
|
13422 |
|
|
and similarly for >= into !=. */
|
13423 |
|
|
if ((code == LT_EXPR || code == GE_EXPR)
|
13424 |
|
|
&& TYPE_UNSIGNED (TREE_TYPE (arg0))
|
13425 |
|
|
&& TREE_CODE (arg1) == LSHIFT_EXPR
|
13426 |
|
|
&& integer_onep (TREE_OPERAND (arg1, 0)))
|
13427 |
|
|
return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
|
13428 |
|
|
build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
|
13429 |
|
|
TREE_OPERAND (arg1, 1)),
|
13430 |
|
|
build_int_cst (TREE_TYPE (arg0), 0));
|
13431 |
|
|
|
13432 |
|
|
if ((code == LT_EXPR || code == GE_EXPR)
|
13433 |
|
|
&& TYPE_UNSIGNED (TREE_TYPE (arg0))
|
13434 |
|
|
&& CONVERT_EXPR_P (arg1)
|
13435 |
|
|
&& TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
|
13436 |
|
|
&& integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
|
13437 |
|
|
{
|
13438 |
|
|
tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
|
13439 |
|
|
TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
|
13440 |
|
|
return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
|
13441 |
|
|
fold_convert_loc (loc, TREE_TYPE (arg0), tem),
|
13442 |
|
|
build_int_cst (TREE_TYPE (arg0), 0));
|
13443 |
|
|
}
|
13444 |
|
|
|
13445 |
|
|
return NULL_TREE;
|
13446 |
|
|
|
13447 |
|
|
case UNORDERED_EXPR:
|
13448 |
|
|
case ORDERED_EXPR:
|
13449 |
|
|
case UNLT_EXPR:
|
13450 |
|
|
case UNLE_EXPR:
|
13451 |
|
|
case UNGT_EXPR:
|
13452 |
|
|
case UNGE_EXPR:
|
13453 |
|
|
case UNEQ_EXPR:
|
13454 |
|
|
case LTGT_EXPR:
|
13455 |
|
|
if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
|
13456 |
|
|
{
|
13457 |
|
|
t1 = fold_relational_const (code, type, arg0, arg1);
|
13458 |
|
|
if (t1 != NULL_TREE)
|
13459 |
|
|
return t1;
|
13460 |
|
|
}
|
13461 |
|
|
|
13462 |
|
|
/* If the first operand is NaN, the result is constant. */
|
13463 |
|
|
if (TREE_CODE (arg0) == REAL_CST
|
13464 |
|
|
&& REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
|
13465 |
|
|
&& (code != LTGT_EXPR || ! flag_trapping_math))
|
13466 |
|
|
{
|
13467 |
|
|
t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
|
13468 |
|
|
? integer_zero_node
|
13469 |
|
|
: integer_one_node;
|
13470 |
|
|
return omit_one_operand_loc (loc, type, t1, arg1);
|
13471 |
|
|
}
|
13472 |
|
|
|
13473 |
|
|
/* If the second operand is NaN, the result is constant. */
|
13474 |
|
|
if (TREE_CODE (arg1) == REAL_CST
|
13475 |
|
|
&& REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
|
13476 |
|
|
&& (code != LTGT_EXPR || ! flag_trapping_math))
|
13477 |
|
|
{
|
13478 |
|
|
t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
|
13479 |
|
|
? integer_zero_node
|
13480 |
|
|
: integer_one_node;
|
13481 |
|
|
return omit_one_operand_loc (loc, type, t1, arg0);
|
13482 |
|
|
}
|
13483 |
|
|
|
13484 |
|
|
/* Simplify unordered comparison of something with itself. */
|
13485 |
|
|
if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
|
13486 |
|
|
&& operand_equal_p (arg0, arg1, 0))
|
13487 |
|
|
return constant_boolean_node (1, type);
|
13488 |
|
|
|
13489 |
|
|
if (code == LTGT_EXPR
|
13490 |
|
|
&& !flag_trapping_math
|
13491 |
|
|
&& operand_equal_p (arg0, arg1, 0))
|
13492 |
|
|
return constant_boolean_node (0, type);
|
13493 |
|
|
|
13494 |
|
|
/* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
|
13495 |
|
|
{
|
13496 |
|
|
tree targ0 = strip_float_extensions (arg0);
|
13497 |
|
|
tree targ1 = strip_float_extensions (arg1);
|
13498 |
|
|
tree newtype = TREE_TYPE (targ0);
|
13499 |
|
|
|
13500 |
|
|
if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
|
13501 |
|
|
newtype = TREE_TYPE (targ1);
|
13502 |
|
|
|
13503 |
|
|
if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
|
13504 |
|
|
return fold_build2_loc (loc, code, type,
|
13505 |
|
|
fold_convert_loc (loc, newtype, targ0),
|
13506 |
|
|
fold_convert_loc (loc, newtype, targ1));
|
13507 |
|
|
}
|
13508 |
|
|
|
13509 |
|
|
return NULL_TREE;
|
13510 |
|
|
|
13511 |
|
|
case COMPOUND_EXPR:
|
13512 |
|
|
/* When pedantic, a compound expression can be neither an lvalue
|
13513 |
|
|
nor an integer constant expression. */
|
13514 |
|
|
if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
|
13515 |
|
|
return NULL_TREE;
|
13516 |
|
|
/* Don't let (0, 0) be null pointer constant. */
|
13517 |
|
|
tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
|
13518 |
|
|
: fold_convert_loc (loc, type, arg1);
|
13519 |
|
|
return pedantic_non_lvalue_loc (loc, tem);
|
13520 |
|
|
|
13521 |
|
|
case COMPLEX_EXPR:
|
13522 |
|
|
if ((TREE_CODE (arg0) == REAL_CST
|
13523 |
|
|
&& TREE_CODE (arg1) == REAL_CST)
|
13524 |
|
|
|| (TREE_CODE (arg0) == INTEGER_CST
|
13525 |
|
|
&& TREE_CODE (arg1) == INTEGER_CST))
|
13526 |
|
|
return build_complex (type, arg0, arg1);
|
13527 |
|
|
if (TREE_CODE (arg0) == REALPART_EXPR
|
13528 |
|
|
&& TREE_CODE (arg1) == IMAGPART_EXPR
|
13529 |
|
|
&& TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
|
13530 |
|
|
&& operand_equal_p (TREE_OPERAND (arg0, 0),
|
13531 |
|
|
TREE_OPERAND (arg1, 0), 0))
|
13532 |
|
|
return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
|
13533 |
|
|
TREE_OPERAND (arg1, 0));
|
13534 |
|
|
return NULL_TREE;
|
13535 |
|
|
|
13536 |
|
|
case ASSERT_EXPR:
|
13537 |
|
|
/* An ASSERT_EXPR should never be passed to fold_binary. */
|
13538 |
|
|
gcc_unreachable ();
|
13539 |
|
|
|
13540 |
|
|
case VEC_PACK_TRUNC_EXPR:
|
13541 |
|
|
case VEC_PACK_FIX_TRUNC_EXPR:
|
13542 |
|
|
{
|
13543 |
|
|
unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
|
13544 |
|
|
tree *elts, vals = NULL_TREE;
|
13545 |
|
|
|
13546 |
|
|
gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
|
13547 |
|
|
&& TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
|
13548 |
|
|
if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
|
13549 |
|
|
return NULL_TREE;
|
13550 |
|
|
|
13551 |
|
|
elts = XALLOCAVEC (tree, nelts);
|
13552 |
|
|
if (!vec_cst_ctor_to_array (arg0, elts)
|
13553 |
|
|
|| !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
|
13554 |
|
|
return NULL_TREE;
|
13555 |
|
|
|
13556 |
|
|
for (i = 0; i < nelts; i++)
|
13557 |
|
|
{
|
13558 |
|
|
elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
|
13559 |
|
|
? NOP_EXPR : FIX_TRUNC_EXPR,
|
13560 |
|
|
TREE_TYPE (type), elts[i]);
|
13561 |
|
|
if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
|
13562 |
|
|
return NULL_TREE;
|
13563 |
|
|
}
|
13564 |
|
|
|
13565 |
|
|
for (i = 0; i < nelts; i++)
|
13566 |
|
|
vals = tree_cons (NULL_TREE, elts[nelts - i - 1], vals);
|
13567 |
|
|
return build_vector (type, vals);
|
13568 |
|
|
}
|
13569 |
|
|
|
13570 |
|
|
case VEC_WIDEN_MULT_LO_EXPR:
|
13571 |
|
|
case VEC_WIDEN_MULT_HI_EXPR:
|
13572 |
|
|
{
|
13573 |
|
|
unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
|
13574 |
|
|
tree *elts, vals = NULL_TREE;
|
13575 |
|
|
|
13576 |
|
|
gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
|
13577 |
|
|
&& TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
|
13578 |
|
|
if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
|
13579 |
|
|
return NULL_TREE;
|
13580 |
|
|
|
13581 |
|
|
elts = XALLOCAVEC (tree, nelts * 4);
|
13582 |
|
|
if (!vec_cst_ctor_to_array (arg0, elts)
|
13583 |
|
|
|| !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
|
13584 |
|
|
return NULL_TREE;
|
13585 |
|
|
|
13586 |
|
|
if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_WIDEN_MULT_LO_EXPR))
|
13587 |
|
|
elts += nelts;
|
13588 |
|
|
|
13589 |
|
|
for (i = 0; i < nelts; i++)
|
13590 |
|
|
{
|
13591 |
|
|
elts[i] = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[i]);
|
13592 |
|
|
elts[i + nelts * 2]
|
13593 |
|
|
= fold_convert_const (NOP_EXPR, TREE_TYPE (type),
|
13594 |
|
|
elts[i + nelts * 2]);
|
13595 |
|
|
if (elts[i] == NULL_TREE || elts[i + nelts * 2] == NULL_TREE)
|
13596 |
|
|
return NULL_TREE;
|
13597 |
|
|
elts[i] = const_binop (MULT_EXPR, elts[i], elts[i + nelts * 2]);
|
13598 |
|
|
if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
|
13599 |
|
|
return NULL_TREE;
|
13600 |
|
|
}
|
13601 |
|
|
|
13602 |
|
|
for (i = 0; i < nelts; i++)
|
13603 |
|
|
vals = tree_cons (NULL_TREE, elts[nelts - i - 1], vals);
|
13604 |
|
|
return build_vector (type, vals);
|
13605 |
|
|
}
|
13606 |
|
|
|
13607 |
|
|
default:
|
13608 |
|
|
return NULL_TREE;
|
13609 |
|
|
} /* switch (code) */
|
13610 |
|
|
}
|
13611 |
|
|
|
13612 |
|
|
/* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
|
13613 |
|
|
a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
|
13614 |
|
|
of GOTO_EXPR. */
|
13615 |
|
|
|
13616 |
|
|
static tree
|
13617 |
|
|
contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
|
13618 |
|
|
{
|
13619 |
|
|
switch (TREE_CODE (*tp))
|
13620 |
|
|
{
|
13621 |
|
|
case LABEL_EXPR:
|
13622 |
|
|
return *tp;
|
13623 |
|
|
|
13624 |
|
|
case GOTO_EXPR:
|
13625 |
|
|
*walk_subtrees = 0;
|
13626 |
|
|
|
13627 |
|
|
/* ... fall through ... */
|
13628 |
|
|
|
13629 |
|
|
default:
|
13630 |
|
|
return NULL_TREE;
|
13631 |
|
|
}
|
13632 |
|
|
}
|
13633 |
|
|
|
13634 |
|
|
/* Return whether the sub-tree ST contains a label which is accessible from
|
13635 |
|
|
outside the sub-tree. */
|
13636 |
|
|
|
13637 |
|
|
static bool
|
13638 |
|
|
contains_label_p (tree st)
|
13639 |
|
|
{
|
13640 |
|
|
return
|
13641 |
|
|
(walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
|
13642 |
|
|
}
|
13643 |
|
|
|
13644 |
|
|
/* Fold a ternary expression of code CODE and type TYPE with operands
|
13645 |
|
|
OP0, OP1, and OP2. Return the folded expression if folding is
|
13646 |
|
|
successful. Otherwise, return NULL_TREE. */
|
13647 |
|
|
|
13648 |
|
|
tree
|
13649 |
|
|
fold_ternary_loc (location_t loc, enum tree_code code, tree type,
|
13650 |
|
|
tree op0, tree op1, tree op2)
|
13651 |
|
|
{
|
13652 |
|
|
tree tem;
|
13653 |
|
|
tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
|
13654 |
|
|
enum tree_code_class kind = TREE_CODE_CLASS (code);
|
13655 |
|
|
|
13656 |
|
|
gcc_assert (IS_EXPR_CODE_CLASS (kind)
|
13657 |
|
|
&& TREE_CODE_LENGTH (code) == 3);
|
13658 |
|
|
|
13659 |
|
|
/* Strip any conversions that don't change the mode. This is safe
|
13660 |
|
|
for every expression, except for a comparison expression because
|
13661 |
|
|
its signedness is derived from its operands. So, in the latter
|
13662 |
|
|
case, only strip conversions that don't change the signedness.
|
13663 |
|
|
|
13664 |
|
|
Note that this is done as an internal manipulation within the
|
13665 |
|
|
constant folder, in order to find the simplest representation of
|
13666 |
|
|
the arguments so that their form can be studied. In any cases,
|
13667 |
|
|
the appropriate type conversions should be put back in the tree
|
13668 |
|
|
that will get out of the constant folder. */
|
13669 |
|
|
if (op0)
|
13670 |
|
|
{
|
13671 |
|
|
arg0 = op0;
|
13672 |
|
|
STRIP_NOPS (arg0);
|
13673 |
|
|
}
|
13674 |
|
|
|
13675 |
|
|
if (op1)
|
13676 |
|
|
{
|
13677 |
|
|
arg1 = op1;
|
13678 |
|
|
STRIP_NOPS (arg1);
|
13679 |
|
|
}
|
13680 |
|
|
|
13681 |
|
|
if (op2)
|
13682 |
|
|
{
|
13683 |
|
|
arg2 = op2;
|
13684 |
|
|
STRIP_NOPS (arg2);
|
13685 |
|
|
}
|
13686 |
|
|
|
13687 |
|
|
switch (code)
|
13688 |
|
|
{
|
13689 |
|
|
case COMPONENT_REF:
|
13690 |
|
|
if (TREE_CODE (arg0) == CONSTRUCTOR
|
13691 |
|
|
&& ! type_contains_placeholder_p (TREE_TYPE (arg0)))
|
13692 |
|
|
{
|
13693 |
|
|
unsigned HOST_WIDE_INT idx;
|
13694 |
|
|
tree field, value;
|
13695 |
|
|
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
|
13696 |
|
|
if (field == arg1)
|
13697 |
|
|
return value;
|
13698 |
|
|
}
|
13699 |
|
|
return NULL_TREE;
|
13700 |
|
|
|
13701 |
|
|
case COND_EXPR:
|
13702 |
|
|
/* Pedantic ANSI C says that a conditional expression is never an lvalue,
|
13703 |
|
|
so all simple results must be passed through pedantic_non_lvalue. */
|
13704 |
|
|
if (TREE_CODE (arg0) == INTEGER_CST)
|
13705 |
|
|
{
|
13706 |
|
|
tree unused_op = integer_zerop (arg0) ? op1 : op2;
|
13707 |
|
|
tem = integer_zerop (arg0) ? op2 : op1;
|
13708 |
|
|
/* Only optimize constant conditions when the selected branch
|
13709 |
|
|
has the same type as the COND_EXPR. This avoids optimizing
|
13710 |
|
|
away "c ? x : throw", where the throw has a void type.
|
13711 |
|
|
Avoid throwing away that operand which contains label. */
|
13712 |
|
|
if ((!TREE_SIDE_EFFECTS (unused_op)
|
13713 |
|
|
|| !contains_label_p (unused_op))
|
13714 |
|
|
&& (! VOID_TYPE_P (TREE_TYPE (tem))
|
13715 |
|
|
|| VOID_TYPE_P (type)))
|
13716 |
|
|
return pedantic_non_lvalue_loc (loc, tem);
|
13717 |
|
|
return NULL_TREE;
|
13718 |
|
|
}
|
13719 |
|
|
if (operand_equal_p (arg1, op2, 0))
|
13720 |
|
|
return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
|
13721 |
|
|
|
13722 |
|
|
/* If we have A op B ? A : C, we may be able to convert this to a
|
13723 |
|
|
simpler expression, depending on the operation and the values
|
13724 |
|
|
of B and C. Signed zeros prevent all of these transformations,
|
13725 |
|
|
for reasons given above each one.
|
13726 |
|
|
|
13727 |
|
|
Also try swapping the arguments and inverting the conditional. */
|
13728 |
|
|
if (COMPARISON_CLASS_P (arg0)
|
13729 |
|
|
&& operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
|
13730 |
|
|
arg1, TREE_OPERAND (arg0, 1))
|
13731 |
|
|
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
|
13732 |
|
|
{
|
13733 |
|
|
tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
|
13734 |
|
|
if (tem)
|
13735 |
|
|
return tem;
|
13736 |
|
|
}
|
13737 |
|
|
|
13738 |
|
|
if (COMPARISON_CLASS_P (arg0)
|
13739 |
|
|
&& operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
|
13740 |
|
|
op2,
|
13741 |
|
|
TREE_OPERAND (arg0, 1))
|
13742 |
|
|
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
|
13743 |
|
|
{
|
13744 |
|
|
location_t loc0 = expr_location_or (arg0, loc);
|
13745 |
|
|
tem = fold_truth_not_expr (loc0, arg0);
|
13746 |
|
|
if (tem && COMPARISON_CLASS_P (tem))
|
13747 |
|
|
{
|
13748 |
|
|
tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
|
13749 |
|
|
if (tem)
|
13750 |
|
|
return tem;
|
13751 |
|
|
}
|
13752 |
|
|
}
|
13753 |
|
|
|
13754 |
|
|
/* If the second operand is simpler than the third, swap them
|
13755 |
|
|
since that produces better jump optimization results. */
|
13756 |
|
|
if (truth_value_p (TREE_CODE (arg0))
|
13757 |
|
|
&& tree_swap_operands_p (op1, op2, false))
|
13758 |
|
|
{
|
13759 |
|
|
location_t loc0 = expr_location_or (arg0, loc);
|
13760 |
|
|
/* See if this can be inverted. If it can't, possibly because
|
13761 |
|
|
it was a floating-point inequality comparison, don't do
|
13762 |
|
|
anything. */
|
13763 |
|
|
tem = fold_truth_not_expr (loc0, arg0);
|
13764 |
|
|
if (tem)
|
13765 |
|
|
return fold_build3_loc (loc, code, type, tem, op2, op1);
|
13766 |
|
|
}
|
13767 |
|
|
|
13768 |
|
|
/* Convert A ? 1 : 0 to simply A. */
|
13769 |
|
|
if (integer_onep (op1)
|
13770 |
|
|
&& integer_zerop (op2)
|
13771 |
|
|
/* If we try to convert OP0 to our type, the
|
13772 |
|
|
call to fold will try to move the conversion inside
|
13773 |
|
|
a COND, which will recurse. In that case, the COND_EXPR
|
13774 |
|
|
is probably the best choice, so leave it alone. */
|
13775 |
|
|
&& type == TREE_TYPE (arg0))
|
13776 |
|
|
return pedantic_non_lvalue_loc (loc, arg0);
|
13777 |
|
|
|
13778 |
|
|
/* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
|
13779 |
|
|
over COND_EXPR in cases such as floating point comparisons. */
|
13780 |
|
|
if (integer_zerop (op1)
|
13781 |
|
|
&& integer_onep (op2)
|
13782 |
|
|
&& truth_value_p (TREE_CODE (arg0)))
|
13783 |
|
|
return pedantic_non_lvalue_loc (loc,
|
13784 |
|
|
fold_convert_loc (loc, type,
|
13785 |
|
|
invert_truthvalue_loc (loc,
|
13786 |
|
|
arg0)));
|
13787 |
|
|
|
13788 |
|
|
/* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
|
13789 |
|
|
if (TREE_CODE (arg0) == LT_EXPR
|
13790 |
|
|
&& integer_zerop (TREE_OPERAND (arg0, 1))
|
13791 |
|
|
&& integer_zerop (op2)
|
13792 |
|
|
&& (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
|
13793 |
|
|
{
|
13794 |
|
|
/* sign_bit_p only checks ARG1 bits within A's precision.
|
13795 |
|
|
If <sign bit of A> has wider type than A, bits outside
|
13796 |
|
|
of A's precision in <sign bit of A> need to be checked.
|
13797 |
|
|
If they are all 0, this optimization needs to be done
|
13798 |
|
|
in unsigned A's type, if they are all 1 in signed A's type,
|
13799 |
|
|
otherwise this can't be done. */
|
13800 |
|
|
if (TYPE_PRECISION (TREE_TYPE (tem))
|
13801 |
|
|
< TYPE_PRECISION (TREE_TYPE (arg1))
|
13802 |
|
|
&& TYPE_PRECISION (TREE_TYPE (tem))
|
13803 |
|
|
< TYPE_PRECISION (type))
|
13804 |
|
|
{
|
13805 |
|
|
unsigned HOST_WIDE_INT mask_lo;
|
13806 |
|
|
HOST_WIDE_INT mask_hi;
|
13807 |
|
|
int inner_width, outer_width;
|
13808 |
|
|
tree tem_type;
|
13809 |
|
|
|
13810 |
|
|
inner_width = TYPE_PRECISION (TREE_TYPE (tem));
|
13811 |
|
|
outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
|
13812 |
|
|
if (outer_width > TYPE_PRECISION (type))
|
13813 |
|
|
outer_width = TYPE_PRECISION (type);
|
13814 |
|
|
|
13815 |
|
|
if (outer_width > HOST_BITS_PER_WIDE_INT)
|
13816 |
|
|
{
|
13817 |
|
|
mask_hi = ((unsigned HOST_WIDE_INT) -1
|
13818 |
|
|
>> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
|
13819 |
|
|
mask_lo = -1;
|
13820 |
|
|
}
|
13821 |
|
|
else
|
13822 |
|
|
{
|
13823 |
|
|
mask_hi = 0;
|
13824 |
|
|
mask_lo = ((unsigned HOST_WIDE_INT) -1
|
13825 |
|
|
>> (HOST_BITS_PER_WIDE_INT - outer_width));
|
13826 |
|
|
}
|
13827 |
|
|
if (inner_width > HOST_BITS_PER_WIDE_INT)
|
13828 |
|
|
{
|
13829 |
|
|
mask_hi &= ~((unsigned HOST_WIDE_INT) -1
|
13830 |
|
|
>> (HOST_BITS_PER_WIDE_INT - inner_width));
|
13831 |
|
|
mask_lo = 0;
|
13832 |
|
|
}
|
13833 |
|
|
else
|
13834 |
|
|
mask_lo &= ~((unsigned HOST_WIDE_INT) -1
|
13835 |
|
|
>> (HOST_BITS_PER_WIDE_INT - inner_width));
|
13836 |
|
|
|
13837 |
|
|
if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
|
13838 |
|
|
&& (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
|
13839 |
|
|
{
|
13840 |
|
|
tem_type = signed_type_for (TREE_TYPE (tem));
|
13841 |
|
|
tem = fold_convert_loc (loc, tem_type, tem);
|
13842 |
|
|
}
|
13843 |
|
|
else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
|
13844 |
|
|
&& (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
|
13845 |
|
|
{
|
13846 |
|
|
tem_type = unsigned_type_for (TREE_TYPE (tem));
|
13847 |
|
|
tem = fold_convert_loc (loc, tem_type, tem);
|
13848 |
|
|
}
|
13849 |
|
|
else
|
13850 |
|
|
tem = NULL;
|
13851 |
|
|
}
|
13852 |
|
|
|
13853 |
|
|
if (tem)
|
13854 |
|
|
return
|
13855 |
|
|
fold_convert_loc (loc, type,
|
13856 |
|
|
fold_build2_loc (loc, BIT_AND_EXPR,
|
13857 |
|
|
TREE_TYPE (tem), tem,
|
13858 |
|
|
fold_convert_loc (loc,
|
13859 |
|
|
TREE_TYPE (tem),
|
13860 |
|
|
arg1)));
|
13861 |
|
|
}
|
13862 |
|
|
|
13863 |
|
|
/* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
|
13864 |
|
|
already handled above. */
|
13865 |
|
|
if (TREE_CODE (arg0) == BIT_AND_EXPR
|
13866 |
|
|
&& integer_onep (TREE_OPERAND (arg0, 1))
|
13867 |
|
|
&& integer_zerop (op2)
|
13868 |
|
|
&& integer_pow2p (arg1))
|
13869 |
|
|
{
|
13870 |
|
|
tree tem = TREE_OPERAND (arg0, 0);
|
13871 |
|
|
STRIP_NOPS (tem);
|
13872 |
|
|
if (TREE_CODE (tem) == RSHIFT_EXPR
|
13873 |
|
|
&& TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
|
13874 |
|
|
&& (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
|
13875 |
|
|
TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
|
13876 |
|
|
return fold_build2_loc (loc, BIT_AND_EXPR, type,
|
13877 |
|
|
TREE_OPERAND (tem, 0), arg1);
|
13878 |
|
|
}
|
13879 |
|
|
|
13880 |
|
|
/* A & N ? N : 0 is simply A & N if N is a power of two. This
|
13881 |
|
|
is probably obsolete because the first operand should be a
|
13882 |
|
|
truth value (that's why we have the two cases above), but let's
|
13883 |
|
|
leave it in until we can confirm this for all front-ends. */
|
13884 |
|
|
if (integer_zerop (op2)
|
13885 |
|
|
&& TREE_CODE (arg0) == NE_EXPR
|
13886 |
|
|
&& integer_zerop (TREE_OPERAND (arg0, 1))
|
13887 |
|
|
&& integer_pow2p (arg1)
|
13888 |
|
|
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
|
13889 |
|
|
&& operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
|
13890 |
|
|
arg1, OEP_ONLY_CONST))
|
13891 |
|
|
return pedantic_non_lvalue_loc (loc,
|
13892 |
|
|
fold_convert_loc (loc, type,
|
13893 |
|
|
TREE_OPERAND (arg0, 0)));
|
13894 |
|
|
|
13895 |
|
|
/* Convert A ? B : 0 into A && B if A and B are truth values. */
|
13896 |
|
|
if (integer_zerop (op2)
|
13897 |
|
|
&& truth_value_p (TREE_CODE (arg0))
|
13898 |
|
|
&& truth_value_p (TREE_CODE (arg1)))
|
13899 |
|
|
return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
|
13900 |
|
|
fold_convert_loc (loc, type, arg0),
|
13901 |
|
|
arg1);
|
13902 |
|
|
|
13903 |
|
|
/* Convert A ? B : 1 into !A || B if A and B are truth values. */
|
13904 |
|
|
if (integer_onep (op2)
|
13905 |
|
|
&& truth_value_p (TREE_CODE (arg0))
|
13906 |
|
|
&& truth_value_p (TREE_CODE (arg1)))
|
13907 |
|
|
{
|
13908 |
|
|
location_t loc0 = expr_location_or (arg0, loc);
|
13909 |
|
|
/* Only perform transformation if ARG0 is easily inverted. */
|
13910 |
|
|
tem = fold_truth_not_expr (loc0, arg0);
|
13911 |
|
|
if (tem)
|
13912 |
|
|
return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
|
13913 |
|
|
fold_convert_loc (loc, type, tem),
|
13914 |
|
|
arg1);
|
13915 |
|
|
}
|
13916 |
|
|
|
13917 |
|
|
/* Convert A ? 0 : B into !A && B if A and B are truth values. */
|
13918 |
|
|
if (integer_zerop (arg1)
|
13919 |
|
|
&& truth_value_p (TREE_CODE (arg0))
|
13920 |
|
|
&& truth_value_p (TREE_CODE (op2)))
|
13921 |
|
|
{
|
13922 |
|
|
location_t loc0 = expr_location_or (arg0, loc);
|
13923 |
|
|
/* Only perform transformation if ARG0 is easily inverted. */
|
13924 |
|
|
tem = fold_truth_not_expr (loc0, arg0);
|
13925 |
|
|
if (tem)
|
13926 |
|
|
return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
|
13927 |
|
|
fold_convert_loc (loc, type, tem),
|
13928 |
|
|
op2);
|
13929 |
|
|
}
|
13930 |
|
|
|
13931 |
|
|
/* Convert A ? 1 : B into A || B if A and B are truth values. */
|
13932 |
|
|
if (integer_onep (arg1)
|
13933 |
|
|
&& truth_value_p (TREE_CODE (arg0))
|
13934 |
|
|
&& truth_value_p (TREE_CODE (op2)))
|
13935 |
|
|
return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
|
13936 |
|
|
fold_convert_loc (loc, type, arg0),
|
13937 |
|
|
op2);
|
13938 |
|
|
|
13939 |
|
|
return NULL_TREE;
|
13940 |
|
|
|
13941 |
|
|
case CALL_EXPR:
|
13942 |
|
|
/* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
|
13943 |
|
|
of fold_ternary on them. */
|
13944 |
|
|
gcc_unreachable ();
|
13945 |
|
|
|
13946 |
|
|
case BIT_FIELD_REF:
|
13947 |
|
|
if ((TREE_CODE (arg0) == VECTOR_CST
|
13948 |
|
|
|| TREE_CODE (arg0) == CONSTRUCTOR)
|
13949 |
|
|
&& type == TREE_TYPE (TREE_TYPE (arg0)))
|
13950 |
|
|
{
|
13951 |
|
|
unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
|
13952 |
|
|
unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
|
13953 |
|
|
|
13954 |
|
|
if (width != 0
|
13955 |
|
|
&& simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
|
13956 |
|
|
&& (idx % width) == 0
|
13957 |
|
|
&& (idx = idx / width)
|
13958 |
|
|
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
|
13959 |
|
|
{
|
13960 |
|
|
if (TREE_CODE (arg0) == VECTOR_CST)
|
13961 |
|
|
{
|
13962 |
|
|
tree elements = TREE_VECTOR_CST_ELTS (arg0);
|
13963 |
|
|
while (idx-- > 0 && elements)
|
13964 |
|
|
elements = TREE_CHAIN (elements);
|
13965 |
|
|
if (elements)
|
13966 |
|
|
return TREE_VALUE (elements);
|
13967 |
|
|
}
|
13968 |
|
|
else if (idx < CONSTRUCTOR_NELTS (arg0))
|
13969 |
|
|
return CONSTRUCTOR_ELT (arg0, idx)->value;
|
13970 |
|
|
return build_zero_cst (type);
|
13971 |
|
|
}
|
13972 |
|
|
}
|
13973 |
|
|
|
13974 |
|
|
/* A bit-field-ref that referenced the full argument can be stripped. */
|
13975 |
|
|
if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
|
13976 |
|
|
&& TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
|
13977 |
|
|
&& integer_zerop (op2))
|
13978 |
|
|
return fold_convert_loc (loc, type, arg0);
|
13979 |
|
|
|
13980 |
|
|
return NULL_TREE;
|
13981 |
|
|
|
13982 |
|
|
case FMA_EXPR:
|
13983 |
|
|
/* For integers we can decompose the FMA if possible. */
|
13984 |
|
|
if (TREE_CODE (arg0) == INTEGER_CST
|
13985 |
|
|
&& TREE_CODE (arg1) == INTEGER_CST)
|
13986 |
|
|
return fold_build2_loc (loc, PLUS_EXPR, type,
|
13987 |
|
|
const_binop (MULT_EXPR, arg0, arg1), arg2);
|
13988 |
|
|
if (integer_zerop (arg2))
|
13989 |
|
|
return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
|
13990 |
|
|
|
13991 |
|
|
return fold_fma (loc, type, arg0, arg1, arg2);
|
13992 |
|
|
|
13993 |
|
|
case VEC_PERM_EXPR:
|
13994 |
|
|
if (TREE_CODE (arg2) == VECTOR_CST)
|
13995 |
|
|
{
|
13996 |
|
|
unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
|
13997 |
|
|
unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
|
13998 |
|
|
tree t;
|
13999 |
|
|
bool need_mask_canon = false;
|
14000 |
|
|
|
14001 |
|
|
gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)));
|
14002 |
|
|
for (i = 0, t = TREE_VECTOR_CST_ELTS (arg2);
|
14003 |
|
|
i < nelts && t; i++, t = TREE_CHAIN (t))
|
14004 |
|
|
{
|
14005 |
|
|
if (TREE_CODE (TREE_VALUE (t)) != INTEGER_CST)
|
14006 |
|
|
return NULL_TREE;
|
14007 |
|
|
|
14008 |
|
|
sel[i] = TREE_INT_CST_LOW (TREE_VALUE (t)) & (2 * nelts - 1);
|
14009 |
|
|
if (TREE_INT_CST_HIGH (TREE_VALUE (t))
|
14010 |
|
|
|| ((unsigned HOST_WIDE_INT)
|
14011 |
|
|
TREE_INT_CST_LOW (TREE_VALUE (t)) != sel[i]))
|
14012 |
|
|
need_mask_canon = true;
|
14013 |
|
|
}
|
14014 |
|
|
if (t)
|
14015 |
|
|
return NULL_TREE;
|
14016 |
|
|
for (; i < nelts; i++)
|
14017 |
|
|
sel[i] = 0;
|
14018 |
|
|
|
14019 |
|
|
if ((TREE_CODE (arg0) == VECTOR_CST
|
14020 |
|
|
|| TREE_CODE (arg0) == CONSTRUCTOR)
|
14021 |
|
|
&& (TREE_CODE (arg1) == VECTOR_CST
|
14022 |
|
|
|| TREE_CODE (arg1) == CONSTRUCTOR))
|
14023 |
|
|
{
|
14024 |
|
|
t = fold_vec_perm (type, arg0, arg1, sel);
|
14025 |
|
|
if (t != NULL_TREE)
|
14026 |
|
|
return t;
|
14027 |
|
|
}
|
14028 |
|
|
|
14029 |
|
|
if (need_mask_canon && arg2 == op2)
|
14030 |
|
|
{
|
14031 |
|
|
tree list = NULL_TREE, eltype = TREE_TYPE (TREE_TYPE (arg2));
|
14032 |
|
|
for (i = 0; i < nelts; i++)
|
14033 |
|
|
list = tree_cons (NULL_TREE,
|
14034 |
|
|
build_int_cst (eltype, sel[nelts - i - 1]),
|
14035 |
|
|
list);
|
14036 |
|
|
t = build_vector (TREE_TYPE (arg2), list);
|
14037 |
|
|
return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, t);
|
14038 |
|
|
}
|
14039 |
|
|
}
|
14040 |
|
|
return NULL_TREE;
|
14041 |
|
|
|
14042 |
|
|
default:
|
14043 |
|
|
return NULL_TREE;
|
14044 |
|
|
} /* switch (code) */
|
14045 |
|
|
}
|
14046 |
|
|
|
14047 |
|
|
/* Perform constant folding and related simplification of EXPR.
|
14048 |
|
|
The related simplifications include x*1 => x, x*0 => 0, etc.,
|
14049 |
|
|
and application of the associative law.
|
14050 |
|
|
NOP_EXPR conversions may be removed freely (as long as we
|
14051 |
|
|
are careful not to change the type of the overall expression).
|
14052 |
|
|
We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
|
14053 |
|
|
but we can constant-fold them if they have constant operands. */
|
14054 |
|
|
|
14055 |
|
|
#ifdef ENABLE_FOLD_CHECKING
|
14056 |
|
|
# define fold(x) fold_1 (x)
|
14057 |
|
|
static tree fold_1 (tree);
|
14058 |
|
|
static
|
14059 |
|
|
#endif
|
14060 |
|
|
tree
|
14061 |
|
|
fold (tree expr)
|
14062 |
|
|
{
|
14063 |
|
|
const tree t = expr;
|
14064 |
|
|
enum tree_code code = TREE_CODE (t);
|
14065 |
|
|
enum tree_code_class kind = TREE_CODE_CLASS (code);
|
14066 |
|
|
tree tem;
|
14067 |
|
|
location_t loc = EXPR_LOCATION (expr);
|
14068 |
|
|
|
14069 |
|
|
/* Return right away if a constant. */
|
14070 |
|
|
if (kind == tcc_constant)
|
14071 |
|
|
return t;
|
14072 |
|
|
|
14073 |
|
|
/* CALL_EXPR-like objects with variable numbers of operands are
|
14074 |
|
|
treated specially. */
|
14075 |
|
|
if (kind == tcc_vl_exp)
|
14076 |
|
|
{
|
14077 |
|
|
if (code == CALL_EXPR)
|
14078 |
|
|
{
|
14079 |
|
|
tem = fold_call_expr (loc, expr, false);
|
14080 |
|
|
return tem ? tem : expr;
|
14081 |
|
|
}
|
14082 |
|
|
return expr;
|
14083 |
|
|
}
|
14084 |
|
|
|
14085 |
|
|
if (IS_EXPR_CODE_CLASS (kind))
|
14086 |
|
|
{
|
14087 |
|
|
tree type = TREE_TYPE (t);
|
14088 |
|
|
tree op0, op1, op2;
|
14089 |
|
|
|
14090 |
|
|
switch (TREE_CODE_LENGTH (code))
|
14091 |
|
|
{
|
14092 |
|
|
case 1:
|
14093 |
|
|
op0 = TREE_OPERAND (t, 0);
|
14094 |
|
|
tem = fold_unary_loc (loc, code, type, op0);
|
14095 |
|
|
return tem ? tem : expr;
|
14096 |
|
|
case 2:
|
14097 |
|
|
op0 = TREE_OPERAND (t, 0);
|
14098 |
|
|
op1 = TREE_OPERAND (t, 1);
|
14099 |
|
|
tem = fold_binary_loc (loc, code, type, op0, op1);
|
14100 |
|
|
return tem ? tem : expr;
|
14101 |
|
|
case 3:
|
14102 |
|
|
op0 = TREE_OPERAND (t, 0);
|
14103 |
|
|
op1 = TREE_OPERAND (t, 1);
|
14104 |
|
|
op2 = TREE_OPERAND (t, 2);
|
14105 |
|
|
tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
|
14106 |
|
|
return tem ? tem : expr;
|
14107 |
|
|
default:
|
14108 |
|
|
break;
|
14109 |
|
|
}
|
14110 |
|
|
}
|
14111 |
|
|
|
14112 |
|
|
switch (code)
|
14113 |
|
|
{
|
14114 |
|
|
case ARRAY_REF:
|
14115 |
|
|
{
|
14116 |
|
|
tree op0 = TREE_OPERAND (t, 0);
|
14117 |
|
|
tree op1 = TREE_OPERAND (t, 1);
|
14118 |
|
|
|
14119 |
|
|
if (TREE_CODE (op1) == INTEGER_CST
|
14120 |
|
|
&& TREE_CODE (op0) == CONSTRUCTOR
|
14121 |
|
|
&& ! type_contains_placeholder_p (TREE_TYPE (op0)))
|
14122 |
|
|
{
|
14123 |
|
|
VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
|
14124 |
|
|
unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
|
14125 |
|
|
unsigned HOST_WIDE_INT begin = 0;
|
14126 |
|
|
|
14127 |
|
|
/* Find a matching index by means of a binary search. */
|
14128 |
|
|
while (begin != end)
|
14129 |
|
|
{
|
14130 |
|
|
unsigned HOST_WIDE_INT middle = (begin + end) / 2;
|
14131 |
|
|
tree index = VEC_index (constructor_elt, elts, middle)->index;
|
14132 |
|
|
|
14133 |
|
|
if (TREE_CODE (index) == INTEGER_CST
|
14134 |
|
|
&& tree_int_cst_lt (index, op1))
|
14135 |
|
|
begin = middle + 1;
|
14136 |
|
|
else if (TREE_CODE (index) == INTEGER_CST
|
14137 |
|
|
&& tree_int_cst_lt (op1, index))
|
14138 |
|
|
end = middle;
|
14139 |
|
|
else if (TREE_CODE (index) == RANGE_EXPR
|
14140 |
|
|
&& tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
|
14141 |
|
|
begin = middle + 1;
|
14142 |
|
|
else if (TREE_CODE (index) == RANGE_EXPR
|
14143 |
|
|
&& tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
|
14144 |
|
|
end = middle;
|
14145 |
|
|
else
|
14146 |
|
|
return VEC_index (constructor_elt, elts, middle)->value;
|
14147 |
|
|
}
|
14148 |
|
|
}
|
14149 |
|
|
|
14150 |
|
|
return t;
|
14151 |
|
|
}
|
14152 |
|
|
|
14153 |
|
|
case CONST_DECL:
|
14154 |
|
|
return fold (DECL_INITIAL (t));
|
14155 |
|
|
|
14156 |
|
|
default:
|
14157 |
|
|
return t;
|
14158 |
|
|
} /* switch (code) */
|
14159 |
|
|
}
|
14160 |
|
|
|
14161 |
|
|
#ifdef ENABLE_FOLD_CHECKING
|
14162 |
|
|
#undef fold
|
14163 |
|
|
|
14164 |
|
|
static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
|
14165 |
|
|
static void fold_check_failed (const_tree, const_tree);
|
14166 |
|
|
void print_fold_checksum (const_tree);
|
14167 |
|
|
|
14168 |
|
|
/* When --enable-checking=fold, compute a digest of expr before
|
14169 |
|
|
and after actual fold call to see if fold did not accidentally
|
14170 |
|
|
change original expr. */
|
14171 |
|
|
|
14172 |
|
|
tree
|
14173 |
|
|
fold (tree expr)
|
14174 |
|
|
{
|
14175 |
|
|
tree ret;
|
14176 |
|
|
struct md5_ctx ctx;
|
14177 |
|
|
unsigned char checksum_before[16], checksum_after[16];
|
14178 |
|
|
htab_t ht;
|
14179 |
|
|
|
14180 |
|
|
ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
|
14181 |
|
|
md5_init_ctx (&ctx);
|
14182 |
|
|
fold_checksum_tree (expr, &ctx, ht);
|
14183 |
|
|
md5_finish_ctx (&ctx, checksum_before);
|
14184 |
|
|
htab_empty (ht);
|
14185 |
|
|
|
14186 |
|
|
ret = fold_1 (expr);
|
14187 |
|
|
|
14188 |
|
|
md5_init_ctx (&ctx);
|
14189 |
|
|
fold_checksum_tree (expr, &ctx, ht);
|
14190 |
|
|
md5_finish_ctx (&ctx, checksum_after);
|
14191 |
|
|
htab_delete (ht);
|
14192 |
|
|
|
14193 |
|
|
if (memcmp (checksum_before, checksum_after, 16))
|
14194 |
|
|
fold_check_failed (expr, ret);
|
14195 |
|
|
|
14196 |
|
|
return ret;
|
14197 |
|
|
}
|
14198 |
|
|
|
14199 |
|
|
void
|
14200 |
|
|
print_fold_checksum (const_tree expr)
|
14201 |
|
|
{
|
14202 |
|
|
struct md5_ctx ctx;
|
14203 |
|
|
unsigned char checksum[16], cnt;
|
14204 |
|
|
htab_t ht;
|
14205 |
|
|
|
14206 |
|
|
ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
|
14207 |
|
|
md5_init_ctx (&ctx);
|
14208 |
|
|
fold_checksum_tree (expr, &ctx, ht);
|
14209 |
|
|
md5_finish_ctx (&ctx, checksum);
|
14210 |
|
|
htab_delete (ht);
|
14211 |
|
|
for (cnt = 0; cnt < 16; ++cnt)
|
14212 |
|
|
fprintf (stderr, "%02x", checksum[cnt]);
|
14213 |
|
|
putc ('\n', stderr);
|
14214 |
|
|
}
|
14215 |
|
|
|
14216 |
|
|
static void
|
14217 |
|
|
fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
|
14218 |
|
|
{
|
14219 |
|
|
internal_error ("fold check: original tree changed by fold");
|
14220 |
|
|
}
|
14221 |
|
|
|
14222 |
|
|
static void
|
14223 |
|
|
fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
|
14224 |
|
|
{
|
14225 |
|
|
void **slot;
|
14226 |
|
|
enum tree_code code;
|
14227 |
|
|
union tree_node buf;
|
14228 |
|
|
int i, len;
|
14229 |
|
|
|
14230 |
|
|
recursive_label:
|
14231 |
|
|
if (expr == NULL)
|
14232 |
|
|
return;
|
14233 |
|
|
slot = (void **) htab_find_slot (ht, expr, INSERT);
|
14234 |
|
|
if (*slot != NULL)
|
14235 |
|
|
return;
|
14236 |
|
|
*slot = CONST_CAST_TREE (expr);
|
14237 |
|
|
code = TREE_CODE (expr);
|
14238 |
|
|
if (TREE_CODE_CLASS (code) == tcc_declaration
|
14239 |
|
|
&& DECL_ASSEMBLER_NAME_SET_P (expr))
|
14240 |
|
|
{
|
14241 |
|
|
/* Allow DECL_ASSEMBLER_NAME to be modified. */
|
14242 |
|
|
memcpy ((char *) &buf, expr, tree_size (expr));
|
14243 |
|
|
SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
|
14244 |
|
|
expr = (tree) &buf;
|
14245 |
|
|
}
|
14246 |
|
|
else if (TREE_CODE_CLASS (code) == tcc_type
|
14247 |
|
|
&& (TYPE_POINTER_TO (expr)
|
14248 |
|
|
|| TYPE_REFERENCE_TO (expr)
|
14249 |
|
|
|| TYPE_CACHED_VALUES_P (expr)
|
14250 |
|
|
|| TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
|
14251 |
|
|
|| TYPE_NEXT_VARIANT (expr)))
|
14252 |
|
|
{
|
14253 |
|
|
/* Allow these fields to be modified. */
|
14254 |
|
|
tree tmp;
|
14255 |
|
|
memcpy ((char *) &buf, expr, tree_size (expr));
|
14256 |
|
|
expr = tmp = (tree) &buf;
|
14257 |
|
|
TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
|
14258 |
|
|
TYPE_POINTER_TO (tmp) = NULL;
|
14259 |
|
|
TYPE_REFERENCE_TO (tmp) = NULL;
|
14260 |
|
|
TYPE_NEXT_VARIANT (tmp) = NULL;
|
14261 |
|
|
if (TYPE_CACHED_VALUES_P (tmp))
|
14262 |
|
|
{
|
14263 |
|
|
TYPE_CACHED_VALUES_P (tmp) = 0;
|
14264 |
|
|
TYPE_CACHED_VALUES (tmp) = NULL;
|
14265 |
|
|
}
|
14266 |
|
|
}
|
14267 |
|
|
md5_process_bytes (expr, tree_size (expr), ctx);
|
14268 |
|
|
if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
|
14269 |
|
|
fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
|
14270 |
|
|
if (TREE_CODE_CLASS (code) != tcc_type
|
14271 |
|
|
&& TREE_CODE_CLASS (code) != tcc_declaration
|
14272 |
|
|
&& code != TREE_LIST
|
14273 |
|
|
&& code != SSA_NAME
|
14274 |
|
|
&& CODE_CONTAINS_STRUCT (code, TS_COMMON))
|
14275 |
|
|
fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
|
14276 |
|
|
switch (TREE_CODE_CLASS (code))
|
14277 |
|
|
{
|
14278 |
|
|
case tcc_constant:
|
14279 |
|
|
switch (code)
|
14280 |
|
|
{
|
14281 |
|
|
case STRING_CST:
|
14282 |
|
|
md5_process_bytes (TREE_STRING_POINTER (expr),
|
14283 |
|
|
TREE_STRING_LENGTH (expr), ctx);
|
14284 |
|
|
break;
|
14285 |
|
|
case COMPLEX_CST:
|
14286 |
|
|
fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
|
14287 |
|
|
fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
|
14288 |
|
|
break;
|
14289 |
|
|
case VECTOR_CST:
|
14290 |
|
|
fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
|
14291 |
|
|
break;
|
14292 |
|
|
default:
|
14293 |
|
|
break;
|
14294 |
|
|
}
|
14295 |
|
|
break;
|
14296 |
|
|
case tcc_exceptional:
|
14297 |
|
|
switch (code)
|
14298 |
|
|
{
|
14299 |
|
|
case TREE_LIST:
|
14300 |
|
|
fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
|
14301 |
|
|
fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
|
14302 |
|
|
expr = TREE_CHAIN (expr);
|
14303 |
|
|
goto recursive_label;
|
14304 |
|
|
break;
|
14305 |
|
|
case TREE_VEC:
|
14306 |
|
|
for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
|
14307 |
|
|
fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
|
14308 |
|
|
break;
|
14309 |
|
|
default:
|
14310 |
|
|
break;
|
14311 |
|
|
}
|
14312 |
|
|
break;
|
14313 |
|
|
case tcc_expression:
|
14314 |
|
|
case tcc_reference:
|
14315 |
|
|
case tcc_comparison:
|
14316 |
|
|
case tcc_unary:
|
14317 |
|
|
case tcc_binary:
|
14318 |
|
|
case tcc_statement:
|
14319 |
|
|
case tcc_vl_exp:
|
14320 |
|
|
len = TREE_OPERAND_LENGTH (expr);
|
14321 |
|
|
for (i = 0; i < len; ++i)
|
14322 |
|
|
fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
|
14323 |
|
|
break;
|
14324 |
|
|
case tcc_declaration:
|
14325 |
|
|
fold_checksum_tree (DECL_NAME (expr), ctx, ht);
|
14326 |
|
|
fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
|
14327 |
|
|
if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
|
14328 |
|
|
{
|
14329 |
|
|
fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
|
14330 |
|
|
fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
|
14331 |
|
|
fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
|
14332 |
|
|
fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
|
14333 |
|
|
fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
|
14334 |
|
|
}
|
14335 |
|
|
if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
|
14336 |
|
|
fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
|
14337 |
|
|
|
14338 |
|
|
if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
|
14339 |
|
|
{
|
14340 |
|
|
fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
|
14341 |
|
|
fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
|
14342 |
|
|
fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
|
14343 |
|
|
}
|
14344 |
|
|
break;
|
14345 |
|
|
case tcc_type:
|
14346 |
|
|
if (TREE_CODE (expr) == ENUMERAL_TYPE)
|
14347 |
|
|
fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
|
14348 |
|
|
fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
|
14349 |
|
|
fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
|
14350 |
|
|
fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
|
14351 |
|
|
fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
|
14352 |
|
|
if (INTEGRAL_TYPE_P (expr)
|
14353 |
|
|
|| SCALAR_FLOAT_TYPE_P (expr))
|
14354 |
|
|
{
|
14355 |
|
|
fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
|
14356 |
|
|
fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
|
14357 |
|
|
}
|
14358 |
|
|
fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
|
14359 |
|
|
if (TREE_CODE (expr) == RECORD_TYPE
|
14360 |
|
|
|| TREE_CODE (expr) == UNION_TYPE
|
14361 |
|
|
|| TREE_CODE (expr) == QUAL_UNION_TYPE)
|
14362 |
|
|
fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
|
14363 |
|
|
fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
|
14364 |
|
|
break;
|
14365 |
|
|
default:
|
14366 |
|
|
break;
|
14367 |
|
|
}
|
14368 |
|
|
}
|
14369 |
|
|
|
14370 |
|
|
/* Helper function for outputting the checksum of a tree T. When
|
14371 |
|
|
debugging with gdb, you can "define mynext" to be "next" followed
|
14372 |
|
|
by "call debug_fold_checksum (op0)", then just trace down till the
|
14373 |
|
|
outputs differ. */
|
14374 |
|
|
|
14375 |
|
|
DEBUG_FUNCTION void
|
14376 |
|
|
debug_fold_checksum (const_tree t)
|
14377 |
|
|
{
|
14378 |
|
|
int i;
|
14379 |
|
|
unsigned char checksum[16];
|
14380 |
|
|
struct md5_ctx ctx;
|
14381 |
|
|
htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
|
14382 |
|
|
|
14383 |
|
|
md5_init_ctx (&ctx);
|
14384 |
|
|
fold_checksum_tree (t, &ctx, ht);
|
14385 |
|
|
md5_finish_ctx (&ctx, checksum);
|
14386 |
|
|
htab_empty (ht);
|
14387 |
|
|
|
14388 |
|
|
for (i = 0; i < 16; i++)
|
14389 |
|
|
fprintf (stderr, "%d ", checksum[i]);
|
14390 |
|
|
|
14391 |
|
|
fprintf (stderr, "\n");
|
14392 |
|
|
}
|
14393 |
|
|
|
14394 |
|
|
#endif
|
14395 |
|
|
|
14396 |
|
|
/* Fold a unary tree expression with code CODE of type TYPE with an
|
14397 |
|
|
operand OP0. LOC is the location of the resulting expression.
|
14398 |
|
|
Return a folded expression if successful. Otherwise, return a tree
|
14399 |
|
|
expression with code CODE of type TYPE with an operand OP0. */
|
14400 |
|
|
|
14401 |
|
|
tree
|
14402 |
|
|
fold_build1_stat_loc (location_t loc,
|
14403 |
|
|
enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
|
14404 |
|
|
{
|
14405 |
|
|
tree tem;
|
14406 |
|
|
#ifdef ENABLE_FOLD_CHECKING
|
14407 |
|
|
unsigned char checksum_before[16], checksum_after[16];
|
14408 |
|
|
struct md5_ctx ctx;
|
14409 |
|
|
htab_t ht;
|
14410 |
|
|
|
14411 |
|
|
ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
|
14412 |
|
|
md5_init_ctx (&ctx);
|
14413 |
|
|
fold_checksum_tree (op0, &ctx, ht);
|
14414 |
|
|
md5_finish_ctx (&ctx, checksum_before);
|
14415 |
|
|
htab_empty (ht);
|
14416 |
|
|
#endif
|
14417 |
|
|
|
14418 |
|
|
tem = fold_unary_loc (loc, code, type, op0);
|
14419 |
|
|
if (!tem)
|
14420 |
|
|
tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
|
14421 |
|
|
|
14422 |
|
|
#ifdef ENABLE_FOLD_CHECKING
|
14423 |
|
|
md5_init_ctx (&ctx);
|
14424 |
|
|
fold_checksum_tree (op0, &ctx, ht);
|
14425 |
|
|
md5_finish_ctx (&ctx, checksum_after);
|
14426 |
|
|
htab_delete (ht);
|
14427 |
|
|
|
14428 |
|
|
if (memcmp (checksum_before, checksum_after, 16))
|
14429 |
|
|
fold_check_failed (op0, tem);
|
14430 |
|
|
#endif
|
14431 |
|
|
return tem;
|
14432 |
|
|
}
|
14433 |
|
|
|
14434 |
|
|
/* Fold a binary tree expression with code CODE of type TYPE with
|
14435 |
|
|
operands OP0 and OP1. LOC is the location of the resulting
|
14436 |
|
|
expression. Return a folded expression if successful. Otherwise,
|
14437 |
|
|
return a tree expression with code CODE of type TYPE with operands
|
14438 |
|
|
OP0 and OP1. */
|
14439 |
|
|
|
14440 |
|
|
tree
|
14441 |
|
|
fold_build2_stat_loc (location_t loc,
|
14442 |
|
|
enum tree_code code, tree type, tree op0, tree op1
|
14443 |
|
|
MEM_STAT_DECL)
|
14444 |
|
|
{
|
14445 |
|
|
tree tem;
|
14446 |
|
|
#ifdef ENABLE_FOLD_CHECKING
|
14447 |
|
|
unsigned char checksum_before_op0[16],
|
14448 |
|
|
checksum_before_op1[16],
|
14449 |
|
|
checksum_after_op0[16],
|
14450 |
|
|
checksum_after_op1[16];
|
14451 |
|
|
struct md5_ctx ctx;
|
14452 |
|
|
htab_t ht;
|
14453 |
|
|
|
14454 |
|
|
ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
|
14455 |
|
|
md5_init_ctx (&ctx);
|
14456 |
|
|
fold_checksum_tree (op0, &ctx, ht);
|
14457 |
|
|
md5_finish_ctx (&ctx, checksum_before_op0);
|
14458 |
|
|
htab_empty (ht);
|
14459 |
|
|
|
14460 |
|
|
md5_init_ctx (&ctx);
|
14461 |
|
|
fold_checksum_tree (op1, &ctx, ht);
|
14462 |
|
|
md5_finish_ctx (&ctx, checksum_before_op1);
|
14463 |
|
|
htab_empty (ht);
|
14464 |
|
|
#endif
|
14465 |
|
|
|
14466 |
|
|
tem = fold_binary_loc (loc, code, type, op0, op1);
|
14467 |
|
|
if (!tem)
|
14468 |
|
|
tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
|
14469 |
|
|
|
14470 |
|
|
#ifdef ENABLE_FOLD_CHECKING
|
14471 |
|
|
md5_init_ctx (&ctx);
|
14472 |
|
|
fold_checksum_tree (op0, &ctx, ht);
|
14473 |
|
|
md5_finish_ctx (&ctx, checksum_after_op0);
|
14474 |
|
|
htab_empty (ht);
|
14475 |
|
|
|
14476 |
|
|
if (memcmp (checksum_before_op0, checksum_after_op0, 16))
|
14477 |
|
|
fold_check_failed (op0, tem);
|
14478 |
|
|
|
14479 |
|
|
md5_init_ctx (&ctx);
|
14480 |
|
|
fold_checksum_tree (op1, &ctx, ht);
|
14481 |
|
|
md5_finish_ctx (&ctx, checksum_after_op1);
|
14482 |
|
|
htab_delete (ht);
|
14483 |
|
|
|
14484 |
|
|
if (memcmp (checksum_before_op1, checksum_after_op1, 16))
|
14485 |
|
|
fold_check_failed (op1, tem);
|
14486 |
|
|
#endif
|
14487 |
|
|
return tem;
|
14488 |
|
|
}
|
14489 |
|
|
|
14490 |
|
|
/* Fold a ternary tree expression with code CODE of type TYPE with
|
14491 |
|
|
operands OP0, OP1, and OP2. Return a folded expression if
|
14492 |
|
|
successful. Otherwise, return a tree expression with code CODE of
|
14493 |
|
|
type TYPE with operands OP0, OP1, and OP2. */
|
14494 |
|
|
|
14495 |
|
|
tree
|
14496 |
|
|
fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
|
14497 |
|
|
tree op0, tree op1, tree op2 MEM_STAT_DECL)
|
14498 |
|
|
{
|
14499 |
|
|
tree tem;
|
14500 |
|
|
#ifdef ENABLE_FOLD_CHECKING
|
14501 |
|
|
unsigned char checksum_before_op0[16],
|
14502 |
|
|
checksum_before_op1[16],
|
14503 |
|
|
checksum_before_op2[16],
|
14504 |
|
|
checksum_after_op0[16],
|
14505 |
|
|
checksum_after_op1[16],
|
14506 |
|
|
checksum_after_op2[16];
|
14507 |
|
|
struct md5_ctx ctx;
|
14508 |
|
|
htab_t ht;
|
14509 |
|
|
|
14510 |
|
|
ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
|
14511 |
|
|
md5_init_ctx (&ctx);
|
14512 |
|
|
fold_checksum_tree (op0, &ctx, ht);
|
14513 |
|
|
md5_finish_ctx (&ctx, checksum_before_op0);
|
14514 |
|
|
htab_empty (ht);
|
14515 |
|
|
|
14516 |
|
|
md5_init_ctx (&ctx);
|
14517 |
|
|
fold_checksum_tree (op1, &ctx, ht);
|
14518 |
|
|
md5_finish_ctx (&ctx, checksum_before_op1);
|
14519 |
|
|
htab_empty (ht);
|
14520 |
|
|
|
14521 |
|
|
md5_init_ctx (&ctx);
|
14522 |
|
|
fold_checksum_tree (op2, &ctx, ht);
|
14523 |
|
|
md5_finish_ctx (&ctx, checksum_before_op2);
|
14524 |
|
|
htab_empty (ht);
|
14525 |
|
|
#endif
|
14526 |
|
|
|
14527 |
|
|
gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
|
14528 |
|
|
tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
|
14529 |
|
|
if (!tem)
|
14530 |
|
|
tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
|
14531 |
|
|
|
14532 |
|
|
#ifdef ENABLE_FOLD_CHECKING
|
14533 |
|
|
md5_init_ctx (&ctx);
|
14534 |
|
|
fold_checksum_tree (op0, &ctx, ht);
|
14535 |
|
|
md5_finish_ctx (&ctx, checksum_after_op0);
|
14536 |
|
|
htab_empty (ht);
|
14537 |
|
|
|
14538 |
|
|
if (memcmp (checksum_before_op0, checksum_after_op0, 16))
|
14539 |
|
|
fold_check_failed (op0, tem);
|
14540 |
|
|
|
14541 |
|
|
md5_init_ctx (&ctx);
|
14542 |
|
|
fold_checksum_tree (op1, &ctx, ht);
|
14543 |
|
|
md5_finish_ctx (&ctx, checksum_after_op1);
|
14544 |
|
|
htab_empty (ht);
|
14545 |
|
|
|
14546 |
|
|
if (memcmp (checksum_before_op1, checksum_after_op1, 16))
|
14547 |
|
|
fold_check_failed (op1, tem);
|
14548 |
|
|
|
14549 |
|
|
md5_init_ctx (&ctx);
|
14550 |
|
|
fold_checksum_tree (op2, &ctx, ht);
|
14551 |
|
|
md5_finish_ctx (&ctx, checksum_after_op2);
|
14552 |
|
|
htab_delete (ht);
|
14553 |
|
|
|
14554 |
|
|
if (memcmp (checksum_before_op2, checksum_after_op2, 16))
|
14555 |
|
|
fold_check_failed (op2, tem);
|
14556 |
|
|
#endif
|
14557 |
|
|
return tem;
|
14558 |
|
|
}
|
14559 |
|
|
|
14560 |
|
|
/* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
|
14561 |
|
|
arguments in ARGARRAY, and a null static chain.
|
14562 |
|
|
Return a folded expression if successful. Otherwise, return a CALL_EXPR
|
14563 |
|
|
of type TYPE from the given operands as constructed by build_call_array. */
|
14564 |
|
|
|
14565 |
|
|
tree
|
14566 |
|
|
fold_build_call_array_loc (location_t loc, tree type, tree fn,
|
14567 |
|
|
int nargs, tree *argarray)
|
14568 |
|
|
{
|
14569 |
|
|
tree tem;
|
14570 |
|
|
#ifdef ENABLE_FOLD_CHECKING
|
14571 |
|
|
unsigned char checksum_before_fn[16],
|
14572 |
|
|
checksum_before_arglist[16],
|
14573 |
|
|
checksum_after_fn[16],
|
14574 |
|
|
checksum_after_arglist[16];
|
14575 |
|
|
struct md5_ctx ctx;
|
14576 |
|
|
htab_t ht;
|
14577 |
|
|
int i;
|
14578 |
|
|
|
14579 |
|
|
ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
|
14580 |
|
|
md5_init_ctx (&ctx);
|
14581 |
|
|
fold_checksum_tree (fn, &ctx, ht);
|
14582 |
|
|
md5_finish_ctx (&ctx, checksum_before_fn);
|
14583 |
|
|
htab_empty (ht);
|
14584 |
|
|
|
14585 |
|
|
md5_init_ctx (&ctx);
|
14586 |
|
|
for (i = 0; i < nargs; i++)
|
14587 |
|
|
fold_checksum_tree (argarray[i], &ctx, ht);
|
14588 |
|
|
md5_finish_ctx (&ctx, checksum_before_arglist);
|
14589 |
|
|
htab_empty (ht);
|
14590 |
|
|
#endif
|
14591 |
|
|
|
14592 |
|
|
tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
|
14593 |
|
|
|
14594 |
|
|
#ifdef ENABLE_FOLD_CHECKING
|
14595 |
|
|
md5_init_ctx (&ctx);
|
14596 |
|
|
fold_checksum_tree (fn, &ctx, ht);
|
14597 |
|
|
md5_finish_ctx (&ctx, checksum_after_fn);
|
14598 |
|
|
htab_empty (ht);
|
14599 |
|
|
|
14600 |
|
|
if (memcmp (checksum_before_fn, checksum_after_fn, 16))
|
14601 |
|
|
fold_check_failed (fn, tem);
|
14602 |
|
|
|
14603 |
|
|
md5_init_ctx (&ctx);
|
14604 |
|
|
for (i = 0; i < nargs; i++)
|
14605 |
|
|
fold_checksum_tree (argarray[i], &ctx, ht);
|
14606 |
|
|
md5_finish_ctx (&ctx, checksum_after_arglist);
|
14607 |
|
|
htab_delete (ht);
|
14608 |
|
|
|
14609 |
|
|
if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
|
14610 |
|
|
fold_check_failed (NULL_TREE, tem);
|
14611 |
|
|
#endif
|
14612 |
|
|
return tem;
|
14613 |
|
|
}
|
14614 |
|
|
|
14615 |
|
|
/* Perform constant folding and related simplification of initializer
|
14616 |
|
|
expression EXPR. These behave identically to "fold_buildN" but ignore
|
14617 |
|
|
potential run-time traps and exceptions that fold must preserve. */
|
14618 |
|
|
|
14619 |
|
|
#define START_FOLD_INIT \
|
14620 |
|
|
int saved_signaling_nans = flag_signaling_nans;\
|
14621 |
|
|
int saved_trapping_math = flag_trapping_math;\
|
14622 |
|
|
int saved_rounding_math = flag_rounding_math;\
|
14623 |
|
|
int saved_trapv = flag_trapv;\
|
14624 |
|
|
int saved_folding_initializer = folding_initializer;\
|
14625 |
|
|
flag_signaling_nans = 0;\
|
14626 |
|
|
flag_trapping_math = 0;\
|
14627 |
|
|
flag_rounding_math = 0;\
|
14628 |
|
|
flag_trapv = 0;\
|
14629 |
|
|
folding_initializer = 1;
|
14630 |
|
|
|
14631 |
|
|
#define END_FOLD_INIT \
|
14632 |
|
|
flag_signaling_nans = saved_signaling_nans;\
|
14633 |
|
|
flag_trapping_math = saved_trapping_math;\
|
14634 |
|
|
flag_rounding_math = saved_rounding_math;\
|
14635 |
|
|
flag_trapv = saved_trapv;\
|
14636 |
|
|
folding_initializer = saved_folding_initializer;
|
14637 |
|
|
|
14638 |
|
|
tree
|
14639 |
|
|
fold_build1_initializer_loc (location_t loc, enum tree_code code,
|
14640 |
|
|
tree type, tree op)
|
14641 |
|
|
{
|
14642 |
|
|
tree result;
|
14643 |
|
|
START_FOLD_INIT;
|
14644 |
|
|
|
14645 |
|
|
result = fold_build1_loc (loc, code, type, op);
|
14646 |
|
|
|
14647 |
|
|
END_FOLD_INIT;
|
14648 |
|
|
return result;
|
14649 |
|
|
}
|
14650 |
|
|
|
14651 |
|
|
tree
|
14652 |
|
|
fold_build2_initializer_loc (location_t loc, enum tree_code code,
|
14653 |
|
|
tree type, tree op0, tree op1)
|
14654 |
|
|
{
|
14655 |
|
|
tree result;
|
14656 |
|
|
START_FOLD_INIT;
|
14657 |
|
|
|
14658 |
|
|
result = fold_build2_loc (loc, code, type, op0, op1);
|
14659 |
|
|
|
14660 |
|
|
END_FOLD_INIT;
|
14661 |
|
|
return result;
|
14662 |
|
|
}
|
14663 |
|
|
|
14664 |
|
|
tree
|
14665 |
|
|
fold_build3_initializer_loc (location_t loc, enum tree_code code,
|
14666 |
|
|
tree type, tree op0, tree op1, tree op2)
|
14667 |
|
|
{
|
14668 |
|
|
tree result;
|
14669 |
|
|
START_FOLD_INIT;
|
14670 |
|
|
|
14671 |
|
|
result = fold_build3_loc (loc, code, type, op0, op1, op2);
|
14672 |
|
|
|
14673 |
|
|
END_FOLD_INIT;
|
14674 |
|
|
return result;
|
14675 |
|
|
}
|
14676 |
|
|
|
14677 |
|
|
tree
|
14678 |
|
|
fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
|
14679 |
|
|
int nargs, tree *argarray)
|
14680 |
|
|
{
|
14681 |
|
|
tree result;
|
14682 |
|
|
START_FOLD_INIT;
|
14683 |
|
|
|
14684 |
|
|
result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
|
14685 |
|
|
|
14686 |
|
|
END_FOLD_INIT;
|
14687 |
|
|
return result;
|
14688 |
|
|
}
|
14689 |
|
|
|
14690 |
|
|
#undef START_FOLD_INIT
|
14691 |
|
|
#undef END_FOLD_INIT
|
14692 |
|
|
|
14693 |
|
|
/* Determine if first argument is a multiple of second argument. Return 0 if
|
14694 |
|
|
it is not, or we cannot easily determined it to be.
|
14695 |
|
|
|
14696 |
|
|
An example of the sort of thing we care about (at this point; this routine
|
14697 |
|
|
could surely be made more general, and expanded to do what the *_DIV_EXPR's
|
14698 |
|
|
fold cases do now) is discovering that
|
14699 |
|
|
|
14700 |
|
|
SAVE_EXPR (I) * SAVE_EXPR (J * 8)
|
14701 |
|
|
|
14702 |
|
|
is a multiple of
|
14703 |
|
|
|
14704 |
|
|
SAVE_EXPR (J * 8)
|
14705 |
|
|
|
14706 |
|
|
when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
|
14707 |
|
|
|
14708 |
|
|
This code also handles discovering that
|
14709 |
|
|
|
14710 |
|
|
SAVE_EXPR (I) * SAVE_EXPR (J * 8)
|
14711 |
|
|
|
14712 |
|
|
is a multiple of 8 so we don't have to worry about dealing with a
|
14713 |
|
|
possible remainder.
|
14714 |
|
|
|
14715 |
|
|
Note that we *look* inside a SAVE_EXPR only to determine how it was
|
14716 |
|
|
calculated; it is not safe for fold to do much of anything else with the
|
14717 |
|
|
internals of a SAVE_EXPR, since it cannot know when it will be evaluated
|
14718 |
|
|
at run time. For example, the latter example above *cannot* be implemented
|
14719 |
|
|
as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
|
14720 |
|
|
evaluation time of the original SAVE_EXPR is not necessarily the same at
|
14721 |
|
|
the time the new expression is evaluated. The only optimization of this
|
14722 |
|
|
sort that would be valid is changing
|
14723 |
|
|
|
14724 |
|
|
SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
|
14725 |
|
|
|
14726 |
|
|
divided by 8 to
|
14727 |
|
|
|
14728 |
|
|
SAVE_EXPR (I) * SAVE_EXPR (J)
|
14729 |
|
|
|
14730 |
|
|
(where the same SAVE_EXPR (J) is used in the original and the
|
14731 |
|
|
transformed version). */
|
14732 |
|
|
|
14733 |
|
|
int
|
14734 |
|
|
multiple_of_p (tree type, const_tree top, const_tree bottom)
|
14735 |
|
|
{
|
14736 |
|
|
if (operand_equal_p (top, bottom, 0))
|
14737 |
|
|
return 1;
|
14738 |
|
|
|
14739 |
|
|
if (TREE_CODE (type) != INTEGER_TYPE)
|
14740 |
|
|
return 0;
|
14741 |
|
|
|
14742 |
|
|
switch (TREE_CODE (top))
|
14743 |
|
|
{
|
14744 |
|
|
case BIT_AND_EXPR:
|
14745 |
|
|
/* Bitwise and provides a power of two multiple. If the mask is
|
14746 |
|
|
a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
|
14747 |
|
|
if (!integer_pow2p (bottom))
|
14748 |
|
|
return 0;
|
14749 |
|
|
/* FALLTHRU */
|
14750 |
|
|
|
14751 |
|
|
case MULT_EXPR:
|
14752 |
|
|
return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
|
14753 |
|
|
|| multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
|
14754 |
|
|
|
14755 |
|
|
case PLUS_EXPR:
|
14756 |
|
|
case MINUS_EXPR:
|
14757 |
|
|
return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
|
14758 |
|
|
&& multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
|
14759 |
|
|
|
14760 |
|
|
case LSHIFT_EXPR:
|
14761 |
|
|
if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
|
14762 |
|
|
{
|
14763 |
|
|
tree op1, t1;
|
14764 |
|
|
|
14765 |
|
|
op1 = TREE_OPERAND (top, 1);
|
14766 |
|
|
/* const_binop may not detect overflow correctly,
|
14767 |
|
|
so check for it explicitly here. */
|
14768 |
|
|
if (TYPE_PRECISION (TREE_TYPE (size_one_node))
|
14769 |
|
|
> TREE_INT_CST_LOW (op1)
|
14770 |
|
|
&& TREE_INT_CST_HIGH (op1) == 0
|
14771 |
|
|
&& 0 != (t1 = fold_convert (type,
|
14772 |
|
|
const_binop (LSHIFT_EXPR,
|
14773 |
|
|
size_one_node,
|
14774 |
|
|
op1)))
|
14775 |
|
|
&& !TREE_OVERFLOW (t1))
|
14776 |
|
|
return multiple_of_p (type, t1, bottom);
|
14777 |
|
|
}
|
14778 |
|
|
return 0;
|
14779 |
|
|
|
14780 |
|
|
case NOP_EXPR:
|
14781 |
|
|
/* Can't handle conversions from non-integral or wider integral type. */
|
14782 |
|
|
if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
|
14783 |
|
|
|| (TYPE_PRECISION (type)
|
14784 |
|
|
< TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
|
14785 |
|
|
return 0;
|
14786 |
|
|
|
14787 |
|
|
/* .. fall through ... */
|
14788 |
|
|
|
14789 |
|
|
case SAVE_EXPR:
|
14790 |
|
|
return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
|
14791 |
|
|
|
14792 |
|
|
case COND_EXPR:
|
14793 |
|
|
return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
|
14794 |
|
|
&& multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
|
14795 |
|
|
|
14796 |
|
|
case INTEGER_CST:
|
14797 |
|
|
if (TREE_CODE (bottom) != INTEGER_CST
|
14798 |
|
|
|| integer_zerop (bottom)
|
14799 |
|
|
|| (TYPE_UNSIGNED (type)
|
14800 |
|
|
&& (tree_int_cst_sgn (top) < 0
|
14801 |
|
|
|| tree_int_cst_sgn (bottom) < 0)))
|
14802 |
|
|
return 0;
|
14803 |
|
|
return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
|
14804 |
|
|
top, bottom));
|
14805 |
|
|
|
14806 |
|
|
default:
|
14807 |
|
|
return 0;
|
14808 |
|
|
}
|
14809 |
|
|
}
|
14810 |
|
|
|
14811 |
|
|
/* Return true if CODE or TYPE is known to be non-negative. */
|
14812 |
|
|
|
14813 |
|
|
static bool
|
14814 |
|
|
tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
|
14815 |
|
|
{
|
14816 |
|
|
if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
|
14817 |
|
|
&& truth_value_p (code))
|
14818 |
|
|
/* Truth values evaluate to 0 or 1, which is nonnegative unless we
|
14819 |
|
|
have a signed:1 type (where the value is -1 and 0). */
|
14820 |
|
|
return true;
|
14821 |
|
|
return false;
|
14822 |
|
|
}
|
14823 |
|
|
|
14824 |
|
|
/* Return true if (CODE OP0) is known to be non-negative. If the return
|
14825 |
|
|
value is based on the assumption that signed overflow is undefined,
|
14826 |
|
|
set *STRICT_OVERFLOW_P to true; otherwise, don't change
|
14827 |
|
|
*STRICT_OVERFLOW_P. */
|
14828 |
|
|
|
14829 |
|
|
bool
|
14830 |
|
|
tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
|
14831 |
|
|
bool *strict_overflow_p)
|
14832 |
|
|
{
|
14833 |
|
|
if (TYPE_UNSIGNED (type))
|
14834 |
|
|
return true;
|
14835 |
|
|
|
14836 |
|
|
switch (code)
|
14837 |
|
|
{
|
14838 |
|
|
case ABS_EXPR:
|
14839 |
|
|
/* We can't return 1 if flag_wrapv is set because
|
14840 |
|
|
ABS_EXPR<INT_MIN> = INT_MIN. */
|
14841 |
|
|
if (!INTEGRAL_TYPE_P (type))
|
14842 |
|
|
return true;
|
14843 |
|
|
if (TYPE_OVERFLOW_UNDEFINED (type))
|
14844 |
|
|
{
|
14845 |
|
|
*strict_overflow_p = true;
|
14846 |
|
|
return true;
|
14847 |
|
|
}
|
14848 |
|
|
break;
|
14849 |
|
|
|
14850 |
|
|
case NON_LVALUE_EXPR:
|
14851 |
|
|
case FLOAT_EXPR:
|
14852 |
|
|
case FIX_TRUNC_EXPR:
|
14853 |
|
|
return tree_expr_nonnegative_warnv_p (op0,
|
14854 |
|
|
strict_overflow_p);
|
14855 |
|
|
|
14856 |
|
|
case NOP_EXPR:
|
14857 |
|
|
{
|
14858 |
|
|
tree inner_type = TREE_TYPE (op0);
|
14859 |
|
|
tree outer_type = type;
|
14860 |
|
|
|
14861 |
|
|
if (TREE_CODE (outer_type) == REAL_TYPE)
|
14862 |
|
|
{
|
14863 |
|
|
if (TREE_CODE (inner_type) == REAL_TYPE)
|
14864 |
|
|
return tree_expr_nonnegative_warnv_p (op0,
|
14865 |
|
|
strict_overflow_p);
|
14866 |
|
|
if (TREE_CODE (inner_type) == INTEGER_TYPE)
|
14867 |
|
|
{
|
14868 |
|
|
if (TYPE_UNSIGNED (inner_type))
|
14869 |
|
|
return true;
|
14870 |
|
|
return tree_expr_nonnegative_warnv_p (op0,
|
14871 |
|
|
strict_overflow_p);
|
14872 |
|
|
}
|
14873 |
|
|
}
|
14874 |
|
|
else if (TREE_CODE (outer_type) == INTEGER_TYPE)
|
14875 |
|
|
{
|
14876 |
|
|
if (TREE_CODE (inner_type) == REAL_TYPE)
|
14877 |
|
|
return tree_expr_nonnegative_warnv_p (op0,
|
14878 |
|
|
strict_overflow_p);
|
14879 |
|
|
if (TREE_CODE (inner_type) == INTEGER_TYPE)
|
14880 |
|
|
return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
|
14881 |
|
|
&& TYPE_UNSIGNED (inner_type);
|
14882 |
|
|
}
|
14883 |
|
|
}
|
14884 |
|
|
break;
|
14885 |
|
|
|
14886 |
|
|
default:
|
14887 |
|
|
return tree_simple_nonnegative_warnv_p (code, type);
|
14888 |
|
|
}
|
14889 |
|
|
|
14890 |
|
|
/* We don't know sign of `t', so be conservative and return false. */
|
14891 |
|
|
return false;
|
14892 |
|
|
}
|
14893 |
|
|
|
14894 |
|
|
/* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
|
14895 |
|
|
value is based on the assumption that signed overflow is undefined,
|
14896 |
|
|
set *STRICT_OVERFLOW_P to true; otherwise, don't change
|
14897 |
|
|
*STRICT_OVERFLOW_P. */
|
14898 |
|
|
|
14899 |
|
|
bool
|
14900 |
|
|
tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
|
14901 |
|
|
tree op1, bool *strict_overflow_p)
|
14902 |
|
|
{
|
14903 |
|
|
if (TYPE_UNSIGNED (type))
|
14904 |
|
|
return true;
|
14905 |
|
|
|
14906 |
|
|
switch (code)
|
14907 |
|
|
{
|
14908 |
|
|
case POINTER_PLUS_EXPR:
|
14909 |
|
|
case PLUS_EXPR:
|
14910 |
|
|
if (FLOAT_TYPE_P (type))
|
14911 |
|
|
return (tree_expr_nonnegative_warnv_p (op0,
|
14912 |
|
|
strict_overflow_p)
|
14913 |
|
|
&& tree_expr_nonnegative_warnv_p (op1,
|
14914 |
|
|
strict_overflow_p));
|
14915 |
|
|
|
14916 |
|
|
/* zero_extend(x) + zero_extend(y) is non-negative if x and y are
|
14917 |
|
|
both unsigned and at least 2 bits shorter than the result. */
|
14918 |
|
|
if (TREE_CODE (type) == INTEGER_TYPE
|
14919 |
|
|
&& TREE_CODE (op0) == NOP_EXPR
|
14920 |
|
|
&& TREE_CODE (op1) == NOP_EXPR)
|
14921 |
|
|
{
|
14922 |
|
|
tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
|
14923 |
|
|
tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
|
14924 |
|
|
if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
|
14925 |
|
|
&& TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
|
14926 |
|
|
{
|
14927 |
|
|
unsigned int prec = MAX (TYPE_PRECISION (inner1),
|
14928 |
|
|
TYPE_PRECISION (inner2)) + 1;
|
14929 |
|
|
return prec < TYPE_PRECISION (type);
|
14930 |
|
|
}
|
14931 |
|
|
}
|
14932 |
|
|
break;
|
14933 |
|
|
|
14934 |
|
|
case MULT_EXPR:
|
14935 |
|
|
if (FLOAT_TYPE_P (type))
|
14936 |
|
|
{
|
14937 |
|
|
/* x * x for floating point x is always non-negative. */
|
14938 |
|
|
if (operand_equal_p (op0, op1, 0))
|
14939 |
|
|
return true;
|
14940 |
|
|
return (tree_expr_nonnegative_warnv_p (op0,
|
14941 |
|
|
strict_overflow_p)
|
14942 |
|
|
&& tree_expr_nonnegative_warnv_p (op1,
|
14943 |
|
|
strict_overflow_p));
|
14944 |
|
|
}
|
14945 |
|
|
|
14946 |
|
|
/* zero_extend(x) * zero_extend(y) is non-negative if x and y are
|
14947 |
|
|
both unsigned and their total bits is shorter than the result. */
|
14948 |
|
|
if (TREE_CODE (type) == INTEGER_TYPE
|
14949 |
|
|
&& (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
|
14950 |
|
|
&& (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
|
14951 |
|
|
{
|
14952 |
|
|
tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
|
14953 |
|
|
? TREE_TYPE (TREE_OPERAND (op0, 0))
|
14954 |
|
|
: TREE_TYPE (op0);
|
14955 |
|
|
tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
|
14956 |
|
|
? TREE_TYPE (TREE_OPERAND (op1, 0))
|
14957 |
|
|
: TREE_TYPE (op1);
|
14958 |
|
|
|
14959 |
|
|
bool unsigned0 = TYPE_UNSIGNED (inner0);
|
14960 |
|
|
bool unsigned1 = TYPE_UNSIGNED (inner1);
|
14961 |
|
|
|
14962 |
|
|
if (TREE_CODE (op0) == INTEGER_CST)
|
14963 |
|
|
unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
|
14964 |
|
|
|
14965 |
|
|
if (TREE_CODE (op1) == INTEGER_CST)
|
14966 |
|
|
unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
|
14967 |
|
|
|
14968 |
|
|
if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
|
14969 |
|
|
&& TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
|
14970 |
|
|
{
|
14971 |
|
|
unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
|
14972 |
|
|
? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
|
14973 |
|
|
: TYPE_PRECISION (inner0);
|
14974 |
|
|
|
14975 |
|
|
unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
|
14976 |
|
|
? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
|
14977 |
|
|
: TYPE_PRECISION (inner1);
|
14978 |
|
|
|
14979 |
|
|
return precision0 + precision1 < TYPE_PRECISION (type);
|
14980 |
|
|
}
|
14981 |
|
|
}
|
14982 |
|
|
return false;
|
14983 |
|
|
|
14984 |
|
|
case BIT_AND_EXPR:
|
14985 |
|
|
case MAX_EXPR:
|
14986 |
|
|
return (tree_expr_nonnegative_warnv_p (op0,
|
14987 |
|
|
strict_overflow_p)
|
14988 |
|
|
|| tree_expr_nonnegative_warnv_p (op1,
|
14989 |
|
|
strict_overflow_p));
|
14990 |
|
|
|
14991 |
|
|
case BIT_IOR_EXPR:
|
14992 |
|
|
case BIT_XOR_EXPR:
|
14993 |
|
|
case MIN_EXPR:
|
14994 |
|
|
case RDIV_EXPR:
|
14995 |
|
|
case TRUNC_DIV_EXPR:
|
14996 |
|
|
case CEIL_DIV_EXPR:
|
14997 |
|
|
case FLOOR_DIV_EXPR:
|
14998 |
|
|
case ROUND_DIV_EXPR:
|
14999 |
|
|
return (tree_expr_nonnegative_warnv_p (op0,
|
15000 |
|
|
strict_overflow_p)
|
15001 |
|
|
&& tree_expr_nonnegative_warnv_p (op1,
|
15002 |
|
|
strict_overflow_p));
|
15003 |
|
|
|
15004 |
|
|
case TRUNC_MOD_EXPR:
|
15005 |
|
|
case CEIL_MOD_EXPR:
|
15006 |
|
|
case FLOOR_MOD_EXPR:
|
15007 |
|
|
case ROUND_MOD_EXPR:
|
15008 |
|
|
return tree_expr_nonnegative_warnv_p (op0,
|
15009 |
|
|
strict_overflow_p);
|
15010 |
|
|
default:
|
15011 |
|
|
return tree_simple_nonnegative_warnv_p (code, type);
|
15012 |
|
|
}
|
15013 |
|
|
|
15014 |
|
|
/* We don't know sign of `t', so be conservative and return false. */
|
15015 |
|
|
return false;
|
15016 |
|
|
}
|
15017 |
|
|
|
15018 |
|
|
/* Return true if T is known to be non-negative. If the return
|
15019 |
|
|
value is based on the assumption that signed overflow is undefined,
|
15020 |
|
|
set *STRICT_OVERFLOW_P to true; otherwise, don't change
|
15021 |
|
|
*STRICT_OVERFLOW_P. */
|
15022 |
|
|
|
15023 |
|
|
bool
|
15024 |
|
|
tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
|
15025 |
|
|
{
|
15026 |
|
|
if (TYPE_UNSIGNED (TREE_TYPE (t)))
|
15027 |
|
|
return true;
|
15028 |
|
|
|
15029 |
|
|
switch (TREE_CODE (t))
|
15030 |
|
|
{
|
15031 |
|
|
case INTEGER_CST:
|
15032 |
|
|
return tree_int_cst_sgn (t) >= 0;
|
15033 |
|
|
|
15034 |
|
|
case REAL_CST:
|
15035 |
|
|
return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
|
15036 |
|
|
|
15037 |
|
|
case FIXED_CST:
|
15038 |
|
|
return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
|
15039 |
|
|
|
15040 |
|
|
case COND_EXPR:
|
15041 |
|
|
return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
|
15042 |
|
|
strict_overflow_p)
|
15043 |
|
|
&& tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
|
15044 |
|
|
strict_overflow_p));
|
15045 |
|
|
default:
|
15046 |
|
|
return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
|
15047 |
|
|
TREE_TYPE (t));
|
15048 |
|
|
}
|
15049 |
|
|
/* We don't know sign of `t', so be conservative and return false. */
|
15050 |
|
|
return false;
|
15051 |
|
|
}
|
15052 |
|
|
|
15053 |
|
|
/* Return true if T is known to be non-negative. If the return
|
15054 |
|
|
value is based on the assumption that signed overflow is undefined,
|
15055 |
|
|
set *STRICT_OVERFLOW_P to true; otherwise, don't change
|
15056 |
|
|
*STRICT_OVERFLOW_P. */
|
15057 |
|
|
|
15058 |
|
|
bool
|
15059 |
|
|
tree_call_nonnegative_warnv_p (tree type, tree fndecl,
|
15060 |
|
|
tree arg0, tree arg1, bool *strict_overflow_p)
|
15061 |
|
|
{
|
15062 |
|
|
if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
|
15063 |
|
|
switch (DECL_FUNCTION_CODE (fndecl))
|
15064 |
|
|
{
|
15065 |
|
|
CASE_FLT_FN (BUILT_IN_ACOS):
|
15066 |
|
|
CASE_FLT_FN (BUILT_IN_ACOSH):
|
15067 |
|
|
CASE_FLT_FN (BUILT_IN_CABS):
|
15068 |
|
|
CASE_FLT_FN (BUILT_IN_COSH):
|
15069 |
|
|
CASE_FLT_FN (BUILT_IN_ERFC):
|
15070 |
|
|
CASE_FLT_FN (BUILT_IN_EXP):
|
15071 |
|
|
CASE_FLT_FN (BUILT_IN_EXP10):
|
15072 |
|
|
CASE_FLT_FN (BUILT_IN_EXP2):
|
15073 |
|
|
CASE_FLT_FN (BUILT_IN_FABS):
|
15074 |
|
|
CASE_FLT_FN (BUILT_IN_FDIM):
|
15075 |
|
|
CASE_FLT_FN (BUILT_IN_HYPOT):
|
15076 |
|
|
CASE_FLT_FN (BUILT_IN_POW10):
|
15077 |
|
|
CASE_INT_FN (BUILT_IN_FFS):
|
15078 |
|
|
CASE_INT_FN (BUILT_IN_PARITY):
|
15079 |
|
|
CASE_INT_FN (BUILT_IN_POPCOUNT):
|
15080 |
|
|
case BUILT_IN_BSWAP32:
|
15081 |
|
|
case BUILT_IN_BSWAP64:
|
15082 |
|
|
/* Always true. */
|
15083 |
|
|
return true;
|
15084 |
|
|
|
15085 |
|
|
CASE_FLT_FN (BUILT_IN_SQRT):
|
15086 |
|
|
/* sqrt(-0.0) is -0.0. */
|
15087 |
|
|
if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
|
15088 |
|
|
return true;
|
15089 |
|
|
return tree_expr_nonnegative_warnv_p (arg0,
|
15090 |
|
|
strict_overflow_p);
|
15091 |
|
|
|
15092 |
|
|
CASE_FLT_FN (BUILT_IN_ASINH):
|
15093 |
|
|
CASE_FLT_FN (BUILT_IN_ATAN):
|
15094 |
|
|
CASE_FLT_FN (BUILT_IN_ATANH):
|
15095 |
|
|
CASE_FLT_FN (BUILT_IN_CBRT):
|
15096 |
|
|
CASE_FLT_FN (BUILT_IN_CEIL):
|
15097 |
|
|
CASE_FLT_FN (BUILT_IN_ERF):
|
15098 |
|
|
CASE_FLT_FN (BUILT_IN_EXPM1):
|
15099 |
|
|
CASE_FLT_FN (BUILT_IN_FLOOR):
|
15100 |
|
|
CASE_FLT_FN (BUILT_IN_FMOD):
|
15101 |
|
|
CASE_FLT_FN (BUILT_IN_FREXP):
|
15102 |
|
|
CASE_FLT_FN (BUILT_IN_ICEIL):
|
15103 |
|
|
CASE_FLT_FN (BUILT_IN_IFLOOR):
|
15104 |
|
|
CASE_FLT_FN (BUILT_IN_IRINT):
|
15105 |
|
|
CASE_FLT_FN (BUILT_IN_IROUND):
|
15106 |
|
|
CASE_FLT_FN (BUILT_IN_LCEIL):
|
15107 |
|
|
CASE_FLT_FN (BUILT_IN_LDEXP):
|
15108 |
|
|
CASE_FLT_FN (BUILT_IN_LFLOOR):
|
15109 |
|
|
CASE_FLT_FN (BUILT_IN_LLCEIL):
|
15110 |
|
|
CASE_FLT_FN (BUILT_IN_LLFLOOR):
|
15111 |
|
|
CASE_FLT_FN (BUILT_IN_LLRINT):
|
15112 |
|
|
CASE_FLT_FN (BUILT_IN_LLROUND):
|
15113 |
|
|
CASE_FLT_FN (BUILT_IN_LRINT):
|
15114 |
|
|
CASE_FLT_FN (BUILT_IN_LROUND):
|
15115 |
|
|
CASE_FLT_FN (BUILT_IN_MODF):
|
15116 |
|
|
CASE_FLT_FN (BUILT_IN_NEARBYINT):
|
15117 |
|
|
CASE_FLT_FN (BUILT_IN_RINT):
|
15118 |
|
|
CASE_FLT_FN (BUILT_IN_ROUND):
|
15119 |
|
|
CASE_FLT_FN (BUILT_IN_SCALB):
|
15120 |
|
|
CASE_FLT_FN (BUILT_IN_SCALBLN):
|
15121 |
|
|
CASE_FLT_FN (BUILT_IN_SCALBN):
|
15122 |
|
|
CASE_FLT_FN (BUILT_IN_SIGNBIT):
|
15123 |
|
|
CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
|
15124 |
|
|
CASE_FLT_FN (BUILT_IN_SINH):
|
15125 |
|
|
CASE_FLT_FN (BUILT_IN_TANH):
|
15126 |
|
|
CASE_FLT_FN (BUILT_IN_TRUNC):
|
15127 |
|
|
/* True if the 1st argument is nonnegative. */
|
15128 |
|
|
return tree_expr_nonnegative_warnv_p (arg0,
|
15129 |
|
|
strict_overflow_p);
|
15130 |
|
|
|
15131 |
|
|
CASE_FLT_FN (BUILT_IN_FMAX):
|
15132 |
|
|
/* True if the 1st OR 2nd arguments are nonnegative. */
|
15133 |
|
|
return (tree_expr_nonnegative_warnv_p (arg0,
|
15134 |
|
|
strict_overflow_p)
|
15135 |
|
|
|| (tree_expr_nonnegative_warnv_p (arg1,
|
15136 |
|
|
strict_overflow_p)));
|
15137 |
|
|
|
15138 |
|
|
CASE_FLT_FN (BUILT_IN_FMIN):
|
15139 |
|
|
/* True if the 1st AND 2nd arguments are nonnegative. */
|
15140 |
|
|
return (tree_expr_nonnegative_warnv_p (arg0,
|
15141 |
|
|
strict_overflow_p)
|
15142 |
|
|
&& (tree_expr_nonnegative_warnv_p (arg1,
|
15143 |
|
|
strict_overflow_p)));
|
15144 |
|
|
|
15145 |
|
|
CASE_FLT_FN (BUILT_IN_COPYSIGN):
|
15146 |
|
|
/* True if the 2nd argument is nonnegative. */
|
15147 |
|
|
return tree_expr_nonnegative_warnv_p (arg1,
|
15148 |
|
|
strict_overflow_p);
|
15149 |
|
|
|
15150 |
|
|
CASE_FLT_FN (BUILT_IN_POWI):
|
15151 |
|
|
/* True if the 1st argument is nonnegative or the second
|
15152 |
|
|
argument is an even integer. */
|
15153 |
|
|
if (TREE_CODE (arg1) == INTEGER_CST
|
15154 |
|
|
&& (TREE_INT_CST_LOW (arg1) & 1) == 0)
|
15155 |
|
|
return true;
|
15156 |
|
|
return tree_expr_nonnegative_warnv_p (arg0,
|
15157 |
|
|
strict_overflow_p);
|
15158 |
|
|
|
15159 |
|
|
CASE_FLT_FN (BUILT_IN_POW):
|
15160 |
|
|
/* True if the 1st argument is nonnegative or the second
|
15161 |
|
|
argument is an even integer valued real. */
|
15162 |
|
|
if (TREE_CODE (arg1) == REAL_CST)
|
15163 |
|
|
{
|
15164 |
|
|
REAL_VALUE_TYPE c;
|
15165 |
|
|
HOST_WIDE_INT n;
|
15166 |
|
|
|
15167 |
|
|
c = TREE_REAL_CST (arg1);
|
15168 |
|
|
n = real_to_integer (&c);
|
15169 |
|
|
if ((n & 1) == 0)
|
15170 |
|
|
{
|
15171 |
|
|
REAL_VALUE_TYPE cint;
|
15172 |
|
|
real_from_integer (&cint, VOIDmode, n,
|
15173 |
|
|
n < 0 ? -1 : 0, 0);
|
15174 |
|
|
if (real_identical (&c, &cint))
|
15175 |
|
|
return true;
|
15176 |
|
|
}
|
15177 |
|
|
}
|
15178 |
|
|
return tree_expr_nonnegative_warnv_p (arg0,
|
15179 |
|
|
strict_overflow_p);
|
15180 |
|
|
|
15181 |
|
|
default:
|
15182 |
|
|
break;
|
15183 |
|
|
}
|
15184 |
|
|
return tree_simple_nonnegative_warnv_p (CALL_EXPR,
|
15185 |
|
|
type);
|
15186 |
|
|
}
|
15187 |
|
|
|
15188 |
|
|
/* Return true if T is known to be non-negative. If the return
|
15189 |
|
|
value is based on the assumption that signed overflow is undefined,
|
15190 |
|
|
set *STRICT_OVERFLOW_P to true; otherwise, don't change
|
15191 |
|
|
*STRICT_OVERFLOW_P. */
|
15192 |
|
|
|
15193 |
|
|
bool
|
15194 |
|
|
tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
|
15195 |
|
|
{
|
15196 |
|
|
enum tree_code code = TREE_CODE (t);
|
15197 |
|
|
if (TYPE_UNSIGNED (TREE_TYPE (t)))
|
15198 |
|
|
return true;
|
15199 |
|
|
|
15200 |
|
|
switch (code)
|
15201 |
|
|
{
|
15202 |
|
|
case TARGET_EXPR:
|
15203 |
|
|
{
|
15204 |
|
|
tree temp = TARGET_EXPR_SLOT (t);
|
15205 |
|
|
t = TARGET_EXPR_INITIAL (t);
|
15206 |
|
|
|
15207 |
|
|
/* If the initializer is non-void, then it's a normal expression
|
15208 |
|
|
that will be assigned to the slot. */
|
15209 |
|
|
if (!VOID_TYPE_P (t))
|
15210 |
|
|
return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
|
15211 |
|
|
|
15212 |
|
|
/* Otherwise, the initializer sets the slot in some way. One common
|
15213 |
|
|
way is an assignment statement at the end of the initializer. */
|
15214 |
|
|
while (1)
|
15215 |
|
|
{
|
15216 |
|
|
if (TREE_CODE (t) == BIND_EXPR)
|
15217 |
|
|
t = expr_last (BIND_EXPR_BODY (t));
|
15218 |
|
|
else if (TREE_CODE (t) == TRY_FINALLY_EXPR
|
15219 |
|
|
|| TREE_CODE (t) == TRY_CATCH_EXPR)
|
15220 |
|
|
t = expr_last (TREE_OPERAND (t, 0));
|
15221 |
|
|
else if (TREE_CODE (t) == STATEMENT_LIST)
|
15222 |
|
|
t = expr_last (t);
|
15223 |
|
|
else
|
15224 |
|
|
break;
|
15225 |
|
|
}
|
15226 |
|
|
if (TREE_CODE (t) == MODIFY_EXPR
|
15227 |
|
|
&& TREE_OPERAND (t, 0) == temp)
|
15228 |
|
|
return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
|
15229 |
|
|
strict_overflow_p);
|
15230 |
|
|
|
15231 |
|
|
return false;
|
15232 |
|
|
}
|
15233 |
|
|
|
15234 |
|
|
case CALL_EXPR:
|
15235 |
|
|
{
|
15236 |
|
|
tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
|
15237 |
|
|
tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
|
15238 |
|
|
|
15239 |
|
|
return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
|
15240 |
|
|
get_callee_fndecl (t),
|
15241 |
|
|
arg0,
|
15242 |
|
|
arg1,
|
15243 |
|
|
strict_overflow_p);
|
15244 |
|
|
}
|
15245 |
|
|
case COMPOUND_EXPR:
|
15246 |
|
|
case MODIFY_EXPR:
|
15247 |
|
|
return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
|
15248 |
|
|
strict_overflow_p);
|
15249 |
|
|
case BIND_EXPR:
|
15250 |
|
|
return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
|
15251 |
|
|
strict_overflow_p);
|
15252 |
|
|
case SAVE_EXPR:
|
15253 |
|
|
return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
|
15254 |
|
|
strict_overflow_p);
|
15255 |
|
|
|
15256 |
|
|
default:
|
15257 |
|
|
return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
|
15258 |
|
|
TREE_TYPE (t));
|
15259 |
|
|
}
|
15260 |
|
|
|
15261 |
|
|
/* We don't know sign of `t', so be conservative and return false. */
|
15262 |
|
|
return false;
|
15263 |
|
|
}
|
15264 |
|
|
|
15265 |
|
|
/* Return true if T is known to be non-negative. If the return
|
15266 |
|
|
value is based on the assumption that signed overflow is undefined,
|
15267 |
|
|
set *STRICT_OVERFLOW_P to true; otherwise, don't change
|
15268 |
|
|
*STRICT_OVERFLOW_P. */
|
15269 |
|
|
|
15270 |
|
|
bool
|
15271 |
|
|
tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
|
15272 |
|
|
{
|
15273 |
|
|
enum tree_code code;
|
15274 |
|
|
if (t == error_mark_node)
|
15275 |
|
|
return false;
|
15276 |
|
|
|
15277 |
|
|
code = TREE_CODE (t);
|
15278 |
|
|
switch (TREE_CODE_CLASS (code))
|
15279 |
|
|
{
|
15280 |
|
|
case tcc_binary:
|
15281 |
|
|
case tcc_comparison:
|
15282 |
|
|
return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
|
15283 |
|
|
TREE_TYPE (t),
|
15284 |
|
|
TREE_OPERAND (t, 0),
|
15285 |
|
|
TREE_OPERAND (t, 1),
|
15286 |
|
|
strict_overflow_p);
|
15287 |
|
|
|
15288 |
|
|
case tcc_unary:
|
15289 |
|
|
return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
|
15290 |
|
|
TREE_TYPE (t),
|
15291 |
|
|
TREE_OPERAND (t, 0),
|
15292 |
|
|
strict_overflow_p);
|
15293 |
|
|
|
15294 |
|
|
case tcc_constant:
|
15295 |
|
|
case tcc_declaration:
|
15296 |
|
|
case tcc_reference:
|
15297 |
|
|
return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
|
15298 |
|
|
|
15299 |
|
|
default:
|
15300 |
|
|
break;
|
15301 |
|
|
}
|
15302 |
|
|
|
15303 |
|
|
switch (code)
|
15304 |
|
|
{
|
15305 |
|
|
case TRUTH_AND_EXPR:
|
15306 |
|
|
case TRUTH_OR_EXPR:
|
15307 |
|
|
case TRUTH_XOR_EXPR:
|
15308 |
|
|
return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
|
15309 |
|
|
TREE_TYPE (t),
|
15310 |
|
|
TREE_OPERAND (t, 0),
|
15311 |
|
|
TREE_OPERAND (t, 1),
|
15312 |
|
|
strict_overflow_p);
|
15313 |
|
|
case TRUTH_NOT_EXPR:
|
15314 |
|
|
return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
|
15315 |
|
|
TREE_TYPE (t),
|
15316 |
|
|
TREE_OPERAND (t, 0),
|
15317 |
|
|
strict_overflow_p);
|
15318 |
|
|
|
15319 |
|
|
case COND_EXPR:
|
15320 |
|
|
case CONSTRUCTOR:
|
15321 |
|
|
case OBJ_TYPE_REF:
|
15322 |
|
|
case ASSERT_EXPR:
|
15323 |
|
|
case ADDR_EXPR:
|
15324 |
|
|
case WITH_SIZE_EXPR:
|
15325 |
|
|
case SSA_NAME:
|
15326 |
|
|
return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
|
15327 |
|
|
|
15328 |
|
|
default:
|
15329 |
|
|
return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
|
15330 |
|
|
}
|
15331 |
|
|
}
|
15332 |
|
|
|
15333 |
|
|
/* Return true if `t' is known to be non-negative. Handle warnings
|
15334 |
|
|
about undefined signed overflow. */
|
15335 |
|
|
|
15336 |
|
|
bool
|
15337 |
|
|
tree_expr_nonnegative_p (tree t)
|
15338 |
|
|
{
|
15339 |
|
|
bool ret, strict_overflow_p;
|
15340 |
|
|
|
15341 |
|
|
strict_overflow_p = false;
|
15342 |
|
|
ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
|
15343 |
|
|
if (strict_overflow_p)
|
15344 |
|
|
fold_overflow_warning (("assuming signed overflow does not occur when "
|
15345 |
|
|
"determining that expression is always "
|
15346 |
|
|
"non-negative"),
|
15347 |
|
|
WARN_STRICT_OVERFLOW_MISC);
|
15348 |
|
|
return ret;
|
15349 |
|
|
}
|
15350 |
|
|
|
15351 |
|
|
|
15352 |
|
|
/* Return true when (CODE OP0) is an address and is known to be nonzero.
|
15353 |
|
|
For floating point we further ensure that T is not denormal.
|
15354 |
|
|
Similar logic is present in nonzero_address in rtlanal.h.
|
15355 |
|
|
|
15356 |
|
|
If the return value is based on the assumption that signed overflow
|
15357 |
|
|
is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
|
15358 |
|
|
change *STRICT_OVERFLOW_P. */
|
15359 |
|
|
|
15360 |
|
|
bool
|
15361 |
|
|
tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
|
15362 |
|
|
bool *strict_overflow_p)
|
15363 |
|
|
{
|
15364 |
|
|
switch (code)
|
15365 |
|
|
{
|
15366 |
|
|
case ABS_EXPR:
|
15367 |
|
|
return tree_expr_nonzero_warnv_p (op0,
|
15368 |
|
|
strict_overflow_p);
|
15369 |
|
|
|
15370 |
|
|
case NOP_EXPR:
|
15371 |
|
|
{
|
15372 |
|
|
tree inner_type = TREE_TYPE (op0);
|
15373 |
|
|
tree outer_type = type;
|
15374 |
|
|
|
15375 |
|
|
return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
|
15376 |
|
|
&& tree_expr_nonzero_warnv_p (op0,
|
15377 |
|
|
strict_overflow_p));
|
15378 |
|
|
}
|
15379 |
|
|
break;
|
15380 |
|
|
|
15381 |
|
|
case NON_LVALUE_EXPR:
|
15382 |
|
|
return tree_expr_nonzero_warnv_p (op0,
|
15383 |
|
|
strict_overflow_p);
|
15384 |
|
|
|
15385 |
|
|
default:
|
15386 |
|
|
break;
|
15387 |
|
|
}
|
15388 |
|
|
|
15389 |
|
|
return false;
|
15390 |
|
|
}
|
15391 |
|
|
|
15392 |
|
|
/* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
|
15393 |
|
|
For floating point we further ensure that T is not denormal.
|
15394 |
|
|
Similar logic is present in nonzero_address in rtlanal.h.
|
15395 |
|
|
|
15396 |
|
|
If the return value is based on the assumption that signed overflow
|
15397 |
|
|
is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
|
15398 |
|
|
change *STRICT_OVERFLOW_P. */
|
15399 |
|
|
|
15400 |
|
|
bool
|
15401 |
|
|
tree_binary_nonzero_warnv_p (enum tree_code code,
|
15402 |
|
|
tree type,
|
15403 |
|
|
tree op0,
|
15404 |
|
|
tree op1, bool *strict_overflow_p)
|
15405 |
|
|
{
|
15406 |
|
|
bool sub_strict_overflow_p;
|
15407 |
|
|
switch (code)
|
15408 |
|
|
{
|
15409 |
|
|
case POINTER_PLUS_EXPR:
|
15410 |
|
|
case PLUS_EXPR:
|
15411 |
|
|
if (TYPE_OVERFLOW_UNDEFINED (type))
|
15412 |
|
|
{
|
15413 |
|
|
/* With the presence of negative values it is hard
|
15414 |
|
|
to say something. */
|
15415 |
|
|
sub_strict_overflow_p = false;
|
15416 |
|
|
if (!tree_expr_nonnegative_warnv_p (op0,
|
15417 |
|
|
&sub_strict_overflow_p)
|
15418 |
|
|
|| !tree_expr_nonnegative_warnv_p (op1,
|
15419 |
|
|
&sub_strict_overflow_p))
|
15420 |
|
|
return false;
|
15421 |
|
|
/* One of operands must be positive and the other non-negative. */
|
15422 |
|
|
/* We don't set *STRICT_OVERFLOW_P here: even if this value
|
15423 |
|
|
overflows, on a twos-complement machine the sum of two
|
15424 |
|
|
nonnegative numbers can never be zero. */
|
15425 |
|
|
return (tree_expr_nonzero_warnv_p (op0,
|
15426 |
|
|
strict_overflow_p)
|
15427 |
|
|
|| tree_expr_nonzero_warnv_p (op1,
|
15428 |
|
|
strict_overflow_p));
|
15429 |
|
|
}
|
15430 |
|
|
break;
|
15431 |
|
|
|
15432 |
|
|
case MULT_EXPR:
|
15433 |
|
|
if (TYPE_OVERFLOW_UNDEFINED (type))
|
15434 |
|
|
{
|
15435 |
|
|
if (tree_expr_nonzero_warnv_p (op0,
|
15436 |
|
|
strict_overflow_p)
|
15437 |
|
|
&& tree_expr_nonzero_warnv_p (op1,
|
15438 |
|
|
strict_overflow_p))
|
15439 |
|
|
{
|
15440 |
|
|
*strict_overflow_p = true;
|
15441 |
|
|
return true;
|
15442 |
|
|
}
|
15443 |
|
|
}
|
15444 |
|
|
break;
|
15445 |
|
|
|
15446 |
|
|
case MIN_EXPR:
|
15447 |
|
|
sub_strict_overflow_p = false;
|
15448 |
|
|
if (tree_expr_nonzero_warnv_p (op0,
|
15449 |
|
|
&sub_strict_overflow_p)
|
15450 |
|
|
&& tree_expr_nonzero_warnv_p (op1,
|
15451 |
|
|
&sub_strict_overflow_p))
|
15452 |
|
|
{
|
15453 |
|
|
if (sub_strict_overflow_p)
|
15454 |
|
|
*strict_overflow_p = true;
|
15455 |
|
|
}
|
15456 |
|
|
break;
|
15457 |
|
|
|
15458 |
|
|
case MAX_EXPR:
|
15459 |
|
|
sub_strict_overflow_p = false;
|
15460 |
|
|
if (tree_expr_nonzero_warnv_p (op0,
|
15461 |
|
|
&sub_strict_overflow_p))
|
15462 |
|
|
{
|
15463 |
|
|
if (sub_strict_overflow_p)
|
15464 |
|
|
*strict_overflow_p = true;
|
15465 |
|
|
|
15466 |
|
|
/* When both operands are nonzero, then MAX must be too. */
|
15467 |
|
|
if (tree_expr_nonzero_warnv_p (op1,
|
15468 |
|
|
strict_overflow_p))
|
15469 |
|
|
return true;
|
15470 |
|
|
|
15471 |
|
|
/* MAX where operand 0 is positive is positive. */
|
15472 |
|
|
return tree_expr_nonnegative_warnv_p (op0,
|
15473 |
|
|
strict_overflow_p);
|
15474 |
|
|
}
|
15475 |
|
|
/* MAX where operand 1 is positive is positive. */
|
15476 |
|
|
else if (tree_expr_nonzero_warnv_p (op1,
|
15477 |
|
|
&sub_strict_overflow_p)
|
15478 |
|
|
&& tree_expr_nonnegative_warnv_p (op1,
|
15479 |
|
|
&sub_strict_overflow_p))
|
15480 |
|
|
{
|
15481 |
|
|
if (sub_strict_overflow_p)
|
15482 |
|
|
*strict_overflow_p = true;
|
15483 |
|
|
return true;
|
15484 |
|
|
}
|
15485 |
|
|
break;
|
15486 |
|
|
|
15487 |
|
|
case BIT_IOR_EXPR:
|
15488 |
|
|
return (tree_expr_nonzero_warnv_p (op1,
|
15489 |
|
|
strict_overflow_p)
|
15490 |
|
|
|| tree_expr_nonzero_warnv_p (op0,
|
15491 |
|
|
strict_overflow_p));
|
15492 |
|
|
|
15493 |
|
|
default:
|
15494 |
|
|
break;
|
15495 |
|
|
}
|
15496 |
|
|
|
15497 |
|
|
return false;
|
15498 |
|
|
}
|
15499 |
|
|
|
15500 |
|
|
/* Return true when T is an address and is known to be nonzero.
|
15501 |
|
|
For floating point we further ensure that T is not denormal.
|
15502 |
|
|
Similar logic is present in nonzero_address in rtlanal.h.
|
15503 |
|
|
|
15504 |
|
|
If the return value is based on the assumption that signed overflow
|
15505 |
|
|
is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
|
15506 |
|
|
change *STRICT_OVERFLOW_P. */
|
15507 |
|
|
|
15508 |
|
|
bool
|
15509 |
|
|
tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
|
15510 |
|
|
{
|
15511 |
|
|
bool sub_strict_overflow_p;
|
15512 |
|
|
switch (TREE_CODE (t))
|
15513 |
|
|
{
|
15514 |
|
|
case INTEGER_CST:
|
15515 |
|
|
return !integer_zerop (t);
|
15516 |
|
|
|
15517 |
|
|
case ADDR_EXPR:
|
15518 |
|
|
{
|
15519 |
|
|
tree base = TREE_OPERAND (t, 0);
|
15520 |
|
|
if (!DECL_P (base))
|
15521 |
|
|
base = get_base_address (base);
|
15522 |
|
|
|
15523 |
|
|
if (!base)
|
15524 |
|
|
return false;
|
15525 |
|
|
|
15526 |
|
|
/* Weak declarations may link to NULL. Other things may also be NULL
|
15527 |
|
|
so protect with -fdelete-null-pointer-checks; but not variables
|
15528 |
|
|
allocated on the stack. */
|
15529 |
|
|
if (DECL_P (base)
|
15530 |
|
|
&& (flag_delete_null_pointer_checks
|
15531 |
|
|
|| (DECL_CONTEXT (base)
|
15532 |
|
|
&& TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
|
15533 |
|
|
&& auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
|
15534 |
|
|
return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
|
15535 |
|
|
|
15536 |
|
|
/* Constants are never weak. */
|
15537 |
|
|
if (CONSTANT_CLASS_P (base))
|
15538 |
|
|
return true;
|
15539 |
|
|
|
15540 |
|
|
return false;
|
15541 |
|
|
}
|
15542 |
|
|
|
15543 |
|
|
case COND_EXPR:
|
15544 |
|
|
sub_strict_overflow_p = false;
|
15545 |
|
|
if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
|
15546 |
|
|
&sub_strict_overflow_p)
|
15547 |
|
|
&& tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
|
15548 |
|
|
&sub_strict_overflow_p))
|
15549 |
|
|
{
|
15550 |
|
|
if (sub_strict_overflow_p)
|
15551 |
|
|
*strict_overflow_p = true;
|
15552 |
|
|
return true;
|
15553 |
|
|
}
|
15554 |
|
|
break;
|
15555 |
|
|
|
15556 |
|
|
default:
|
15557 |
|
|
break;
|
15558 |
|
|
}
|
15559 |
|
|
return false;
|
15560 |
|
|
}
|
15561 |
|
|
|
15562 |
|
|
/* Return true when T is an address and is known to be nonzero.
|
15563 |
|
|
For floating point we further ensure that T is not denormal.
|
15564 |
|
|
Similar logic is present in nonzero_address in rtlanal.h.
|
15565 |
|
|
|
15566 |
|
|
If the return value is based on the assumption that signed overflow
|
15567 |
|
|
is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
|
15568 |
|
|
change *STRICT_OVERFLOW_P. */
|
15569 |
|
|
|
15570 |
|
|
bool
|
15571 |
|
|
tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
|
15572 |
|
|
{
|
15573 |
|
|
tree type = TREE_TYPE (t);
|
15574 |
|
|
enum tree_code code;
|
15575 |
|
|
|
15576 |
|
|
/* Doing something useful for floating point would need more work. */
|
15577 |
|
|
if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
|
15578 |
|
|
return false;
|
15579 |
|
|
|
15580 |
|
|
code = TREE_CODE (t);
|
15581 |
|
|
switch (TREE_CODE_CLASS (code))
|
15582 |
|
|
{
|
15583 |
|
|
case tcc_unary:
|
15584 |
|
|
return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
|
15585 |
|
|
strict_overflow_p);
|
15586 |
|
|
case tcc_binary:
|
15587 |
|
|
case tcc_comparison:
|
15588 |
|
|
return tree_binary_nonzero_warnv_p (code, type,
|
15589 |
|
|
TREE_OPERAND (t, 0),
|
15590 |
|
|
TREE_OPERAND (t, 1),
|
15591 |
|
|
strict_overflow_p);
|
15592 |
|
|
case tcc_constant:
|
15593 |
|
|
case tcc_declaration:
|
15594 |
|
|
case tcc_reference:
|
15595 |
|
|
return tree_single_nonzero_warnv_p (t, strict_overflow_p);
|
15596 |
|
|
|
15597 |
|
|
default:
|
15598 |
|
|
break;
|
15599 |
|
|
}
|
15600 |
|
|
|
15601 |
|
|
switch (code)
|
15602 |
|
|
{
|
15603 |
|
|
case TRUTH_NOT_EXPR:
|
15604 |
|
|
return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
|
15605 |
|
|
strict_overflow_p);
|
15606 |
|
|
|
15607 |
|
|
case TRUTH_AND_EXPR:
|
15608 |
|
|
case TRUTH_OR_EXPR:
|
15609 |
|
|
case TRUTH_XOR_EXPR:
|
15610 |
|
|
return tree_binary_nonzero_warnv_p (code, type,
|
15611 |
|
|
TREE_OPERAND (t, 0),
|
15612 |
|
|
TREE_OPERAND (t, 1),
|
15613 |
|
|
strict_overflow_p);
|
15614 |
|
|
|
15615 |
|
|
case COND_EXPR:
|
15616 |
|
|
case CONSTRUCTOR:
|
15617 |
|
|
case OBJ_TYPE_REF:
|
15618 |
|
|
case ASSERT_EXPR:
|
15619 |
|
|
case ADDR_EXPR:
|
15620 |
|
|
case WITH_SIZE_EXPR:
|
15621 |
|
|
case SSA_NAME:
|
15622 |
|
|
return tree_single_nonzero_warnv_p (t, strict_overflow_p);
|
15623 |
|
|
|
15624 |
|
|
case COMPOUND_EXPR:
|
15625 |
|
|
case MODIFY_EXPR:
|
15626 |
|
|
case BIND_EXPR:
|
15627 |
|
|
return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
|
15628 |
|
|
strict_overflow_p);
|
15629 |
|
|
|
15630 |
|
|
case SAVE_EXPR:
|
15631 |
|
|
return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
|
15632 |
|
|
strict_overflow_p);
|
15633 |
|
|
|
15634 |
|
|
case CALL_EXPR:
|
15635 |
|
|
return alloca_call_p (t);
|
15636 |
|
|
|
15637 |
|
|
default:
|
15638 |
|
|
break;
|
15639 |
|
|
}
|
15640 |
|
|
return false;
|
15641 |
|
|
}
|
15642 |
|
|
|
15643 |
|
|
/* Return true when T is an address and is known to be nonzero.
|
15644 |
|
|
Handle warnings about undefined signed overflow. */
|
15645 |
|
|
|
15646 |
|
|
bool
|
15647 |
|
|
tree_expr_nonzero_p (tree t)
|
15648 |
|
|
{
|
15649 |
|
|
bool ret, strict_overflow_p;
|
15650 |
|
|
|
15651 |
|
|
strict_overflow_p = false;
|
15652 |
|
|
ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
|
15653 |
|
|
if (strict_overflow_p)
|
15654 |
|
|
fold_overflow_warning (("assuming signed overflow does not occur when "
|
15655 |
|
|
"determining that expression is always "
|
15656 |
|
|
"non-zero"),
|
15657 |
|
|
WARN_STRICT_OVERFLOW_MISC);
|
15658 |
|
|
return ret;
|
15659 |
|
|
}
|
15660 |
|
|
|
15661 |
|
|
/* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
|
15662 |
|
|
attempt to fold the expression to a constant without modifying TYPE,
|
15663 |
|
|
OP0 or OP1.
|
15664 |
|
|
|
15665 |
|
|
If the expression could be simplified to a constant, then return
|
15666 |
|
|
the constant. If the expression would not be simplified to a
|
15667 |
|
|
constant, then return NULL_TREE. */
|
15668 |
|
|
|
15669 |
|
|
tree
|
15670 |
|
|
fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
|
15671 |
|
|
{
|
15672 |
|
|
tree tem = fold_binary (code, type, op0, op1);
|
15673 |
|
|
return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
|
15674 |
|
|
}
|
15675 |
|
|
|
15676 |
|
|
/* Given the components of a unary expression CODE, TYPE and OP0,
|
15677 |
|
|
attempt to fold the expression to a constant without modifying
|
15678 |
|
|
TYPE or OP0.
|
15679 |
|
|
|
15680 |
|
|
If the expression could be simplified to a constant, then return
|
15681 |
|
|
the constant. If the expression would not be simplified to a
|
15682 |
|
|
constant, then return NULL_TREE. */
|
15683 |
|
|
|
15684 |
|
|
tree
|
15685 |
|
|
fold_unary_to_constant (enum tree_code code, tree type, tree op0)
|
15686 |
|
|
{
|
15687 |
|
|
tree tem = fold_unary (code, type, op0);
|
15688 |
|
|
return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
|
15689 |
|
|
}
|
15690 |
|
|
|
15691 |
|
|
/* If EXP represents referencing an element in a constant string
|
15692 |
|
|
(either via pointer arithmetic or array indexing), return the
|
15693 |
|
|
tree representing the value accessed, otherwise return NULL. */
|
15694 |
|
|
|
15695 |
|
|
tree
|
15696 |
|
|
fold_read_from_constant_string (tree exp)
|
15697 |
|
|
{
|
15698 |
|
|
if ((TREE_CODE (exp) == INDIRECT_REF
|
15699 |
|
|
|| TREE_CODE (exp) == ARRAY_REF)
|
15700 |
|
|
&& TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
|
15701 |
|
|
{
|
15702 |
|
|
tree exp1 = TREE_OPERAND (exp, 0);
|
15703 |
|
|
tree index;
|
15704 |
|
|
tree string;
|
15705 |
|
|
location_t loc = EXPR_LOCATION (exp);
|
15706 |
|
|
|
15707 |
|
|
if (TREE_CODE (exp) == INDIRECT_REF)
|
15708 |
|
|
string = string_constant (exp1, &index);
|
15709 |
|
|
else
|
15710 |
|
|
{
|
15711 |
|
|
tree low_bound = array_ref_low_bound (exp);
|
15712 |
|
|
index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
|
15713 |
|
|
|
15714 |
|
|
/* Optimize the special-case of a zero lower bound.
|
15715 |
|
|
|
15716 |
|
|
We convert the low_bound to sizetype to avoid some problems
|
15717 |
|
|
with constant folding. (E.g. suppose the lower bound is 1,
|
15718 |
|
|
and its mode is QI. Without the conversion,l (ARRAY
|
15719 |
|
|
+(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
|
15720 |
|
|
+INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
|
15721 |
|
|
if (! integer_zerop (low_bound))
|
15722 |
|
|
index = size_diffop_loc (loc, index,
|
15723 |
|
|
fold_convert_loc (loc, sizetype, low_bound));
|
15724 |
|
|
|
15725 |
|
|
string = exp1;
|
15726 |
|
|
}
|
15727 |
|
|
|
15728 |
|
|
if (string
|
15729 |
|
|
&& TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
|
15730 |
|
|
&& TREE_CODE (string) == STRING_CST
|
15731 |
|
|
&& TREE_CODE (index) == INTEGER_CST
|
15732 |
|
|
&& compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
|
15733 |
|
|
&& (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
|
15734 |
|
|
== MODE_INT)
|
15735 |
|
|
&& (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
|
15736 |
|
|
return build_int_cst_type (TREE_TYPE (exp),
|
15737 |
|
|
(TREE_STRING_POINTER (string)
|
15738 |
|
|
[TREE_INT_CST_LOW (index)]));
|
15739 |
|
|
}
|
15740 |
|
|
return NULL;
|
15741 |
|
|
}
|
15742 |
|
|
|
15743 |
|
|
/* Return the tree for neg (ARG0) when ARG0 is known to be either
|
15744 |
|
|
an integer constant, real, or fixed-point constant.
|
15745 |
|
|
|
15746 |
|
|
TYPE is the type of the result. */
|
15747 |
|
|
|
15748 |
|
|
static tree
|
15749 |
|
|
fold_negate_const (tree arg0, tree type)
|
15750 |
|
|
{
|
15751 |
|
|
tree t = NULL_TREE;
|
15752 |
|
|
|
15753 |
|
|
switch (TREE_CODE (arg0))
|
15754 |
|
|
{
|
15755 |
|
|
case INTEGER_CST:
|
15756 |
|
|
{
|
15757 |
|
|
double_int val = tree_to_double_int (arg0);
|
15758 |
|
|
int overflow = neg_double (val.low, val.high, &val.low, &val.high);
|
15759 |
|
|
|
15760 |
|
|
t = force_fit_type_double (type, val, 1,
|
15761 |
|
|
(overflow | TREE_OVERFLOW (arg0))
|
15762 |
|
|
&& !TYPE_UNSIGNED (type));
|
15763 |
|
|
break;
|
15764 |
|
|
}
|
15765 |
|
|
|
15766 |
|
|
case REAL_CST:
|
15767 |
|
|
t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
|
15768 |
|
|
break;
|
15769 |
|
|
|
15770 |
|
|
case FIXED_CST:
|
15771 |
|
|
{
|
15772 |
|
|
FIXED_VALUE_TYPE f;
|
15773 |
|
|
bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
|
15774 |
|
|
&(TREE_FIXED_CST (arg0)), NULL,
|
15775 |
|
|
TYPE_SATURATING (type));
|
15776 |
|
|
t = build_fixed (type, f);
|
15777 |
|
|
/* Propagate overflow flags. */
|
15778 |
|
|
if (overflow_p | TREE_OVERFLOW (arg0))
|
15779 |
|
|
TREE_OVERFLOW (t) = 1;
|
15780 |
|
|
break;
|
15781 |
|
|
}
|
15782 |
|
|
|
15783 |
|
|
default:
|
15784 |
|
|
gcc_unreachable ();
|
15785 |
|
|
}
|
15786 |
|
|
|
15787 |
|
|
return t;
|
15788 |
|
|
}
|
15789 |
|
|
|
15790 |
|
|
/* Return the tree for abs (ARG0) when ARG0 is known to be either
|
15791 |
|
|
an integer constant or real constant.
|
15792 |
|
|
|
15793 |
|
|
TYPE is the type of the result. */
|
15794 |
|
|
|
15795 |
|
|
tree
|
15796 |
|
|
fold_abs_const (tree arg0, tree type)
|
15797 |
|
|
{
|
15798 |
|
|
tree t = NULL_TREE;
|
15799 |
|
|
|
15800 |
|
|
switch (TREE_CODE (arg0))
|
15801 |
|
|
{
|
15802 |
|
|
case INTEGER_CST:
|
15803 |
|
|
{
|
15804 |
|
|
double_int val = tree_to_double_int (arg0);
|
15805 |
|
|
|
15806 |
|
|
/* If the value is unsigned or non-negative, then the absolute value
|
15807 |
|
|
is the same as the ordinary value. */
|
15808 |
|
|
if (TYPE_UNSIGNED (type)
|
15809 |
|
|
|| !double_int_negative_p (val))
|
15810 |
|
|
t = arg0;
|
15811 |
|
|
|
15812 |
|
|
/* If the value is negative, then the absolute value is
|
15813 |
|
|
its negation. */
|
15814 |
|
|
else
|
15815 |
|
|
{
|
15816 |
|
|
int overflow;
|
15817 |
|
|
|
15818 |
|
|
overflow = neg_double (val.low, val.high, &val.low, &val.high);
|
15819 |
|
|
t = force_fit_type_double (type, val, -1,
|
15820 |
|
|
overflow | TREE_OVERFLOW (arg0));
|
15821 |
|
|
}
|
15822 |
|
|
}
|
15823 |
|
|
break;
|
15824 |
|
|
|
15825 |
|
|
case REAL_CST:
|
15826 |
|
|
if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
|
15827 |
|
|
t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
|
15828 |
|
|
else
|
15829 |
|
|
t = arg0;
|
15830 |
|
|
break;
|
15831 |
|
|
|
15832 |
|
|
default:
|
15833 |
|
|
gcc_unreachable ();
|
15834 |
|
|
}
|
15835 |
|
|
|
15836 |
|
|
return t;
|
15837 |
|
|
}
|
15838 |
|
|
|
15839 |
|
|
/* Return the tree for not (ARG0) when ARG0 is known to be an integer
|
15840 |
|
|
constant. TYPE is the type of the result. */
|
15841 |
|
|
|
15842 |
|
|
static tree
|
15843 |
|
|
fold_not_const (const_tree arg0, tree type)
|
15844 |
|
|
{
|
15845 |
|
|
double_int val;
|
15846 |
|
|
|
15847 |
|
|
gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
|
15848 |
|
|
|
15849 |
|
|
val = double_int_not (tree_to_double_int (arg0));
|
15850 |
|
|
return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
|
15851 |
|
|
}
|
15852 |
|
|
|
15853 |
|
|
/* Given CODE, a relational operator, the target type, TYPE and two
|
15854 |
|
|
constant operands OP0 and OP1, return the result of the
|
15855 |
|
|
relational operation. If the result is not a compile time
|
15856 |
|
|
constant, then return NULL_TREE. */
|
15857 |
|
|
|
15858 |
|
|
static tree
|
15859 |
|
|
fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
|
15860 |
|
|
{
|
15861 |
|
|
int result, invert;
|
15862 |
|
|
|
15863 |
|
|
/* From here on, the only cases we handle are when the result is
|
15864 |
|
|
known to be a constant. */
|
15865 |
|
|
|
15866 |
|
|
if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
|
15867 |
|
|
{
|
15868 |
|
|
const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
|
15869 |
|
|
const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
|
15870 |
|
|
|
15871 |
|
|
/* Handle the cases where either operand is a NaN. */
|
15872 |
|
|
if (real_isnan (c0) || real_isnan (c1))
|
15873 |
|
|
{
|
15874 |
|
|
switch (code)
|
15875 |
|
|
{
|
15876 |
|
|
case EQ_EXPR:
|
15877 |
|
|
case ORDERED_EXPR:
|
15878 |
|
|
result = 0;
|
15879 |
|
|
break;
|
15880 |
|
|
|
15881 |
|
|
case NE_EXPR:
|
15882 |
|
|
case UNORDERED_EXPR:
|
15883 |
|
|
case UNLT_EXPR:
|
15884 |
|
|
case UNLE_EXPR:
|
15885 |
|
|
case UNGT_EXPR:
|
15886 |
|
|
case UNGE_EXPR:
|
15887 |
|
|
case UNEQ_EXPR:
|
15888 |
|
|
result = 1;
|
15889 |
|
|
break;
|
15890 |
|
|
|
15891 |
|
|
case LT_EXPR:
|
15892 |
|
|
case LE_EXPR:
|
15893 |
|
|
case GT_EXPR:
|
15894 |
|
|
case GE_EXPR:
|
15895 |
|
|
case LTGT_EXPR:
|
15896 |
|
|
if (flag_trapping_math)
|
15897 |
|
|
return NULL_TREE;
|
15898 |
|
|
result = 0;
|
15899 |
|
|
break;
|
15900 |
|
|
|
15901 |
|
|
default:
|
15902 |
|
|
gcc_unreachable ();
|
15903 |
|
|
}
|
15904 |
|
|
|
15905 |
|
|
return constant_boolean_node (result, type);
|
15906 |
|
|
}
|
15907 |
|
|
|
15908 |
|
|
return constant_boolean_node (real_compare (code, c0, c1), type);
|
15909 |
|
|
}
|
15910 |
|
|
|
15911 |
|
|
if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
|
15912 |
|
|
{
|
15913 |
|
|
const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
|
15914 |
|
|
const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
|
15915 |
|
|
return constant_boolean_node (fixed_compare (code, c0, c1), type);
|
15916 |
|
|
}
|
15917 |
|
|
|
15918 |
|
|
/* Handle equality/inequality of complex constants. */
|
15919 |
|
|
if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
|
15920 |
|
|
{
|
15921 |
|
|
tree rcond = fold_relational_const (code, type,
|
15922 |
|
|
TREE_REALPART (op0),
|
15923 |
|
|
TREE_REALPART (op1));
|
15924 |
|
|
tree icond = fold_relational_const (code, type,
|
15925 |
|
|
TREE_IMAGPART (op0),
|
15926 |
|
|
TREE_IMAGPART (op1));
|
15927 |
|
|
if (code == EQ_EXPR)
|
15928 |
|
|
return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
|
15929 |
|
|
else if (code == NE_EXPR)
|
15930 |
|
|
return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
|
15931 |
|
|
else
|
15932 |
|
|
return NULL_TREE;
|
15933 |
|
|
}
|
15934 |
|
|
|
15935 |
|
|
/* From here on we only handle LT, LE, GT, GE, EQ and NE.
|
15936 |
|
|
|
15937 |
|
|
To compute GT, swap the arguments and do LT.
|
15938 |
|
|
To compute GE, do LT and invert the result.
|
15939 |
|
|
To compute LE, swap the arguments, do LT and invert the result.
|
15940 |
|
|
To compute NE, do EQ and invert the result.
|
15941 |
|
|
|
15942 |
|
|
Therefore, the code below must handle only EQ and LT. */
|
15943 |
|
|
|
15944 |
|
|
if (code == LE_EXPR || code == GT_EXPR)
|
15945 |
|
|
{
|
15946 |
|
|
tree tem = op0;
|
15947 |
|
|
op0 = op1;
|
15948 |
|
|
op1 = tem;
|
15949 |
|
|
code = swap_tree_comparison (code);
|
15950 |
|
|
}
|
15951 |
|
|
|
15952 |
|
|
/* Note that it is safe to invert for real values here because we
|
15953 |
|
|
have already handled the one case that it matters. */
|
15954 |
|
|
|
15955 |
|
|
invert = 0;
|
15956 |
|
|
if (code == NE_EXPR || code == GE_EXPR)
|
15957 |
|
|
{
|
15958 |
|
|
invert = 1;
|
15959 |
|
|
code = invert_tree_comparison (code, false);
|
15960 |
|
|
}
|
15961 |
|
|
|
15962 |
|
|
/* Compute a result for LT or EQ if args permit;
|
15963 |
|
|
Otherwise return T. */
|
15964 |
|
|
if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
|
15965 |
|
|
{
|
15966 |
|
|
if (code == EQ_EXPR)
|
15967 |
|
|
result = tree_int_cst_equal (op0, op1);
|
15968 |
|
|
else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
|
15969 |
|
|
result = INT_CST_LT_UNSIGNED (op0, op1);
|
15970 |
|
|
else
|
15971 |
|
|
result = INT_CST_LT (op0, op1);
|
15972 |
|
|
}
|
15973 |
|
|
else
|
15974 |
|
|
return NULL_TREE;
|
15975 |
|
|
|
15976 |
|
|
if (invert)
|
15977 |
|
|
result ^= 1;
|
15978 |
|
|
return constant_boolean_node (result, type);
|
15979 |
|
|
}
|
15980 |
|
|
|
15981 |
|
|
/* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
|
15982 |
|
|
indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
|
15983 |
|
|
itself. */
|
15984 |
|
|
|
15985 |
|
|
tree
|
15986 |
|
|
fold_build_cleanup_point_expr (tree type, tree expr)
|
15987 |
|
|
{
|
15988 |
|
|
/* If the expression does not have side effects then we don't have to wrap
|
15989 |
|
|
it with a cleanup point expression. */
|
15990 |
|
|
if (!TREE_SIDE_EFFECTS (expr))
|
15991 |
|
|
return expr;
|
15992 |
|
|
|
15993 |
|
|
/* If the expression is a return, check to see if the expression inside the
|
15994 |
|
|
return has no side effects or the right hand side of the modify expression
|
15995 |
|
|
inside the return. If either don't have side effects set we don't need to
|
15996 |
|
|
wrap the expression in a cleanup point expression. Note we don't check the
|
15997 |
|
|
left hand side of the modify because it should always be a return decl. */
|
15998 |
|
|
if (TREE_CODE (expr) == RETURN_EXPR)
|
15999 |
|
|
{
|
16000 |
|
|
tree op = TREE_OPERAND (expr, 0);
|
16001 |
|
|
if (!op || !TREE_SIDE_EFFECTS (op))
|
16002 |
|
|
return expr;
|
16003 |
|
|
op = TREE_OPERAND (op, 1);
|
16004 |
|
|
if (!TREE_SIDE_EFFECTS (op))
|
16005 |
|
|
return expr;
|
16006 |
|
|
}
|
16007 |
|
|
|
16008 |
|
|
return build1 (CLEANUP_POINT_EXPR, type, expr);
|
16009 |
|
|
}
|
16010 |
|
|
|
16011 |
|
|
/* Given a pointer value OP0 and a type TYPE, return a simplified version
|
16012 |
|
|
of an indirection through OP0, or NULL_TREE if no simplification is
|
16013 |
|
|
possible. */
|
16014 |
|
|
|
16015 |
|
|
tree
|
16016 |
|
|
fold_indirect_ref_1 (location_t loc, tree type, tree op0)
|
16017 |
|
|
{
|
16018 |
|
|
tree sub = op0;
|
16019 |
|
|
tree subtype;
|
16020 |
|
|
|
16021 |
|
|
STRIP_NOPS (sub);
|
16022 |
|
|
subtype = TREE_TYPE (sub);
|
16023 |
|
|
if (!POINTER_TYPE_P (subtype))
|
16024 |
|
|
return NULL_TREE;
|
16025 |
|
|
|
16026 |
|
|
if (TREE_CODE (sub) == ADDR_EXPR)
|
16027 |
|
|
{
|
16028 |
|
|
tree op = TREE_OPERAND (sub, 0);
|
16029 |
|
|
tree optype = TREE_TYPE (op);
|
16030 |
|
|
/* *&CONST_DECL -> to the value of the const decl. */
|
16031 |
|
|
if (TREE_CODE (op) == CONST_DECL)
|
16032 |
|
|
return DECL_INITIAL (op);
|
16033 |
|
|
/* *&p => p; make sure to handle *&"str"[cst] here. */
|
16034 |
|
|
if (type == optype)
|
16035 |
|
|
{
|
16036 |
|
|
tree fop = fold_read_from_constant_string (op);
|
16037 |
|
|
if (fop)
|
16038 |
|
|
return fop;
|
16039 |
|
|
else
|
16040 |
|
|
return op;
|
16041 |
|
|
}
|
16042 |
|
|
/* *(foo *)&fooarray => fooarray[0] */
|
16043 |
|
|
else if (TREE_CODE (optype) == ARRAY_TYPE
|
16044 |
|
|
&& type == TREE_TYPE (optype)
|
16045 |
|
|
&& (!in_gimple_form
|
16046 |
|
|
|| TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
|
16047 |
|
|
{
|
16048 |
|
|
tree type_domain = TYPE_DOMAIN (optype);
|
16049 |
|
|
tree min_val = size_zero_node;
|
16050 |
|
|
if (type_domain && TYPE_MIN_VALUE (type_domain))
|
16051 |
|
|
min_val = TYPE_MIN_VALUE (type_domain);
|
16052 |
|
|
if (in_gimple_form
|
16053 |
|
|
&& TREE_CODE (min_val) != INTEGER_CST)
|
16054 |
|
|
return NULL_TREE;
|
16055 |
|
|
return build4_loc (loc, ARRAY_REF, type, op, min_val,
|
16056 |
|
|
NULL_TREE, NULL_TREE);
|
16057 |
|
|
}
|
16058 |
|
|
/* *(foo *)&complexfoo => __real__ complexfoo */
|
16059 |
|
|
else if (TREE_CODE (optype) == COMPLEX_TYPE
|
16060 |
|
|
&& type == TREE_TYPE (optype))
|
16061 |
|
|
return fold_build1_loc (loc, REALPART_EXPR, type, op);
|
16062 |
|
|
/* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
|
16063 |
|
|
else if (TREE_CODE (optype) == VECTOR_TYPE
|
16064 |
|
|
&& type == TREE_TYPE (optype))
|
16065 |
|
|
{
|
16066 |
|
|
tree part_width = TYPE_SIZE (type);
|
16067 |
|
|
tree index = bitsize_int (0);
|
16068 |
|
|
return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
|
16069 |
|
|
}
|
16070 |
|
|
}
|
16071 |
|
|
|
16072 |
|
|
if (TREE_CODE (sub) == POINTER_PLUS_EXPR
|
16073 |
|
|
&& TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
|
16074 |
|
|
{
|
16075 |
|
|
tree op00 = TREE_OPERAND (sub, 0);
|
16076 |
|
|
tree op01 = TREE_OPERAND (sub, 1);
|
16077 |
|
|
|
16078 |
|
|
STRIP_NOPS (op00);
|
16079 |
|
|
if (TREE_CODE (op00) == ADDR_EXPR)
|
16080 |
|
|
{
|
16081 |
|
|
tree op00type;
|
16082 |
|
|
op00 = TREE_OPERAND (op00, 0);
|
16083 |
|
|
op00type = TREE_TYPE (op00);
|
16084 |
|
|
|
16085 |
|
|
/* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
|
16086 |
|
|
if (TREE_CODE (op00type) == VECTOR_TYPE
|
16087 |
|
|
&& type == TREE_TYPE (op00type))
|
16088 |
|
|
{
|
16089 |
|
|
HOST_WIDE_INT offset = tree_low_cst (op01, 0);
|
16090 |
|
|
tree part_width = TYPE_SIZE (type);
|
16091 |
|
|
unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
|
16092 |
|
|
unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
|
16093 |
|
|
tree index = bitsize_int (indexi);
|
16094 |
|
|
|
16095 |
|
|
if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
|
16096 |
|
|
return fold_build3_loc (loc,
|
16097 |
|
|
BIT_FIELD_REF, type, op00,
|
16098 |
|
|
part_width, index);
|
16099 |
|
|
|
16100 |
|
|
}
|
16101 |
|
|
/* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
|
16102 |
|
|
else if (TREE_CODE (op00type) == COMPLEX_TYPE
|
16103 |
|
|
&& type == TREE_TYPE (op00type))
|
16104 |
|
|
{
|
16105 |
|
|
tree size = TYPE_SIZE_UNIT (type);
|
16106 |
|
|
if (tree_int_cst_equal (size, op01))
|
16107 |
|
|
return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
|
16108 |
|
|
}
|
16109 |
|
|
/* ((foo *)&fooarray)[1] => fooarray[1] */
|
16110 |
|
|
else if (TREE_CODE (op00type) == ARRAY_TYPE
|
16111 |
|
|
&& type == TREE_TYPE (op00type))
|
16112 |
|
|
{
|
16113 |
|
|
tree type_domain = TYPE_DOMAIN (op00type);
|
16114 |
|
|
tree min_val = size_zero_node;
|
16115 |
|
|
if (type_domain && TYPE_MIN_VALUE (type_domain))
|
16116 |
|
|
min_val = TYPE_MIN_VALUE (type_domain);
|
16117 |
|
|
op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
|
16118 |
|
|
TYPE_SIZE_UNIT (type));
|
16119 |
|
|
op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
|
16120 |
|
|
return build4_loc (loc, ARRAY_REF, type, op00, op01,
|
16121 |
|
|
NULL_TREE, NULL_TREE);
|
16122 |
|
|
}
|
16123 |
|
|
}
|
16124 |
|
|
}
|
16125 |
|
|
|
16126 |
|
|
/* *(foo *)fooarrptr => (*fooarrptr)[0] */
|
16127 |
|
|
if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
|
16128 |
|
|
&& type == TREE_TYPE (TREE_TYPE (subtype))
|
16129 |
|
|
&& (!in_gimple_form
|
16130 |
|
|
|| TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
|
16131 |
|
|
{
|
16132 |
|
|
tree type_domain;
|
16133 |
|
|
tree min_val = size_zero_node;
|
16134 |
|
|
sub = build_fold_indirect_ref_loc (loc, sub);
|
16135 |
|
|
type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
|
16136 |
|
|
if (type_domain && TYPE_MIN_VALUE (type_domain))
|
16137 |
|
|
min_val = TYPE_MIN_VALUE (type_domain);
|
16138 |
|
|
if (in_gimple_form
|
16139 |
|
|
&& TREE_CODE (min_val) != INTEGER_CST)
|
16140 |
|
|
return NULL_TREE;
|
16141 |
|
|
return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
|
16142 |
|
|
NULL_TREE);
|
16143 |
|
|
}
|
16144 |
|
|
|
16145 |
|
|
return NULL_TREE;
|
16146 |
|
|
}
|
16147 |
|
|
|
16148 |
|
|
/* Builds an expression for an indirection through T, simplifying some
|
16149 |
|
|
cases. */
|
16150 |
|
|
|
16151 |
|
|
tree
|
16152 |
|
|
build_fold_indirect_ref_loc (location_t loc, tree t)
|
16153 |
|
|
{
|
16154 |
|
|
tree type = TREE_TYPE (TREE_TYPE (t));
|
16155 |
|
|
tree sub = fold_indirect_ref_1 (loc, type, t);
|
16156 |
|
|
|
16157 |
|
|
if (sub)
|
16158 |
|
|
return sub;
|
16159 |
|
|
|
16160 |
|
|
return build1_loc (loc, INDIRECT_REF, type, t);
|
16161 |
|
|
}
|
16162 |
|
|
|
16163 |
|
|
/* Given an INDIRECT_REF T, return either T or a simplified version. */
|
16164 |
|
|
|
16165 |
|
|
tree
|
16166 |
|
|
fold_indirect_ref_loc (location_t loc, tree t)
|
16167 |
|
|
{
|
16168 |
|
|
tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
|
16169 |
|
|
|
16170 |
|
|
if (sub)
|
16171 |
|
|
return sub;
|
16172 |
|
|
else
|
16173 |
|
|
return t;
|
16174 |
|
|
}
|
16175 |
|
|
|
16176 |
|
|
/* Strip non-trapping, non-side-effecting tree nodes from an expression
|
16177 |
|
|
whose result is ignored. The type of the returned tree need not be
|
16178 |
|
|
the same as the original expression. */
|
16179 |
|
|
|
16180 |
|
|
tree
|
16181 |
|
|
fold_ignored_result (tree t)
|
16182 |
|
|
{
|
16183 |
|
|
if (!TREE_SIDE_EFFECTS (t))
|
16184 |
|
|
return integer_zero_node;
|
16185 |
|
|
|
16186 |
|
|
for (;;)
|
16187 |
|
|
switch (TREE_CODE_CLASS (TREE_CODE (t)))
|
16188 |
|
|
{
|
16189 |
|
|
case tcc_unary:
|
16190 |
|
|
t = TREE_OPERAND (t, 0);
|
16191 |
|
|
break;
|
16192 |
|
|
|
16193 |
|
|
case tcc_binary:
|
16194 |
|
|
case tcc_comparison:
|
16195 |
|
|
if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
|
16196 |
|
|
t = TREE_OPERAND (t, 0);
|
16197 |
|
|
else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
|
16198 |
|
|
t = TREE_OPERAND (t, 1);
|
16199 |
|
|
else
|
16200 |
|
|
return t;
|
16201 |
|
|
break;
|
16202 |
|
|
|
16203 |
|
|
case tcc_expression:
|
16204 |
|
|
switch (TREE_CODE (t))
|
16205 |
|
|
{
|
16206 |
|
|
case COMPOUND_EXPR:
|
16207 |
|
|
if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
|
16208 |
|
|
return t;
|
16209 |
|
|
t = TREE_OPERAND (t, 0);
|
16210 |
|
|
break;
|
16211 |
|
|
|
16212 |
|
|
case COND_EXPR:
|
16213 |
|
|
if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
|
16214 |
|
|
|| TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
|
16215 |
|
|
return t;
|
16216 |
|
|
t = TREE_OPERAND (t, 0);
|
16217 |
|
|
break;
|
16218 |
|
|
|
16219 |
|
|
default:
|
16220 |
|
|
return t;
|
16221 |
|
|
}
|
16222 |
|
|
break;
|
16223 |
|
|
|
16224 |
|
|
default:
|
16225 |
|
|
return t;
|
16226 |
|
|
}
|
16227 |
|
|
}
|
16228 |
|
|
|
16229 |
|
|
/* Return the value of VALUE, rounded up to a multiple of DIVISOR.
|
16230 |
|
|
This can only be applied to objects of a sizetype. */
|
16231 |
|
|
|
16232 |
|
|
tree
|
16233 |
|
|
round_up_loc (location_t loc, tree value, int divisor)
|
16234 |
|
|
{
|
16235 |
|
|
tree div = NULL_TREE;
|
16236 |
|
|
|
16237 |
|
|
gcc_assert (divisor > 0);
|
16238 |
|
|
if (divisor == 1)
|
16239 |
|
|
return value;
|
16240 |
|
|
|
16241 |
|
|
/* See if VALUE is already a multiple of DIVISOR. If so, we don't
|
16242 |
|
|
have to do anything. Only do this when we are not given a const,
|
16243 |
|
|
because in that case, this check is more expensive than just
|
16244 |
|
|
doing it. */
|
16245 |
|
|
if (TREE_CODE (value) != INTEGER_CST)
|
16246 |
|
|
{
|
16247 |
|
|
div = build_int_cst (TREE_TYPE (value), divisor);
|
16248 |
|
|
|
16249 |
|
|
if (multiple_of_p (TREE_TYPE (value), value, div))
|
16250 |
|
|
return value;
|
16251 |
|
|
}
|
16252 |
|
|
|
16253 |
|
|
/* If divisor is a power of two, simplify this to bit manipulation. */
|
16254 |
|
|
if (divisor == (divisor & -divisor))
|
16255 |
|
|
{
|
16256 |
|
|
if (TREE_CODE (value) == INTEGER_CST)
|
16257 |
|
|
{
|
16258 |
|
|
double_int val = tree_to_double_int (value);
|
16259 |
|
|
bool overflow_p;
|
16260 |
|
|
|
16261 |
|
|
if ((val.low & (divisor - 1)) == 0)
|
16262 |
|
|
return value;
|
16263 |
|
|
|
16264 |
|
|
overflow_p = TREE_OVERFLOW (value);
|
16265 |
|
|
val.low &= ~(divisor - 1);
|
16266 |
|
|
val.low += divisor;
|
16267 |
|
|
if (val.low == 0)
|
16268 |
|
|
{
|
16269 |
|
|
val.high++;
|
16270 |
|
|
if (val.high == 0)
|
16271 |
|
|
overflow_p = true;
|
16272 |
|
|
}
|
16273 |
|
|
|
16274 |
|
|
return force_fit_type_double (TREE_TYPE (value), val,
|
16275 |
|
|
-1, overflow_p);
|
16276 |
|
|
}
|
16277 |
|
|
else
|
16278 |
|
|
{
|
16279 |
|
|
tree t;
|
16280 |
|
|
|
16281 |
|
|
t = build_int_cst (TREE_TYPE (value), divisor - 1);
|
16282 |
|
|
value = size_binop_loc (loc, PLUS_EXPR, value, t);
|
16283 |
|
|
t = build_int_cst (TREE_TYPE (value), -divisor);
|
16284 |
|
|
value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
|
16285 |
|
|
}
|
16286 |
|
|
}
|
16287 |
|
|
else
|
16288 |
|
|
{
|
16289 |
|
|
if (!div)
|
16290 |
|
|
div = build_int_cst (TREE_TYPE (value), divisor);
|
16291 |
|
|
value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
|
16292 |
|
|
value = size_binop_loc (loc, MULT_EXPR, value, div);
|
16293 |
|
|
}
|
16294 |
|
|
|
16295 |
|
|
return value;
|
16296 |
|
|
}
|
16297 |
|
|
|
16298 |
|
|
/* Likewise, but round down. */
|
16299 |
|
|
|
16300 |
|
|
tree
|
16301 |
|
|
round_down_loc (location_t loc, tree value, int divisor)
|
16302 |
|
|
{
|
16303 |
|
|
tree div = NULL_TREE;
|
16304 |
|
|
|
16305 |
|
|
gcc_assert (divisor > 0);
|
16306 |
|
|
if (divisor == 1)
|
16307 |
|
|
return value;
|
16308 |
|
|
|
16309 |
|
|
/* See if VALUE is already a multiple of DIVISOR. If so, we don't
|
16310 |
|
|
have to do anything. Only do this when we are not given a const,
|
16311 |
|
|
because in that case, this check is more expensive than just
|
16312 |
|
|
doing it. */
|
16313 |
|
|
if (TREE_CODE (value) != INTEGER_CST)
|
16314 |
|
|
{
|
16315 |
|
|
div = build_int_cst (TREE_TYPE (value), divisor);
|
16316 |
|
|
|
16317 |
|
|
if (multiple_of_p (TREE_TYPE (value), value, div))
|
16318 |
|
|
return value;
|
16319 |
|
|
}
|
16320 |
|
|
|
16321 |
|
|
/* If divisor is a power of two, simplify this to bit manipulation. */
|
16322 |
|
|
if (divisor == (divisor & -divisor))
|
16323 |
|
|
{
|
16324 |
|
|
tree t;
|
16325 |
|
|
|
16326 |
|
|
t = build_int_cst (TREE_TYPE (value), -divisor);
|
16327 |
|
|
value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
|
16328 |
|
|
}
|
16329 |
|
|
else
|
16330 |
|
|
{
|
16331 |
|
|
if (!div)
|
16332 |
|
|
div = build_int_cst (TREE_TYPE (value), divisor);
|
16333 |
|
|
value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
|
16334 |
|
|
value = size_binop_loc (loc, MULT_EXPR, value, div);
|
16335 |
|
|
}
|
16336 |
|
|
|
16337 |
|
|
return value;
|
16338 |
|
|
}
|
16339 |
|
|
|
16340 |
|
|
/* Returns the pointer to the base of the object addressed by EXP and
|
16341 |
|
|
extracts the information about the offset of the access, storing it
|
16342 |
|
|
to PBITPOS and POFFSET. */
|
16343 |
|
|
|
16344 |
|
|
static tree
|
16345 |
|
|
split_address_to_core_and_offset (tree exp,
|
16346 |
|
|
HOST_WIDE_INT *pbitpos, tree *poffset)
|
16347 |
|
|
{
|
16348 |
|
|
tree core;
|
16349 |
|
|
enum machine_mode mode;
|
16350 |
|
|
int unsignedp, volatilep;
|
16351 |
|
|
HOST_WIDE_INT bitsize;
|
16352 |
|
|
location_t loc = EXPR_LOCATION (exp);
|
16353 |
|
|
|
16354 |
|
|
if (TREE_CODE (exp) == ADDR_EXPR)
|
16355 |
|
|
{
|
16356 |
|
|
core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
|
16357 |
|
|
poffset, &mode, &unsignedp, &volatilep,
|
16358 |
|
|
false);
|
16359 |
|
|
core = build_fold_addr_expr_loc (loc, core);
|
16360 |
|
|
}
|
16361 |
|
|
else
|
16362 |
|
|
{
|
16363 |
|
|
core = exp;
|
16364 |
|
|
*pbitpos = 0;
|
16365 |
|
|
*poffset = NULL_TREE;
|
16366 |
|
|
}
|
16367 |
|
|
|
16368 |
|
|
return core;
|
16369 |
|
|
}
|
16370 |
|
|
|
16371 |
|
|
/* Returns true if addresses of E1 and E2 differ by a constant, false
|
16372 |
|
|
otherwise. If they do, E1 - E2 is stored in *DIFF. */
|
16373 |
|
|
|
16374 |
|
|
bool
|
16375 |
|
|
ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
|
16376 |
|
|
{
|
16377 |
|
|
tree core1, core2;
|
16378 |
|
|
HOST_WIDE_INT bitpos1, bitpos2;
|
16379 |
|
|
tree toffset1, toffset2, tdiff, type;
|
16380 |
|
|
|
16381 |
|
|
core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
|
16382 |
|
|
core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
|
16383 |
|
|
|
16384 |
|
|
if (bitpos1 % BITS_PER_UNIT != 0
|
16385 |
|
|
|| bitpos2 % BITS_PER_UNIT != 0
|
16386 |
|
|
|| !operand_equal_p (core1, core2, 0))
|
16387 |
|
|
return false;
|
16388 |
|
|
|
16389 |
|
|
if (toffset1 && toffset2)
|
16390 |
|
|
{
|
16391 |
|
|
type = TREE_TYPE (toffset1);
|
16392 |
|
|
if (type != TREE_TYPE (toffset2))
|
16393 |
|
|
toffset2 = fold_convert (type, toffset2);
|
16394 |
|
|
|
16395 |
|
|
tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
|
16396 |
|
|
if (!cst_and_fits_in_hwi (tdiff))
|
16397 |
|
|
return false;
|
16398 |
|
|
|
16399 |
|
|
*diff = int_cst_value (tdiff);
|
16400 |
|
|
}
|
16401 |
|
|
else if (toffset1 || toffset2)
|
16402 |
|
|
{
|
16403 |
|
|
/* If only one of the offsets is non-constant, the difference cannot
|
16404 |
|
|
be a constant. */
|
16405 |
|
|
return false;
|
16406 |
|
|
}
|
16407 |
|
|
else
|
16408 |
|
|
*diff = 0;
|
16409 |
|
|
|
16410 |
|
|
*diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
|
16411 |
|
|
return true;
|
16412 |
|
|
}
|
16413 |
|
|
|
16414 |
|
|
/* Simplify the floating point expression EXP when the sign of the
|
16415 |
|
|
result is not significant. Return NULL_TREE if no simplification
|
16416 |
|
|
is possible. */
|
16417 |
|
|
|
16418 |
|
|
tree
|
16419 |
|
|
fold_strip_sign_ops (tree exp)
|
16420 |
|
|
{
|
16421 |
|
|
tree arg0, arg1;
|
16422 |
|
|
location_t loc = EXPR_LOCATION (exp);
|
16423 |
|
|
|
16424 |
|
|
switch (TREE_CODE (exp))
|
16425 |
|
|
{
|
16426 |
|
|
case ABS_EXPR:
|
16427 |
|
|
case NEGATE_EXPR:
|
16428 |
|
|
arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
|
16429 |
|
|
return arg0 ? arg0 : TREE_OPERAND (exp, 0);
|
16430 |
|
|
|
16431 |
|
|
case MULT_EXPR:
|
16432 |
|
|
case RDIV_EXPR:
|
16433 |
|
|
if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
|
16434 |
|
|
return NULL_TREE;
|
16435 |
|
|
arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
|
16436 |
|
|
arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
|
16437 |
|
|
if (arg0 != NULL_TREE || arg1 != NULL_TREE)
|
16438 |
|
|
return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
|
16439 |
|
|
arg0 ? arg0 : TREE_OPERAND (exp, 0),
|
16440 |
|
|
arg1 ? arg1 : TREE_OPERAND (exp, 1));
|
16441 |
|
|
break;
|
16442 |
|
|
|
16443 |
|
|
case COMPOUND_EXPR:
|
16444 |
|
|
arg0 = TREE_OPERAND (exp, 0);
|
16445 |
|
|
arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
|
16446 |
|
|
if (arg1)
|
16447 |
|
|
return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
|
16448 |
|
|
break;
|
16449 |
|
|
|
16450 |
|
|
case COND_EXPR:
|
16451 |
|
|
arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
|
16452 |
|
|
arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
|
16453 |
|
|
if (arg0 || arg1)
|
16454 |
|
|
return fold_build3_loc (loc,
|
16455 |
|
|
COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
|
16456 |
|
|
arg0 ? arg0 : TREE_OPERAND (exp, 1),
|
16457 |
|
|
arg1 ? arg1 : TREE_OPERAND (exp, 2));
|
16458 |
|
|
break;
|
16459 |
|
|
|
16460 |
|
|
case CALL_EXPR:
|
16461 |
|
|
{
|
16462 |
|
|
const enum built_in_function fcode = builtin_mathfn_code (exp);
|
16463 |
|
|
switch (fcode)
|
16464 |
|
|
{
|
16465 |
|
|
CASE_FLT_FN (BUILT_IN_COPYSIGN):
|
16466 |
|
|
/* Strip copysign function call, return the 1st argument. */
|
16467 |
|
|
arg0 = CALL_EXPR_ARG (exp, 0);
|
16468 |
|
|
arg1 = CALL_EXPR_ARG (exp, 1);
|
16469 |
|
|
return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
|
16470 |
|
|
|
16471 |
|
|
default:
|
16472 |
|
|
/* Strip sign ops from the argument of "odd" math functions. */
|
16473 |
|
|
if (negate_mathfn_p (fcode))
|
16474 |
|
|
{
|
16475 |
|
|
arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
|
16476 |
|
|
if (arg0)
|
16477 |
|
|
return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
|
16478 |
|
|
}
|
16479 |
|
|
break;
|
16480 |
|
|
}
|
16481 |
|
|
}
|
16482 |
|
|
break;
|
16483 |
|
|
|
16484 |
|
|
default:
|
16485 |
|
|
break;
|
16486 |
|
|
}
|
16487 |
|
|
return NULL_TREE;
|
16488 |
|
|
}
|