1 |
684 |
jeremybenn |
/* Language-independent node constructors for parse phase of GNU compiler.
|
2 |
|
|
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
|
3 |
|
|
1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
|
4 |
|
|
2011, 2012 Free Software Foundation, Inc.
|
5 |
|
|
|
6 |
|
|
This file is part of GCC.
|
7 |
|
|
|
8 |
|
|
GCC is free software; you can redistribute it and/or modify it under
|
9 |
|
|
the terms of the GNU General Public License as published by the Free
|
10 |
|
|
Software Foundation; either version 3, or (at your option) any later
|
11 |
|
|
version.
|
12 |
|
|
|
13 |
|
|
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
14 |
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
15 |
|
|
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
16 |
|
|
for more details.
|
17 |
|
|
|
18 |
|
|
You should have received a copy of the GNU General Public License
|
19 |
|
|
along with GCC; see the file COPYING3. If not see
|
20 |
|
|
<http://www.gnu.org/licenses/>. */
|
21 |
|
|
|
22 |
|
|
/* This file contains the low level primitives for operating on tree nodes,
|
23 |
|
|
including allocation, list operations, interning of identifiers,
|
24 |
|
|
construction of data type nodes and statement nodes,
|
25 |
|
|
and construction of type conversion nodes. It also contains
|
26 |
|
|
tables index by tree code that describe how to take apart
|
27 |
|
|
nodes of that code.
|
28 |
|
|
|
29 |
|
|
It is intended to be language-independent, but occasionally
|
30 |
|
|
calls language-dependent routines defined (for C) in typecheck.c. */
|
31 |
|
|
|
32 |
|
|
#include "config.h"
|
33 |
|
|
#include "system.h"
|
34 |
|
|
#include "coretypes.h"
|
35 |
|
|
#include "tm.h"
|
36 |
|
|
#include "flags.h"
|
37 |
|
|
#include "tree.h"
|
38 |
|
|
#include "tm_p.h"
|
39 |
|
|
#include "function.h"
|
40 |
|
|
#include "obstack.h"
|
41 |
|
|
#include "toplev.h"
|
42 |
|
|
#include "ggc.h"
|
43 |
|
|
#include "hashtab.h"
|
44 |
|
|
#include "filenames.h"
|
45 |
|
|
#include "output.h"
|
46 |
|
|
#include "target.h"
|
47 |
|
|
#include "common/common-target.h"
|
48 |
|
|
#include "langhooks.h"
|
49 |
|
|
#include "tree-inline.h"
|
50 |
|
|
#include "tree-iterator.h"
|
51 |
|
|
#include "basic-block.h"
|
52 |
|
|
#include "tree-flow.h"
|
53 |
|
|
#include "params.h"
|
54 |
|
|
#include "pointer-set.h"
|
55 |
|
|
#include "tree-pass.h"
|
56 |
|
|
#include "langhooks-def.h"
|
57 |
|
|
#include "diagnostic.h"
|
58 |
|
|
#include "tree-diagnostic.h"
|
59 |
|
|
#include "tree-pretty-print.h"
|
60 |
|
|
#include "cgraph.h"
|
61 |
|
|
#include "timevar.h"
|
62 |
|
|
#include "except.h"
|
63 |
|
|
#include "debug.h"
|
64 |
|
|
#include "intl.h"
|
65 |
|
|
|
66 |
|
|
/* Tree code classes. */
|
67 |
|
|
|
68 |
|
|
#define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
|
69 |
|
|
#define END_OF_BASE_TREE_CODES tcc_exceptional,
|
70 |
|
|
|
71 |
|
|
const enum tree_code_class tree_code_type[] = {
|
72 |
|
|
#include "all-tree.def"
|
73 |
|
|
};
|
74 |
|
|
|
75 |
|
|
#undef DEFTREECODE
|
76 |
|
|
#undef END_OF_BASE_TREE_CODES
|
77 |
|
|
|
78 |
|
|
/* Table indexed by tree code giving number of expression
|
79 |
|
|
operands beyond the fixed part of the node structure.
|
80 |
|
|
Not used for types or decls. */
|
81 |
|
|
|
82 |
|
|
#define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
|
83 |
|
|
#define END_OF_BASE_TREE_CODES 0,
|
84 |
|
|
|
85 |
|
|
const unsigned char tree_code_length[] = {
|
86 |
|
|
#include "all-tree.def"
|
87 |
|
|
};
|
88 |
|
|
|
89 |
|
|
#undef DEFTREECODE
|
90 |
|
|
#undef END_OF_BASE_TREE_CODES
|
91 |
|
|
|
92 |
|
|
/* Names of tree components.
|
93 |
|
|
Used for printing out the tree and error messages. */
|
94 |
|
|
#define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
|
95 |
|
|
#define END_OF_BASE_TREE_CODES "@dummy",
|
96 |
|
|
|
97 |
|
|
const char *const tree_code_name[] = {
|
98 |
|
|
#include "all-tree.def"
|
99 |
|
|
};
|
100 |
|
|
|
101 |
|
|
#undef DEFTREECODE
|
102 |
|
|
#undef END_OF_BASE_TREE_CODES
|
103 |
|
|
|
104 |
|
|
/* Each tree code class has an associated string representation.
|
105 |
|
|
These must correspond to the tree_code_class entries. */
|
106 |
|
|
|
107 |
|
|
const char *const tree_code_class_strings[] =
|
108 |
|
|
{
|
109 |
|
|
"exceptional",
|
110 |
|
|
"constant",
|
111 |
|
|
"type",
|
112 |
|
|
"declaration",
|
113 |
|
|
"reference",
|
114 |
|
|
"comparison",
|
115 |
|
|
"unary",
|
116 |
|
|
"binary",
|
117 |
|
|
"statement",
|
118 |
|
|
"vl_exp",
|
119 |
|
|
"expression"
|
120 |
|
|
};
|
121 |
|
|
|
122 |
|
|
/* obstack.[ch] explicitly declined to prototype this. */
|
123 |
|
|
extern int _obstack_allocated_p (struct obstack *h, void *obj);
|
124 |
|
|
|
125 |
|
|
#ifdef GATHER_STATISTICS
|
126 |
|
|
/* Statistics-gathering stuff. */
|
127 |
|
|
|
128 |
|
|
static int tree_code_counts[MAX_TREE_CODES];
|
129 |
|
|
int tree_node_counts[(int) all_kinds];
|
130 |
|
|
int tree_node_sizes[(int) all_kinds];
|
131 |
|
|
|
132 |
|
|
/* Keep in sync with tree.h:enum tree_node_kind. */
|
133 |
|
|
static const char * const tree_node_kind_names[] = {
|
134 |
|
|
"decls",
|
135 |
|
|
"types",
|
136 |
|
|
"blocks",
|
137 |
|
|
"stmts",
|
138 |
|
|
"refs",
|
139 |
|
|
"exprs",
|
140 |
|
|
"constants",
|
141 |
|
|
"identifiers",
|
142 |
|
|
"vecs",
|
143 |
|
|
"binfos",
|
144 |
|
|
"ssa names",
|
145 |
|
|
"constructors",
|
146 |
|
|
"random kinds",
|
147 |
|
|
"lang_decl kinds",
|
148 |
|
|
"lang_type kinds",
|
149 |
|
|
"omp clauses",
|
150 |
|
|
};
|
151 |
|
|
#endif /* GATHER_STATISTICS */
|
152 |
|
|
|
153 |
|
|
/* Unique id for next decl created. */
|
154 |
|
|
static GTY(()) int next_decl_uid;
|
155 |
|
|
/* Unique id for next type created. */
|
156 |
|
|
static GTY(()) int next_type_uid = 1;
|
157 |
|
|
/* Unique id for next debug decl created. Use negative numbers,
|
158 |
|
|
to catch erroneous uses. */
|
159 |
|
|
static GTY(()) int next_debug_decl_uid;
|
160 |
|
|
|
161 |
|
|
/* Since we cannot rehash a type after it is in the table, we have to
|
162 |
|
|
keep the hash code. */
|
163 |
|
|
|
164 |
|
|
struct GTY(()) type_hash {
|
165 |
|
|
unsigned long hash;
|
166 |
|
|
tree type;
|
167 |
|
|
};
|
168 |
|
|
|
169 |
|
|
/* Initial size of the hash table (rounded to next prime). */
|
170 |
|
|
#define TYPE_HASH_INITIAL_SIZE 1000
|
171 |
|
|
|
172 |
|
|
/* Now here is the hash table. When recording a type, it is added to
|
173 |
|
|
the slot whose index is the hash code. Note that the hash table is
|
174 |
|
|
used for several kinds of types (function types, array types and
|
175 |
|
|
array index range types, for now). While all these live in the
|
176 |
|
|
same table, they are completely independent, and the hash code is
|
177 |
|
|
computed differently for each of these. */
|
178 |
|
|
|
179 |
|
|
static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
|
180 |
|
|
htab_t type_hash_table;
|
181 |
|
|
|
182 |
|
|
/* Hash table and temporary node for larger integer const values. */
|
183 |
|
|
static GTY (()) tree int_cst_node;
|
184 |
|
|
static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
|
185 |
|
|
htab_t int_cst_hash_table;
|
186 |
|
|
|
187 |
|
|
/* Hash table for optimization flags and target option flags. Use the same
|
188 |
|
|
hash table for both sets of options. Nodes for building the current
|
189 |
|
|
optimization and target option nodes. The assumption is most of the time
|
190 |
|
|
the options created will already be in the hash table, so we avoid
|
191 |
|
|
allocating and freeing up a node repeatably. */
|
192 |
|
|
static GTY (()) tree cl_optimization_node;
|
193 |
|
|
static GTY (()) tree cl_target_option_node;
|
194 |
|
|
static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
|
195 |
|
|
htab_t cl_option_hash_table;
|
196 |
|
|
|
197 |
|
|
/* General tree->tree mapping structure for use in hash tables. */
|
198 |
|
|
|
199 |
|
|
|
200 |
|
|
static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
|
201 |
|
|
htab_t debug_expr_for_decl;
|
202 |
|
|
|
203 |
|
|
static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
|
204 |
|
|
htab_t value_expr_for_decl;
|
205 |
|
|
|
206 |
|
|
static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
|
207 |
|
|
htab_t debug_args_for_decl;
|
208 |
|
|
|
209 |
|
|
static GTY ((if_marked ("tree_priority_map_marked_p"),
|
210 |
|
|
param_is (struct tree_priority_map)))
|
211 |
|
|
htab_t init_priority_for_decl;
|
212 |
|
|
|
213 |
|
|
static void set_type_quals (tree, int);
|
214 |
|
|
static int type_hash_eq (const void *, const void *);
|
215 |
|
|
static hashval_t type_hash_hash (const void *);
|
216 |
|
|
static hashval_t int_cst_hash_hash (const void *);
|
217 |
|
|
static int int_cst_hash_eq (const void *, const void *);
|
218 |
|
|
static hashval_t cl_option_hash_hash (const void *);
|
219 |
|
|
static int cl_option_hash_eq (const void *, const void *);
|
220 |
|
|
static void print_type_hash_statistics (void);
|
221 |
|
|
static void print_debug_expr_statistics (void);
|
222 |
|
|
static void print_value_expr_statistics (void);
|
223 |
|
|
static int type_hash_marked_p (const void *);
|
224 |
|
|
static unsigned int type_hash_list (const_tree, hashval_t);
|
225 |
|
|
static unsigned int attribute_hash_list (const_tree, hashval_t);
|
226 |
|
|
|
227 |
|
|
tree global_trees[TI_MAX];
|
228 |
|
|
tree integer_types[itk_none];
|
229 |
|
|
|
230 |
|
|
unsigned char tree_contains_struct[MAX_TREE_CODES][64];
|
231 |
|
|
|
232 |
|
|
/* Number of operands for each OpenMP clause. */
|
233 |
|
|
unsigned const char omp_clause_num_ops[] =
|
234 |
|
|
{
|
235 |
|
|
0, /* OMP_CLAUSE_ERROR */
|
236 |
|
|
1, /* OMP_CLAUSE_PRIVATE */
|
237 |
|
|
1, /* OMP_CLAUSE_SHARED */
|
238 |
|
|
1, /* OMP_CLAUSE_FIRSTPRIVATE */
|
239 |
|
|
2, /* OMP_CLAUSE_LASTPRIVATE */
|
240 |
|
|
4, /* OMP_CLAUSE_REDUCTION */
|
241 |
|
|
1, /* OMP_CLAUSE_COPYIN */
|
242 |
|
|
1, /* OMP_CLAUSE_COPYPRIVATE */
|
243 |
|
|
1, /* OMP_CLAUSE_IF */
|
244 |
|
|
1, /* OMP_CLAUSE_NUM_THREADS */
|
245 |
|
|
1, /* OMP_CLAUSE_SCHEDULE */
|
246 |
|
|
0, /* OMP_CLAUSE_NOWAIT */
|
247 |
|
|
0, /* OMP_CLAUSE_ORDERED */
|
248 |
|
|
0, /* OMP_CLAUSE_DEFAULT */
|
249 |
|
|
3, /* OMP_CLAUSE_COLLAPSE */
|
250 |
|
|
0, /* OMP_CLAUSE_UNTIED */
|
251 |
|
|
1, /* OMP_CLAUSE_FINAL */
|
252 |
|
|
|
253 |
|
|
};
|
254 |
|
|
|
255 |
|
|
const char * const omp_clause_code_name[] =
|
256 |
|
|
{
|
257 |
|
|
"error_clause",
|
258 |
|
|
"private",
|
259 |
|
|
"shared",
|
260 |
|
|
"firstprivate",
|
261 |
|
|
"lastprivate",
|
262 |
|
|
"reduction",
|
263 |
|
|
"copyin",
|
264 |
|
|
"copyprivate",
|
265 |
|
|
"if",
|
266 |
|
|
"num_threads",
|
267 |
|
|
"schedule",
|
268 |
|
|
"nowait",
|
269 |
|
|
"ordered",
|
270 |
|
|
"default",
|
271 |
|
|
"collapse",
|
272 |
|
|
"untied",
|
273 |
|
|
"final",
|
274 |
|
|
"mergeable"
|
275 |
|
|
};
|
276 |
|
|
|
277 |
|
|
|
278 |
|
|
/* Return the tree node structure used by tree code CODE. */
|
279 |
|
|
|
280 |
|
|
static inline enum tree_node_structure_enum
|
281 |
|
|
tree_node_structure_for_code (enum tree_code code)
|
282 |
|
|
{
|
283 |
|
|
switch (TREE_CODE_CLASS (code))
|
284 |
|
|
{
|
285 |
|
|
case tcc_declaration:
|
286 |
|
|
{
|
287 |
|
|
switch (code)
|
288 |
|
|
{
|
289 |
|
|
case FIELD_DECL:
|
290 |
|
|
return TS_FIELD_DECL;
|
291 |
|
|
case PARM_DECL:
|
292 |
|
|
return TS_PARM_DECL;
|
293 |
|
|
case VAR_DECL:
|
294 |
|
|
return TS_VAR_DECL;
|
295 |
|
|
case LABEL_DECL:
|
296 |
|
|
return TS_LABEL_DECL;
|
297 |
|
|
case RESULT_DECL:
|
298 |
|
|
return TS_RESULT_DECL;
|
299 |
|
|
case DEBUG_EXPR_DECL:
|
300 |
|
|
return TS_DECL_WRTL;
|
301 |
|
|
case CONST_DECL:
|
302 |
|
|
return TS_CONST_DECL;
|
303 |
|
|
case TYPE_DECL:
|
304 |
|
|
return TS_TYPE_DECL;
|
305 |
|
|
case FUNCTION_DECL:
|
306 |
|
|
return TS_FUNCTION_DECL;
|
307 |
|
|
case TRANSLATION_UNIT_DECL:
|
308 |
|
|
return TS_TRANSLATION_UNIT_DECL;
|
309 |
|
|
default:
|
310 |
|
|
return TS_DECL_NON_COMMON;
|
311 |
|
|
}
|
312 |
|
|
}
|
313 |
|
|
case tcc_type:
|
314 |
|
|
return TS_TYPE_NON_COMMON;
|
315 |
|
|
case tcc_reference:
|
316 |
|
|
case tcc_comparison:
|
317 |
|
|
case tcc_unary:
|
318 |
|
|
case tcc_binary:
|
319 |
|
|
case tcc_expression:
|
320 |
|
|
case tcc_statement:
|
321 |
|
|
case tcc_vl_exp:
|
322 |
|
|
return TS_EXP;
|
323 |
|
|
default: /* tcc_constant and tcc_exceptional */
|
324 |
|
|
break;
|
325 |
|
|
}
|
326 |
|
|
switch (code)
|
327 |
|
|
{
|
328 |
|
|
/* tcc_constant cases. */
|
329 |
|
|
case INTEGER_CST: return TS_INT_CST;
|
330 |
|
|
case REAL_CST: return TS_REAL_CST;
|
331 |
|
|
case FIXED_CST: return TS_FIXED_CST;
|
332 |
|
|
case COMPLEX_CST: return TS_COMPLEX;
|
333 |
|
|
case VECTOR_CST: return TS_VECTOR;
|
334 |
|
|
case STRING_CST: return TS_STRING;
|
335 |
|
|
/* tcc_exceptional cases. */
|
336 |
|
|
case ERROR_MARK: return TS_COMMON;
|
337 |
|
|
case IDENTIFIER_NODE: return TS_IDENTIFIER;
|
338 |
|
|
case TREE_LIST: return TS_LIST;
|
339 |
|
|
case TREE_VEC: return TS_VEC;
|
340 |
|
|
case SSA_NAME: return TS_SSA_NAME;
|
341 |
|
|
case PLACEHOLDER_EXPR: return TS_COMMON;
|
342 |
|
|
case STATEMENT_LIST: return TS_STATEMENT_LIST;
|
343 |
|
|
case BLOCK: return TS_BLOCK;
|
344 |
|
|
case CONSTRUCTOR: return TS_CONSTRUCTOR;
|
345 |
|
|
case TREE_BINFO: return TS_BINFO;
|
346 |
|
|
case OMP_CLAUSE: return TS_OMP_CLAUSE;
|
347 |
|
|
case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
|
348 |
|
|
case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
|
349 |
|
|
|
350 |
|
|
default:
|
351 |
|
|
gcc_unreachable ();
|
352 |
|
|
}
|
353 |
|
|
}
|
354 |
|
|
|
355 |
|
|
|
356 |
|
|
/* Initialize tree_contains_struct to describe the hierarchy of tree
|
357 |
|
|
nodes. */
|
358 |
|
|
|
359 |
|
|
static void
|
360 |
|
|
initialize_tree_contains_struct (void)
|
361 |
|
|
{
|
362 |
|
|
unsigned i;
|
363 |
|
|
|
364 |
|
|
for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
|
365 |
|
|
{
|
366 |
|
|
enum tree_code code;
|
367 |
|
|
enum tree_node_structure_enum ts_code;
|
368 |
|
|
|
369 |
|
|
code = (enum tree_code) i;
|
370 |
|
|
ts_code = tree_node_structure_for_code (code);
|
371 |
|
|
|
372 |
|
|
/* Mark the TS structure itself. */
|
373 |
|
|
tree_contains_struct[code][ts_code] = 1;
|
374 |
|
|
|
375 |
|
|
/* Mark all the structures that TS is derived from. */
|
376 |
|
|
switch (ts_code)
|
377 |
|
|
{
|
378 |
|
|
case TS_TYPED:
|
379 |
|
|
case TS_BLOCK:
|
380 |
|
|
MARK_TS_BASE (code);
|
381 |
|
|
break;
|
382 |
|
|
|
383 |
|
|
case TS_COMMON:
|
384 |
|
|
case TS_INT_CST:
|
385 |
|
|
case TS_REAL_CST:
|
386 |
|
|
case TS_FIXED_CST:
|
387 |
|
|
case TS_VECTOR:
|
388 |
|
|
case TS_STRING:
|
389 |
|
|
case TS_COMPLEX:
|
390 |
|
|
case TS_SSA_NAME:
|
391 |
|
|
case TS_CONSTRUCTOR:
|
392 |
|
|
case TS_EXP:
|
393 |
|
|
case TS_STATEMENT_LIST:
|
394 |
|
|
MARK_TS_TYPED (code);
|
395 |
|
|
break;
|
396 |
|
|
|
397 |
|
|
case TS_IDENTIFIER:
|
398 |
|
|
case TS_DECL_MINIMAL:
|
399 |
|
|
case TS_TYPE_COMMON:
|
400 |
|
|
case TS_LIST:
|
401 |
|
|
case TS_VEC:
|
402 |
|
|
case TS_BINFO:
|
403 |
|
|
case TS_OMP_CLAUSE:
|
404 |
|
|
case TS_OPTIMIZATION:
|
405 |
|
|
case TS_TARGET_OPTION:
|
406 |
|
|
MARK_TS_COMMON (code);
|
407 |
|
|
break;
|
408 |
|
|
|
409 |
|
|
case TS_TYPE_WITH_LANG_SPECIFIC:
|
410 |
|
|
MARK_TS_TYPE_COMMON (code);
|
411 |
|
|
break;
|
412 |
|
|
|
413 |
|
|
case TS_TYPE_NON_COMMON:
|
414 |
|
|
MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
|
415 |
|
|
break;
|
416 |
|
|
|
417 |
|
|
case TS_DECL_COMMON:
|
418 |
|
|
MARK_TS_DECL_MINIMAL (code);
|
419 |
|
|
break;
|
420 |
|
|
|
421 |
|
|
case TS_DECL_WRTL:
|
422 |
|
|
case TS_CONST_DECL:
|
423 |
|
|
MARK_TS_DECL_COMMON (code);
|
424 |
|
|
break;
|
425 |
|
|
|
426 |
|
|
case TS_DECL_NON_COMMON:
|
427 |
|
|
MARK_TS_DECL_WITH_VIS (code);
|
428 |
|
|
break;
|
429 |
|
|
|
430 |
|
|
case TS_DECL_WITH_VIS:
|
431 |
|
|
case TS_PARM_DECL:
|
432 |
|
|
case TS_LABEL_DECL:
|
433 |
|
|
case TS_RESULT_DECL:
|
434 |
|
|
MARK_TS_DECL_WRTL (code);
|
435 |
|
|
break;
|
436 |
|
|
|
437 |
|
|
case TS_FIELD_DECL:
|
438 |
|
|
MARK_TS_DECL_COMMON (code);
|
439 |
|
|
break;
|
440 |
|
|
|
441 |
|
|
case TS_VAR_DECL:
|
442 |
|
|
MARK_TS_DECL_WITH_VIS (code);
|
443 |
|
|
break;
|
444 |
|
|
|
445 |
|
|
case TS_TYPE_DECL:
|
446 |
|
|
case TS_FUNCTION_DECL:
|
447 |
|
|
MARK_TS_DECL_NON_COMMON (code);
|
448 |
|
|
break;
|
449 |
|
|
|
450 |
|
|
case TS_TRANSLATION_UNIT_DECL:
|
451 |
|
|
MARK_TS_DECL_COMMON (code);
|
452 |
|
|
break;
|
453 |
|
|
|
454 |
|
|
default:
|
455 |
|
|
gcc_unreachable ();
|
456 |
|
|
}
|
457 |
|
|
}
|
458 |
|
|
|
459 |
|
|
/* Basic consistency checks for attributes used in fold. */
|
460 |
|
|
gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
|
461 |
|
|
gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
|
462 |
|
|
gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
|
463 |
|
|
gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
|
464 |
|
|
gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
|
465 |
|
|
gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
|
466 |
|
|
gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
|
467 |
|
|
gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
|
468 |
|
|
gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
|
469 |
|
|
gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
|
470 |
|
|
gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
|
471 |
|
|
gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
|
472 |
|
|
gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
|
473 |
|
|
gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
|
474 |
|
|
gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
|
475 |
|
|
gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
|
476 |
|
|
gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
|
477 |
|
|
gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
|
478 |
|
|
gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
|
479 |
|
|
gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
|
480 |
|
|
gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
|
481 |
|
|
gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
|
482 |
|
|
gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
|
483 |
|
|
gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
|
484 |
|
|
gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
|
485 |
|
|
gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
|
486 |
|
|
gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
|
487 |
|
|
gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
|
488 |
|
|
gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
|
489 |
|
|
gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
|
490 |
|
|
gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
|
491 |
|
|
gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
|
492 |
|
|
gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
|
493 |
|
|
gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
|
494 |
|
|
gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
|
495 |
|
|
gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
|
496 |
|
|
gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
|
497 |
|
|
gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
|
498 |
|
|
}
|
499 |
|
|
|
500 |
|
|
|
501 |
|
|
/* Init tree.c. */
|
502 |
|
|
|
503 |
|
|
void
|
504 |
|
|
init_ttree (void)
|
505 |
|
|
{
|
506 |
|
|
/* Initialize the hash table of types. */
|
507 |
|
|
type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
|
508 |
|
|
type_hash_eq, 0);
|
509 |
|
|
|
510 |
|
|
debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
|
511 |
|
|
tree_decl_map_eq, 0);
|
512 |
|
|
|
513 |
|
|
value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
|
514 |
|
|
tree_decl_map_eq, 0);
|
515 |
|
|
init_priority_for_decl = htab_create_ggc (512, tree_priority_map_hash,
|
516 |
|
|
tree_priority_map_eq, 0);
|
517 |
|
|
|
518 |
|
|
int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
|
519 |
|
|
int_cst_hash_eq, NULL);
|
520 |
|
|
|
521 |
|
|
int_cst_node = make_node (INTEGER_CST);
|
522 |
|
|
|
523 |
|
|
cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
|
524 |
|
|
cl_option_hash_eq, NULL);
|
525 |
|
|
|
526 |
|
|
cl_optimization_node = make_node (OPTIMIZATION_NODE);
|
527 |
|
|
cl_target_option_node = make_node (TARGET_OPTION_NODE);
|
528 |
|
|
|
529 |
|
|
/* Initialize the tree_contains_struct array. */
|
530 |
|
|
initialize_tree_contains_struct ();
|
531 |
|
|
lang_hooks.init_ts ();
|
532 |
|
|
}
|
533 |
|
|
|
534 |
|
|
|
535 |
|
|
/* The name of the object as the assembler will see it (but before any
|
536 |
|
|
translations made by ASM_OUTPUT_LABELREF). Often this is the same
|
537 |
|
|
as DECL_NAME. It is an IDENTIFIER_NODE. */
|
538 |
|
|
tree
|
539 |
|
|
decl_assembler_name (tree decl)
|
540 |
|
|
{
|
541 |
|
|
if (!DECL_ASSEMBLER_NAME_SET_P (decl))
|
542 |
|
|
lang_hooks.set_decl_assembler_name (decl);
|
543 |
|
|
return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
|
544 |
|
|
}
|
545 |
|
|
|
546 |
|
|
/* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL. */
|
547 |
|
|
|
548 |
|
|
bool
|
549 |
|
|
decl_assembler_name_equal (tree decl, const_tree asmname)
|
550 |
|
|
{
|
551 |
|
|
tree decl_asmname = DECL_ASSEMBLER_NAME (decl);
|
552 |
|
|
const char *decl_str;
|
553 |
|
|
const char *asmname_str;
|
554 |
|
|
bool test = false;
|
555 |
|
|
|
556 |
|
|
if (decl_asmname == asmname)
|
557 |
|
|
return true;
|
558 |
|
|
|
559 |
|
|
decl_str = IDENTIFIER_POINTER (decl_asmname);
|
560 |
|
|
asmname_str = IDENTIFIER_POINTER (asmname);
|
561 |
|
|
|
562 |
|
|
|
563 |
|
|
/* If the target assembler name was set by the user, things are trickier.
|
564 |
|
|
We have a leading '*' to begin with. After that, it's arguable what
|
565 |
|
|
is the correct thing to do with -fleading-underscore. Arguably, we've
|
566 |
|
|
historically been doing the wrong thing in assemble_alias by always
|
567 |
|
|
printing the leading underscore. Since we're not changing that, make
|
568 |
|
|
sure user_label_prefix follows the '*' before matching. */
|
569 |
|
|
if (decl_str[0] == '*')
|
570 |
|
|
{
|
571 |
|
|
size_t ulp_len = strlen (user_label_prefix);
|
572 |
|
|
|
573 |
|
|
decl_str ++;
|
574 |
|
|
|
575 |
|
|
if (ulp_len == 0)
|
576 |
|
|
test = true;
|
577 |
|
|
else if (strncmp (decl_str, user_label_prefix, ulp_len) == 0)
|
578 |
|
|
decl_str += ulp_len, test=true;
|
579 |
|
|
else
|
580 |
|
|
decl_str --;
|
581 |
|
|
}
|
582 |
|
|
if (asmname_str[0] == '*')
|
583 |
|
|
{
|
584 |
|
|
size_t ulp_len = strlen (user_label_prefix);
|
585 |
|
|
|
586 |
|
|
asmname_str ++;
|
587 |
|
|
|
588 |
|
|
if (ulp_len == 0)
|
589 |
|
|
test = true;
|
590 |
|
|
else if (strncmp (asmname_str, user_label_prefix, ulp_len) == 0)
|
591 |
|
|
asmname_str += ulp_len, test=true;
|
592 |
|
|
else
|
593 |
|
|
asmname_str --;
|
594 |
|
|
}
|
595 |
|
|
|
596 |
|
|
if (!test)
|
597 |
|
|
return false;
|
598 |
|
|
return strcmp (decl_str, asmname_str) == 0;
|
599 |
|
|
}
|
600 |
|
|
|
601 |
|
|
/* Hash asmnames ignoring the user specified marks. */
|
602 |
|
|
|
603 |
|
|
hashval_t
|
604 |
|
|
decl_assembler_name_hash (const_tree asmname)
|
605 |
|
|
{
|
606 |
|
|
if (IDENTIFIER_POINTER (asmname)[0] == '*')
|
607 |
|
|
{
|
608 |
|
|
const char *decl_str = IDENTIFIER_POINTER (asmname) + 1;
|
609 |
|
|
size_t ulp_len = strlen (user_label_prefix);
|
610 |
|
|
|
611 |
|
|
if (ulp_len == 0)
|
612 |
|
|
;
|
613 |
|
|
else if (strncmp (decl_str, user_label_prefix, ulp_len) == 0)
|
614 |
|
|
decl_str += ulp_len;
|
615 |
|
|
|
616 |
|
|
return htab_hash_string (decl_str);
|
617 |
|
|
}
|
618 |
|
|
|
619 |
|
|
return htab_hash_string (IDENTIFIER_POINTER (asmname));
|
620 |
|
|
}
|
621 |
|
|
|
622 |
|
|
/* Compute the number of bytes occupied by a tree with code CODE.
|
623 |
|
|
This function cannot be used for nodes that have variable sizes,
|
624 |
|
|
including TREE_VEC, STRING_CST, and CALL_EXPR. */
|
625 |
|
|
size_t
|
626 |
|
|
tree_code_size (enum tree_code code)
|
627 |
|
|
{
|
628 |
|
|
switch (TREE_CODE_CLASS (code))
|
629 |
|
|
{
|
630 |
|
|
case tcc_declaration: /* A decl node */
|
631 |
|
|
{
|
632 |
|
|
switch (code)
|
633 |
|
|
{
|
634 |
|
|
case FIELD_DECL:
|
635 |
|
|
return sizeof (struct tree_field_decl);
|
636 |
|
|
case PARM_DECL:
|
637 |
|
|
return sizeof (struct tree_parm_decl);
|
638 |
|
|
case VAR_DECL:
|
639 |
|
|
return sizeof (struct tree_var_decl);
|
640 |
|
|
case LABEL_DECL:
|
641 |
|
|
return sizeof (struct tree_label_decl);
|
642 |
|
|
case RESULT_DECL:
|
643 |
|
|
return sizeof (struct tree_result_decl);
|
644 |
|
|
case CONST_DECL:
|
645 |
|
|
return sizeof (struct tree_const_decl);
|
646 |
|
|
case TYPE_DECL:
|
647 |
|
|
return sizeof (struct tree_type_decl);
|
648 |
|
|
case FUNCTION_DECL:
|
649 |
|
|
return sizeof (struct tree_function_decl);
|
650 |
|
|
case DEBUG_EXPR_DECL:
|
651 |
|
|
return sizeof (struct tree_decl_with_rtl);
|
652 |
|
|
default:
|
653 |
|
|
return sizeof (struct tree_decl_non_common);
|
654 |
|
|
}
|
655 |
|
|
}
|
656 |
|
|
|
657 |
|
|
case tcc_type: /* a type node */
|
658 |
|
|
return sizeof (struct tree_type_non_common);
|
659 |
|
|
|
660 |
|
|
case tcc_reference: /* a reference */
|
661 |
|
|
case tcc_expression: /* an expression */
|
662 |
|
|
case tcc_statement: /* an expression with side effects */
|
663 |
|
|
case tcc_comparison: /* a comparison expression */
|
664 |
|
|
case tcc_unary: /* a unary arithmetic expression */
|
665 |
|
|
case tcc_binary: /* a binary arithmetic expression */
|
666 |
|
|
return (sizeof (struct tree_exp)
|
667 |
|
|
+ (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
|
668 |
|
|
|
669 |
|
|
case tcc_constant: /* a constant */
|
670 |
|
|
switch (code)
|
671 |
|
|
{
|
672 |
|
|
case INTEGER_CST: return sizeof (struct tree_int_cst);
|
673 |
|
|
case REAL_CST: return sizeof (struct tree_real_cst);
|
674 |
|
|
case FIXED_CST: return sizeof (struct tree_fixed_cst);
|
675 |
|
|
case COMPLEX_CST: return sizeof (struct tree_complex);
|
676 |
|
|
case VECTOR_CST: return sizeof (struct tree_vector);
|
677 |
|
|
case STRING_CST: gcc_unreachable ();
|
678 |
|
|
default:
|
679 |
|
|
return lang_hooks.tree_size (code);
|
680 |
|
|
}
|
681 |
|
|
|
682 |
|
|
case tcc_exceptional: /* something random, like an identifier. */
|
683 |
|
|
switch (code)
|
684 |
|
|
{
|
685 |
|
|
case IDENTIFIER_NODE: return lang_hooks.identifier_size;
|
686 |
|
|
case TREE_LIST: return sizeof (struct tree_list);
|
687 |
|
|
|
688 |
|
|
case ERROR_MARK:
|
689 |
|
|
case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
|
690 |
|
|
|
691 |
|
|
case TREE_VEC:
|
692 |
|
|
case OMP_CLAUSE: gcc_unreachable ();
|
693 |
|
|
|
694 |
|
|
case SSA_NAME: return sizeof (struct tree_ssa_name);
|
695 |
|
|
|
696 |
|
|
case STATEMENT_LIST: return sizeof (struct tree_statement_list);
|
697 |
|
|
case BLOCK: return sizeof (struct tree_block);
|
698 |
|
|
case CONSTRUCTOR: return sizeof (struct tree_constructor);
|
699 |
|
|
case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
|
700 |
|
|
case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
|
701 |
|
|
|
702 |
|
|
default:
|
703 |
|
|
return lang_hooks.tree_size (code);
|
704 |
|
|
}
|
705 |
|
|
|
706 |
|
|
default:
|
707 |
|
|
gcc_unreachable ();
|
708 |
|
|
}
|
709 |
|
|
}
|
710 |
|
|
|
711 |
|
|
/* Compute the number of bytes occupied by NODE. This routine only
|
712 |
|
|
looks at TREE_CODE, except for those nodes that have variable sizes. */
|
713 |
|
|
size_t
|
714 |
|
|
tree_size (const_tree node)
|
715 |
|
|
{
|
716 |
|
|
const enum tree_code code = TREE_CODE (node);
|
717 |
|
|
switch (code)
|
718 |
|
|
{
|
719 |
|
|
case TREE_BINFO:
|
720 |
|
|
return (offsetof (struct tree_binfo, base_binfos)
|
721 |
|
|
+ VEC_embedded_size (tree, BINFO_N_BASE_BINFOS (node)));
|
722 |
|
|
|
723 |
|
|
case TREE_VEC:
|
724 |
|
|
return (sizeof (struct tree_vec)
|
725 |
|
|
+ (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
|
726 |
|
|
|
727 |
|
|
case STRING_CST:
|
728 |
|
|
return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
|
729 |
|
|
|
730 |
|
|
case OMP_CLAUSE:
|
731 |
|
|
return (sizeof (struct tree_omp_clause)
|
732 |
|
|
+ (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
|
733 |
|
|
* sizeof (tree));
|
734 |
|
|
|
735 |
|
|
default:
|
736 |
|
|
if (TREE_CODE_CLASS (code) == tcc_vl_exp)
|
737 |
|
|
return (sizeof (struct tree_exp)
|
738 |
|
|
+ (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
|
739 |
|
|
else
|
740 |
|
|
return tree_code_size (code);
|
741 |
|
|
}
|
742 |
|
|
}
|
743 |
|
|
|
744 |
|
|
/* Record interesting allocation statistics for a tree node with CODE
|
745 |
|
|
and LENGTH. */
|
746 |
|
|
|
747 |
|
|
static void
|
748 |
|
|
record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
|
749 |
|
|
size_t length ATTRIBUTE_UNUSED)
|
750 |
|
|
{
|
751 |
|
|
#ifdef GATHER_STATISTICS
|
752 |
|
|
enum tree_code_class type = TREE_CODE_CLASS (code);
|
753 |
|
|
tree_node_kind kind;
|
754 |
|
|
|
755 |
|
|
switch (type)
|
756 |
|
|
{
|
757 |
|
|
case tcc_declaration: /* A decl node */
|
758 |
|
|
kind = d_kind;
|
759 |
|
|
break;
|
760 |
|
|
|
761 |
|
|
case tcc_type: /* a type node */
|
762 |
|
|
kind = t_kind;
|
763 |
|
|
break;
|
764 |
|
|
|
765 |
|
|
case tcc_statement: /* an expression with side effects */
|
766 |
|
|
kind = s_kind;
|
767 |
|
|
break;
|
768 |
|
|
|
769 |
|
|
case tcc_reference: /* a reference */
|
770 |
|
|
kind = r_kind;
|
771 |
|
|
break;
|
772 |
|
|
|
773 |
|
|
case tcc_expression: /* an expression */
|
774 |
|
|
case tcc_comparison: /* a comparison expression */
|
775 |
|
|
case tcc_unary: /* a unary arithmetic expression */
|
776 |
|
|
case tcc_binary: /* a binary arithmetic expression */
|
777 |
|
|
kind = e_kind;
|
778 |
|
|
break;
|
779 |
|
|
|
780 |
|
|
case tcc_constant: /* a constant */
|
781 |
|
|
kind = c_kind;
|
782 |
|
|
break;
|
783 |
|
|
|
784 |
|
|
case tcc_exceptional: /* something random, like an identifier. */
|
785 |
|
|
switch (code)
|
786 |
|
|
{
|
787 |
|
|
case IDENTIFIER_NODE:
|
788 |
|
|
kind = id_kind;
|
789 |
|
|
break;
|
790 |
|
|
|
791 |
|
|
case TREE_VEC:
|
792 |
|
|
kind = vec_kind;
|
793 |
|
|
break;
|
794 |
|
|
|
795 |
|
|
case TREE_BINFO:
|
796 |
|
|
kind = binfo_kind;
|
797 |
|
|
break;
|
798 |
|
|
|
799 |
|
|
case SSA_NAME:
|
800 |
|
|
kind = ssa_name_kind;
|
801 |
|
|
break;
|
802 |
|
|
|
803 |
|
|
case BLOCK:
|
804 |
|
|
kind = b_kind;
|
805 |
|
|
break;
|
806 |
|
|
|
807 |
|
|
case CONSTRUCTOR:
|
808 |
|
|
kind = constr_kind;
|
809 |
|
|
break;
|
810 |
|
|
|
811 |
|
|
case OMP_CLAUSE:
|
812 |
|
|
kind = omp_clause_kind;
|
813 |
|
|
break;
|
814 |
|
|
|
815 |
|
|
default:
|
816 |
|
|
kind = x_kind;
|
817 |
|
|
break;
|
818 |
|
|
}
|
819 |
|
|
break;
|
820 |
|
|
|
821 |
|
|
case tcc_vl_exp:
|
822 |
|
|
kind = e_kind;
|
823 |
|
|
break;
|
824 |
|
|
|
825 |
|
|
default:
|
826 |
|
|
gcc_unreachable ();
|
827 |
|
|
}
|
828 |
|
|
|
829 |
|
|
tree_code_counts[(int) code]++;
|
830 |
|
|
tree_node_counts[(int) kind]++;
|
831 |
|
|
tree_node_sizes[(int) kind] += length;
|
832 |
|
|
#endif
|
833 |
|
|
}
|
834 |
|
|
|
835 |
|
|
/* Allocate and return a new UID from the DECL_UID namespace. */
|
836 |
|
|
|
837 |
|
|
int
|
838 |
|
|
allocate_decl_uid (void)
|
839 |
|
|
{
|
840 |
|
|
return next_decl_uid++;
|
841 |
|
|
}
|
842 |
|
|
|
843 |
|
|
/* Return a newly allocated node of code CODE. For decl and type
|
844 |
|
|
nodes, some other fields are initialized. The rest of the node is
|
845 |
|
|
initialized to zero. This function cannot be used for TREE_VEC or
|
846 |
|
|
OMP_CLAUSE nodes, which is enforced by asserts in tree_code_size.
|
847 |
|
|
|
848 |
|
|
Achoo! I got a code in the node. */
|
849 |
|
|
|
850 |
|
|
tree
|
851 |
|
|
make_node_stat (enum tree_code code MEM_STAT_DECL)
|
852 |
|
|
{
|
853 |
|
|
tree t;
|
854 |
|
|
enum tree_code_class type = TREE_CODE_CLASS (code);
|
855 |
|
|
size_t length = tree_code_size (code);
|
856 |
|
|
|
857 |
|
|
record_node_allocation_statistics (code, length);
|
858 |
|
|
|
859 |
|
|
t = ggc_alloc_zone_cleared_tree_node_stat (
|
860 |
|
|
(code == IDENTIFIER_NODE) ? &tree_id_zone : &tree_zone,
|
861 |
|
|
length PASS_MEM_STAT);
|
862 |
|
|
TREE_SET_CODE (t, code);
|
863 |
|
|
|
864 |
|
|
switch (type)
|
865 |
|
|
{
|
866 |
|
|
case tcc_statement:
|
867 |
|
|
TREE_SIDE_EFFECTS (t) = 1;
|
868 |
|
|
break;
|
869 |
|
|
|
870 |
|
|
case tcc_declaration:
|
871 |
|
|
if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
|
872 |
|
|
{
|
873 |
|
|
if (code == FUNCTION_DECL)
|
874 |
|
|
{
|
875 |
|
|
DECL_ALIGN (t) = FUNCTION_BOUNDARY;
|
876 |
|
|
DECL_MODE (t) = FUNCTION_MODE;
|
877 |
|
|
}
|
878 |
|
|
else
|
879 |
|
|
DECL_ALIGN (t) = 1;
|
880 |
|
|
}
|
881 |
|
|
DECL_SOURCE_LOCATION (t) = input_location;
|
882 |
|
|
if (TREE_CODE (t) == DEBUG_EXPR_DECL)
|
883 |
|
|
DECL_UID (t) = --next_debug_decl_uid;
|
884 |
|
|
else
|
885 |
|
|
{
|
886 |
|
|
DECL_UID (t) = allocate_decl_uid ();
|
887 |
|
|
SET_DECL_PT_UID (t, -1);
|
888 |
|
|
}
|
889 |
|
|
if (TREE_CODE (t) == LABEL_DECL)
|
890 |
|
|
LABEL_DECL_UID (t) = -1;
|
891 |
|
|
|
892 |
|
|
break;
|
893 |
|
|
|
894 |
|
|
case tcc_type:
|
895 |
|
|
TYPE_UID (t) = next_type_uid++;
|
896 |
|
|
TYPE_ALIGN (t) = BITS_PER_UNIT;
|
897 |
|
|
TYPE_USER_ALIGN (t) = 0;
|
898 |
|
|
TYPE_MAIN_VARIANT (t) = t;
|
899 |
|
|
TYPE_CANONICAL (t) = t;
|
900 |
|
|
|
901 |
|
|
/* Default to no attributes for type, but let target change that. */
|
902 |
|
|
TYPE_ATTRIBUTES (t) = NULL_TREE;
|
903 |
|
|
targetm.set_default_type_attributes (t);
|
904 |
|
|
|
905 |
|
|
/* We have not yet computed the alias set for this type. */
|
906 |
|
|
TYPE_ALIAS_SET (t) = -1;
|
907 |
|
|
break;
|
908 |
|
|
|
909 |
|
|
case tcc_constant:
|
910 |
|
|
TREE_CONSTANT (t) = 1;
|
911 |
|
|
break;
|
912 |
|
|
|
913 |
|
|
case tcc_expression:
|
914 |
|
|
switch (code)
|
915 |
|
|
{
|
916 |
|
|
case INIT_EXPR:
|
917 |
|
|
case MODIFY_EXPR:
|
918 |
|
|
case VA_ARG_EXPR:
|
919 |
|
|
case PREDECREMENT_EXPR:
|
920 |
|
|
case PREINCREMENT_EXPR:
|
921 |
|
|
case POSTDECREMENT_EXPR:
|
922 |
|
|
case POSTINCREMENT_EXPR:
|
923 |
|
|
/* All of these have side-effects, no matter what their
|
924 |
|
|
operands are. */
|
925 |
|
|
TREE_SIDE_EFFECTS (t) = 1;
|
926 |
|
|
break;
|
927 |
|
|
|
928 |
|
|
default:
|
929 |
|
|
break;
|
930 |
|
|
}
|
931 |
|
|
break;
|
932 |
|
|
|
933 |
|
|
default:
|
934 |
|
|
/* Other classes need no special treatment. */
|
935 |
|
|
break;
|
936 |
|
|
}
|
937 |
|
|
|
938 |
|
|
return t;
|
939 |
|
|
}
|
940 |
|
|
|
941 |
|
|
/* Return a new node with the same contents as NODE except that its
|
942 |
|
|
TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
|
943 |
|
|
|
944 |
|
|
tree
|
945 |
|
|
copy_node_stat (tree node MEM_STAT_DECL)
|
946 |
|
|
{
|
947 |
|
|
tree t;
|
948 |
|
|
enum tree_code code = TREE_CODE (node);
|
949 |
|
|
size_t length;
|
950 |
|
|
|
951 |
|
|
gcc_assert (code != STATEMENT_LIST);
|
952 |
|
|
|
953 |
|
|
length = tree_size (node);
|
954 |
|
|
record_node_allocation_statistics (code, length);
|
955 |
|
|
t = ggc_alloc_zone_tree_node_stat (&tree_zone, length PASS_MEM_STAT);
|
956 |
|
|
memcpy (t, node, length);
|
957 |
|
|
|
958 |
|
|
if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
|
959 |
|
|
TREE_CHAIN (t) = 0;
|
960 |
|
|
TREE_ASM_WRITTEN (t) = 0;
|
961 |
|
|
TREE_VISITED (t) = 0;
|
962 |
|
|
if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
|
963 |
|
|
*DECL_VAR_ANN_PTR (t) = 0;
|
964 |
|
|
|
965 |
|
|
if (TREE_CODE_CLASS (code) == tcc_declaration)
|
966 |
|
|
{
|
967 |
|
|
if (code == DEBUG_EXPR_DECL)
|
968 |
|
|
DECL_UID (t) = --next_debug_decl_uid;
|
969 |
|
|
else
|
970 |
|
|
{
|
971 |
|
|
DECL_UID (t) = allocate_decl_uid ();
|
972 |
|
|
if (DECL_PT_UID_SET_P (node))
|
973 |
|
|
SET_DECL_PT_UID (t, DECL_PT_UID (node));
|
974 |
|
|
}
|
975 |
|
|
if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
|
976 |
|
|
&& DECL_HAS_VALUE_EXPR_P (node))
|
977 |
|
|
{
|
978 |
|
|
SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
|
979 |
|
|
DECL_HAS_VALUE_EXPR_P (t) = 1;
|
980 |
|
|
}
|
981 |
|
|
if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
|
982 |
|
|
{
|
983 |
|
|
SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
|
984 |
|
|
DECL_HAS_INIT_PRIORITY_P (t) = 1;
|
985 |
|
|
}
|
986 |
|
|
}
|
987 |
|
|
else if (TREE_CODE_CLASS (code) == tcc_type)
|
988 |
|
|
{
|
989 |
|
|
TYPE_UID (t) = next_type_uid++;
|
990 |
|
|
/* The following is so that the debug code for
|
991 |
|
|
the copy is different from the original type.
|
992 |
|
|
The two statements usually duplicate each other
|
993 |
|
|
(because they clear fields of the same union),
|
994 |
|
|
but the optimizer should catch that. */
|
995 |
|
|
TYPE_SYMTAB_POINTER (t) = 0;
|
996 |
|
|
TYPE_SYMTAB_ADDRESS (t) = 0;
|
997 |
|
|
|
998 |
|
|
/* Do not copy the values cache. */
|
999 |
|
|
if (TYPE_CACHED_VALUES_P(t))
|
1000 |
|
|
{
|
1001 |
|
|
TYPE_CACHED_VALUES_P (t) = 0;
|
1002 |
|
|
TYPE_CACHED_VALUES (t) = NULL_TREE;
|
1003 |
|
|
}
|
1004 |
|
|
}
|
1005 |
|
|
|
1006 |
|
|
return t;
|
1007 |
|
|
}
|
1008 |
|
|
|
1009 |
|
|
/* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
|
1010 |
|
|
For example, this can copy a list made of TREE_LIST nodes. */
|
1011 |
|
|
|
1012 |
|
|
tree
|
1013 |
|
|
copy_list (tree list)
|
1014 |
|
|
{
|
1015 |
|
|
tree head;
|
1016 |
|
|
tree prev, next;
|
1017 |
|
|
|
1018 |
|
|
if (list == 0)
|
1019 |
|
|
return 0;
|
1020 |
|
|
|
1021 |
|
|
head = prev = copy_node (list);
|
1022 |
|
|
next = TREE_CHAIN (list);
|
1023 |
|
|
while (next)
|
1024 |
|
|
{
|
1025 |
|
|
TREE_CHAIN (prev) = copy_node (next);
|
1026 |
|
|
prev = TREE_CHAIN (prev);
|
1027 |
|
|
next = TREE_CHAIN (next);
|
1028 |
|
|
}
|
1029 |
|
|
return head;
|
1030 |
|
|
}
|
1031 |
|
|
|
1032 |
|
|
|
1033 |
|
|
/* Create an INT_CST node with a LOW value sign extended to TYPE. */
|
1034 |
|
|
|
1035 |
|
|
tree
|
1036 |
|
|
build_int_cst (tree type, HOST_WIDE_INT low)
|
1037 |
|
|
{
|
1038 |
|
|
/* Support legacy code. */
|
1039 |
|
|
if (!type)
|
1040 |
|
|
type = integer_type_node;
|
1041 |
|
|
|
1042 |
|
|
return double_int_to_tree (type, shwi_to_double_int (low));
|
1043 |
|
|
}
|
1044 |
|
|
|
1045 |
|
|
/* Create an INT_CST node with a LOW value sign extended to TYPE. */
|
1046 |
|
|
|
1047 |
|
|
tree
|
1048 |
|
|
build_int_cst_type (tree type, HOST_WIDE_INT low)
|
1049 |
|
|
{
|
1050 |
|
|
gcc_assert (type);
|
1051 |
|
|
|
1052 |
|
|
return double_int_to_tree (type, shwi_to_double_int (low));
|
1053 |
|
|
}
|
1054 |
|
|
|
1055 |
|
|
/* Constructs tree in type TYPE from with value given by CST. Signedness
|
1056 |
|
|
of CST is assumed to be the same as the signedness of TYPE. */
|
1057 |
|
|
|
1058 |
|
|
tree
|
1059 |
|
|
double_int_to_tree (tree type, double_int cst)
|
1060 |
|
|
{
|
1061 |
|
|
/* Size types *are* sign extended. */
|
1062 |
|
|
bool sign_extended_type = (!TYPE_UNSIGNED (type)
|
1063 |
|
|
|| (TREE_CODE (type) == INTEGER_TYPE
|
1064 |
|
|
&& TYPE_IS_SIZETYPE (type)));
|
1065 |
|
|
|
1066 |
|
|
cst = double_int_ext (cst, TYPE_PRECISION (type), !sign_extended_type);
|
1067 |
|
|
|
1068 |
|
|
return build_int_cst_wide (type, cst.low, cst.high);
|
1069 |
|
|
}
|
1070 |
|
|
|
1071 |
|
|
/* Returns true if CST fits into range of TYPE. Signedness of CST is assumed
|
1072 |
|
|
to be the same as the signedness of TYPE. */
|
1073 |
|
|
|
1074 |
|
|
bool
|
1075 |
|
|
double_int_fits_to_tree_p (const_tree type, double_int cst)
|
1076 |
|
|
{
|
1077 |
|
|
/* Size types *are* sign extended. */
|
1078 |
|
|
bool sign_extended_type = (!TYPE_UNSIGNED (type)
|
1079 |
|
|
|| (TREE_CODE (type) == INTEGER_TYPE
|
1080 |
|
|
&& TYPE_IS_SIZETYPE (type)));
|
1081 |
|
|
|
1082 |
|
|
double_int ext
|
1083 |
|
|
= double_int_ext (cst, TYPE_PRECISION (type), !sign_extended_type);
|
1084 |
|
|
|
1085 |
|
|
return double_int_equal_p (cst, ext);
|
1086 |
|
|
}
|
1087 |
|
|
|
1088 |
|
|
/* We force the double_int CST to the range of the type TYPE by sign or
|
1089 |
|
|
zero extending it. OVERFLOWABLE indicates if we are interested in
|
1090 |
|
|
overflow of the value, when >0 we are only interested in signed
|
1091 |
|
|
overflow, for <0 we are interested in any overflow. OVERFLOWED
|
1092 |
|
|
indicates whether overflow has already occurred. CONST_OVERFLOWED
|
1093 |
|
|
indicates whether constant overflow has already occurred. We force
|
1094 |
|
|
T's value to be within range of T's type (by setting to 0 or 1 all
|
1095 |
|
|
the bits outside the type's range). We set TREE_OVERFLOWED if,
|
1096 |
|
|
OVERFLOWED is nonzero,
|
1097 |
|
|
or OVERFLOWABLE is >0 and signed overflow occurs
|
1098 |
|
|
or OVERFLOWABLE is <0 and any overflow occurs
|
1099 |
|
|
We return a new tree node for the extended double_int. The node
|
1100 |
|
|
is shared if no overflow flags are set. */
|
1101 |
|
|
|
1102 |
|
|
|
1103 |
|
|
tree
|
1104 |
|
|
force_fit_type_double (tree type, double_int cst, int overflowable,
|
1105 |
|
|
bool overflowed)
|
1106 |
|
|
{
|
1107 |
|
|
bool sign_extended_type;
|
1108 |
|
|
|
1109 |
|
|
/* Size types *are* sign extended. */
|
1110 |
|
|
sign_extended_type = (!TYPE_UNSIGNED (type)
|
1111 |
|
|
|| (TREE_CODE (type) == INTEGER_TYPE
|
1112 |
|
|
&& TYPE_IS_SIZETYPE (type)));
|
1113 |
|
|
|
1114 |
|
|
/* If we need to set overflow flags, return a new unshared node. */
|
1115 |
|
|
if (overflowed || !double_int_fits_to_tree_p(type, cst))
|
1116 |
|
|
{
|
1117 |
|
|
if (overflowed
|
1118 |
|
|
|| overflowable < 0
|
1119 |
|
|
|| (overflowable > 0 && sign_extended_type))
|
1120 |
|
|
{
|
1121 |
|
|
tree t = make_node (INTEGER_CST);
|
1122 |
|
|
TREE_INT_CST (t) = double_int_ext (cst, TYPE_PRECISION (type),
|
1123 |
|
|
!sign_extended_type);
|
1124 |
|
|
TREE_TYPE (t) = type;
|
1125 |
|
|
TREE_OVERFLOW (t) = 1;
|
1126 |
|
|
return t;
|
1127 |
|
|
}
|
1128 |
|
|
}
|
1129 |
|
|
|
1130 |
|
|
/* Else build a shared node. */
|
1131 |
|
|
return double_int_to_tree (type, cst);
|
1132 |
|
|
}
|
1133 |
|
|
|
1134 |
|
|
/* These are the hash table functions for the hash table of INTEGER_CST
|
1135 |
|
|
nodes of a sizetype. */
|
1136 |
|
|
|
1137 |
|
|
/* Return the hash code code X, an INTEGER_CST. */
|
1138 |
|
|
|
1139 |
|
|
static hashval_t
|
1140 |
|
|
int_cst_hash_hash (const void *x)
|
1141 |
|
|
{
|
1142 |
|
|
const_tree const t = (const_tree) x;
|
1143 |
|
|
|
1144 |
|
|
return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
|
1145 |
|
|
^ htab_hash_pointer (TREE_TYPE (t)));
|
1146 |
|
|
}
|
1147 |
|
|
|
1148 |
|
|
/* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
|
1149 |
|
|
is the same as that given by *Y, which is the same. */
|
1150 |
|
|
|
1151 |
|
|
static int
|
1152 |
|
|
int_cst_hash_eq (const void *x, const void *y)
|
1153 |
|
|
{
|
1154 |
|
|
const_tree const xt = (const_tree) x;
|
1155 |
|
|
const_tree const yt = (const_tree) y;
|
1156 |
|
|
|
1157 |
|
|
return (TREE_TYPE (xt) == TREE_TYPE (yt)
|
1158 |
|
|
&& TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
|
1159 |
|
|
&& TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt));
|
1160 |
|
|
}
|
1161 |
|
|
|
1162 |
|
|
/* Create an INT_CST node of TYPE and value HI:LOW.
|
1163 |
|
|
The returned node is always shared. For small integers we use a
|
1164 |
|
|
per-type vector cache, for larger ones we use a single hash table. */
|
1165 |
|
|
|
1166 |
|
|
tree
|
1167 |
|
|
build_int_cst_wide (tree type, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
|
1168 |
|
|
{
|
1169 |
|
|
tree t;
|
1170 |
|
|
int ix = -1;
|
1171 |
|
|
int limit = 0;
|
1172 |
|
|
|
1173 |
|
|
gcc_assert (type);
|
1174 |
|
|
|
1175 |
|
|
switch (TREE_CODE (type))
|
1176 |
|
|
{
|
1177 |
|
|
case NULLPTR_TYPE:
|
1178 |
|
|
gcc_assert (hi == 0 && low == 0);
|
1179 |
|
|
/* Fallthru. */
|
1180 |
|
|
|
1181 |
|
|
case POINTER_TYPE:
|
1182 |
|
|
case REFERENCE_TYPE:
|
1183 |
|
|
/* Cache NULL pointer. */
|
1184 |
|
|
if (!hi && !low)
|
1185 |
|
|
{
|
1186 |
|
|
limit = 1;
|
1187 |
|
|
ix = 0;
|
1188 |
|
|
}
|
1189 |
|
|
break;
|
1190 |
|
|
|
1191 |
|
|
case BOOLEAN_TYPE:
|
1192 |
|
|
/* Cache false or true. */
|
1193 |
|
|
limit = 2;
|
1194 |
|
|
if (!hi && low < 2)
|
1195 |
|
|
ix = low;
|
1196 |
|
|
break;
|
1197 |
|
|
|
1198 |
|
|
case INTEGER_TYPE:
|
1199 |
|
|
case OFFSET_TYPE:
|
1200 |
|
|
if (TYPE_UNSIGNED (type))
|
1201 |
|
|
{
|
1202 |
|
|
/* Cache 0..N */
|
1203 |
|
|
limit = INTEGER_SHARE_LIMIT;
|
1204 |
|
|
if (!hi && low < (unsigned HOST_WIDE_INT)INTEGER_SHARE_LIMIT)
|
1205 |
|
|
ix = low;
|
1206 |
|
|
}
|
1207 |
|
|
else
|
1208 |
|
|
{
|
1209 |
|
|
/* Cache -1..N */
|
1210 |
|
|
limit = INTEGER_SHARE_LIMIT + 1;
|
1211 |
|
|
if (!hi && low < (unsigned HOST_WIDE_INT)INTEGER_SHARE_LIMIT)
|
1212 |
|
|
ix = low + 1;
|
1213 |
|
|
else if (hi == -1 && low == -(unsigned HOST_WIDE_INT)1)
|
1214 |
|
|
ix = 0;
|
1215 |
|
|
}
|
1216 |
|
|
break;
|
1217 |
|
|
|
1218 |
|
|
case ENUMERAL_TYPE:
|
1219 |
|
|
break;
|
1220 |
|
|
|
1221 |
|
|
default:
|
1222 |
|
|
gcc_unreachable ();
|
1223 |
|
|
}
|
1224 |
|
|
|
1225 |
|
|
if (ix >= 0)
|
1226 |
|
|
{
|
1227 |
|
|
/* Look for it in the type's vector of small shared ints. */
|
1228 |
|
|
if (!TYPE_CACHED_VALUES_P (type))
|
1229 |
|
|
{
|
1230 |
|
|
TYPE_CACHED_VALUES_P (type) = 1;
|
1231 |
|
|
TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
|
1232 |
|
|
}
|
1233 |
|
|
|
1234 |
|
|
t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
|
1235 |
|
|
if (t)
|
1236 |
|
|
{
|
1237 |
|
|
/* Make sure no one is clobbering the shared constant. */
|
1238 |
|
|
gcc_assert (TREE_TYPE (t) == type);
|
1239 |
|
|
gcc_assert (TREE_INT_CST_LOW (t) == low);
|
1240 |
|
|
gcc_assert (TREE_INT_CST_HIGH (t) == hi);
|
1241 |
|
|
}
|
1242 |
|
|
else
|
1243 |
|
|
{
|
1244 |
|
|
/* Create a new shared int. */
|
1245 |
|
|
t = make_node (INTEGER_CST);
|
1246 |
|
|
|
1247 |
|
|
TREE_INT_CST_LOW (t) = low;
|
1248 |
|
|
TREE_INT_CST_HIGH (t) = hi;
|
1249 |
|
|
TREE_TYPE (t) = type;
|
1250 |
|
|
|
1251 |
|
|
TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
|
1252 |
|
|
}
|
1253 |
|
|
}
|
1254 |
|
|
else
|
1255 |
|
|
{
|
1256 |
|
|
/* Use the cache of larger shared ints. */
|
1257 |
|
|
void **slot;
|
1258 |
|
|
|
1259 |
|
|
TREE_INT_CST_LOW (int_cst_node) = low;
|
1260 |
|
|
TREE_INT_CST_HIGH (int_cst_node) = hi;
|
1261 |
|
|
TREE_TYPE (int_cst_node) = type;
|
1262 |
|
|
|
1263 |
|
|
slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
|
1264 |
|
|
t = (tree) *slot;
|
1265 |
|
|
if (!t)
|
1266 |
|
|
{
|
1267 |
|
|
/* Insert this one into the hash table. */
|
1268 |
|
|
t = int_cst_node;
|
1269 |
|
|
*slot = t;
|
1270 |
|
|
/* Make a new node for next time round. */
|
1271 |
|
|
int_cst_node = make_node (INTEGER_CST);
|
1272 |
|
|
}
|
1273 |
|
|
}
|
1274 |
|
|
|
1275 |
|
|
return t;
|
1276 |
|
|
}
|
1277 |
|
|
|
1278 |
|
|
/* Builds an integer constant in TYPE such that lowest BITS bits are ones
|
1279 |
|
|
and the rest are zeros. */
|
1280 |
|
|
|
1281 |
|
|
tree
|
1282 |
|
|
build_low_bits_mask (tree type, unsigned bits)
|
1283 |
|
|
{
|
1284 |
|
|
double_int mask;
|
1285 |
|
|
|
1286 |
|
|
gcc_assert (bits <= TYPE_PRECISION (type));
|
1287 |
|
|
|
1288 |
|
|
if (bits == TYPE_PRECISION (type)
|
1289 |
|
|
&& !TYPE_UNSIGNED (type))
|
1290 |
|
|
/* Sign extended all-ones mask. */
|
1291 |
|
|
mask = double_int_minus_one;
|
1292 |
|
|
else
|
1293 |
|
|
mask = double_int_mask (bits);
|
1294 |
|
|
|
1295 |
|
|
return build_int_cst_wide (type, mask.low, mask.high);
|
1296 |
|
|
}
|
1297 |
|
|
|
1298 |
|
|
/* Checks that X is integer constant that can be expressed in (unsigned)
|
1299 |
|
|
HOST_WIDE_INT without loss of precision. */
|
1300 |
|
|
|
1301 |
|
|
bool
|
1302 |
|
|
cst_and_fits_in_hwi (const_tree x)
|
1303 |
|
|
{
|
1304 |
|
|
if (TREE_CODE (x) != INTEGER_CST)
|
1305 |
|
|
return false;
|
1306 |
|
|
|
1307 |
|
|
if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
|
1308 |
|
|
return false;
|
1309 |
|
|
|
1310 |
|
|
return (TREE_INT_CST_HIGH (x) == 0
|
1311 |
|
|
|| TREE_INT_CST_HIGH (x) == -1);
|
1312 |
|
|
}
|
1313 |
|
|
|
1314 |
|
|
/* Return a new VECTOR_CST node whose type is TYPE and whose values
|
1315 |
|
|
are in a list pointed to by VALS. */
|
1316 |
|
|
|
1317 |
|
|
tree
|
1318 |
|
|
build_vector (tree type, tree vals)
|
1319 |
|
|
{
|
1320 |
|
|
tree v = make_node (VECTOR_CST);
|
1321 |
|
|
int over = 0;
|
1322 |
|
|
tree link;
|
1323 |
|
|
unsigned cnt = 0;
|
1324 |
|
|
|
1325 |
|
|
TREE_VECTOR_CST_ELTS (v) = vals;
|
1326 |
|
|
TREE_TYPE (v) = type;
|
1327 |
|
|
|
1328 |
|
|
/* Iterate through elements and check for overflow. */
|
1329 |
|
|
for (link = vals; link; link = TREE_CHAIN (link))
|
1330 |
|
|
{
|
1331 |
|
|
tree value = TREE_VALUE (link);
|
1332 |
|
|
cnt++;
|
1333 |
|
|
|
1334 |
|
|
/* Don't crash if we get an address constant. */
|
1335 |
|
|
if (!CONSTANT_CLASS_P (value))
|
1336 |
|
|
continue;
|
1337 |
|
|
|
1338 |
|
|
over |= TREE_OVERFLOW (value);
|
1339 |
|
|
}
|
1340 |
|
|
|
1341 |
|
|
gcc_assert (cnt == TYPE_VECTOR_SUBPARTS (type));
|
1342 |
|
|
|
1343 |
|
|
TREE_OVERFLOW (v) = over;
|
1344 |
|
|
return v;
|
1345 |
|
|
}
|
1346 |
|
|
|
1347 |
|
|
/* Return a new VECTOR_CST node whose type is TYPE and whose values
|
1348 |
|
|
are extracted from V, a vector of CONSTRUCTOR_ELT. */
|
1349 |
|
|
|
1350 |
|
|
tree
|
1351 |
|
|
build_vector_from_ctor (tree type, VEC(constructor_elt,gc) *v)
|
1352 |
|
|
{
|
1353 |
|
|
tree list = NULL_TREE;
|
1354 |
|
|
unsigned HOST_WIDE_INT idx;
|
1355 |
|
|
tree value;
|
1356 |
|
|
|
1357 |
|
|
FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
|
1358 |
|
|
list = tree_cons (NULL_TREE, value, list);
|
1359 |
|
|
for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
|
1360 |
|
|
list = tree_cons (NULL_TREE,
|
1361 |
|
|
build_zero_cst (TREE_TYPE (type)), list);
|
1362 |
|
|
return build_vector (type, nreverse (list));
|
1363 |
|
|
}
|
1364 |
|
|
|
1365 |
|
|
/* Build a vector of type VECTYPE where all the elements are SCs. */
|
1366 |
|
|
tree
|
1367 |
|
|
build_vector_from_val (tree vectype, tree sc)
|
1368 |
|
|
{
|
1369 |
|
|
int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
|
1370 |
|
|
VEC(constructor_elt, gc) *v = NULL;
|
1371 |
|
|
|
1372 |
|
|
if (sc == error_mark_node)
|
1373 |
|
|
return sc;
|
1374 |
|
|
|
1375 |
|
|
/* Verify that the vector type is suitable for SC. Note that there
|
1376 |
|
|
is some inconsistency in the type-system with respect to restrict
|
1377 |
|
|
qualifications of pointers. Vector types always have a main-variant
|
1378 |
|
|
element type and the qualification is applied to the vector-type.
|
1379 |
|
|
So TREE_TYPE (vector-type) does not return a properly qualified
|
1380 |
|
|
vector element-type. */
|
1381 |
|
|
gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
|
1382 |
|
|
TREE_TYPE (vectype)));
|
1383 |
|
|
|
1384 |
|
|
v = VEC_alloc (constructor_elt, gc, nunits);
|
1385 |
|
|
for (i = 0; i < nunits; ++i)
|
1386 |
|
|
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
|
1387 |
|
|
|
1388 |
|
|
if (CONSTANT_CLASS_P (sc))
|
1389 |
|
|
return build_vector_from_ctor (vectype, v);
|
1390 |
|
|
else
|
1391 |
|
|
return build_constructor (vectype, v);
|
1392 |
|
|
}
|
1393 |
|
|
|
1394 |
|
|
/* Return a new CONSTRUCTOR node whose type is TYPE and whose values
|
1395 |
|
|
are in the VEC pointed to by VALS. */
|
1396 |
|
|
tree
|
1397 |
|
|
build_constructor (tree type, VEC(constructor_elt,gc) *vals)
|
1398 |
|
|
{
|
1399 |
|
|
tree c = make_node (CONSTRUCTOR);
|
1400 |
|
|
unsigned int i;
|
1401 |
|
|
constructor_elt *elt;
|
1402 |
|
|
bool constant_p = true;
|
1403 |
|
|
|
1404 |
|
|
TREE_TYPE (c) = type;
|
1405 |
|
|
CONSTRUCTOR_ELTS (c) = vals;
|
1406 |
|
|
|
1407 |
|
|
FOR_EACH_VEC_ELT (constructor_elt, vals, i, elt)
|
1408 |
|
|
if (!TREE_CONSTANT (elt->value))
|
1409 |
|
|
{
|
1410 |
|
|
constant_p = false;
|
1411 |
|
|
break;
|
1412 |
|
|
}
|
1413 |
|
|
|
1414 |
|
|
TREE_CONSTANT (c) = constant_p;
|
1415 |
|
|
|
1416 |
|
|
return c;
|
1417 |
|
|
}
|
1418 |
|
|
|
1419 |
|
|
/* Build a CONSTRUCTOR node made of a single initializer, with the specified
|
1420 |
|
|
INDEX and VALUE. */
|
1421 |
|
|
tree
|
1422 |
|
|
build_constructor_single (tree type, tree index, tree value)
|
1423 |
|
|
{
|
1424 |
|
|
VEC(constructor_elt,gc) *v;
|
1425 |
|
|
constructor_elt *elt;
|
1426 |
|
|
|
1427 |
|
|
v = VEC_alloc (constructor_elt, gc, 1);
|
1428 |
|
|
elt = VEC_quick_push (constructor_elt, v, NULL);
|
1429 |
|
|
elt->index = index;
|
1430 |
|
|
elt->value = value;
|
1431 |
|
|
|
1432 |
|
|
return build_constructor (type, v);
|
1433 |
|
|
}
|
1434 |
|
|
|
1435 |
|
|
|
1436 |
|
|
/* Return a new CONSTRUCTOR node whose type is TYPE and whose values
|
1437 |
|
|
are in a list pointed to by VALS. */
|
1438 |
|
|
tree
|
1439 |
|
|
build_constructor_from_list (tree type, tree vals)
|
1440 |
|
|
{
|
1441 |
|
|
tree t;
|
1442 |
|
|
VEC(constructor_elt,gc) *v = NULL;
|
1443 |
|
|
|
1444 |
|
|
if (vals)
|
1445 |
|
|
{
|
1446 |
|
|
v = VEC_alloc (constructor_elt, gc, list_length (vals));
|
1447 |
|
|
for (t = vals; t; t = TREE_CHAIN (t))
|
1448 |
|
|
CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
|
1449 |
|
|
}
|
1450 |
|
|
|
1451 |
|
|
return build_constructor (type, v);
|
1452 |
|
|
}
|
1453 |
|
|
|
1454 |
|
|
/* Return a new FIXED_CST node whose type is TYPE and value is F. */
|
1455 |
|
|
|
1456 |
|
|
tree
|
1457 |
|
|
build_fixed (tree type, FIXED_VALUE_TYPE f)
|
1458 |
|
|
{
|
1459 |
|
|
tree v;
|
1460 |
|
|
FIXED_VALUE_TYPE *fp;
|
1461 |
|
|
|
1462 |
|
|
v = make_node (FIXED_CST);
|
1463 |
|
|
fp = ggc_alloc_fixed_value ();
|
1464 |
|
|
memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
|
1465 |
|
|
|
1466 |
|
|
TREE_TYPE (v) = type;
|
1467 |
|
|
TREE_FIXED_CST_PTR (v) = fp;
|
1468 |
|
|
return v;
|
1469 |
|
|
}
|
1470 |
|
|
|
1471 |
|
|
/* Return a new REAL_CST node whose type is TYPE and value is D. */
|
1472 |
|
|
|
1473 |
|
|
tree
|
1474 |
|
|
build_real (tree type, REAL_VALUE_TYPE d)
|
1475 |
|
|
{
|
1476 |
|
|
tree v;
|
1477 |
|
|
REAL_VALUE_TYPE *dp;
|
1478 |
|
|
int overflow = 0;
|
1479 |
|
|
|
1480 |
|
|
/* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
|
1481 |
|
|
Consider doing it via real_convert now. */
|
1482 |
|
|
|
1483 |
|
|
v = make_node (REAL_CST);
|
1484 |
|
|
dp = ggc_alloc_real_value ();
|
1485 |
|
|
memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
|
1486 |
|
|
|
1487 |
|
|
TREE_TYPE (v) = type;
|
1488 |
|
|
TREE_REAL_CST_PTR (v) = dp;
|
1489 |
|
|
TREE_OVERFLOW (v) = overflow;
|
1490 |
|
|
return v;
|
1491 |
|
|
}
|
1492 |
|
|
|
1493 |
|
|
/* Return a new REAL_CST node whose type is TYPE
|
1494 |
|
|
and whose value is the integer value of the INTEGER_CST node I. */
|
1495 |
|
|
|
1496 |
|
|
REAL_VALUE_TYPE
|
1497 |
|
|
real_value_from_int_cst (const_tree type, const_tree i)
|
1498 |
|
|
{
|
1499 |
|
|
REAL_VALUE_TYPE d;
|
1500 |
|
|
|
1501 |
|
|
/* Clear all bits of the real value type so that we can later do
|
1502 |
|
|
bitwise comparisons to see if two values are the same. */
|
1503 |
|
|
memset (&d, 0, sizeof d);
|
1504 |
|
|
|
1505 |
|
|
real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode,
|
1506 |
|
|
TREE_INT_CST_LOW (i), TREE_INT_CST_HIGH (i),
|
1507 |
|
|
TYPE_UNSIGNED (TREE_TYPE (i)));
|
1508 |
|
|
return d;
|
1509 |
|
|
}
|
1510 |
|
|
|
1511 |
|
|
/* Given a tree representing an integer constant I, return a tree
|
1512 |
|
|
representing the same value as a floating-point constant of type TYPE. */
|
1513 |
|
|
|
1514 |
|
|
tree
|
1515 |
|
|
build_real_from_int_cst (tree type, const_tree i)
|
1516 |
|
|
{
|
1517 |
|
|
tree v;
|
1518 |
|
|
int overflow = TREE_OVERFLOW (i);
|
1519 |
|
|
|
1520 |
|
|
v = build_real (type, real_value_from_int_cst (type, i));
|
1521 |
|
|
|
1522 |
|
|
TREE_OVERFLOW (v) |= overflow;
|
1523 |
|
|
return v;
|
1524 |
|
|
}
|
1525 |
|
|
|
1526 |
|
|
/* Return a newly constructed STRING_CST node whose value is
|
1527 |
|
|
the LEN characters at STR.
|
1528 |
|
|
Note that for a C string literal, LEN should include the trailing NUL.
|
1529 |
|
|
The TREE_TYPE is not initialized. */
|
1530 |
|
|
|
1531 |
|
|
tree
|
1532 |
|
|
build_string (int len, const char *str)
|
1533 |
|
|
{
|
1534 |
|
|
tree s;
|
1535 |
|
|
size_t length;
|
1536 |
|
|
|
1537 |
|
|
/* Do not waste bytes provided by padding of struct tree_string. */
|
1538 |
|
|
length = len + offsetof (struct tree_string, str) + 1;
|
1539 |
|
|
|
1540 |
|
|
record_node_allocation_statistics (STRING_CST, length);
|
1541 |
|
|
|
1542 |
|
|
s = ggc_alloc_tree_node (length);
|
1543 |
|
|
|
1544 |
|
|
memset (s, 0, sizeof (struct tree_typed));
|
1545 |
|
|
TREE_SET_CODE (s, STRING_CST);
|
1546 |
|
|
TREE_CONSTANT (s) = 1;
|
1547 |
|
|
TREE_STRING_LENGTH (s) = len;
|
1548 |
|
|
memcpy (s->string.str, str, len);
|
1549 |
|
|
s->string.str[len] = '\0';
|
1550 |
|
|
|
1551 |
|
|
return s;
|
1552 |
|
|
}
|
1553 |
|
|
|
1554 |
|
|
/* Return a newly constructed COMPLEX_CST node whose value is
|
1555 |
|
|
specified by the real and imaginary parts REAL and IMAG.
|
1556 |
|
|
Both REAL and IMAG should be constant nodes. TYPE, if specified,
|
1557 |
|
|
will be the type of the COMPLEX_CST; otherwise a new type will be made. */
|
1558 |
|
|
|
1559 |
|
|
tree
|
1560 |
|
|
build_complex (tree type, tree real, tree imag)
|
1561 |
|
|
{
|
1562 |
|
|
tree t = make_node (COMPLEX_CST);
|
1563 |
|
|
|
1564 |
|
|
TREE_REALPART (t) = real;
|
1565 |
|
|
TREE_IMAGPART (t) = imag;
|
1566 |
|
|
TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
|
1567 |
|
|
TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
|
1568 |
|
|
return t;
|
1569 |
|
|
}
|
1570 |
|
|
|
1571 |
|
|
/* Return a constant of arithmetic type TYPE which is the
|
1572 |
|
|
multiplicative identity of the set TYPE. */
|
1573 |
|
|
|
1574 |
|
|
tree
|
1575 |
|
|
build_one_cst (tree type)
|
1576 |
|
|
{
|
1577 |
|
|
switch (TREE_CODE (type))
|
1578 |
|
|
{
|
1579 |
|
|
case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
|
1580 |
|
|
case POINTER_TYPE: case REFERENCE_TYPE:
|
1581 |
|
|
case OFFSET_TYPE:
|
1582 |
|
|
return build_int_cst (type, 1);
|
1583 |
|
|
|
1584 |
|
|
case REAL_TYPE:
|
1585 |
|
|
return build_real (type, dconst1);
|
1586 |
|
|
|
1587 |
|
|
case FIXED_POINT_TYPE:
|
1588 |
|
|
/* We can only generate 1 for accum types. */
|
1589 |
|
|
gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
|
1590 |
|
|
return build_fixed (type, FCONST1(TYPE_MODE (type)));
|
1591 |
|
|
|
1592 |
|
|
case VECTOR_TYPE:
|
1593 |
|
|
{
|
1594 |
|
|
tree scalar = build_one_cst (TREE_TYPE (type));
|
1595 |
|
|
|
1596 |
|
|
return build_vector_from_val (type, scalar);
|
1597 |
|
|
}
|
1598 |
|
|
|
1599 |
|
|
case COMPLEX_TYPE:
|
1600 |
|
|
return build_complex (type,
|
1601 |
|
|
build_one_cst (TREE_TYPE (type)),
|
1602 |
|
|
build_zero_cst (TREE_TYPE (type)));
|
1603 |
|
|
|
1604 |
|
|
default:
|
1605 |
|
|
gcc_unreachable ();
|
1606 |
|
|
}
|
1607 |
|
|
}
|
1608 |
|
|
|
1609 |
|
|
/* Build 0 constant of type TYPE. This is used by constructor folding
|
1610 |
|
|
and thus the constant should be represented in memory by
|
1611 |
|
|
zero(es). */
|
1612 |
|
|
|
1613 |
|
|
tree
|
1614 |
|
|
build_zero_cst (tree type)
|
1615 |
|
|
{
|
1616 |
|
|
switch (TREE_CODE (type))
|
1617 |
|
|
{
|
1618 |
|
|
case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
|
1619 |
|
|
case POINTER_TYPE: case REFERENCE_TYPE:
|
1620 |
|
|
case OFFSET_TYPE:
|
1621 |
|
|
return build_int_cst (type, 0);
|
1622 |
|
|
|
1623 |
|
|
case REAL_TYPE:
|
1624 |
|
|
return build_real (type, dconst0);
|
1625 |
|
|
|
1626 |
|
|
case FIXED_POINT_TYPE:
|
1627 |
|
|
return build_fixed (type, FCONST0 (TYPE_MODE (type)));
|
1628 |
|
|
|
1629 |
|
|
case VECTOR_TYPE:
|
1630 |
|
|
{
|
1631 |
|
|
tree scalar = build_zero_cst (TREE_TYPE (type));
|
1632 |
|
|
|
1633 |
|
|
return build_vector_from_val (type, scalar);
|
1634 |
|
|
}
|
1635 |
|
|
|
1636 |
|
|
case COMPLEX_TYPE:
|
1637 |
|
|
{
|
1638 |
|
|
tree zero = build_zero_cst (TREE_TYPE (type));
|
1639 |
|
|
|
1640 |
|
|
return build_complex (type, zero, zero);
|
1641 |
|
|
}
|
1642 |
|
|
|
1643 |
|
|
default:
|
1644 |
|
|
if (!AGGREGATE_TYPE_P (type))
|
1645 |
|
|
return fold_convert (type, integer_zero_node);
|
1646 |
|
|
return build_constructor (type, NULL);
|
1647 |
|
|
}
|
1648 |
|
|
}
|
1649 |
|
|
|
1650 |
|
|
|
1651 |
|
|
/* Build a BINFO with LEN language slots. */
|
1652 |
|
|
|
1653 |
|
|
tree
|
1654 |
|
|
make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
|
1655 |
|
|
{
|
1656 |
|
|
tree t;
|
1657 |
|
|
size_t length = (offsetof (struct tree_binfo, base_binfos)
|
1658 |
|
|
+ VEC_embedded_size (tree, base_binfos));
|
1659 |
|
|
|
1660 |
|
|
record_node_allocation_statistics (TREE_BINFO, length);
|
1661 |
|
|
|
1662 |
|
|
t = ggc_alloc_zone_tree_node_stat (&tree_zone, length PASS_MEM_STAT);
|
1663 |
|
|
|
1664 |
|
|
memset (t, 0, offsetof (struct tree_binfo, base_binfos));
|
1665 |
|
|
|
1666 |
|
|
TREE_SET_CODE (t, TREE_BINFO);
|
1667 |
|
|
|
1668 |
|
|
VEC_embedded_init (tree, BINFO_BASE_BINFOS (t), base_binfos);
|
1669 |
|
|
|
1670 |
|
|
return t;
|
1671 |
|
|
}
|
1672 |
|
|
|
1673 |
|
|
/* Create a CASE_LABEL_EXPR tree node and return it. */
|
1674 |
|
|
|
1675 |
|
|
tree
|
1676 |
|
|
build_case_label (tree low_value, tree high_value, tree label_decl)
|
1677 |
|
|
{
|
1678 |
|
|
tree t = make_node (CASE_LABEL_EXPR);
|
1679 |
|
|
|
1680 |
|
|
TREE_TYPE (t) = void_type_node;
|
1681 |
|
|
SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
|
1682 |
|
|
|
1683 |
|
|
CASE_LOW (t) = low_value;
|
1684 |
|
|
CASE_HIGH (t) = high_value;
|
1685 |
|
|
CASE_LABEL (t) = label_decl;
|
1686 |
|
|
CASE_CHAIN (t) = NULL_TREE;
|
1687 |
|
|
|
1688 |
|
|
return t;
|
1689 |
|
|
}
|
1690 |
|
|
|
1691 |
|
|
/* Build a newly constructed TREE_VEC node of length LEN. */
|
1692 |
|
|
|
1693 |
|
|
tree
|
1694 |
|
|
make_tree_vec_stat (int len MEM_STAT_DECL)
|
1695 |
|
|
{
|
1696 |
|
|
tree t;
|
1697 |
|
|
int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
|
1698 |
|
|
|
1699 |
|
|
record_node_allocation_statistics (TREE_VEC, length);
|
1700 |
|
|
|
1701 |
|
|
t = ggc_alloc_zone_cleared_tree_node_stat (&tree_zone, length PASS_MEM_STAT);
|
1702 |
|
|
|
1703 |
|
|
TREE_SET_CODE (t, TREE_VEC);
|
1704 |
|
|
TREE_VEC_LENGTH (t) = len;
|
1705 |
|
|
|
1706 |
|
|
return t;
|
1707 |
|
|
}
|
1708 |
|
|
|
1709 |
|
|
/* Return 1 if EXPR is the integer constant zero or a complex constant
|
1710 |
|
|
of zero. */
|
1711 |
|
|
|
1712 |
|
|
int
|
1713 |
|
|
integer_zerop (const_tree expr)
|
1714 |
|
|
{
|
1715 |
|
|
STRIP_NOPS (expr);
|
1716 |
|
|
|
1717 |
|
|
return ((TREE_CODE (expr) == INTEGER_CST
|
1718 |
|
|
&& TREE_INT_CST_LOW (expr) == 0
|
1719 |
|
|
&& TREE_INT_CST_HIGH (expr) == 0)
|
1720 |
|
|
|| (TREE_CODE (expr) == COMPLEX_CST
|
1721 |
|
|
&& integer_zerop (TREE_REALPART (expr))
|
1722 |
|
|
&& integer_zerop (TREE_IMAGPART (expr))));
|
1723 |
|
|
}
|
1724 |
|
|
|
1725 |
|
|
/* Return 1 if EXPR is the integer constant one or the corresponding
|
1726 |
|
|
complex constant. */
|
1727 |
|
|
|
1728 |
|
|
int
|
1729 |
|
|
integer_onep (const_tree expr)
|
1730 |
|
|
{
|
1731 |
|
|
STRIP_NOPS (expr);
|
1732 |
|
|
|
1733 |
|
|
return ((TREE_CODE (expr) == INTEGER_CST
|
1734 |
|
|
&& TREE_INT_CST_LOW (expr) == 1
|
1735 |
|
|
&& TREE_INT_CST_HIGH (expr) == 0)
|
1736 |
|
|
|| (TREE_CODE (expr) == COMPLEX_CST
|
1737 |
|
|
&& integer_onep (TREE_REALPART (expr))
|
1738 |
|
|
&& integer_zerop (TREE_IMAGPART (expr))));
|
1739 |
|
|
}
|
1740 |
|
|
|
1741 |
|
|
/* Return 1 if EXPR is an integer containing all 1's in as much precision as
|
1742 |
|
|
it contains. Likewise for the corresponding complex constant. */
|
1743 |
|
|
|
1744 |
|
|
int
|
1745 |
|
|
integer_all_onesp (const_tree expr)
|
1746 |
|
|
{
|
1747 |
|
|
int prec;
|
1748 |
|
|
int uns;
|
1749 |
|
|
|
1750 |
|
|
STRIP_NOPS (expr);
|
1751 |
|
|
|
1752 |
|
|
if (TREE_CODE (expr) == COMPLEX_CST
|
1753 |
|
|
&& integer_all_onesp (TREE_REALPART (expr))
|
1754 |
|
|
&& integer_zerop (TREE_IMAGPART (expr)))
|
1755 |
|
|
return 1;
|
1756 |
|
|
|
1757 |
|
|
else if (TREE_CODE (expr) != INTEGER_CST)
|
1758 |
|
|
return 0;
|
1759 |
|
|
|
1760 |
|
|
uns = TYPE_UNSIGNED (TREE_TYPE (expr));
|
1761 |
|
|
if (TREE_INT_CST_LOW (expr) == ~(unsigned HOST_WIDE_INT) 0
|
1762 |
|
|
&& TREE_INT_CST_HIGH (expr) == -1)
|
1763 |
|
|
return 1;
|
1764 |
|
|
if (!uns)
|
1765 |
|
|
return 0;
|
1766 |
|
|
|
1767 |
|
|
prec = TYPE_PRECISION (TREE_TYPE (expr));
|
1768 |
|
|
if (prec >= HOST_BITS_PER_WIDE_INT)
|
1769 |
|
|
{
|
1770 |
|
|
HOST_WIDE_INT high_value;
|
1771 |
|
|
int shift_amount;
|
1772 |
|
|
|
1773 |
|
|
shift_amount = prec - HOST_BITS_PER_WIDE_INT;
|
1774 |
|
|
|
1775 |
|
|
/* Can not handle precisions greater than twice the host int size. */
|
1776 |
|
|
gcc_assert (shift_amount <= HOST_BITS_PER_WIDE_INT);
|
1777 |
|
|
if (shift_amount == HOST_BITS_PER_WIDE_INT)
|
1778 |
|
|
/* Shifting by the host word size is undefined according to the ANSI
|
1779 |
|
|
standard, so we must handle this as a special case. */
|
1780 |
|
|
high_value = -1;
|
1781 |
|
|
else
|
1782 |
|
|
high_value = ((HOST_WIDE_INT) 1 << shift_amount) - 1;
|
1783 |
|
|
|
1784 |
|
|
return (TREE_INT_CST_LOW (expr) == ~(unsigned HOST_WIDE_INT) 0
|
1785 |
|
|
&& TREE_INT_CST_HIGH (expr) == high_value);
|
1786 |
|
|
}
|
1787 |
|
|
else
|
1788 |
|
|
return TREE_INT_CST_LOW (expr) == ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
|
1789 |
|
|
}
|
1790 |
|
|
|
1791 |
|
|
/* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
|
1792 |
|
|
one bit on). */
|
1793 |
|
|
|
1794 |
|
|
int
|
1795 |
|
|
integer_pow2p (const_tree expr)
|
1796 |
|
|
{
|
1797 |
|
|
int prec;
|
1798 |
|
|
HOST_WIDE_INT high, low;
|
1799 |
|
|
|
1800 |
|
|
STRIP_NOPS (expr);
|
1801 |
|
|
|
1802 |
|
|
if (TREE_CODE (expr) == COMPLEX_CST
|
1803 |
|
|
&& integer_pow2p (TREE_REALPART (expr))
|
1804 |
|
|
&& integer_zerop (TREE_IMAGPART (expr)))
|
1805 |
|
|
return 1;
|
1806 |
|
|
|
1807 |
|
|
if (TREE_CODE (expr) != INTEGER_CST)
|
1808 |
|
|
return 0;
|
1809 |
|
|
|
1810 |
|
|
prec = TYPE_PRECISION (TREE_TYPE (expr));
|
1811 |
|
|
high = TREE_INT_CST_HIGH (expr);
|
1812 |
|
|
low = TREE_INT_CST_LOW (expr);
|
1813 |
|
|
|
1814 |
|
|
/* First clear all bits that are beyond the type's precision in case
|
1815 |
|
|
we've been sign extended. */
|
1816 |
|
|
|
1817 |
|
|
if (prec == 2 * HOST_BITS_PER_WIDE_INT)
|
1818 |
|
|
;
|
1819 |
|
|
else if (prec > HOST_BITS_PER_WIDE_INT)
|
1820 |
|
|
high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
|
1821 |
|
|
else
|
1822 |
|
|
{
|
1823 |
|
|
high = 0;
|
1824 |
|
|
if (prec < HOST_BITS_PER_WIDE_INT)
|
1825 |
|
|
low &= ~((HOST_WIDE_INT) (-1) << prec);
|
1826 |
|
|
}
|
1827 |
|
|
|
1828 |
|
|
if (high == 0 && low == 0)
|
1829 |
|
|
return 0;
|
1830 |
|
|
|
1831 |
|
|
return ((high == 0 && (low & (low - 1)) == 0)
|
1832 |
|
|
|| (low == 0 && (high & (high - 1)) == 0));
|
1833 |
|
|
}
|
1834 |
|
|
|
1835 |
|
|
/* Return 1 if EXPR is an integer constant other than zero or a
|
1836 |
|
|
complex constant other than zero. */
|
1837 |
|
|
|
1838 |
|
|
int
|
1839 |
|
|
integer_nonzerop (const_tree expr)
|
1840 |
|
|
{
|
1841 |
|
|
STRIP_NOPS (expr);
|
1842 |
|
|
|
1843 |
|
|
return ((TREE_CODE (expr) == INTEGER_CST
|
1844 |
|
|
&& (TREE_INT_CST_LOW (expr) != 0
|
1845 |
|
|
|| TREE_INT_CST_HIGH (expr) != 0))
|
1846 |
|
|
|| (TREE_CODE (expr) == COMPLEX_CST
|
1847 |
|
|
&& (integer_nonzerop (TREE_REALPART (expr))
|
1848 |
|
|
|| integer_nonzerop (TREE_IMAGPART (expr)))));
|
1849 |
|
|
}
|
1850 |
|
|
|
1851 |
|
|
/* Return 1 if EXPR is the fixed-point constant zero. */
|
1852 |
|
|
|
1853 |
|
|
int
|
1854 |
|
|
fixed_zerop (const_tree expr)
|
1855 |
|
|
{
|
1856 |
|
|
return (TREE_CODE (expr) == FIXED_CST
|
1857 |
|
|
&& double_int_zero_p (TREE_FIXED_CST (expr).data));
|
1858 |
|
|
}
|
1859 |
|
|
|
1860 |
|
|
/* Return the power of two represented by a tree node known to be a
|
1861 |
|
|
power of two. */
|
1862 |
|
|
|
1863 |
|
|
int
|
1864 |
|
|
tree_log2 (const_tree expr)
|
1865 |
|
|
{
|
1866 |
|
|
int prec;
|
1867 |
|
|
HOST_WIDE_INT high, low;
|
1868 |
|
|
|
1869 |
|
|
STRIP_NOPS (expr);
|
1870 |
|
|
|
1871 |
|
|
if (TREE_CODE (expr) == COMPLEX_CST)
|
1872 |
|
|
return tree_log2 (TREE_REALPART (expr));
|
1873 |
|
|
|
1874 |
|
|
prec = TYPE_PRECISION (TREE_TYPE (expr));
|
1875 |
|
|
high = TREE_INT_CST_HIGH (expr);
|
1876 |
|
|
low = TREE_INT_CST_LOW (expr);
|
1877 |
|
|
|
1878 |
|
|
/* First clear all bits that are beyond the type's precision in case
|
1879 |
|
|
we've been sign extended. */
|
1880 |
|
|
|
1881 |
|
|
if (prec == 2 * HOST_BITS_PER_WIDE_INT)
|
1882 |
|
|
;
|
1883 |
|
|
else if (prec > HOST_BITS_PER_WIDE_INT)
|
1884 |
|
|
high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
|
1885 |
|
|
else
|
1886 |
|
|
{
|
1887 |
|
|
high = 0;
|
1888 |
|
|
if (prec < HOST_BITS_PER_WIDE_INT)
|
1889 |
|
|
low &= ~((HOST_WIDE_INT) (-1) << prec);
|
1890 |
|
|
}
|
1891 |
|
|
|
1892 |
|
|
return (high != 0 ? HOST_BITS_PER_WIDE_INT + exact_log2 (high)
|
1893 |
|
|
: exact_log2 (low));
|
1894 |
|
|
}
|
1895 |
|
|
|
1896 |
|
|
/* Similar, but return the largest integer Y such that 2 ** Y is less
|
1897 |
|
|
than or equal to EXPR. */
|
1898 |
|
|
|
1899 |
|
|
int
|
1900 |
|
|
tree_floor_log2 (const_tree expr)
|
1901 |
|
|
{
|
1902 |
|
|
int prec;
|
1903 |
|
|
HOST_WIDE_INT high, low;
|
1904 |
|
|
|
1905 |
|
|
STRIP_NOPS (expr);
|
1906 |
|
|
|
1907 |
|
|
if (TREE_CODE (expr) == COMPLEX_CST)
|
1908 |
|
|
return tree_log2 (TREE_REALPART (expr));
|
1909 |
|
|
|
1910 |
|
|
prec = TYPE_PRECISION (TREE_TYPE (expr));
|
1911 |
|
|
high = TREE_INT_CST_HIGH (expr);
|
1912 |
|
|
low = TREE_INT_CST_LOW (expr);
|
1913 |
|
|
|
1914 |
|
|
/* First clear all bits that are beyond the type's precision in case
|
1915 |
|
|
we've been sign extended. Ignore if type's precision hasn't been set
|
1916 |
|
|
since what we are doing is setting it. */
|
1917 |
|
|
|
1918 |
|
|
if (prec == 2 * HOST_BITS_PER_WIDE_INT || prec == 0)
|
1919 |
|
|
;
|
1920 |
|
|
else if (prec > HOST_BITS_PER_WIDE_INT)
|
1921 |
|
|
high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
|
1922 |
|
|
else
|
1923 |
|
|
{
|
1924 |
|
|
high = 0;
|
1925 |
|
|
if (prec < HOST_BITS_PER_WIDE_INT)
|
1926 |
|
|
low &= ~((HOST_WIDE_INT) (-1) << prec);
|
1927 |
|
|
}
|
1928 |
|
|
|
1929 |
|
|
return (high != 0 ? HOST_BITS_PER_WIDE_INT + floor_log2 (high)
|
1930 |
|
|
: floor_log2 (low));
|
1931 |
|
|
}
|
1932 |
|
|
|
1933 |
|
|
/* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
|
1934 |
|
|
decimal float constants, so don't return 1 for them. */
|
1935 |
|
|
|
1936 |
|
|
int
|
1937 |
|
|
real_zerop (const_tree expr)
|
1938 |
|
|
{
|
1939 |
|
|
STRIP_NOPS (expr);
|
1940 |
|
|
|
1941 |
|
|
return ((TREE_CODE (expr) == REAL_CST
|
1942 |
|
|
&& REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
|
1943 |
|
|
&& !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr)))))
|
1944 |
|
|
|| (TREE_CODE (expr) == COMPLEX_CST
|
1945 |
|
|
&& real_zerop (TREE_REALPART (expr))
|
1946 |
|
|
&& real_zerop (TREE_IMAGPART (expr))));
|
1947 |
|
|
}
|
1948 |
|
|
|
1949 |
|
|
/* Return 1 if EXPR is the real constant one in real or complex form.
|
1950 |
|
|
Trailing zeroes matter for decimal float constants, so don't return
|
1951 |
|
|
1 for them. */
|
1952 |
|
|
|
1953 |
|
|
int
|
1954 |
|
|
real_onep (const_tree expr)
|
1955 |
|
|
{
|
1956 |
|
|
STRIP_NOPS (expr);
|
1957 |
|
|
|
1958 |
|
|
return ((TREE_CODE (expr) == REAL_CST
|
1959 |
|
|
&& REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
|
1960 |
|
|
&& !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr)))))
|
1961 |
|
|
|| (TREE_CODE (expr) == COMPLEX_CST
|
1962 |
|
|
&& real_onep (TREE_REALPART (expr))
|
1963 |
|
|
&& real_zerop (TREE_IMAGPART (expr))));
|
1964 |
|
|
}
|
1965 |
|
|
|
1966 |
|
|
/* Return 1 if EXPR is the real constant two. Trailing zeroes matter
|
1967 |
|
|
for decimal float constants, so don't return 1 for them. */
|
1968 |
|
|
|
1969 |
|
|
int
|
1970 |
|
|
real_twop (const_tree expr)
|
1971 |
|
|
{
|
1972 |
|
|
STRIP_NOPS (expr);
|
1973 |
|
|
|
1974 |
|
|
return ((TREE_CODE (expr) == REAL_CST
|
1975 |
|
|
&& REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst2)
|
1976 |
|
|
&& !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr)))))
|
1977 |
|
|
|| (TREE_CODE (expr) == COMPLEX_CST
|
1978 |
|
|
&& real_twop (TREE_REALPART (expr))
|
1979 |
|
|
&& real_zerop (TREE_IMAGPART (expr))));
|
1980 |
|
|
}
|
1981 |
|
|
|
1982 |
|
|
/* Return 1 if EXPR is the real constant minus one. Trailing zeroes
|
1983 |
|
|
matter for decimal float constants, so don't return 1 for them. */
|
1984 |
|
|
|
1985 |
|
|
int
|
1986 |
|
|
real_minus_onep (const_tree expr)
|
1987 |
|
|
{
|
1988 |
|
|
STRIP_NOPS (expr);
|
1989 |
|
|
|
1990 |
|
|
return ((TREE_CODE (expr) == REAL_CST
|
1991 |
|
|
&& REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
|
1992 |
|
|
&& !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr)))))
|
1993 |
|
|
|| (TREE_CODE (expr) == COMPLEX_CST
|
1994 |
|
|
&& real_minus_onep (TREE_REALPART (expr))
|
1995 |
|
|
&& real_zerop (TREE_IMAGPART (expr))));
|
1996 |
|
|
}
|
1997 |
|
|
|
1998 |
|
|
/* Nonzero if EXP is a constant or a cast of a constant. */
|
1999 |
|
|
|
2000 |
|
|
int
|
2001 |
|
|
really_constant_p (const_tree exp)
|
2002 |
|
|
{
|
2003 |
|
|
/* This is not quite the same as STRIP_NOPS. It does more. */
|
2004 |
|
|
while (CONVERT_EXPR_P (exp)
|
2005 |
|
|
|| TREE_CODE (exp) == NON_LVALUE_EXPR)
|
2006 |
|
|
exp = TREE_OPERAND (exp, 0);
|
2007 |
|
|
return TREE_CONSTANT (exp);
|
2008 |
|
|
}
|
2009 |
|
|
|
2010 |
|
|
/* Return first list element whose TREE_VALUE is ELEM.
|
2011 |
|
|
Return 0 if ELEM is not in LIST. */
|
2012 |
|
|
|
2013 |
|
|
tree
|
2014 |
|
|
value_member (tree elem, tree list)
|
2015 |
|
|
{
|
2016 |
|
|
while (list)
|
2017 |
|
|
{
|
2018 |
|
|
if (elem == TREE_VALUE (list))
|
2019 |
|
|
return list;
|
2020 |
|
|
list = TREE_CHAIN (list);
|
2021 |
|
|
}
|
2022 |
|
|
return NULL_TREE;
|
2023 |
|
|
}
|
2024 |
|
|
|
2025 |
|
|
/* Return first list element whose TREE_PURPOSE is ELEM.
|
2026 |
|
|
Return 0 if ELEM is not in LIST. */
|
2027 |
|
|
|
2028 |
|
|
tree
|
2029 |
|
|
purpose_member (const_tree elem, tree list)
|
2030 |
|
|
{
|
2031 |
|
|
while (list)
|
2032 |
|
|
{
|
2033 |
|
|
if (elem == TREE_PURPOSE (list))
|
2034 |
|
|
return list;
|
2035 |
|
|
list = TREE_CHAIN (list);
|
2036 |
|
|
}
|
2037 |
|
|
return NULL_TREE;
|
2038 |
|
|
}
|
2039 |
|
|
|
2040 |
|
|
/* Return true if ELEM is in V. */
|
2041 |
|
|
|
2042 |
|
|
bool
|
2043 |
|
|
vec_member (const_tree elem, VEC(tree,gc) *v)
|
2044 |
|
|
{
|
2045 |
|
|
unsigned ix;
|
2046 |
|
|
tree t;
|
2047 |
|
|
FOR_EACH_VEC_ELT (tree, v, ix, t)
|
2048 |
|
|
if (elem == t)
|
2049 |
|
|
return true;
|
2050 |
|
|
return false;
|
2051 |
|
|
}
|
2052 |
|
|
|
2053 |
|
|
/* Returns element number IDX (zero-origin) of chain CHAIN, or
|
2054 |
|
|
NULL_TREE. */
|
2055 |
|
|
|
2056 |
|
|
tree
|
2057 |
|
|
chain_index (int idx, tree chain)
|
2058 |
|
|
{
|
2059 |
|
|
for (; chain && idx > 0; --idx)
|
2060 |
|
|
chain = TREE_CHAIN (chain);
|
2061 |
|
|
return chain;
|
2062 |
|
|
}
|
2063 |
|
|
|
2064 |
|
|
/* Return nonzero if ELEM is part of the chain CHAIN. */
|
2065 |
|
|
|
2066 |
|
|
int
|
2067 |
|
|
chain_member (const_tree elem, const_tree chain)
|
2068 |
|
|
{
|
2069 |
|
|
while (chain)
|
2070 |
|
|
{
|
2071 |
|
|
if (elem == chain)
|
2072 |
|
|
return 1;
|
2073 |
|
|
chain = DECL_CHAIN (chain);
|
2074 |
|
|
}
|
2075 |
|
|
|
2076 |
|
|
return 0;
|
2077 |
|
|
}
|
2078 |
|
|
|
2079 |
|
|
/* Return the length of a chain of nodes chained through TREE_CHAIN.
|
2080 |
|
|
We expect a null pointer to mark the end of the chain.
|
2081 |
|
|
This is the Lisp primitive `length'. */
|
2082 |
|
|
|
2083 |
|
|
int
|
2084 |
|
|
list_length (const_tree t)
|
2085 |
|
|
{
|
2086 |
|
|
const_tree p = t;
|
2087 |
|
|
#ifdef ENABLE_TREE_CHECKING
|
2088 |
|
|
const_tree q = t;
|
2089 |
|
|
#endif
|
2090 |
|
|
int len = 0;
|
2091 |
|
|
|
2092 |
|
|
while (p)
|
2093 |
|
|
{
|
2094 |
|
|
p = TREE_CHAIN (p);
|
2095 |
|
|
#ifdef ENABLE_TREE_CHECKING
|
2096 |
|
|
if (len % 2)
|
2097 |
|
|
q = TREE_CHAIN (q);
|
2098 |
|
|
gcc_assert (p != q);
|
2099 |
|
|
#endif
|
2100 |
|
|
len++;
|
2101 |
|
|
}
|
2102 |
|
|
|
2103 |
|
|
return len;
|
2104 |
|
|
}
|
2105 |
|
|
|
2106 |
|
|
/* Returns the number of FIELD_DECLs in TYPE. */
|
2107 |
|
|
|
2108 |
|
|
int
|
2109 |
|
|
fields_length (const_tree type)
|
2110 |
|
|
{
|
2111 |
|
|
tree t = TYPE_FIELDS (type);
|
2112 |
|
|
int count = 0;
|
2113 |
|
|
|
2114 |
|
|
for (; t; t = DECL_CHAIN (t))
|
2115 |
|
|
if (TREE_CODE (t) == FIELD_DECL)
|
2116 |
|
|
++count;
|
2117 |
|
|
|
2118 |
|
|
return count;
|
2119 |
|
|
}
|
2120 |
|
|
|
2121 |
|
|
/* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
|
2122 |
|
|
UNION_TYPE TYPE, or NULL_TREE if none. */
|
2123 |
|
|
|
2124 |
|
|
tree
|
2125 |
|
|
first_field (const_tree type)
|
2126 |
|
|
{
|
2127 |
|
|
tree t = TYPE_FIELDS (type);
|
2128 |
|
|
while (t && TREE_CODE (t) != FIELD_DECL)
|
2129 |
|
|
t = TREE_CHAIN (t);
|
2130 |
|
|
return t;
|
2131 |
|
|
}
|
2132 |
|
|
|
2133 |
|
|
/* Concatenate two chains of nodes (chained through TREE_CHAIN)
|
2134 |
|
|
by modifying the last node in chain 1 to point to chain 2.
|
2135 |
|
|
This is the Lisp primitive `nconc'. */
|
2136 |
|
|
|
2137 |
|
|
tree
|
2138 |
|
|
chainon (tree op1, tree op2)
|
2139 |
|
|
{
|
2140 |
|
|
tree t1;
|
2141 |
|
|
|
2142 |
|
|
if (!op1)
|
2143 |
|
|
return op2;
|
2144 |
|
|
if (!op2)
|
2145 |
|
|
return op1;
|
2146 |
|
|
|
2147 |
|
|
for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
|
2148 |
|
|
continue;
|
2149 |
|
|
TREE_CHAIN (t1) = op2;
|
2150 |
|
|
|
2151 |
|
|
#ifdef ENABLE_TREE_CHECKING
|
2152 |
|
|
{
|
2153 |
|
|
tree t2;
|
2154 |
|
|
for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
|
2155 |
|
|
gcc_assert (t2 != t1);
|
2156 |
|
|
}
|
2157 |
|
|
#endif
|
2158 |
|
|
|
2159 |
|
|
return op1;
|
2160 |
|
|
}
|
2161 |
|
|
|
2162 |
|
|
/* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
|
2163 |
|
|
|
2164 |
|
|
tree
|
2165 |
|
|
tree_last (tree chain)
|
2166 |
|
|
{
|
2167 |
|
|
tree next;
|
2168 |
|
|
if (chain)
|
2169 |
|
|
while ((next = TREE_CHAIN (chain)))
|
2170 |
|
|
chain = next;
|
2171 |
|
|
return chain;
|
2172 |
|
|
}
|
2173 |
|
|
|
2174 |
|
|
/* Reverse the order of elements in the chain T,
|
2175 |
|
|
and return the new head of the chain (old last element). */
|
2176 |
|
|
|
2177 |
|
|
tree
|
2178 |
|
|
nreverse (tree t)
|
2179 |
|
|
{
|
2180 |
|
|
tree prev = 0, decl, next;
|
2181 |
|
|
for (decl = t; decl; decl = next)
|
2182 |
|
|
{
|
2183 |
|
|
/* We shouldn't be using this function to reverse BLOCK chains; we
|
2184 |
|
|
have blocks_nreverse for that. */
|
2185 |
|
|
gcc_checking_assert (TREE_CODE (decl) != BLOCK);
|
2186 |
|
|
next = TREE_CHAIN (decl);
|
2187 |
|
|
TREE_CHAIN (decl) = prev;
|
2188 |
|
|
prev = decl;
|
2189 |
|
|
}
|
2190 |
|
|
return prev;
|
2191 |
|
|
}
|
2192 |
|
|
|
2193 |
|
|
/* Return a newly created TREE_LIST node whose
|
2194 |
|
|
purpose and value fields are PARM and VALUE. */
|
2195 |
|
|
|
2196 |
|
|
tree
|
2197 |
|
|
build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
|
2198 |
|
|
{
|
2199 |
|
|
tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
|
2200 |
|
|
TREE_PURPOSE (t) = parm;
|
2201 |
|
|
TREE_VALUE (t) = value;
|
2202 |
|
|
return t;
|
2203 |
|
|
}
|
2204 |
|
|
|
2205 |
|
|
/* Build a chain of TREE_LIST nodes from a vector. */
|
2206 |
|
|
|
2207 |
|
|
tree
|
2208 |
|
|
build_tree_list_vec_stat (const VEC(tree,gc) *vec MEM_STAT_DECL)
|
2209 |
|
|
{
|
2210 |
|
|
tree ret = NULL_TREE;
|
2211 |
|
|
tree *pp = &ret;
|
2212 |
|
|
unsigned int i;
|
2213 |
|
|
tree t;
|
2214 |
|
|
FOR_EACH_VEC_ELT (tree, vec, i, t)
|
2215 |
|
|
{
|
2216 |
|
|
*pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
|
2217 |
|
|
pp = &TREE_CHAIN (*pp);
|
2218 |
|
|
}
|
2219 |
|
|
return ret;
|
2220 |
|
|
}
|
2221 |
|
|
|
2222 |
|
|
/* Return a newly created TREE_LIST node whose
|
2223 |
|
|
purpose and value fields are PURPOSE and VALUE
|
2224 |
|
|
and whose TREE_CHAIN is CHAIN. */
|
2225 |
|
|
|
2226 |
|
|
tree
|
2227 |
|
|
tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
|
2228 |
|
|
{
|
2229 |
|
|
tree node;
|
2230 |
|
|
|
2231 |
|
|
node = ggc_alloc_zone_tree_node_stat (&tree_zone, sizeof (struct tree_list)
|
2232 |
|
|
PASS_MEM_STAT);
|
2233 |
|
|
memset (node, 0, sizeof (struct tree_common));
|
2234 |
|
|
|
2235 |
|
|
record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
|
2236 |
|
|
|
2237 |
|
|
TREE_SET_CODE (node, TREE_LIST);
|
2238 |
|
|
TREE_CHAIN (node) = chain;
|
2239 |
|
|
TREE_PURPOSE (node) = purpose;
|
2240 |
|
|
TREE_VALUE (node) = value;
|
2241 |
|
|
return node;
|
2242 |
|
|
}
|
2243 |
|
|
|
2244 |
|
|
/* Return the values of the elements of a CONSTRUCTOR as a vector of
|
2245 |
|
|
trees. */
|
2246 |
|
|
|
2247 |
|
|
VEC(tree,gc) *
|
2248 |
|
|
ctor_to_vec (tree ctor)
|
2249 |
|
|
{
|
2250 |
|
|
VEC(tree, gc) *vec = VEC_alloc (tree, gc, CONSTRUCTOR_NELTS (ctor));
|
2251 |
|
|
unsigned int ix;
|
2252 |
|
|
tree val;
|
2253 |
|
|
|
2254 |
|
|
FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
|
2255 |
|
|
VEC_quick_push (tree, vec, val);
|
2256 |
|
|
|
2257 |
|
|
return vec;
|
2258 |
|
|
}
|
2259 |
|
|
|
2260 |
|
|
/* Return the size nominally occupied by an object of type TYPE
|
2261 |
|
|
when it resides in memory. The value is measured in units of bytes,
|
2262 |
|
|
and its data type is that normally used for type sizes
|
2263 |
|
|
(which is the first type created by make_signed_type or
|
2264 |
|
|
make_unsigned_type). */
|
2265 |
|
|
|
2266 |
|
|
tree
|
2267 |
|
|
size_in_bytes (const_tree type)
|
2268 |
|
|
{
|
2269 |
|
|
tree t;
|
2270 |
|
|
|
2271 |
|
|
if (type == error_mark_node)
|
2272 |
|
|
return integer_zero_node;
|
2273 |
|
|
|
2274 |
|
|
type = TYPE_MAIN_VARIANT (type);
|
2275 |
|
|
t = TYPE_SIZE_UNIT (type);
|
2276 |
|
|
|
2277 |
|
|
if (t == 0)
|
2278 |
|
|
{
|
2279 |
|
|
lang_hooks.types.incomplete_type_error (NULL_TREE, type);
|
2280 |
|
|
return size_zero_node;
|
2281 |
|
|
}
|
2282 |
|
|
|
2283 |
|
|
return t;
|
2284 |
|
|
}
|
2285 |
|
|
|
2286 |
|
|
/* Return the size of TYPE (in bytes) as a wide integer
|
2287 |
|
|
or return -1 if the size can vary or is larger than an integer. */
|
2288 |
|
|
|
2289 |
|
|
HOST_WIDE_INT
|
2290 |
|
|
int_size_in_bytes (const_tree type)
|
2291 |
|
|
{
|
2292 |
|
|
tree t;
|
2293 |
|
|
|
2294 |
|
|
if (type == error_mark_node)
|
2295 |
|
|
return 0;
|
2296 |
|
|
|
2297 |
|
|
type = TYPE_MAIN_VARIANT (type);
|
2298 |
|
|
t = TYPE_SIZE_UNIT (type);
|
2299 |
|
|
if (t == 0
|
2300 |
|
|
|| TREE_CODE (t) != INTEGER_CST
|
2301 |
|
|
|| TREE_INT_CST_HIGH (t) != 0
|
2302 |
|
|
/* If the result would appear negative, it's too big to represent. */
|
2303 |
|
|
|| (HOST_WIDE_INT) TREE_INT_CST_LOW (t) < 0)
|
2304 |
|
|
return -1;
|
2305 |
|
|
|
2306 |
|
|
return TREE_INT_CST_LOW (t);
|
2307 |
|
|
}
|
2308 |
|
|
|
2309 |
|
|
/* Return the maximum size of TYPE (in bytes) as a wide integer
|
2310 |
|
|
or return -1 if the size can vary or is larger than an integer. */
|
2311 |
|
|
|
2312 |
|
|
HOST_WIDE_INT
|
2313 |
|
|
max_int_size_in_bytes (const_tree type)
|
2314 |
|
|
{
|
2315 |
|
|
HOST_WIDE_INT size = -1;
|
2316 |
|
|
tree size_tree;
|
2317 |
|
|
|
2318 |
|
|
/* If this is an array type, check for a possible MAX_SIZE attached. */
|
2319 |
|
|
|
2320 |
|
|
if (TREE_CODE (type) == ARRAY_TYPE)
|
2321 |
|
|
{
|
2322 |
|
|
size_tree = TYPE_ARRAY_MAX_SIZE (type);
|
2323 |
|
|
|
2324 |
|
|
if (size_tree && host_integerp (size_tree, 1))
|
2325 |
|
|
size = tree_low_cst (size_tree, 1);
|
2326 |
|
|
}
|
2327 |
|
|
|
2328 |
|
|
/* If we still haven't been able to get a size, see if the language
|
2329 |
|
|
can compute a maximum size. */
|
2330 |
|
|
|
2331 |
|
|
if (size == -1)
|
2332 |
|
|
{
|
2333 |
|
|
size_tree = lang_hooks.types.max_size (type);
|
2334 |
|
|
|
2335 |
|
|
if (size_tree && host_integerp (size_tree, 1))
|
2336 |
|
|
size = tree_low_cst (size_tree, 1);
|
2337 |
|
|
}
|
2338 |
|
|
|
2339 |
|
|
return size;
|
2340 |
|
|
}
|
2341 |
|
|
|
2342 |
|
|
/* Returns a tree for the size of EXP in bytes. */
|
2343 |
|
|
|
2344 |
|
|
tree
|
2345 |
|
|
tree_expr_size (const_tree exp)
|
2346 |
|
|
{
|
2347 |
|
|
if (DECL_P (exp)
|
2348 |
|
|
&& DECL_SIZE_UNIT (exp) != 0)
|
2349 |
|
|
return DECL_SIZE_UNIT (exp);
|
2350 |
|
|
else
|
2351 |
|
|
return size_in_bytes (TREE_TYPE (exp));
|
2352 |
|
|
}
|
2353 |
|
|
|
2354 |
|
|
/* Return the bit position of FIELD, in bits from the start of the record.
|
2355 |
|
|
This is a tree of type bitsizetype. */
|
2356 |
|
|
|
2357 |
|
|
tree
|
2358 |
|
|
bit_position (const_tree field)
|
2359 |
|
|
{
|
2360 |
|
|
return bit_from_pos (DECL_FIELD_OFFSET (field),
|
2361 |
|
|
DECL_FIELD_BIT_OFFSET (field));
|
2362 |
|
|
}
|
2363 |
|
|
|
2364 |
|
|
/* Likewise, but return as an integer. It must be representable in
|
2365 |
|
|
that way (since it could be a signed value, we don't have the
|
2366 |
|
|
option of returning -1 like int_size_in_byte can. */
|
2367 |
|
|
|
2368 |
|
|
HOST_WIDE_INT
|
2369 |
|
|
int_bit_position (const_tree field)
|
2370 |
|
|
{
|
2371 |
|
|
return tree_low_cst (bit_position (field), 0);
|
2372 |
|
|
}
|
2373 |
|
|
|
2374 |
|
|
/* Return the byte position of FIELD, in bytes from the start of the record.
|
2375 |
|
|
This is a tree of type sizetype. */
|
2376 |
|
|
|
2377 |
|
|
tree
|
2378 |
|
|
byte_position (const_tree field)
|
2379 |
|
|
{
|
2380 |
|
|
return byte_from_pos (DECL_FIELD_OFFSET (field),
|
2381 |
|
|
DECL_FIELD_BIT_OFFSET (field));
|
2382 |
|
|
}
|
2383 |
|
|
|
2384 |
|
|
/* Likewise, but return as an integer. It must be representable in
|
2385 |
|
|
that way (since it could be a signed value, we don't have the
|
2386 |
|
|
option of returning -1 like int_size_in_byte can. */
|
2387 |
|
|
|
2388 |
|
|
HOST_WIDE_INT
|
2389 |
|
|
int_byte_position (const_tree field)
|
2390 |
|
|
{
|
2391 |
|
|
return tree_low_cst (byte_position (field), 0);
|
2392 |
|
|
}
|
2393 |
|
|
|
2394 |
|
|
/* Return the strictest alignment, in bits, that T is known to have. */
|
2395 |
|
|
|
2396 |
|
|
unsigned int
|
2397 |
|
|
expr_align (const_tree t)
|
2398 |
|
|
{
|
2399 |
|
|
unsigned int align0, align1;
|
2400 |
|
|
|
2401 |
|
|
switch (TREE_CODE (t))
|
2402 |
|
|
{
|
2403 |
|
|
CASE_CONVERT: case NON_LVALUE_EXPR:
|
2404 |
|
|
/* If we have conversions, we know that the alignment of the
|
2405 |
|
|
object must meet each of the alignments of the types. */
|
2406 |
|
|
align0 = expr_align (TREE_OPERAND (t, 0));
|
2407 |
|
|
align1 = TYPE_ALIGN (TREE_TYPE (t));
|
2408 |
|
|
return MAX (align0, align1);
|
2409 |
|
|
|
2410 |
|
|
case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
|
2411 |
|
|
case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
|
2412 |
|
|
case CLEANUP_POINT_EXPR:
|
2413 |
|
|
/* These don't change the alignment of an object. */
|
2414 |
|
|
return expr_align (TREE_OPERAND (t, 0));
|
2415 |
|
|
|
2416 |
|
|
case COND_EXPR:
|
2417 |
|
|
/* The best we can do is say that the alignment is the least aligned
|
2418 |
|
|
of the two arms. */
|
2419 |
|
|
align0 = expr_align (TREE_OPERAND (t, 1));
|
2420 |
|
|
align1 = expr_align (TREE_OPERAND (t, 2));
|
2421 |
|
|
return MIN (align0, align1);
|
2422 |
|
|
|
2423 |
|
|
/* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
|
2424 |
|
|
meaningfully, it's always 1. */
|
2425 |
|
|
case LABEL_DECL: case CONST_DECL:
|
2426 |
|
|
case VAR_DECL: case PARM_DECL: case RESULT_DECL:
|
2427 |
|
|
case FUNCTION_DECL:
|
2428 |
|
|
gcc_assert (DECL_ALIGN (t) != 0);
|
2429 |
|
|
return DECL_ALIGN (t);
|
2430 |
|
|
|
2431 |
|
|
default:
|
2432 |
|
|
break;
|
2433 |
|
|
}
|
2434 |
|
|
|
2435 |
|
|
/* Otherwise take the alignment from that of the type. */
|
2436 |
|
|
return TYPE_ALIGN (TREE_TYPE (t));
|
2437 |
|
|
}
|
2438 |
|
|
|
2439 |
|
|
/* Return, as a tree node, the number of elements for TYPE (which is an
|
2440 |
|
|
ARRAY_TYPE) minus one. This counts only elements of the top array. */
|
2441 |
|
|
|
2442 |
|
|
tree
|
2443 |
|
|
array_type_nelts (const_tree type)
|
2444 |
|
|
{
|
2445 |
|
|
tree index_type, min, max;
|
2446 |
|
|
|
2447 |
|
|
/* If they did it with unspecified bounds, then we should have already
|
2448 |
|
|
given an error about it before we got here. */
|
2449 |
|
|
if (! TYPE_DOMAIN (type))
|
2450 |
|
|
return error_mark_node;
|
2451 |
|
|
|
2452 |
|
|
index_type = TYPE_DOMAIN (type);
|
2453 |
|
|
min = TYPE_MIN_VALUE (index_type);
|
2454 |
|
|
max = TYPE_MAX_VALUE (index_type);
|
2455 |
|
|
|
2456 |
|
|
/* TYPE_MAX_VALUE may not be set if the array has unknown length. */
|
2457 |
|
|
if (!max)
|
2458 |
|
|
return error_mark_node;
|
2459 |
|
|
|
2460 |
|
|
return (integer_zerop (min)
|
2461 |
|
|
? max
|
2462 |
|
|
: fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
|
2463 |
|
|
}
|
2464 |
|
|
|
2465 |
|
|
/* If arg is static -- a reference to an object in static storage -- then
|
2466 |
|
|
return the object. This is not the same as the C meaning of `static'.
|
2467 |
|
|
If arg isn't static, return NULL. */
|
2468 |
|
|
|
2469 |
|
|
tree
|
2470 |
|
|
staticp (tree arg)
|
2471 |
|
|
{
|
2472 |
|
|
switch (TREE_CODE (arg))
|
2473 |
|
|
{
|
2474 |
|
|
case FUNCTION_DECL:
|
2475 |
|
|
/* Nested functions are static, even though taking their address will
|
2476 |
|
|
involve a trampoline as we unnest the nested function and create
|
2477 |
|
|
the trampoline on the tree level. */
|
2478 |
|
|
return arg;
|
2479 |
|
|
|
2480 |
|
|
case VAR_DECL:
|
2481 |
|
|
return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
|
2482 |
|
|
&& ! DECL_THREAD_LOCAL_P (arg)
|
2483 |
|
|
&& ! DECL_DLLIMPORT_P (arg)
|
2484 |
|
|
? arg : NULL);
|
2485 |
|
|
|
2486 |
|
|
case CONST_DECL:
|
2487 |
|
|
return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
|
2488 |
|
|
? arg : NULL);
|
2489 |
|
|
|
2490 |
|
|
case CONSTRUCTOR:
|
2491 |
|
|
return TREE_STATIC (arg) ? arg : NULL;
|
2492 |
|
|
|
2493 |
|
|
case LABEL_DECL:
|
2494 |
|
|
case STRING_CST:
|
2495 |
|
|
return arg;
|
2496 |
|
|
|
2497 |
|
|
case COMPONENT_REF:
|
2498 |
|
|
/* If the thing being referenced is not a field, then it is
|
2499 |
|
|
something language specific. */
|
2500 |
|
|
gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
|
2501 |
|
|
|
2502 |
|
|
/* If we are referencing a bitfield, we can't evaluate an
|
2503 |
|
|
ADDR_EXPR at compile time and so it isn't a constant. */
|
2504 |
|
|
if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
|
2505 |
|
|
return NULL;
|
2506 |
|
|
|
2507 |
|
|
return staticp (TREE_OPERAND (arg, 0));
|
2508 |
|
|
|
2509 |
|
|
case BIT_FIELD_REF:
|
2510 |
|
|
return NULL;
|
2511 |
|
|
|
2512 |
|
|
case INDIRECT_REF:
|
2513 |
|
|
return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
|
2514 |
|
|
|
2515 |
|
|
case ARRAY_REF:
|
2516 |
|
|
case ARRAY_RANGE_REF:
|
2517 |
|
|
if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
|
2518 |
|
|
&& TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
|
2519 |
|
|
return staticp (TREE_OPERAND (arg, 0));
|
2520 |
|
|
else
|
2521 |
|
|
return NULL;
|
2522 |
|
|
|
2523 |
|
|
case COMPOUND_LITERAL_EXPR:
|
2524 |
|
|
return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
|
2525 |
|
|
|
2526 |
|
|
default:
|
2527 |
|
|
return NULL;
|
2528 |
|
|
}
|
2529 |
|
|
}
|
2530 |
|
|
|
2531 |
|
|
|
2532 |
|
|
|
2533 |
|
|
|
2534 |
|
|
/* Return whether OP is a DECL whose address is function-invariant. */
|
2535 |
|
|
|
2536 |
|
|
bool
|
2537 |
|
|
decl_address_invariant_p (const_tree op)
|
2538 |
|
|
{
|
2539 |
|
|
/* The conditions below are slightly less strict than the one in
|
2540 |
|
|
staticp. */
|
2541 |
|
|
|
2542 |
|
|
switch (TREE_CODE (op))
|
2543 |
|
|
{
|
2544 |
|
|
case PARM_DECL:
|
2545 |
|
|
case RESULT_DECL:
|
2546 |
|
|
case LABEL_DECL:
|
2547 |
|
|
case FUNCTION_DECL:
|
2548 |
|
|
return true;
|
2549 |
|
|
|
2550 |
|
|
case VAR_DECL:
|
2551 |
|
|
if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
|
2552 |
|
|
|| DECL_THREAD_LOCAL_P (op)
|
2553 |
|
|
|| DECL_CONTEXT (op) == current_function_decl
|
2554 |
|
|
|| decl_function_context (op) == current_function_decl)
|
2555 |
|
|
return true;
|
2556 |
|
|
break;
|
2557 |
|
|
|
2558 |
|
|
case CONST_DECL:
|
2559 |
|
|
if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
|
2560 |
|
|
|| decl_function_context (op) == current_function_decl)
|
2561 |
|
|
return true;
|
2562 |
|
|
break;
|
2563 |
|
|
|
2564 |
|
|
default:
|
2565 |
|
|
break;
|
2566 |
|
|
}
|
2567 |
|
|
|
2568 |
|
|
return false;
|
2569 |
|
|
}
|
2570 |
|
|
|
2571 |
|
|
/* Return whether OP is a DECL whose address is interprocedural-invariant. */
|
2572 |
|
|
|
2573 |
|
|
bool
|
2574 |
|
|
decl_address_ip_invariant_p (const_tree op)
|
2575 |
|
|
{
|
2576 |
|
|
/* The conditions below are slightly less strict than the one in
|
2577 |
|
|
staticp. */
|
2578 |
|
|
|
2579 |
|
|
switch (TREE_CODE (op))
|
2580 |
|
|
{
|
2581 |
|
|
case LABEL_DECL:
|
2582 |
|
|
case FUNCTION_DECL:
|
2583 |
|
|
case STRING_CST:
|
2584 |
|
|
return true;
|
2585 |
|
|
|
2586 |
|
|
case VAR_DECL:
|
2587 |
|
|
if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
|
2588 |
|
|
&& !DECL_DLLIMPORT_P (op))
|
2589 |
|
|
|| DECL_THREAD_LOCAL_P (op))
|
2590 |
|
|
return true;
|
2591 |
|
|
break;
|
2592 |
|
|
|
2593 |
|
|
case CONST_DECL:
|
2594 |
|
|
if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
|
2595 |
|
|
return true;
|
2596 |
|
|
break;
|
2597 |
|
|
|
2598 |
|
|
default:
|
2599 |
|
|
break;
|
2600 |
|
|
}
|
2601 |
|
|
|
2602 |
|
|
return false;
|
2603 |
|
|
}
|
2604 |
|
|
|
2605 |
|
|
|
2606 |
|
|
/* Return true if T is function-invariant (internal function, does
|
2607 |
|
|
not handle arithmetic; that's handled in skip_simple_arithmetic and
|
2608 |
|
|
tree_invariant_p). */
|
2609 |
|
|
|
2610 |
|
|
static bool tree_invariant_p (tree t);
|
2611 |
|
|
|
2612 |
|
|
static bool
|
2613 |
|
|
tree_invariant_p_1 (tree t)
|
2614 |
|
|
{
|
2615 |
|
|
tree op;
|
2616 |
|
|
|
2617 |
|
|
if (TREE_CONSTANT (t)
|
2618 |
|
|
|| (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
|
2619 |
|
|
return true;
|
2620 |
|
|
|
2621 |
|
|
switch (TREE_CODE (t))
|
2622 |
|
|
{
|
2623 |
|
|
case SAVE_EXPR:
|
2624 |
|
|
return true;
|
2625 |
|
|
|
2626 |
|
|
case ADDR_EXPR:
|
2627 |
|
|
op = TREE_OPERAND (t, 0);
|
2628 |
|
|
while (handled_component_p (op))
|
2629 |
|
|
{
|
2630 |
|
|
switch (TREE_CODE (op))
|
2631 |
|
|
{
|
2632 |
|
|
case ARRAY_REF:
|
2633 |
|
|
case ARRAY_RANGE_REF:
|
2634 |
|
|
if (!tree_invariant_p (TREE_OPERAND (op, 1))
|
2635 |
|
|
|| TREE_OPERAND (op, 2) != NULL_TREE
|
2636 |
|
|
|| TREE_OPERAND (op, 3) != NULL_TREE)
|
2637 |
|
|
return false;
|
2638 |
|
|
break;
|
2639 |
|
|
|
2640 |
|
|
case COMPONENT_REF:
|
2641 |
|
|
if (TREE_OPERAND (op, 2) != NULL_TREE)
|
2642 |
|
|
return false;
|
2643 |
|
|
break;
|
2644 |
|
|
|
2645 |
|
|
default:;
|
2646 |
|
|
}
|
2647 |
|
|
op = TREE_OPERAND (op, 0);
|
2648 |
|
|
}
|
2649 |
|
|
|
2650 |
|
|
return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
|
2651 |
|
|
|
2652 |
|
|
default:
|
2653 |
|
|
break;
|
2654 |
|
|
}
|
2655 |
|
|
|
2656 |
|
|
return false;
|
2657 |
|
|
}
|
2658 |
|
|
|
2659 |
|
|
/* Return true if T is function-invariant. */
|
2660 |
|
|
|
2661 |
|
|
static bool
|
2662 |
|
|
tree_invariant_p (tree t)
|
2663 |
|
|
{
|
2664 |
|
|
tree inner = skip_simple_arithmetic (t);
|
2665 |
|
|
return tree_invariant_p_1 (inner);
|
2666 |
|
|
}
|
2667 |
|
|
|
2668 |
|
|
/* Wrap a SAVE_EXPR around EXPR, if appropriate.
|
2669 |
|
|
Do this to any expression which may be used in more than one place,
|
2670 |
|
|
but must be evaluated only once.
|
2671 |
|
|
|
2672 |
|
|
Normally, expand_expr would reevaluate the expression each time.
|
2673 |
|
|
Calling save_expr produces something that is evaluated and recorded
|
2674 |
|
|
the first time expand_expr is called on it. Subsequent calls to
|
2675 |
|
|
expand_expr just reuse the recorded value.
|
2676 |
|
|
|
2677 |
|
|
The call to expand_expr that generates code that actually computes
|
2678 |
|
|
the value is the first call *at compile time*. Subsequent calls
|
2679 |
|
|
*at compile time* generate code to use the saved value.
|
2680 |
|
|
This produces correct result provided that *at run time* control
|
2681 |
|
|
always flows through the insns made by the first expand_expr
|
2682 |
|
|
before reaching the other places where the save_expr was evaluated.
|
2683 |
|
|
You, the caller of save_expr, must make sure this is so.
|
2684 |
|
|
|
2685 |
|
|
Constants, and certain read-only nodes, are returned with no
|
2686 |
|
|
SAVE_EXPR because that is safe. Expressions containing placeholders
|
2687 |
|
|
are not touched; see tree.def for an explanation of what these
|
2688 |
|
|
are used for. */
|
2689 |
|
|
|
2690 |
|
|
tree
|
2691 |
|
|
save_expr (tree expr)
|
2692 |
|
|
{
|
2693 |
|
|
tree t = fold (expr);
|
2694 |
|
|
tree inner;
|
2695 |
|
|
|
2696 |
|
|
/* If the tree evaluates to a constant, then we don't want to hide that
|
2697 |
|
|
fact (i.e. this allows further folding, and direct checks for constants).
|
2698 |
|
|
However, a read-only object that has side effects cannot be bypassed.
|
2699 |
|
|
Since it is no problem to reevaluate literals, we just return the
|
2700 |
|
|
literal node. */
|
2701 |
|
|
inner = skip_simple_arithmetic (t);
|
2702 |
|
|
if (TREE_CODE (inner) == ERROR_MARK)
|
2703 |
|
|
return inner;
|
2704 |
|
|
|
2705 |
|
|
if (tree_invariant_p_1 (inner))
|
2706 |
|
|
return t;
|
2707 |
|
|
|
2708 |
|
|
/* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
|
2709 |
|
|
it means that the size or offset of some field of an object depends on
|
2710 |
|
|
the value within another field.
|
2711 |
|
|
|
2712 |
|
|
Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
|
2713 |
|
|
and some variable since it would then need to be both evaluated once and
|
2714 |
|
|
evaluated more than once. Front-ends must assure this case cannot
|
2715 |
|
|
happen by surrounding any such subexpressions in their own SAVE_EXPR
|
2716 |
|
|
and forcing evaluation at the proper time. */
|
2717 |
|
|
if (contains_placeholder_p (inner))
|
2718 |
|
|
return t;
|
2719 |
|
|
|
2720 |
|
|
t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
|
2721 |
|
|
SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
|
2722 |
|
|
|
2723 |
|
|
/* This expression might be placed ahead of a jump to ensure that the
|
2724 |
|
|
value was computed on both sides of the jump. So make sure it isn't
|
2725 |
|
|
eliminated as dead. */
|
2726 |
|
|
TREE_SIDE_EFFECTS (t) = 1;
|
2727 |
|
|
return t;
|
2728 |
|
|
}
|
2729 |
|
|
|
2730 |
|
|
/* Look inside EXPR and into any simple arithmetic operations. Return
|
2731 |
|
|
the innermost non-arithmetic node. */
|
2732 |
|
|
|
2733 |
|
|
tree
|
2734 |
|
|
skip_simple_arithmetic (tree expr)
|
2735 |
|
|
{
|
2736 |
|
|
tree inner;
|
2737 |
|
|
|
2738 |
|
|
/* We don't care about whether this can be used as an lvalue in this
|
2739 |
|
|
context. */
|
2740 |
|
|
while (TREE_CODE (expr) == NON_LVALUE_EXPR)
|
2741 |
|
|
expr = TREE_OPERAND (expr, 0);
|
2742 |
|
|
|
2743 |
|
|
/* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
|
2744 |
|
|
a constant, it will be more efficient to not make another SAVE_EXPR since
|
2745 |
|
|
it will allow better simplification and GCSE will be able to merge the
|
2746 |
|
|
computations if they actually occur. */
|
2747 |
|
|
inner = expr;
|
2748 |
|
|
while (1)
|
2749 |
|
|
{
|
2750 |
|
|
if (UNARY_CLASS_P (inner))
|
2751 |
|
|
inner = TREE_OPERAND (inner, 0);
|
2752 |
|
|
else if (BINARY_CLASS_P (inner))
|
2753 |
|
|
{
|
2754 |
|
|
if (tree_invariant_p (TREE_OPERAND (inner, 1)))
|
2755 |
|
|
inner = TREE_OPERAND (inner, 0);
|
2756 |
|
|
else if (tree_invariant_p (TREE_OPERAND (inner, 0)))
|
2757 |
|
|
inner = TREE_OPERAND (inner, 1);
|
2758 |
|
|
else
|
2759 |
|
|
break;
|
2760 |
|
|
}
|
2761 |
|
|
else
|
2762 |
|
|
break;
|
2763 |
|
|
}
|
2764 |
|
|
|
2765 |
|
|
return inner;
|
2766 |
|
|
}
|
2767 |
|
|
|
2768 |
|
|
|
2769 |
|
|
/* Return which tree structure is used by T. */
|
2770 |
|
|
|
2771 |
|
|
enum tree_node_structure_enum
|
2772 |
|
|
tree_node_structure (const_tree t)
|
2773 |
|
|
{
|
2774 |
|
|
const enum tree_code code = TREE_CODE (t);
|
2775 |
|
|
return tree_node_structure_for_code (code);
|
2776 |
|
|
}
|
2777 |
|
|
|
2778 |
|
|
/* Set various status flags when building a CALL_EXPR object T. */
|
2779 |
|
|
|
2780 |
|
|
static void
|
2781 |
|
|
process_call_operands (tree t)
|
2782 |
|
|
{
|
2783 |
|
|
bool side_effects = TREE_SIDE_EFFECTS (t);
|
2784 |
|
|
bool read_only = false;
|
2785 |
|
|
int i = call_expr_flags (t);
|
2786 |
|
|
|
2787 |
|
|
/* Calls have side-effects, except those to const or pure functions. */
|
2788 |
|
|
if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
|
2789 |
|
|
side_effects = true;
|
2790 |
|
|
/* Propagate TREE_READONLY of arguments for const functions. */
|
2791 |
|
|
if (i & ECF_CONST)
|
2792 |
|
|
read_only = true;
|
2793 |
|
|
|
2794 |
|
|
if (!side_effects || read_only)
|
2795 |
|
|
for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
|
2796 |
|
|
{
|
2797 |
|
|
tree op = TREE_OPERAND (t, i);
|
2798 |
|
|
if (op && TREE_SIDE_EFFECTS (op))
|
2799 |
|
|
side_effects = true;
|
2800 |
|
|
if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
|
2801 |
|
|
read_only = false;
|
2802 |
|
|
}
|
2803 |
|
|
|
2804 |
|
|
TREE_SIDE_EFFECTS (t) = side_effects;
|
2805 |
|
|
TREE_READONLY (t) = read_only;
|
2806 |
|
|
}
|
2807 |
|
|
|
2808 |
|
|
/* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
|
2809 |
|
|
size or offset that depends on a field within a record. */
|
2810 |
|
|
|
2811 |
|
|
bool
|
2812 |
|
|
contains_placeholder_p (const_tree exp)
|
2813 |
|
|
{
|
2814 |
|
|
enum tree_code code;
|
2815 |
|
|
|
2816 |
|
|
if (!exp)
|
2817 |
|
|
return 0;
|
2818 |
|
|
|
2819 |
|
|
code = TREE_CODE (exp);
|
2820 |
|
|
if (code == PLACEHOLDER_EXPR)
|
2821 |
|
|
return 1;
|
2822 |
|
|
|
2823 |
|
|
switch (TREE_CODE_CLASS (code))
|
2824 |
|
|
{
|
2825 |
|
|
case tcc_reference:
|
2826 |
|
|
/* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
|
2827 |
|
|
position computations since they will be converted into a
|
2828 |
|
|
WITH_RECORD_EXPR involving the reference, which will assume
|
2829 |
|
|
here will be valid. */
|
2830 |
|
|
return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
|
2831 |
|
|
|
2832 |
|
|
case tcc_exceptional:
|
2833 |
|
|
if (code == TREE_LIST)
|
2834 |
|
|
return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
|
2835 |
|
|
|| CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
|
2836 |
|
|
break;
|
2837 |
|
|
|
2838 |
|
|
case tcc_unary:
|
2839 |
|
|
case tcc_binary:
|
2840 |
|
|
case tcc_comparison:
|
2841 |
|
|
case tcc_expression:
|
2842 |
|
|
switch (code)
|
2843 |
|
|
{
|
2844 |
|
|
case COMPOUND_EXPR:
|
2845 |
|
|
/* Ignoring the first operand isn't quite right, but works best. */
|
2846 |
|
|
return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
|
2847 |
|
|
|
2848 |
|
|
case COND_EXPR:
|
2849 |
|
|
return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
|
2850 |
|
|
|| CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
|
2851 |
|
|
|| CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
|
2852 |
|
|
|
2853 |
|
|
case SAVE_EXPR:
|
2854 |
|
|
/* The save_expr function never wraps anything containing
|
2855 |
|
|
a PLACEHOLDER_EXPR. */
|
2856 |
|
|
return 0;
|
2857 |
|
|
|
2858 |
|
|
default:
|
2859 |
|
|
break;
|
2860 |
|
|
}
|
2861 |
|
|
|
2862 |
|
|
switch (TREE_CODE_LENGTH (code))
|
2863 |
|
|
{
|
2864 |
|
|
case 1:
|
2865 |
|
|
return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
|
2866 |
|
|
case 2:
|
2867 |
|
|
return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
|
2868 |
|
|
|| CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
|
2869 |
|
|
default:
|
2870 |
|
|
return 0;
|
2871 |
|
|
}
|
2872 |
|
|
|
2873 |
|
|
case tcc_vl_exp:
|
2874 |
|
|
switch (code)
|
2875 |
|
|
{
|
2876 |
|
|
case CALL_EXPR:
|
2877 |
|
|
{
|
2878 |
|
|
const_tree arg;
|
2879 |
|
|
const_call_expr_arg_iterator iter;
|
2880 |
|
|
FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
|
2881 |
|
|
if (CONTAINS_PLACEHOLDER_P (arg))
|
2882 |
|
|
return 1;
|
2883 |
|
|
return 0;
|
2884 |
|
|
}
|
2885 |
|
|
default:
|
2886 |
|
|
return 0;
|
2887 |
|
|
}
|
2888 |
|
|
|
2889 |
|
|
default:
|
2890 |
|
|
return 0;
|
2891 |
|
|
}
|
2892 |
|
|
return 0;
|
2893 |
|
|
}
|
2894 |
|
|
|
2895 |
|
|
/* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
|
2896 |
|
|
directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
|
2897 |
|
|
field positions. */
|
2898 |
|
|
|
2899 |
|
|
static bool
|
2900 |
|
|
type_contains_placeholder_1 (const_tree type)
|
2901 |
|
|
{
|
2902 |
|
|
/* If the size contains a placeholder or the parent type (component type in
|
2903 |
|
|
the case of arrays) type involves a placeholder, this type does. */
|
2904 |
|
|
if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
|
2905 |
|
|
|| CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
|
2906 |
|
|
|| (!POINTER_TYPE_P (type)
|
2907 |
|
|
&& TREE_TYPE (type)
|
2908 |
|
|
&& type_contains_placeholder_p (TREE_TYPE (type))))
|
2909 |
|
|
return true;
|
2910 |
|
|
|
2911 |
|
|
/* Now do type-specific checks. Note that the last part of the check above
|
2912 |
|
|
greatly limits what we have to do below. */
|
2913 |
|
|
switch (TREE_CODE (type))
|
2914 |
|
|
{
|
2915 |
|
|
case VOID_TYPE:
|
2916 |
|
|
case COMPLEX_TYPE:
|
2917 |
|
|
case ENUMERAL_TYPE:
|
2918 |
|
|
case BOOLEAN_TYPE:
|
2919 |
|
|
case POINTER_TYPE:
|
2920 |
|
|
case OFFSET_TYPE:
|
2921 |
|
|
case REFERENCE_TYPE:
|
2922 |
|
|
case METHOD_TYPE:
|
2923 |
|
|
case FUNCTION_TYPE:
|
2924 |
|
|
case VECTOR_TYPE:
|
2925 |
|
|
return false;
|
2926 |
|
|
|
2927 |
|
|
case INTEGER_TYPE:
|
2928 |
|
|
case REAL_TYPE:
|
2929 |
|
|
case FIXED_POINT_TYPE:
|
2930 |
|
|
/* Here we just check the bounds. */
|
2931 |
|
|
return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
|
2932 |
|
|
|| CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
|
2933 |
|
|
|
2934 |
|
|
case ARRAY_TYPE:
|
2935 |
|
|
/* We have already checked the component type above, so just check the
|
2936 |
|
|
domain type. */
|
2937 |
|
|
return type_contains_placeholder_p (TYPE_DOMAIN (type));
|
2938 |
|
|
|
2939 |
|
|
case RECORD_TYPE:
|
2940 |
|
|
case UNION_TYPE:
|
2941 |
|
|
case QUAL_UNION_TYPE:
|
2942 |
|
|
{
|
2943 |
|
|
tree field;
|
2944 |
|
|
|
2945 |
|
|
for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
|
2946 |
|
|
if (TREE_CODE (field) == FIELD_DECL
|
2947 |
|
|
&& (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
|
2948 |
|
|
|| (TREE_CODE (type) == QUAL_UNION_TYPE
|
2949 |
|
|
&& CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
|
2950 |
|
|
|| type_contains_placeholder_p (TREE_TYPE (field))))
|
2951 |
|
|
return true;
|
2952 |
|
|
|
2953 |
|
|
return false;
|
2954 |
|
|
}
|
2955 |
|
|
|
2956 |
|
|
default:
|
2957 |
|
|
gcc_unreachable ();
|
2958 |
|
|
}
|
2959 |
|
|
}
|
2960 |
|
|
|
2961 |
|
|
/* Wrapper around above function used to cache its result. */
|
2962 |
|
|
|
2963 |
|
|
bool
|
2964 |
|
|
type_contains_placeholder_p (tree type)
|
2965 |
|
|
{
|
2966 |
|
|
bool result;
|
2967 |
|
|
|
2968 |
|
|
/* If the contains_placeholder_bits field has been initialized,
|
2969 |
|
|
then we know the answer. */
|
2970 |
|
|
if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
|
2971 |
|
|
return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
|
2972 |
|
|
|
2973 |
|
|
/* Indicate that we've seen this type node, and the answer is false.
|
2974 |
|
|
This is what we want to return if we run into recursion via fields. */
|
2975 |
|
|
TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
|
2976 |
|
|
|
2977 |
|
|
/* Compute the real value. */
|
2978 |
|
|
result = type_contains_placeholder_1 (type);
|
2979 |
|
|
|
2980 |
|
|
/* Store the real value. */
|
2981 |
|
|
TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
|
2982 |
|
|
|
2983 |
|
|
return result;
|
2984 |
|
|
}
|
2985 |
|
|
|
2986 |
|
|
/* Push tree EXP onto vector QUEUE if it is not already present. */
|
2987 |
|
|
|
2988 |
|
|
static void
|
2989 |
|
|
push_without_duplicates (tree exp, VEC (tree, heap) **queue)
|
2990 |
|
|
{
|
2991 |
|
|
unsigned int i;
|
2992 |
|
|
tree iter;
|
2993 |
|
|
|
2994 |
|
|
FOR_EACH_VEC_ELT (tree, *queue, i, iter)
|
2995 |
|
|
if (simple_cst_equal (iter, exp) == 1)
|
2996 |
|
|
break;
|
2997 |
|
|
|
2998 |
|
|
if (!iter)
|
2999 |
|
|
VEC_safe_push (tree, heap, *queue, exp);
|
3000 |
|
|
}
|
3001 |
|
|
|
3002 |
|
|
/* Given a tree EXP, find all occurences of references to fields
|
3003 |
|
|
in a PLACEHOLDER_EXPR and place them in vector REFS without
|
3004 |
|
|
duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
|
3005 |
|
|
we assume here that EXP contains only arithmetic expressions
|
3006 |
|
|
or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
|
3007 |
|
|
argument list. */
|
3008 |
|
|
|
3009 |
|
|
void
|
3010 |
|
|
find_placeholder_in_expr (tree exp, VEC (tree, heap) **refs)
|
3011 |
|
|
{
|
3012 |
|
|
enum tree_code code = TREE_CODE (exp);
|
3013 |
|
|
tree inner;
|
3014 |
|
|
int i;
|
3015 |
|
|
|
3016 |
|
|
/* We handle TREE_LIST and COMPONENT_REF separately. */
|
3017 |
|
|
if (code == TREE_LIST)
|
3018 |
|
|
{
|
3019 |
|
|
FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
|
3020 |
|
|
FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
|
3021 |
|
|
}
|
3022 |
|
|
else if (code == COMPONENT_REF)
|
3023 |
|
|
{
|
3024 |
|
|
for (inner = TREE_OPERAND (exp, 0);
|
3025 |
|
|
REFERENCE_CLASS_P (inner);
|
3026 |
|
|
inner = TREE_OPERAND (inner, 0))
|
3027 |
|
|
;
|
3028 |
|
|
|
3029 |
|
|
if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
|
3030 |
|
|
push_without_duplicates (exp, refs);
|
3031 |
|
|
else
|
3032 |
|
|
FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
|
3033 |
|
|
}
|
3034 |
|
|
else
|
3035 |
|
|
switch (TREE_CODE_CLASS (code))
|
3036 |
|
|
{
|
3037 |
|
|
case tcc_constant:
|
3038 |
|
|
break;
|
3039 |
|
|
|
3040 |
|
|
case tcc_declaration:
|
3041 |
|
|
/* Variables allocated to static storage can stay. */
|
3042 |
|
|
if (!TREE_STATIC (exp))
|
3043 |
|
|
push_without_duplicates (exp, refs);
|
3044 |
|
|
break;
|
3045 |
|
|
|
3046 |
|
|
case tcc_expression:
|
3047 |
|
|
/* This is the pattern built in ada/make_aligning_type. */
|
3048 |
|
|
if (code == ADDR_EXPR
|
3049 |
|
|
&& TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
|
3050 |
|
|
{
|
3051 |
|
|
push_without_duplicates (exp, refs);
|
3052 |
|
|
break;
|
3053 |
|
|
}
|
3054 |
|
|
|
3055 |
|
|
/* Fall through... */
|
3056 |
|
|
|
3057 |
|
|
case tcc_exceptional:
|
3058 |
|
|
case tcc_unary:
|
3059 |
|
|
case tcc_binary:
|
3060 |
|
|
case tcc_comparison:
|
3061 |
|
|
case tcc_reference:
|
3062 |
|
|
for (i = 0; i < TREE_CODE_LENGTH (code); i++)
|
3063 |
|
|
FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
|
3064 |
|
|
break;
|
3065 |
|
|
|
3066 |
|
|
case tcc_vl_exp:
|
3067 |
|
|
for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
|
3068 |
|
|
FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
|
3069 |
|
|
break;
|
3070 |
|
|
|
3071 |
|
|
default:
|
3072 |
|
|
gcc_unreachable ();
|
3073 |
|
|
}
|
3074 |
|
|
}
|
3075 |
|
|
|
3076 |
|
|
/* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
|
3077 |
|
|
return a tree with all occurrences of references to F in a
|
3078 |
|
|
PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
|
3079 |
|
|
CONST_DECLs. Note that we assume here that EXP contains only
|
3080 |
|
|
arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
|
3081 |
|
|
occurring only in their argument list. */
|
3082 |
|
|
|
3083 |
|
|
tree
|
3084 |
|
|
substitute_in_expr (tree exp, tree f, tree r)
|
3085 |
|
|
{
|
3086 |
|
|
enum tree_code code = TREE_CODE (exp);
|
3087 |
|
|
tree op0, op1, op2, op3;
|
3088 |
|
|
tree new_tree;
|
3089 |
|
|
|
3090 |
|
|
/* We handle TREE_LIST and COMPONENT_REF separately. */
|
3091 |
|
|
if (code == TREE_LIST)
|
3092 |
|
|
{
|
3093 |
|
|
op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
|
3094 |
|
|
op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
|
3095 |
|
|
if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
|
3096 |
|
|
return exp;
|
3097 |
|
|
|
3098 |
|
|
return tree_cons (TREE_PURPOSE (exp), op1, op0);
|
3099 |
|
|
}
|
3100 |
|
|
else if (code == COMPONENT_REF)
|
3101 |
|
|
{
|
3102 |
|
|
tree inner;
|
3103 |
|
|
|
3104 |
|
|
/* If this expression is getting a value from a PLACEHOLDER_EXPR
|
3105 |
|
|
and it is the right field, replace it with R. */
|
3106 |
|
|
for (inner = TREE_OPERAND (exp, 0);
|
3107 |
|
|
REFERENCE_CLASS_P (inner);
|
3108 |
|
|
inner = TREE_OPERAND (inner, 0))
|
3109 |
|
|
;
|
3110 |
|
|
|
3111 |
|
|
/* The field. */
|
3112 |
|
|
op1 = TREE_OPERAND (exp, 1);
|
3113 |
|
|
|
3114 |
|
|
if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
|
3115 |
|
|
return r;
|
3116 |
|
|
|
3117 |
|
|
/* If this expression hasn't been completed let, leave it alone. */
|
3118 |
|
|
if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
|
3119 |
|
|
return exp;
|
3120 |
|
|
|
3121 |
|
|
op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
|
3122 |
|
|
if (op0 == TREE_OPERAND (exp, 0))
|
3123 |
|
|
return exp;
|
3124 |
|
|
|
3125 |
|
|
new_tree
|
3126 |
|
|
= fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
|
3127 |
|
|
}
|
3128 |
|
|
else
|
3129 |
|
|
switch (TREE_CODE_CLASS (code))
|
3130 |
|
|
{
|
3131 |
|
|
case tcc_constant:
|
3132 |
|
|
return exp;
|
3133 |
|
|
|
3134 |
|
|
case tcc_declaration:
|
3135 |
|
|
if (exp == f)
|
3136 |
|
|
return r;
|
3137 |
|
|
else
|
3138 |
|
|
return exp;
|
3139 |
|
|
|
3140 |
|
|
case tcc_expression:
|
3141 |
|
|
if (exp == f)
|
3142 |
|
|
return r;
|
3143 |
|
|
|
3144 |
|
|
/* Fall through... */
|
3145 |
|
|
|
3146 |
|
|
case tcc_exceptional:
|
3147 |
|
|
case tcc_unary:
|
3148 |
|
|
case tcc_binary:
|
3149 |
|
|
case tcc_comparison:
|
3150 |
|
|
case tcc_reference:
|
3151 |
|
|
switch (TREE_CODE_LENGTH (code))
|
3152 |
|
|
{
|
3153 |
|
|
case 0:
|
3154 |
|
|
return exp;
|
3155 |
|
|
|
3156 |
|
|
case 1:
|
3157 |
|
|
op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
|
3158 |
|
|
if (op0 == TREE_OPERAND (exp, 0))
|
3159 |
|
|
return exp;
|
3160 |
|
|
|
3161 |
|
|
new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
|
3162 |
|
|
break;
|
3163 |
|
|
|
3164 |
|
|
case 2:
|
3165 |
|
|
op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
|
3166 |
|
|
op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
|
3167 |
|
|
|
3168 |
|
|
if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
|
3169 |
|
|
return exp;
|
3170 |
|
|
|
3171 |
|
|
new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
|
3172 |
|
|
break;
|
3173 |
|
|
|
3174 |
|
|
case 3:
|
3175 |
|
|
op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
|
3176 |
|
|
op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
|
3177 |
|
|
op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
|
3178 |
|
|
|
3179 |
|
|
if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
|
3180 |
|
|
&& op2 == TREE_OPERAND (exp, 2))
|
3181 |
|
|
return exp;
|
3182 |
|
|
|
3183 |
|
|
new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
|
3184 |
|
|
break;
|
3185 |
|
|
|
3186 |
|
|
case 4:
|
3187 |
|
|
op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
|
3188 |
|
|
op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
|
3189 |
|
|
op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
|
3190 |
|
|
op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
|
3191 |
|
|
|
3192 |
|
|
if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
|
3193 |
|
|
&& op2 == TREE_OPERAND (exp, 2)
|
3194 |
|
|
&& op3 == TREE_OPERAND (exp, 3))
|
3195 |
|
|
return exp;
|
3196 |
|
|
|
3197 |
|
|
new_tree
|
3198 |
|
|
= fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
|
3199 |
|
|
break;
|
3200 |
|
|
|
3201 |
|
|
default:
|
3202 |
|
|
gcc_unreachable ();
|
3203 |
|
|
}
|
3204 |
|
|
break;
|
3205 |
|
|
|
3206 |
|
|
case tcc_vl_exp:
|
3207 |
|
|
{
|
3208 |
|
|
int i;
|
3209 |
|
|
|
3210 |
|
|
new_tree = NULL_TREE;
|
3211 |
|
|
|
3212 |
|
|
/* If we are trying to replace F with a constant, inline back
|
3213 |
|
|
functions which do nothing else than computing a value from
|
3214 |
|
|
the arguments they are passed. This makes it possible to
|
3215 |
|
|
fold partially or entirely the replacement expression. */
|
3216 |
|
|
if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
|
3217 |
|
|
{
|
3218 |
|
|
tree t = maybe_inline_call_in_expr (exp);
|
3219 |
|
|
if (t)
|
3220 |
|
|
return SUBSTITUTE_IN_EXPR (t, f, r);
|
3221 |
|
|
}
|
3222 |
|
|
|
3223 |
|
|
for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
|
3224 |
|
|
{
|
3225 |
|
|
tree op = TREE_OPERAND (exp, i);
|
3226 |
|
|
tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
|
3227 |
|
|
if (new_op != op)
|
3228 |
|
|
{
|
3229 |
|
|
if (!new_tree)
|
3230 |
|
|
new_tree = copy_node (exp);
|
3231 |
|
|
TREE_OPERAND (new_tree, i) = new_op;
|
3232 |
|
|
}
|
3233 |
|
|
}
|
3234 |
|
|
|
3235 |
|
|
if (new_tree)
|
3236 |
|
|
{
|
3237 |
|
|
new_tree = fold (new_tree);
|
3238 |
|
|
if (TREE_CODE (new_tree) == CALL_EXPR)
|
3239 |
|
|
process_call_operands (new_tree);
|
3240 |
|
|
}
|
3241 |
|
|
else
|
3242 |
|
|
return exp;
|
3243 |
|
|
}
|
3244 |
|
|
break;
|
3245 |
|
|
|
3246 |
|
|
default:
|
3247 |
|
|
gcc_unreachable ();
|
3248 |
|
|
}
|
3249 |
|
|
|
3250 |
|
|
TREE_READONLY (new_tree) |= TREE_READONLY (exp);
|
3251 |
|
|
|
3252 |
|
|
if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
|
3253 |
|
|
TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
|
3254 |
|
|
|
3255 |
|
|
return new_tree;
|
3256 |
|
|
}
|
3257 |
|
|
|
3258 |
|
|
/* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
|
3259 |
|
|
for it within OBJ, a tree that is an object or a chain of references. */
|
3260 |
|
|
|
3261 |
|
|
tree
|
3262 |
|
|
substitute_placeholder_in_expr (tree exp, tree obj)
|
3263 |
|
|
{
|
3264 |
|
|
enum tree_code code = TREE_CODE (exp);
|
3265 |
|
|
tree op0, op1, op2, op3;
|
3266 |
|
|
tree new_tree;
|
3267 |
|
|
|
3268 |
|
|
/* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
|
3269 |
|
|
in the chain of OBJ. */
|
3270 |
|
|
if (code == PLACEHOLDER_EXPR)
|
3271 |
|
|
{
|
3272 |
|
|
tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
|
3273 |
|
|
tree elt;
|
3274 |
|
|
|
3275 |
|
|
for (elt = obj; elt != 0;
|
3276 |
|
|
elt = ((TREE_CODE (elt) == COMPOUND_EXPR
|
3277 |
|
|
|| TREE_CODE (elt) == COND_EXPR)
|
3278 |
|
|
? TREE_OPERAND (elt, 1)
|
3279 |
|
|
: (REFERENCE_CLASS_P (elt)
|
3280 |
|
|
|| UNARY_CLASS_P (elt)
|
3281 |
|
|
|| BINARY_CLASS_P (elt)
|
3282 |
|
|
|| VL_EXP_CLASS_P (elt)
|
3283 |
|
|
|| EXPRESSION_CLASS_P (elt))
|
3284 |
|
|
? TREE_OPERAND (elt, 0) : 0))
|
3285 |
|
|
if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
|
3286 |
|
|
return elt;
|
3287 |
|
|
|
3288 |
|
|
for (elt = obj; elt != 0;
|
3289 |
|
|
elt = ((TREE_CODE (elt) == COMPOUND_EXPR
|
3290 |
|
|
|| TREE_CODE (elt) == COND_EXPR)
|
3291 |
|
|
? TREE_OPERAND (elt, 1)
|
3292 |
|
|
: (REFERENCE_CLASS_P (elt)
|
3293 |
|
|
|| UNARY_CLASS_P (elt)
|
3294 |
|
|
|| BINARY_CLASS_P (elt)
|
3295 |
|
|
|| VL_EXP_CLASS_P (elt)
|
3296 |
|
|
|| EXPRESSION_CLASS_P (elt))
|
3297 |
|
|
? TREE_OPERAND (elt, 0) : 0))
|
3298 |
|
|
if (POINTER_TYPE_P (TREE_TYPE (elt))
|
3299 |
|
|
&& (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
|
3300 |
|
|
== need_type))
|
3301 |
|
|
return fold_build1 (INDIRECT_REF, need_type, elt);
|
3302 |
|
|
|
3303 |
|
|
/* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
|
3304 |
|
|
survives until RTL generation, there will be an error. */
|
3305 |
|
|
return exp;
|
3306 |
|
|
}
|
3307 |
|
|
|
3308 |
|
|
/* TREE_LIST is special because we need to look at TREE_VALUE
|
3309 |
|
|
and TREE_CHAIN, not TREE_OPERANDS. */
|
3310 |
|
|
else if (code == TREE_LIST)
|
3311 |
|
|
{
|
3312 |
|
|
op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
|
3313 |
|
|
op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
|
3314 |
|
|
if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
|
3315 |
|
|
return exp;
|
3316 |
|
|
|
3317 |
|
|
return tree_cons (TREE_PURPOSE (exp), op1, op0);
|
3318 |
|
|
}
|
3319 |
|
|
else
|
3320 |
|
|
switch (TREE_CODE_CLASS (code))
|
3321 |
|
|
{
|
3322 |
|
|
case tcc_constant:
|
3323 |
|
|
case tcc_declaration:
|
3324 |
|
|
return exp;
|
3325 |
|
|
|
3326 |
|
|
case tcc_exceptional:
|
3327 |
|
|
case tcc_unary:
|
3328 |
|
|
case tcc_binary:
|
3329 |
|
|
case tcc_comparison:
|
3330 |
|
|
case tcc_expression:
|
3331 |
|
|
case tcc_reference:
|
3332 |
|
|
case tcc_statement:
|
3333 |
|
|
switch (TREE_CODE_LENGTH (code))
|
3334 |
|
|
{
|
3335 |
|
|
case 0:
|
3336 |
|
|
return exp;
|
3337 |
|
|
|
3338 |
|
|
case 1:
|
3339 |
|
|
op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
|
3340 |
|
|
if (op0 == TREE_OPERAND (exp, 0))
|
3341 |
|
|
return exp;
|
3342 |
|
|
|
3343 |
|
|
new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
|
3344 |
|
|
break;
|
3345 |
|
|
|
3346 |
|
|
case 2:
|
3347 |
|
|
op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
|
3348 |
|
|
op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
|
3349 |
|
|
|
3350 |
|
|
if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
|
3351 |
|
|
return exp;
|
3352 |
|
|
|
3353 |
|
|
new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
|
3354 |
|
|
break;
|
3355 |
|
|
|
3356 |
|
|
case 3:
|
3357 |
|
|
op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
|
3358 |
|
|
op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
|
3359 |
|
|
op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
|
3360 |
|
|
|
3361 |
|
|
if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
|
3362 |
|
|
&& op2 == TREE_OPERAND (exp, 2))
|
3363 |
|
|
return exp;
|
3364 |
|
|
|
3365 |
|
|
new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
|
3366 |
|
|
break;
|
3367 |
|
|
|
3368 |
|
|
case 4:
|
3369 |
|
|
op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
|
3370 |
|
|
op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
|
3371 |
|
|
op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
|
3372 |
|
|
op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
|
3373 |
|
|
|
3374 |
|
|
if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
|
3375 |
|
|
&& op2 == TREE_OPERAND (exp, 2)
|
3376 |
|
|
&& op3 == TREE_OPERAND (exp, 3))
|
3377 |
|
|
return exp;
|
3378 |
|
|
|
3379 |
|
|
new_tree
|
3380 |
|
|
= fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
|
3381 |
|
|
break;
|
3382 |
|
|
|
3383 |
|
|
default:
|
3384 |
|
|
gcc_unreachable ();
|
3385 |
|
|
}
|
3386 |
|
|
break;
|
3387 |
|
|
|
3388 |
|
|
case tcc_vl_exp:
|
3389 |
|
|
{
|
3390 |
|
|
int i;
|
3391 |
|
|
|
3392 |
|
|
new_tree = NULL_TREE;
|
3393 |
|
|
|
3394 |
|
|
for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
|
3395 |
|
|
{
|
3396 |
|
|
tree op = TREE_OPERAND (exp, i);
|
3397 |
|
|
tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
|
3398 |
|
|
if (new_op != op)
|
3399 |
|
|
{
|
3400 |
|
|
if (!new_tree)
|
3401 |
|
|
new_tree = copy_node (exp);
|
3402 |
|
|
TREE_OPERAND (new_tree, i) = new_op;
|
3403 |
|
|
}
|
3404 |
|
|
}
|
3405 |
|
|
|
3406 |
|
|
if (new_tree)
|
3407 |
|
|
{
|
3408 |
|
|
new_tree = fold (new_tree);
|
3409 |
|
|
if (TREE_CODE (new_tree) == CALL_EXPR)
|
3410 |
|
|
process_call_operands (new_tree);
|
3411 |
|
|
}
|
3412 |
|
|
else
|
3413 |
|
|
return exp;
|
3414 |
|
|
}
|
3415 |
|
|
break;
|
3416 |
|
|
|
3417 |
|
|
default:
|
3418 |
|
|
gcc_unreachable ();
|
3419 |
|
|
}
|
3420 |
|
|
|
3421 |
|
|
TREE_READONLY (new_tree) |= TREE_READONLY (exp);
|
3422 |
|
|
|
3423 |
|
|
if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
|
3424 |
|
|
TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
|
3425 |
|
|
|
3426 |
|
|
return new_tree;
|
3427 |
|
|
}
|
3428 |
|
|
|
3429 |
|
|
/* Stabilize a reference so that we can use it any number of times
|
3430 |
|
|
without causing its operands to be evaluated more than once.
|
3431 |
|
|
Returns the stabilized reference. This works by means of save_expr,
|
3432 |
|
|
so see the caveats in the comments about save_expr.
|
3433 |
|
|
|
3434 |
|
|
Also allows conversion expressions whose operands are references.
|
3435 |
|
|
Any other kind of expression is returned unchanged. */
|
3436 |
|
|
|
3437 |
|
|
tree
|
3438 |
|
|
stabilize_reference (tree ref)
|
3439 |
|
|
{
|
3440 |
|
|
tree result;
|
3441 |
|
|
enum tree_code code = TREE_CODE (ref);
|
3442 |
|
|
|
3443 |
|
|
switch (code)
|
3444 |
|
|
{
|
3445 |
|
|
case VAR_DECL:
|
3446 |
|
|
case PARM_DECL:
|
3447 |
|
|
case RESULT_DECL:
|
3448 |
|
|
/* No action is needed in this case. */
|
3449 |
|
|
return ref;
|
3450 |
|
|
|
3451 |
|
|
CASE_CONVERT:
|
3452 |
|
|
case FLOAT_EXPR:
|
3453 |
|
|
case FIX_TRUNC_EXPR:
|
3454 |
|
|
result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
|
3455 |
|
|
break;
|
3456 |
|
|
|
3457 |
|
|
case INDIRECT_REF:
|
3458 |
|
|
result = build_nt (INDIRECT_REF,
|
3459 |
|
|
stabilize_reference_1 (TREE_OPERAND (ref, 0)));
|
3460 |
|
|
break;
|
3461 |
|
|
|
3462 |
|
|
case COMPONENT_REF:
|
3463 |
|
|
result = build_nt (COMPONENT_REF,
|
3464 |
|
|
stabilize_reference (TREE_OPERAND (ref, 0)),
|
3465 |
|
|
TREE_OPERAND (ref, 1), NULL_TREE);
|
3466 |
|
|
break;
|
3467 |
|
|
|
3468 |
|
|
case BIT_FIELD_REF:
|
3469 |
|
|
result = build_nt (BIT_FIELD_REF,
|
3470 |
|
|
stabilize_reference (TREE_OPERAND (ref, 0)),
|
3471 |
|
|
stabilize_reference_1 (TREE_OPERAND (ref, 1)),
|
3472 |
|
|
stabilize_reference_1 (TREE_OPERAND (ref, 2)));
|
3473 |
|
|
break;
|
3474 |
|
|
|
3475 |
|
|
case ARRAY_REF:
|
3476 |
|
|
result = build_nt (ARRAY_REF,
|
3477 |
|
|
stabilize_reference (TREE_OPERAND (ref, 0)),
|
3478 |
|
|
stabilize_reference_1 (TREE_OPERAND (ref, 1)),
|
3479 |
|
|
TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
|
3480 |
|
|
break;
|
3481 |
|
|
|
3482 |
|
|
case ARRAY_RANGE_REF:
|
3483 |
|
|
result = build_nt (ARRAY_RANGE_REF,
|
3484 |
|
|
stabilize_reference (TREE_OPERAND (ref, 0)),
|
3485 |
|
|
stabilize_reference_1 (TREE_OPERAND (ref, 1)),
|
3486 |
|
|
TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
|
3487 |
|
|
break;
|
3488 |
|
|
|
3489 |
|
|
case COMPOUND_EXPR:
|
3490 |
|
|
/* We cannot wrap the first expression in a SAVE_EXPR, as then
|
3491 |
|
|
it wouldn't be ignored. This matters when dealing with
|
3492 |
|
|
volatiles. */
|
3493 |
|
|
return stabilize_reference_1 (ref);
|
3494 |
|
|
|
3495 |
|
|
/* If arg isn't a kind of lvalue we recognize, make no change.
|
3496 |
|
|
Caller should recognize the error for an invalid lvalue. */
|
3497 |
|
|
default:
|
3498 |
|
|
return ref;
|
3499 |
|
|
|
3500 |
|
|
case ERROR_MARK:
|
3501 |
|
|
return error_mark_node;
|
3502 |
|
|
}
|
3503 |
|
|
|
3504 |
|
|
TREE_TYPE (result) = TREE_TYPE (ref);
|
3505 |
|
|
TREE_READONLY (result) = TREE_READONLY (ref);
|
3506 |
|
|
TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
|
3507 |
|
|
TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
|
3508 |
|
|
|
3509 |
|
|
return result;
|
3510 |
|
|
}
|
3511 |
|
|
|
3512 |
|
|
/* Subroutine of stabilize_reference; this is called for subtrees of
|
3513 |
|
|
references. Any expression with side-effects must be put in a SAVE_EXPR
|
3514 |
|
|
to ensure that it is only evaluated once.
|
3515 |
|
|
|
3516 |
|
|
We don't put SAVE_EXPR nodes around everything, because assigning very
|
3517 |
|
|
simple expressions to temporaries causes us to miss good opportunities
|
3518 |
|
|
for optimizations. Among other things, the opportunity to fold in the
|
3519 |
|
|
addition of a constant into an addressing mode often gets lost, e.g.
|
3520 |
|
|
"y[i+1] += x;". In general, we take the approach that we should not make
|
3521 |
|
|
an assignment unless we are forced into it - i.e., that any non-side effect
|
3522 |
|
|
operator should be allowed, and that cse should take care of coalescing
|
3523 |
|
|
multiple utterances of the same expression should that prove fruitful. */
|
3524 |
|
|
|
3525 |
|
|
tree
|
3526 |
|
|
stabilize_reference_1 (tree e)
|
3527 |
|
|
{
|
3528 |
|
|
tree result;
|
3529 |
|
|
enum tree_code code = TREE_CODE (e);
|
3530 |
|
|
|
3531 |
|
|
/* We cannot ignore const expressions because it might be a reference
|
3532 |
|
|
to a const array but whose index contains side-effects. But we can
|
3533 |
|
|
ignore things that are actual constant or that already have been
|
3534 |
|
|
handled by this function. */
|
3535 |
|
|
|
3536 |
|
|
if (tree_invariant_p (e))
|
3537 |
|
|
return e;
|
3538 |
|
|
|
3539 |
|
|
switch (TREE_CODE_CLASS (code))
|
3540 |
|
|
{
|
3541 |
|
|
case tcc_exceptional:
|
3542 |
|
|
case tcc_type:
|
3543 |
|
|
case tcc_declaration:
|
3544 |
|
|
case tcc_comparison:
|
3545 |
|
|
case tcc_statement:
|
3546 |
|
|
case tcc_expression:
|
3547 |
|
|
case tcc_reference:
|
3548 |
|
|
case tcc_vl_exp:
|
3549 |
|
|
/* If the expression has side-effects, then encase it in a SAVE_EXPR
|
3550 |
|
|
so that it will only be evaluated once. */
|
3551 |
|
|
/* The reference (r) and comparison (<) classes could be handled as
|
3552 |
|
|
below, but it is generally faster to only evaluate them once. */
|
3553 |
|
|
if (TREE_SIDE_EFFECTS (e))
|
3554 |
|
|
return save_expr (e);
|
3555 |
|
|
return e;
|
3556 |
|
|
|
3557 |
|
|
case tcc_constant:
|
3558 |
|
|
/* Constants need no processing. In fact, we should never reach
|
3559 |
|
|
here. */
|
3560 |
|
|
return e;
|
3561 |
|
|
|
3562 |
|
|
case tcc_binary:
|
3563 |
|
|
/* Division is slow and tends to be compiled with jumps,
|
3564 |
|
|
especially the division by powers of 2 that is often
|
3565 |
|
|
found inside of an array reference. So do it just once. */
|
3566 |
|
|
if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
|
3567 |
|
|
|| code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
|
3568 |
|
|
|| code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
|
3569 |
|
|
|| code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
|
3570 |
|
|
return save_expr (e);
|
3571 |
|
|
/* Recursively stabilize each operand. */
|
3572 |
|
|
result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
|
3573 |
|
|
stabilize_reference_1 (TREE_OPERAND (e, 1)));
|
3574 |
|
|
break;
|
3575 |
|
|
|
3576 |
|
|
case tcc_unary:
|
3577 |
|
|
/* Recursively stabilize each operand. */
|
3578 |
|
|
result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
|
3579 |
|
|
break;
|
3580 |
|
|
|
3581 |
|
|
default:
|
3582 |
|
|
gcc_unreachable ();
|
3583 |
|
|
}
|
3584 |
|
|
|
3585 |
|
|
TREE_TYPE (result) = TREE_TYPE (e);
|
3586 |
|
|
TREE_READONLY (result) = TREE_READONLY (e);
|
3587 |
|
|
TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
|
3588 |
|
|
TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
|
3589 |
|
|
|
3590 |
|
|
return result;
|
3591 |
|
|
}
|
3592 |
|
|
|
3593 |
|
|
/* Low-level constructors for expressions. */
|
3594 |
|
|
|
3595 |
|
|
/* A helper function for build1 and constant folders. Set TREE_CONSTANT,
|
3596 |
|
|
and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
|
3597 |
|
|
|
3598 |
|
|
void
|
3599 |
|
|
recompute_tree_invariant_for_addr_expr (tree t)
|
3600 |
|
|
{
|
3601 |
|
|
tree node;
|
3602 |
|
|
bool tc = true, se = false;
|
3603 |
|
|
|
3604 |
|
|
/* We started out assuming this address is both invariant and constant, but
|
3605 |
|
|
does not have side effects. Now go down any handled components and see if
|
3606 |
|
|
any of them involve offsets that are either non-constant or non-invariant.
|
3607 |
|
|
Also check for side-effects.
|
3608 |
|
|
|
3609 |
|
|
??? Note that this code makes no attempt to deal with the case where
|
3610 |
|
|
taking the address of something causes a copy due to misalignment. */
|
3611 |
|
|
|
3612 |
|
|
#define UPDATE_FLAGS(NODE) \
|
3613 |
|
|
do { tree _node = (NODE); \
|
3614 |
|
|
if (_node && !TREE_CONSTANT (_node)) tc = false; \
|
3615 |
|
|
if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
|
3616 |
|
|
|
3617 |
|
|
for (node = TREE_OPERAND (t, 0); handled_component_p (node);
|
3618 |
|
|
node = TREE_OPERAND (node, 0))
|
3619 |
|
|
{
|
3620 |
|
|
/* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
|
3621 |
|
|
array reference (probably made temporarily by the G++ front end),
|
3622 |
|
|
so ignore all the operands. */
|
3623 |
|
|
if ((TREE_CODE (node) == ARRAY_REF
|
3624 |
|
|
|| TREE_CODE (node) == ARRAY_RANGE_REF)
|
3625 |
|
|
&& TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
|
3626 |
|
|
{
|
3627 |
|
|
UPDATE_FLAGS (TREE_OPERAND (node, 1));
|
3628 |
|
|
if (TREE_OPERAND (node, 2))
|
3629 |
|
|
UPDATE_FLAGS (TREE_OPERAND (node, 2));
|
3630 |
|
|
if (TREE_OPERAND (node, 3))
|
3631 |
|
|
UPDATE_FLAGS (TREE_OPERAND (node, 3));
|
3632 |
|
|
}
|
3633 |
|
|
/* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
|
3634 |
|
|
FIELD_DECL, apparently. The G++ front end can put something else
|
3635 |
|
|
there, at least temporarily. */
|
3636 |
|
|
else if (TREE_CODE (node) == COMPONENT_REF
|
3637 |
|
|
&& TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
|
3638 |
|
|
{
|
3639 |
|
|
if (TREE_OPERAND (node, 2))
|
3640 |
|
|
UPDATE_FLAGS (TREE_OPERAND (node, 2));
|
3641 |
|
|
}
|
3642 |
|
|
else if (TREE_CODE (node) == BIT_FIELD_REF)
|
3643 |
|
|
UPDATE_FLAGS (TREE_OPERAND (node, 2));
|
3644 |
|
|
}
|
3645 |
|
|
|
3646 |
|
|
node = lang_hooks.expr_to_decl (node, &tc, &se);
|
3647 |
|
|
|
3648 |
|
|
/* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
|
3649 |
|
|
the address, since &(*a)->b is a form of addition. If it's a constant, the
|
3650 |
|
|
address is constant too. If it's a decl, its address is constant if the
|
3651 |
|
|
decl is static. Everything else is not constant and, furthermore,
|
3652 |
|
|
taking the address of a volatile variable is not volatile. */
|
3653 |
|
|
if (TREE_CODE (node) == INDIRECT_REF
|
3654 |
|
|
|| TREE_CODE (node) == MEM_REF)
|
3655 |
|
|
UPDATE_FLAGS (TREE_OPERAND (node, 0));
|
3656 |
|
|
else if (CONSTANT_CLASS_P (node))
|
3657 |
|
|
;
|
3658 |
|
|
else if (DECL_P (node))
|
3659 |
|
|
tc &= (staticp (node) != NULL_TREE);
|
3660 |
|
|
else
|
3661 |
|
|
{
|
3662 |
|
|
tc = false;
|
3663 |
|
|
se |= TREE_SIDE_EFFECTS (node);
|
3664 |
|
|
}
|
3665 |
|
|
|
3666 |
|
|
|
3667 |
|
|
TREE_CONSTANT (t) = tc;
|
3668 |
|
|
TREE_SIDE_EFFECTS (t) = se;
|
3669 |
|
|
#undef UPDATE_FLAGS
|
3670 |
|
|
}
|
3671 |
|
|
|
3672 |
|
|
/* Build an expression of code CODE, data type TYPE, and operands as
|
3673 |
|
|
specified. Expressions and reference nodes can be created this way.
|
3674 |
|
|
Constants, decls, types and misc nodes cannot be.
|
3675 |
|
|
|
3676 |
|
|
We define 5 non-variadic functions, from 0 to 4 arguments. This is
|
3677 |
|
|
enough for all extant tree codes. */
|
3678 |
|
|
|
3679 |
|
|
tree
|
3680 |
|
|
build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
|
3681 |
|
|
{
|
3682 |
|
|
tree t;
|
3683 |
|
|
|
3684 |
|
|
gcc_assert (TREE_CODE_LENGTH (code) == 0);
|
3685 |
|
|
|
3686 |
|
|
t = make_node_stat (code PASS_MEM_STAT);
|
3687 |
|
|
TREE_TYPE (t) = tt;
|
3688 |
|
|
|
3689 |
|
|
return t;
|
3690 |
|
|
}
|
3691 |
|
|
|
3692 |
|
|
tree
|
3693 |
|
|
build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
|
3694 |
|
|
{
|
3695 |
|
|
int length = sizeof (struct tree_exp);
|
3696 |
|
|
tree t;
|
3697 |
|
|
|
3698 |
|
|
record_node_allocation_statistics (code, length);
|
3699 |
|
|
|
3700 |
|
|
gcc_assert (TREE_CODE_LENGTH (code) == 1);
|
3701 |
|
|
|
3702 |
|
|
t = ggc_alloc_zone_tree_node_stat (&tree_zone, length PASS_MEM_STAT);
|
3703 |
|
|
|
3704 |
|
|
memset (t, 0, sizeof (struct tree_common));
|
3705 |
|
|
|
3706 |
|
|
TREE_SET_CODE (t, code);
|
3707 |
|
|
|
3708 |
|
|
TREE_TYPE (t) = type;
|
3709 |
|
|
SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
|
3710 |
|
|
TREE_OPERAND (t, 0) = node;
|
3711 |
|
|
TREE_BLOCK (t) = NULL_TREE;
|
3712 |
|
|
if (node && !TYPE_P (node))
|
3713 |
|
|
{
|
3714 |
|
|
TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
|
3715 |
|
|
TREE_READONLY (t) = TREE_READONLY (node);
|
3716 |
|
|
}
|
3717 |
|
|
|
3718 |
|
|
if (TREE_CODE_CLASS (code) == tcc_statement)
|
3719 |
|
|
TREE_SIDE_EFFECTS (t) = 1;
|
3720 |
|
|
else switch (code)
|
3721 |
|
|
{
|
3722 |
|
|
case VA_ARG_EXPR:
|
3723 |
|
|
/* All of these have side-effects, no matter what their
|
3724 |
|
|
operands are. */
|
3725 |
|
|
TREE_SIDE_EFFECTS (t) = 1;
|
3726 |
|
|
TREE_READONLY (t) = 0;
|
3727 |
|
|
break;
|
3728 |
|
|
|
3729 |
|
|
case INDIRECT_REF:
|
3730 |
|
|
/* Whether a dereference is readonly has nothing to do with whether
|
3731 |
|
|
its operand is readonly. */
|
3732 |
|
|
TREE_READONLY (t) = 0;
|
3733 |
|
|
break;
|
3734 |
|
|
|
3735 |
|
|
case ADDR_EXPR:
|
3736 |
|
|
if (node)
|
3737 |
|
|
recompute_tree_invariant_for_addr_expr (t);
|
3738 |
|
|
break;
|
3739 |
|
|
|
3740 |
|
|
default:
|
3741 |
|
|
if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
|
3742 |
|
|
&& node && !TYPE_P (node)
|
3743 |
|
|
&& TREE_CONSTANT (node))
|
3744 |
|
|
TREE_CONSTANT (t) = 1;
|
3745 |
|
|
if (TREE_CODE_CLASS (code) == tcc_reference
|
3746 |
|
|
&& node && TREE_THIS_VOLATILE (node))
|
3747 |
|
|
TREE_THIS_VOLATILE (t) = 1;
|
3748 |
|
|
break;
|
3749 |
|
|
}
|
3750 |
|
|
|
3751 |
|
|
return t;
|
3752 |
|
|
}
|
3753 |
|
|
|
3754 |
|
|
#define PROCESS_ARG(N) \
|
3755 |
|
|
do { \
|
3756 |
|
|
TREE_OPERAND (t, N) = arg##N; \
|
3757 |
|
|
if (arg##N &&!TYPE_P (arg##N)) \
|
3758 |
|
|
{ \
|
3759 |
|
|
if (TREE_SIDE_EFFECTS (arg##N)) \
|
3760 |
|
|
side_effects = 1; \
|
3761 |
|
|
if (!TREE_READONLY (arg##N) \
|
3762 |
|
|
&& !CONSTANT_CLASS_P (arg##N)) \
|
3763 |
|
|
(void) (read_only = 0); \
|
3764 |
|
|
if (!TREE_CONSTANT (arg##N)) \
|
3765 |
|
|
(void) (constant = 0); \
|
3766 |
|
|
} \
|
3767 |
|
|
} while (0)
|
3768 |
|
|
|
3769 |
|
|
tree
|
3770 |
|
|
build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
|
3771 |
|
|
{
|
3772 |
|
|
bool constant, read_only, side_effects;
|
3773 |
|
|
tree t;
|
3774 |
|
|
|
3775 |
|
|
gcc_assert (TREE_CODE_LENGTH (code) == 2);
|
3776 |
|
|
|
3777 |
|
|
if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
|
3778 |
|
|
&& arg0 && arg1 && tt && POINTER_TYPE_P (tt)
|
3779 |
|
|
/* When sizetype precision doesn't match that of pointers
|
3780 |
|
|
we need to be able to build explicit extensions or truncations
|
3781 |
|
|
of the offset argument. */
|
3782 |
|
|
&& TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
|
3783 |
|
|
gcc_assert (TREE_CODE (arg0) == INTEGER_CST
|
3784 |
|
|
&& TREE_CODE (arg1) == INTEGER_CST);
|
3785 |
|
|
|
3786 |
|
|
if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
|
3787 |
|
|
gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
|
3788 |
|
|
&& ptrofftype_p (TREE_TYPE (arg1)));
|
3789 |
|
|
|
3790 |
|
|
t = make_node_stat (code PASS_MEM_STAT);
|
3791 |
|
|
TREE_TYPE (t) = tt;
|
3792 |
|
|
|
3793 |
|
|
/* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
|
3794 |
|
|
result based on those same flags for the arguments. But if the
|
3795 |
|
|
arguments aren't really even `tree' expressions, we shouldn't be trying
|
3796 |
|
|
to do this. */
|
3797 |
|
|
|
3798 |
|
|
/* Expressions without side effects may be constant if their
|
3799 |
|
|
arguments are as well. */
|
3800 |
|
|
constant = (TREE_CODE_CLASS (code) == tcc_comparison
|
3801 |
|
|
|| TREE_CODE_CLASS (code) == tcc_binary);
|
3802 |
|
|
read_only = 1;
|
3803 |
|
|
side_effects = TREE_SIDE_EFFECTS (t);
|
3804 |
|
|
|
3805 |
|
|
PROCESS_ARG(0);
|
3806 |
|
|
PROCESS_ARG(1);
|
3807 |
|
|
|
3808 |
|
|
TREE_READONLY (t) = read_only;
|
3809 |
|
|
TREE_CONSTANT (t) = constant;
|
3810 |
|
|
TREE_SIDE_EFFECTS (t) = side_effects;
|
3811 |
|
|
TREE_THIS_VOLATILE (t)
|
3812 |
|
|
= (TREE_CODE_CLASS (code) == tcc_reference
|
3813 |
|
|
&& arg0 && TREE_THIS_VOLATILE (arg0));
|
3814 |
|
|
|
3815 |
|
|
return t;
|
3816 |
|
|
}
|
3817 |
|
|
|
3818 |
|
|
|
3819 |
|
|
tree
|
3820 |
|
|
build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
|
3821 |
|
|
tree arg2 MEM_STAT_DECL)
|
3822 |
|
|
{
|
3823 |
|
|
bool constant, read_only, side_effects;
|
3824 |
|
|
tree t;
|
3825 |
|
|
|
3826 |
|
|
gcc_assert (TREE_CODE_LENGTH (code) == 3);
|
3827 |
|
|
gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
|
3828 |
|
|
|
3829 |
|
|
t = make_node_stat (code PASS_MEM_STAT);
|
3830 |
|
|
TREE_TYPE (t) = tt;
|
3831 |
|
|
|
3832 |
|
|
read_only = 1;
|
3833 |
|
|
|
3834 |
|
|
/* As a special exception, if COND_EXPR has NULL branches, we
|
3835 |
|
|
assume that it is a gimple statement and always consider
|
3836 |
|
|
it to have side effects. */
|
3837 |
|
|
if (code == COND_EXPR
|
3838 |
|
|
&& tt == void_type_node
|
3839 |
|
|
&& arg1 == NULL_TREE
|
3840 |
|
|
&& arg2 == NULL_TREE)
|
3841 |
|
|
side_effects = true;
|
3842 |
|
|
else
|
3843 |
|
|
side_effects = TREE_SIDE_EFFECTS (t);
|
3844 |
|
|
|
3845 |
|
|
PROCESS_ARG(0);
|
3846 |
|
|
PROCESS_ARG(1);
|
3847 |
|
|
PROCESS_ARG(2);
|
3848 |
|
|
|
3849 |
|
|
if (code == COND_EXPR)
|
3850 |
|
|
TREE_READONLY (t) = read_only;
|
3851 |
|
|
|
3852 |
|
|
TREE_SIDE_EFFECTS (t) = side_effects;
|
3853 |
|
|
TREE_THIS_VOLATILE (t)
|
3854 |
|
|
= (TREE_CODE_CLASS (code) == tcc_reference
|
3855 |
|
|
&& arg0 && TREE_THIS_VOLATILE (arg0));
|
3856 |
|
|
|
3857 |
|
|
return t;
|
3858 |
|
|
}
|
3859 |
|
|
|
3860 |
|
|
tree
|
3861 |
|
|
build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
|
3862 |
|
|
tree arg2, tree arg3 MEM_STAT_DECL)
|
3863 |
|
|
{
|
3864 |
|
|
bool constant, read_only, side_effects;
|
3865 |
|
|
tree t;
|
3866 |
|
|
|
3867 |
|
|
gcc_assert (TREE_CODE_LENGTH (code) == 4);
|
3868 |
|
|
|
3869 |
|
|
t = make_node_stat (code PASS_MEM_STAT);
|
3870 |
|
|
TREE_TYPE (t) = tt;
|
3871 |
|
|
|
3872 |
|
|
side_effects = TREE_SIDE_EFFECTS (t);
|
3873 |
|
|
|
3874 |
|
|
PROCESS_ARG(0);
|
3875 |
|
|
PROCESS_ARG(1);
|
3876 |
|
|
PROCESS_ARG(2);
|
3877 |
|
|
PROCESS_ARG(3);
|
3878 |
|
|
|
3879 |
|
|
TREE_SIDE_EFFECTS (t) = side_effects;
|
3880 |
|
|
TREE_THIS_VOLATILE (t)
|
3881 |
|
|
= (TREE_CODE_CLASS (code) == tcc_reference
|
3882 |
|
|
&& arg0 && TREE_THIS_VOLATILE (arg0));
|
3883 |
|
|
|
3884 |
|
|
return t;
|
3885 |
|
|
}
|
3886 |
|
|
|
3887 |
|
|
tree
|
3888 |
|
|
build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
|
3889 |
|
|
tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
|
3890 |
|
|
{
|
3891 |
|
|
bool constant, read_only, side_effects;
|
3892 |
|
|
tree t;
|
3893 |
|
|
|
3894 |
|
|
gcc_assert (TREE_CODE_LENGTH (code) == 5);
|
3895 |
|
|
|
3896 |
|
|
t = make_node_stat (code PASS_MEM_STAT);
|
3897 |
|
|
TREE_TYPE (t) = tt;
|
3898 |
|
|
|
3899 |
|
|
side_effects = TREE_SIDE_EFFECTS (t);
|
3900 |
|
|
|
3901 |
|
|
PROCESS_ARG(0);
|
3902 |
|
|
PROCESS_ARG(1);
|
3903 |
|
|
PROCESS_ARG(2);
|
3904 |
|
|
PROCESS_ARG(3);
|
3905 |
|
|
PROCESS_ARG(4);
|
3906 |
|
|
|
3907 |
|
|
TREE_SIDE_EFFECTS (t) = side_effects;
|
3908 |
|
|
TREE_THIS_VOLATILE (t)
|
3909 |
|
|
= (TREE_CODE_CLASS (code) == tcc_reference
|
3910 |
|
|
&& arg0 && TREE_THIS_VOLATILE (arg0));
|
3911 |
|
|
|
3912 |
|
|
return t;
|
3913 |
|
|
}
|
3914 |
|
|
|
3915 |
|
|
tree
|
3916 |
|
|
build6_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
|
3917 |
|
|
tree arg2, tree arg3, tree arg4, tree arg5 MEM_STAT_DECL)
|
3918 |
|
|
{
|
3919 |
|
|
bool constant, read_only, side_effects;
|
3920 |
|
|
tree t;
|
3921 |
|
|
|
3922 |
|
|
gcc_assert (code == TARGET_MEM_REF);
|
3923 |
|
|
|
3924 |
|
|
t = make_node_stat (code PASS_MEM_STAT);
|
3925 |
|
|
TREE_TYPE (t) = tt;
|
3926 |
|
|
|
3927 |
|
|
side_effects = TREE_SIDE_EFFECTS (t);
|
3928 |
|
|
|
3929 |
|
|
PROCESS_ARG(0);
|
3930 |
|
|
PROCESS_ARG(1);
|
3931 |
|
|
PROCESS_ARG(2);
|
3932 |
|
|
PROCESS_ARG(3);
|
3933 |
|
|
PROCESS_ARG(4);
|
3934 |
|
|
if (code == TARGET_MEM_REF)
|
3935 |
|
|
side_effects = 0;
|
3936 |
|
|
PROCESS_ARG(5);
|
3937 |
|
|
|
3938 |
|
|
TREE_SIDE_EFFECTS (t) = side_effects;
|
3939 |
|
|
TREE_THIS_VOLATILE (t)
|
3940 |
|
|
= (code == TARGET_MEM_REF
|
3941 |
|
|
&& arg5 && TREE_THIS_VOLATILE (arg5));
|
3942 |
|
|
|
3943 |
|
|
return t;
|
3944 |
|
|
}
|
3945 |
|
|
|
3946 |
|
|
/* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
|
3947 |
|
|
on the pointer PTR. */
|
3948 |
|
|
|
3949 |
|
|
tree
|
3950 |
|
|
build_simple_mem_ref_loc (location_t loc, tree ptr)
|
3951 |
|
|
{
|
3952 |
|
|
HOST_WIDE_INT offset = 0;
|
3953 |
|
|
tree ptype = TREE_TYPE (ptr);
|
3954 |
|
|
tree tem;
|
3955 |
|
|
/* For convenience allow addresses that collapse to a simple base
|
3956 |
|
|
and offset. */
|
3957 |
|
|
if (TREE_CODE (ptr) == ADDR_EXPR
|
3958 |
|
|
&& (handled_component_p (TREE_OPERAND (ptr, 0))
|
3959 |
|
|
|| TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
|
3960 |
|
|
{
|
3961 |
|
|
ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
|
3962 |
|
|
gcc_assert (ptr);
|
3963 |
|
|
ptr = build_fold_addr_expr (ptr);
|
3964 |
|
|
gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
|
3965 |
|
|
}
|
3966 |
|
|
tem = build2 (MEM_REF, TREE_TYPE (ptype),
|
3967 |
|
|
ptr, build_int_cst (ptype, offset));
|
3968 |
|
|
SET_EXPR_LOCATION (tem, loc);
|
3969 |
|
|
return tem;
|
3970 |
|
|
}
|
3971 |
|
|
|
3972 |
|
|
/* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
|
3973 |
|
|
|
3974 |
|
|
double_int
|
3975 |
|
|
mem_ref_offset (const_tree t)
|
3976 |
|
|
{
|
3977 |
|
|
tree toff = TREE_OPERAND (t, 1);
|
3978 |
|
|
return double_int_sext (tree_to_double_int (toff),
|
3979 |
|
|
TYPE_PRECISION (TREE_TYPE (toff)));
|
3980 |
|
|
}
|
3981 |
|
|
|
3982 |
|
|
/* Return the pointer-type relevant for TBAA purposes from the
|
3983 |
|
|
gimple memory reference tree T. This is the type to be used for
|
3984 |
|
|
the offset operand of MEM_REF or TARGET_MEM_REF replacements of T. */
|
3985 |
|
|
|
3986 |
|
|
tree
|
3987 |
|
|
reference_alias_ptr_type (const_tree t)
|
3988 |
|
|
{
|
3989 |
|
|
const_tree base = t;
|
3990 |
|
|
while (handled_component_p (base))
|
3991 |
|
|
base = TREE_OPERAND (base, 0);
|
3992 |
|
|
if (TREE_CODE (base) == MEM_REF)
|
3993 |
|
|
return TREE_TYPE (TREE_OPERAND (base, 1));
|
3994 |
|
|
else if (TREE_CODE (base) == TARGET_MEM_REF)
|
3995 |
|
|
return TREE_TYPE (TMR_OFFSET (base));
|
3996 |
|
|
else
|
3997 |
|
|
return build_pointer_type (TYPE_MAIN_VARIANT (TREE_TYPE (base)));
|
3998 |
|
|
}
|
3999 |
|
|
|
4000 |
|
|
/* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
|
4001 |
|
|
offsetted by OFFSET units. */
|
4002 |
|
|
|
4003 |
|
|
tree
|
4004 |
|
|
build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
|
4005 |
|
|
{
|
4006 |
|
|
tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
|
4007 |
|
|
build_fold_addr_expr (base),
|
4008 |
|
|
build_int_cst (ptr_type_node, offset));
|
4009 |
|
|
tree addr = build1 (ADDR_EXPR, type, ref);
|
4010 |
|
|
recompute_tree_invariant_for_addr_expr (addr);
|
4011 |
|
|
return addr;
|
4012 |
|
|
}
|
4013 |
|
|
|
4014 |
|
|
/* Similar except don't specify the TREE_TYPE
|
4015 |
|
|
and leave the TREE_SIDE_EFFECTS as 0.
|
4016 |
|
|
It is permissible for arguments to be null,
|
4017 |
|
|
or even garbage if their values do not matter. */
|
4018 |
|
|
|
4019 |
|
|
tree
|
4020 |
|
|
build_nt (enum tree_code code, ...)
|
4021 |
|
|
{
|
4022 |
|
|
tree t;
|
4023 |
|
|
int length;
|
4024 |
|
|
int i;
|
4025 |
|
|
va_list p;
|
4026 |
|
|
|
4027 |
|
|
gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
|
4028 |
|
|
|
4029 |
|
|
va_start (p, code);
|
4030 |
|
|
|
4031 |
|
|
t = make_node (code);
|
4032 |
|
|
length = TREE_CODE_LENGTH (code);
|
4033 |
|
|
|
4034 |
|
|
for (i = 0; i < length; i++)
|
4035 |
|
|
TREE_OPERAND (t, i) = va_arg (p, tree);
|
4036 |
|
|
|
4037 |
|
|
va_end (p);
|
4038 |
|
|
return t;
|
4039 |
|
|
}
|
4040 |
|
|
|
4041 |
|
|
/* Similar to build_nt, but for creating a CALL_EXPR object with a
|
4042 |
|
|
tree VEC. */
|
4043 |
|
|
|
4044 |
|
|
tree
|
4045 |
|
|
build_nt_call_vec (tree fn, VEC(tree,gc) *args)
|
4046 |
|
|
{
|
4047 |
|
|
tree ret, t;
|
4048 |
|
|
unsigned int ix;
|
4049 |
|
|
|
4050 |
|
|
ret = build_vl_exp (CALL_EXPR, VEC_length (tree, args) + 3);
|
4051 |
|
|
CALL_EXPR_FN (ret) = fn;
|
4052 |
|
|
CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
|
4053 |
|
|
FOR_EACH_VEC_ELT (tree, args, ix, t)
|
4054 |
|
|
CALL_EXPR_ARG (ret, ix) = t;
|
4055 |
|
|
return ret;
|
4056 |
|
|
}
|
4057 |
|
|
|
4058 |
|
|
/* Create a DECL_... node of code CODE, name NAME and data type TYPE.
|
4059 |
|
|
We do NOT enter this node in any sort of symbol table.
|
4060 |
|
|
|
4061 |
|
|
LOC is the location of the decl.
|
4062 |
|
|
|
4063 |
|
|
layout_decl is used to set up the decl's storage layout.
|
4064 |
|
|
Other slots are initialized to 0 or null pointers. */
|
4065 |
|
|
|
4066 |
|
|
tree
|
4067 |
|
|
build_decl_stat (location_t loc, enum tree_code code, tree name,
|
4068 |
|
|
tree type MEM_STAT_DECL)
|
4069 |
|
|
{
|
4070 |
|
|
tree t;
|
4071 |
|
|
|
4072 |
|
|
t = make_node_stat (code PASS_MEM_STAT);
|
4073 |
|
|
DECL_SOURCE_LOCATION (t) = loc;
|
4074 |
|
|
|
4075 |
|
|
/* if (type == error_mark_node)
|
4076 |
|
|
type = integer_type_node; */
|
4077 |
|
|
/* That is not done, deliberately, so that having error_mark_node
|
4078 |
|
|
as the type can suppress useless errors in the use of this variable. */
|
4079 |
|
|
|
4080 |
|
|
DECL_NAME (t) = name;
|
4081 |
|
|
TREE_TYPE (t) = type;
|
4082 |
|
|
|
4083 |
|
|
if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
|
4084 |
|
|
layout_decl (t, 0);
|
4085 |
|
|
|
4086 |
|
|
return t;
|
4087 |
|
|
}
|
4088 |
|
|
|
4089 |
|
|
/* Builds and returns function declaration with NAME and TYPE. */
|
4090 |
|
|
|
4091 |
|
|
tree
|
4092 |
|
|
build_fn_decl (const char *name, tree type)
|
4093 |
|
|
{
|
4094 |
|
|
tree id = get_identifier (name);
|
4095 |
|
|
tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
|
4096 |
|
|
|
4097 |
|
|
DECL_EXTERNAL (decl) = 1;
|
4098 |
|
|
TREE_PUBLIC (decl) = 1;
|
4099 |
|
|
DECL_ARTIFICIAL (decl) = 1;
|
4100 |
|
|
TREE_NOTHROW (decl) = 1;
|
4101 |
|
|
|
4102 |
|
|
return decl;
|
4103 |
|
|
}
|
4104 |
|
|
|
4105 |
|
|
VEC(tree,gc) *all_translation_units;
|
4106 |
|
|
|
4107 |
|
|
/* Builds a new translation-unit decl with name NAME, queues it in the
|
4108 |
|
|
global list of translation-unit decls and returns it. */
|
4109 |
|
|
|
4110 |
|
|
tree
|
4111 |
|
|
build_translation_unit_decl (tree name)
|
4112 |
|
|
{
|
4113 |
|
|
tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
|
4114 |
|
|
name, NULL_TREE);
|
4115 |
|
|
TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
|
4116 |
|
|
VEC_safe_push (tree, gc, all_translation_units, tu);
|
4117 |
|
|
return tu;
|
4118 |
|
|
}
|
4119 |
|
|
|
4120 |
|
|
|
4121 |
|
|
/* BLOCK nodes are used to represent the structure of binding contours
|
4122 |
|
|
and declarations, once those contours have been exited and their contents
|
4123 |
|
|
compiled. This information is used for outputting debugging info. */
|
4124 |
|
|
|
4125 |
|
|
tree
|
4126 |
|
|
build_block (tree vars, tree subblocks, tree supercontext, tree chain)
|
4127 |
|
|
{
|
4128 |
|
|
tree block = make_node (BLOCK);
|
4129 |
|
|
|
4130 |
|
|
BLOCK_VARS (block) = vars;
|
4131 |
|
|
BLOCK_SUBBLOCKS (block) = subblocks;
|
4132 |
|
|
BLOCK_SUPERCONTEXT (block) = supercontext;
|
4133 |
|
|
BLOCK_CHAIN (block) = chain;
|
4134 |
|
|
return block;
|
4135 |
|
|
}
|
4136 |
|
|
|
4137 |
|
|
|
4138 |
|
|
/* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
|
4139 |
|
|
|
4140 |
|
|
LOC is the location to use in tree T. */
|
4141 |
|
|
|
4142 |
|
|
void
|
4143 |
|
|
protected_set_expr_location (tree t, location_t loc)
|
4144 |
|
|
{
|
4145 |
|
|
if (t && CAN_HAVE_LOCATION_P (t))
|
4146 |
|
|
SET_EXPR_LOCATION (t, loc);
|
4147 |
|
|
}
|
4148 |
|
|
|
4149 |
|
|
/* Return a declaration like DDECL except that its DECL_ATTRIBUTES
|
4150 |
|
|
is ATTRIBUTE. */
|
4151 |
|
|
|
4152 |
|
|
tree
|
4153 |
|
|
build_decl_attribute_variant (tree ddecl, tree attribute)
|
4154 |
|
|
{
|
4155 |
|
|
DECL_ATTRIBUTES (ddecl) = attribute;
|
4156 |
|
|
return ddecl;
|
4157 |
|
|
}
|
4158 |
|
|
|
4159 |
|
|
/* Borrowed from hashtab.c iterative_hash implementation. */
|
4160 |
|
|
#define mix(a,b,c) \
|
4161 |
|
|
{ \
|
4162 |
|
|
a -= b; a -= c; a ^= (c>>13); \
|
4163 |
|
|
b -= c; b -= a; b ^= (a<< 8); \
|
4164 |
|
|
c -= a; c -= b; c ^= ((b&0xffffffff)>>13); \
|
4165 |
|
|
a -= b; a -= c; a ^= ((c&0xffffffff)>>12); \
|
4166 |
|
|
b -= c; b -= a; b = (b ^ (a<<16)) & 0xffffffff; \
|
4167 |
|
|
c -= a; c -= b; c = (c ^ (b>> 5)) & 0xffffffff; \
|
4168 |
|
|
a -= b; a -= c; a = (a ^ (c>> 3)) & 0xffffffff; \
|
4169 |
|
|
b -= c; b -= a; b = (b ^ (a<<10)) & 0xffffffff; \
|
4170 |
|
|
c -= a; c -= b; c = (c ^ (b>>15)) & 0xffffffff; \
|
4171 |
|
|
}
|
4172 |
|
|
|
4173 |
|
|
|
4174 |
|
|
/* Produce good hash value combining VAL and VAL2. */
|
4175 |
|
|
hashval_t
|
4176 |
|
|
iterative_hash_hashval_t (hashval_t val, hashval_t val2)
|
4177 |
|
|
{
|
4178 |
|
|
/* the golden ratio; an arbitrary value. */
|
4179 |
|
|
hashval_t a = 0x9e3779b9;
|
4180 |
|
|
|
4181 |
|
|
mix (a, val, val2);
|
4182 |
|
|
return val2;
|
4183 |
|
|
}
|
4184 |
|
|
|
4185 |
|
|
/* Produce good hash value combining VAL and VAL2. */
|
4186 |
|
|
hashval_t
|
4187 |
|
|
iterative_hash_host_wide_int (HOST_WIDE_INT val, hashval_t val2)
|
4188 |
|
|
{
|
4189 |
|
|
if (sizeof (HOST_WIDE_INT) == sizeof (hashval_t))
|
4190 |
|
|
return iterative_hash_hashval_t (val, val2);
|
4191 |
|
|
else
|
4192 |
|
|
{
|
4193 |
|
|
hashval_t a = (hashval_t) val;
|
4194 |
|
|
/* Avoid warnings about shifting of more than the width of the type on
|
4195 |
|
|
hosts that won't execute this path. */
|
4196 |
|
|
int zero = 0;
|
4197 |
|
|
hashval_t b = (hashval_t) (val >> (sizeof (hashval_t) * 8 + zero));
|
4198 |
|
|
mix (a, b, val2);
|
4199 |
|
|
if (sizeof (HOST_WIDE_INT) > 2 * sizeof (hashval_t))
|
4200 |
|
|
{
|
4201 |
|
|
hashval_t a = (hashval_t) (val >> (sizeof (hashval_t) * 16 + zero));
|
4202 |
|
|
hashval_t b = (hashval_t) (val >> (sizeof (hashval_t) * 24 + zero));
|
4203 |
|
|
mix (a, b, val2);
|
4204 |
|
|
}
|
4205 |
|
|
return val2;
|
4206 |
|
|
}
|
4207 |
|
|
}
|
4208 |
|
|
|
4209 |
|
|
/* Return a type like TTYPE except that its TYPE_ATTRIBUTE
|
4210 |
|
|
is ATTRIBUTE and its qualifiers are QUALS.
|
4211 |
|
|
|
4212 |
|
|
Record such modified types already made so we don't make duplicates. */
|
4213 |
|
|
|
4214 |
|
|
tree
|
4215 |
|
|
build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
|
4216 |
|
|
{
|
4217 |
|
|
if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
|
4218 |
|
|
{
|
4219 |
|
|
hashval_t hashcode = 0;
|
4220 |
|
|
tree ntype;
|
4221 |
|
|
enum tree_code code = TREE_CODE (ttype);
|
4222 |
|
|
|
4223 |
|
|
/* Building a distinct copy of a tagged type is inappropriate; it
|
4224 |
|
|
causes breakage in code that expects there to be a one-to-one
|
4225 |
|
|
relationship between a struct and its fields.
|
4226 |
|
|
build_duplicate_type is another solution (as used in
|
4227 |
|
|
handle_transparent_union_attribute), but that doesn't play well
|
4228 |
|
|
with the stronger C++ type identity model. */
|
4229 |
|
|
if (TREE_CODE (ttype) == RECORD_TYPE
|
4230 |
|
|
|| TREE_CODE (ttype) == UNION_TYPE
|
4231 |
|
|
|| TREE_CODE (ttype) == QUAL_UNION_TYPE
|
4232 |
|
|
|| TREE_CODE (ttype) == ENUMERAL_TYPE)
|
4233 |
|
|
{
|
4234 |
|
|
warning (OPT_Wattributes,
|
4235 |
|
|
"ignoring attributes applied to %qT after definition",
|
4236 |
|
|
TYPE_MAIN_VARIANT (ttype));
|
4237 |
|
|
return build_qualified_type (ttype, quals);
|
4238 |
|
|
}
|
4239 |
|
|
|
4240 |
|
|
ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
|
4241 |
|
|
ntype = build_distinct_type_copy (ttype);
|
4242 |
|
|
|
4243 |
|
|
TYPE_ATTRIBUTES (ntype) = attribute;
|
4244 |
|
|
|
4245 |
|
|
hashcode = iterative_hash_object (code, hashcode);
|
4246 |
|
|
if (TREE_TYPE (ntype))
|
4247 |
|
|
hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (ntype)),
|
4248 |
|
|
hashcode);
|
4249 |
|
|
hashcode = attribute_hash_list (attribute, hashcode);
|
4250 |
|
|
|
4251 |
|
|
switch (TREE_CODE (ntype))
|
4252 |
|
|
{
|
4253 |
|
|
case FUNCTION_TYPE:
|
4254 |
|
|
hashcode = type_hash_list (TYPE_ARG_TYPES (ntype), hashcode);
|
4255 |
|
|
break;
|
4256 |
|
|
case ARRAY_TYPE:
|
4257 |
|
|
if (TYPE_DOMAIN (ntype))
|
4258 |
|
|
hashcode = iterative_hash_object (TYPE_HASH (TYPE_DOMAIN (ntype)),
|
4259 |
|
|
hashcode);
|
4260 |
|
|
break;
|
4261 |
|
|
case INTEGER_TYPE:
|
4262 |
|
|
hashcode = iterative_hash_object
|
4263 |
|
|
(TREE_INT_CST_LOW (TYPE_MAX_VALUE (ntype)), hashcode);
|
4264 |
|
|
hashcode = iterative_hash_object
|
4265 |
|
|
(TREE_INT_CST_HIGH (TYPE_MAX_VALUE (ntype)), hashcode);
|
4266 |
|
|
break;
|
4267 |
|
|
case REAL_TYPE:
|
4268 |
|
|
case FIXED_POINT_TYPE:
|
4269 |
|
|
{
|
4270 |
|
|
unsigned int precision = TYPE_PRECISION (ntype);
|
4271 |
|
|
hashcode = iterative_hash_object (precision, hashcode);
|
4272 |
|
|
}
|
4273 |
|
|
break;
|
4274 |
|
|
default:
|
4275 |
|
|
break;
|
4276 |
|
|
}
|
4277 |
|
|
|
4278 |
|
|
ntype = type_hash_canon (hashcode, ntype);
|
4279 |
|
|
|
4280 |
|
|
/* If the target-dependent attributes make NTYPE different from
|
4281 |
|
|
its canonical type, we will need to use structural equality
|
4282 |
|
|
checks for this type. */
|
4283 |
|
|
if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
|
4284 |
|
|
|| !comp_type_attributes (ntype, ttype))
|
4285 |
|
|
SET_TYPE_STRUCTURAL_EQUALITY (ntype);
|
4286 |
|
|
else if (TYPE_CANONICAL (ntype) == ntype)
|
4287 |
|
|
TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
|
4288 |
|
|
|
4289 |
|
|
ttype = build_qualified_type (ntype, quals);
|
4290 |
|
|
}
|
4291 |
|
|
else if (TYPE_QUALS (ttype) != quals)
|
4292 |
|
|
ttype = build_qualified_type (ttype, quals);
|
4293 |
|
|
|
4294 |
|
|
return ttype;
|
4295 |
|
|
}
|
4296 |
|
|
|
4297 |
|
|
/* Compare two attributes for their value identity. Return true if the
|
4298 |
|
|
attribute values are known to be equal; otherwise return false.
|
4299 |
|
|
*/
|
4300 |
|
|
|
4301 |
|
|
static bool
|
4302 |
|
|
attribute_value_equal (const_tree attr1, const_tree attr2)
|
4303 |
|
|
{
|
4304 |
|
|
if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
|
4305 |
|
|
return true;
|
4306 |
|
|
|
4307 |
|
|
if (TREE_VALUE (attr1) != NULL_TREE
|
4308 |
|
|
&& TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
|
4309 |
|
|
&& TREE_VALUE (attr2) != NULL
|
4310 |
|
|
&& TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
|
4311 |
|
|
return (simple_cst_list_equal (TREE_VALUE (attr1),
|
4312 |
|
|
TREE_VALUE (attr2)) == 1);
|
4313 |
|
|
|
4314 |
|
|
return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
|
4315 |
|
|
}
|
4316 |
|
|
|
4317 |
|
|
/* Return 0 if the attributes for two types are incompatible, 1 if they
|
4318 |
|
|
are compatible, and 2 if they are nearly compatible (which causes a
|
4319 |
|
|
warning to be generated). */
|
4320 |
|
|
int
|
4321 |
|
|
comp_type_attributes (const_tree type1, const_tree type2)
|
4322 |
|
|
{
|
4323 |
|
|
const_tree a1 = TYPE_ATTRIBUTES (type1);
|
4324 |
|
|
const_tree a2 = TYPE_ATTRIBUTES (type2);
|
4325 |
|
|
const_tree a;
|
4326 |
|
|
|
4327 |
|
|
if (a1 == a2)
|
4328 |
|
|
return 1;
|
4329 |
|
|
for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
|
4330 |
|
|
{
|
4331 |
|
|
const struct attribute_spec *as;
|
4332 |
|
|
const_tree attr;
|
4333 |
|
|
|
4334 |
|
|
as = lookup_attribute_spec (TREE_PURPOSE (a));
|
4335 |
|
|
if (!as || as->affects_type_identity == false)
|
4336 |
|
|
continue;
|
4337 |
|
|
|
4338 |
|
|
attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
|
4339 |
|
|
if (!attr || !attribute_value_equal (a, attr))
|
4340 |
|
|
break;
|
4341 |
|
|
}
|
4342 |
|
|
if (!a)
|
4343 |
|
|
{
|
4344 |
|
|
for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
|
4345 |
|
|
{
|
4346 |
|
|
const struct attribute_spec *as;
|
4347 |
|
|
|
4348 |
|
|
as = lookup_attribute_spec (TREE_PURPOSE (a));
|
4349 |
|
|
if (!as || as->affects_type_identity == false)
|
4350 |
|
|
continue;
|
4351 |
|
|
|
4352 |
|
|
if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
|
4353 |
|
|
break;
|
4354 |
|
|
/* We don't need to compare trees again, as we did this
|
4355 |
|
|
already in first loop. */
|
4356 |
|
|
}
|
4357 |
|
|
/* All types - affecting identity - are equal, so
|
4358 |
|
|
there is no need to call target hook for comparison. */
|
4359 |
|
|
if (!a)
|
4360 |
|
|
return 1;
|
4361 |
|
|
}
|
4362 |
|
|
/* As some type combinations - like default calling-convention - might
|
4363 |
|
|
be compatible, we have to call the target hook to get the final result. */
|
4364 |
|
|
return targetm.comp_type_attributes (type1, type2);
|
4365 |
|
|
}
|
4366 |
|
|
|
4367 |
|
|
/* Return a type like TTYPE except that its TYPE_ATTRIBUTE
|
4368 |
|
|
is ATTRIBUTE.
|
4369 |
|
|
|
4370 |
|
|
Record such modified types already made so we don't make duplicates. */
|
4371 |
|
|
|
4372 |
|
|
tree
|
4373 |
|
|
build_type_attribute_variant (tree ttype, tree attribute)
|
4374 |
|
|
{
|
4375 |
|
|
return build_type_attribute_qual_variant (ttype, attribute,
|
4376 |
|
|
TYPE_QUALS (ttype));
|
4377 |
|
|
}
|
4378 |
|
|
|
4379 |
|
|
|
4380 |
|
|
/* Reset the expression *EXPR_P, a size or position.
|
4381 |
|
|
|
4382 |
|
|
??? We could reset all non-constant sizes or positions. But it's cheap
|
4383 |
|
|
enough to not do so and refrain from adding workarounds to dwarf2out.c.
|
4384 |
|
|
|
4385 |
|
|
We need to reset self-referential sizes or positions because they cannot
|
4386 |
|
|
be gimplified and thus can contain a CALL_EXPR after the gimplification
|
4387 |
|
|
is finished, which will run afoul of LTO streaming. And they need to be
|
4388 |
|
|
reset to something essentially dummy but not constant, so as to preserve
|
4389 |
|
|
the properties of the object they are attached to. */
|
4390 |
|
|
|
4391 |
|
|
static inline void
|
4392 |
|
|
free_lang_data_in_one_sizepos (tree *expr_p)
|
4393 |
|
|
{
|
4394 |
|
|
tree expr = *expr_p;
|
4395 |
|
|
if (CONTAINS_PLACEHOLDER_P (expr))
|
4396 |
|
|
*expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
|
4397 |
|
|
}
|
4398 |
|
|
|
4399 |
|
|
|
4400 |
|
|
/* Reset all the fields in a binfo node BINFO. We only keep
|
4401 |
|
|
BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
|
4402 |
|
|
|
4403 |
|
|
static void
|
4404 |
|
|
free_lang_data_in_binfo (tree binfo)
|
4405 |
|
|
{
|
4406 |
|
|
unsigned i;
|
4407 |
|
|
tree t;
|
4408 |
|
|
|
4409 |
|
|
gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
|
4410 |
|
|
|
4411 |
|
|
BINFO_VIRTUALS (binfo) = NULL_TREE;
|
4412 |
|
|
BINFO_BASE_ACCESSES (binfo) = NULL;
|
4413 |
|
|
BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
|
4414 |
|
|
BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
|
4415 |
|
|
|
4416 |
|
|
FOR_EACH_VEC_ELT (tree, BINFO_BASE_BINFOS (binfo), i, t)
|
4417 |
|
|
free_lang_data_in_binfo (t);
|
4418 |
|
|
}
|
4419 |
|
|
|
4420 |
|
|
|
4421 |
|
|
/* Reset all language specific information still present in TYPE. */
|
4422 |
|
|
|
4423 |
|
|
static void
|
4424 |
|
|
free_lang_data_in_type (tree type)
|
4425 |
|
|
{
|
4426 |
|
|
gcc_assert (TYPE_P (type));
|
4427 |
|
|
|
4428 |
|
|
/* Give the FE a chance to remove its own data first. */
|
4429 |
|
|
lang_hooks.free_lang_data (type);
|
4430 |
|
|
|
4431 |
|
|
TREE_LANG_FLAG_0 (type) = 0;
|
4432 |
|
|
TREE_LANG_FLAG_1 (type) = 0;
|
4433 |
|
|
TREE_LANG_FLAG_2 (type) = 0;
|
4434 |
|
|
TREE_LANG_FLAG_3 (type) = 0;
|
4435 |
|
|
TREE_LANG_FLAG_4 (type) = 0;
|
4436 |
|
|
TREE_LANG_FLAG_5 (type) = 0;
|
4437 |
|
|
TREE_LANG_FLAG_6 (type) = 0;
|
4438 |
|
|
|
4439 |
|
|
if (TREE_CODE (type) == FUNCTION_TYPE)
|
4440 |
|
|
{
|
4441 |
|
|
/* Remove the const and volatile qualifiers from arguments. The
|
4442 |
|
|
C++ front end removes them, but the C front end does not,
|
4443 |
|
|
leading to false ODR violation errors when merging two
|
4444 |
|
|
instances of the same function signature compiled by
|
4445 |
|
|
different front ends. */
|
4446 |
|
|
tree p;
|
4447 |
|
|
|
4448 |
|
|
for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
|
4449 |
|
|
{
|
4450 |
|
|
tree arg_type = TREE_VALUE (p);
|
4451 |
|
|
|
4452 |
|
|
if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
|
4453 |
|
|
{
|
4454 |
|
|
int quals = TYPE_QUALS (arg_type)
|
4455 |
|
|
& ~TYPE_QUAL_CONST
|
4456 |
|
|
& ~TYPE_QUAL_VOLATILE;
|
4457 |
|
|
TREE_VALUE (p) = build_qualified_type (arg_type, quals);
|
4458 |
|
|
free_lang_data_in_type (TREE_VALUE (p));
|
4459 |
|
|
}
|
4460 |
|
|
}
|
4461 |
|
|
}
|
4462 |
|
|
|
4463 |
|
|
/* Remove members that are not actually FIELD_DECLs from the field
|
4464 |
|
|
list of an aggregate. These occur in C++. */
|
4465 |
|
|
if (RECORD_OR_UNION_TYPE_P (type))
|
4466 |
|
|
{
|
4467 |
|
|
tree prev, member;
|
4468 |
|
|
|
4469 |
|
|
/* Note that TYPE_FIELDS can be shared across distinct
|
4470 |
|
|
TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
|
4471 |
|
|
to be removed, we cannot set its TREE_CHAIN to NULL.
|
4472 |
|
|
Otherwise, we would not be able to find all the other fields
|
4473 |
|
|
in the other instances of this TREE_TYPE.
|
4474 |
|
|
|
4475 |
|
|
This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
|
4476 |
|
|
prev = NULL_TREE;
|
4477 |
|
|
member = TYPE_FIELDS (type);
|
4478 |
|
|
while (member)
|
4479 |
|
|
{
|
4480 |
|
|
if (TREE_CODE (member) == FIELD_DECL
|
4481 |
|
|
|| TREE_CODE (member) == TYPE_DECL)
|
4482 |
|
|
{
|
4483 |
|
|
if (prev)
|
4484 |
|
|
TREE_CHAIN (prev) = member;
|
4485 |
|
|
else
|
4486 |
|
|
TYPE_FIELDS (type) = member;
|
4487 |
|
|
prev = member;
|
4488 |
|
|
}
|
4489 |
|
|
|
4490 |
|
|
member = TREE_CHAIN (member);
|
4491 |
|
|
}
|
4492 |
|
|
|
4493 |
|
|
if (prev)
|
4494 |
|
|
TREE_CHAIN (prev) = NULL_TREE;
|
4495 |
|
|
else
|
4496 |
|
|
TYPE_FIELDS (type) = NULL_TREE;
|
4497 |
|
|
|
4498 |
|
|
TYPE_METHODS (type) = NULL_TREE;
|
4499 |
|
|
if (TYPE_BINFO (type))
|
4500 |
|
|
free_lang_data_in_binfo (TYPE_BINFO (type));
|
4501 |
|
|
}
|
4502 |
|
|
else
|
4503 |
|
|
{
|
4504 |
|
|
/* For non-aggregate types, clear out the language slot (which
|
4505 |
|
|
overloads TYPE_BINFO). */
|
4506 |
|
|
TYPE_LANG_SLOT_1 (type) = NULL_TREE;
|
4507 |
|
|
|
4508 |
|
|
if (INTEGRAL_TYPE_P (type)
|
4509 |
|
|
|| SCALAR_FLOAT_TYPE_P (type)
|
4510 |
|
|
|| FIXED_POINT_TYPE_P (type))
|
4511 |
|
|
{
|
4512 |
|
|
free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
|
4513 |
|
|
free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
|
4514 |
|
|
}
|
4515 |
|
|
}
|
4516 |
|
|
|
4517 |
|
|
free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
|
4518 |
|
|
free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
|
4519 |
|
|
|
4520 |
|
|
if (debug_info_level < DINFO_LEVEL_TERSE
|
4521 |
|
|
|| (TYPE_CONTEXT (type)
|
4522 |
|
|
&& TREE_CODE (TYPE_CONTEXT (type)) != FUNCTION_DECL
|
4523 |
|
|
&& TREE_CODE (TYPE_CONTEXT (type)) != NAMESPACE_DECL))
|
4524 |
|
|
TYPE_CONTEXT (type) = NULL_TREE;
|
4525 |
|
|
}
|
4526 |
|
|
|
4527 |
|
|
|
4528 |
|
|
/* Return true if DECL may need an assembler name to be set. */
|
4529 |
|
|
|
4530 |
|
|
static inline bool
|
4531 |
|
|
need_assembler_name_p (tree decl)
|
4532 |
|
|
{
|
4533 |
|
|
/* Only FUNCTION_DECLs and VAR_DECLs are considered. */
|
4534 |
|
|
if (TREE_CODE (decl) != FUNCTION_DECL
|
4535 |
|
|
&& TREE_CODE (decl) != VAR_DECL)
|
4536 |
|
|
return false;
|
4537 |
|
|
|
4538 |
|
|
/* If DECL already has its assembler name set, it does not need a
|
4539 |
|
|
new one. */
|
4540 |
|
|
if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
|
4541 |
|
|
|| DECL_ASSEMBLER_NAME_SET_P (decl))
|
4542 |
|
|
return false;
|
4543 |
|
|
|
4544 |
|
|
/* Abstract decls do not need an assembler name. */
|
4545 |
|
|
if (DECL_ABSTRACT (decl))
|
4546 |
|
|
return false;
|
4547 |
|
|
|
4548 |
|
|
/* For VAR_DECLs, only static, public and external symbols need an
|
4549 |
|
|
assembler name. */
|
4550 |
|
|
if (TREE_CODE (decl) == VAR_DECL
|
4551 |
|
|
&& !TREE_STATIC (decl)
|
4552 |
|
|
&& !TREE_PUBLIC (decl)
|
4553 |
|
|
&& !DECL_EXTERNAL (decl))
|
4554 |
|
|
return false;
|
4555 |
|
|
|
4556 |
|
|
if (TREE_CODE (decl) == FUNCTION_DECL)
|
4557 |
|
|
{
|
4558 |
|
|
/* Do not set assembler name on builtins. Allow RTL expansion to
|
4559 |
|
|
decide whether to expand inline or via a regular call. */
|
4560 |
|
|
if (DECL_BUILT_IN (decl)
|
4561 |
|
|
&& DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
|
4562 |
|
|
return false;
|
4563 |
|
|
|
4564 |
|
|
/* Functions represented in the callgraph need an assembler name. */
|
4565 |
|
|
if (cgraph_get_node (decl) != NULL)
|
4566 |
|
|
return true;
|
4567 |
|
|
|
4568 |
|
|
/* Unused and not public functions don't need an assembler name. */
|
4569 |
|
|
if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
|
4570 |
|
|
return false;
|
4571 |
|
|
}
|
4572 |
|
|
|
4573 |
|
|
return true;
|
4574 |
|
|
}
|
4575 |
|
|
|
4576 |
|
|
|
4577 |
|
|
/* Reset all language specific information still present in symbol
|
4578 |
|
|
DECL. */
|
4579 |
|
|
|
4580 |
|
|
static void
|
4581 |
|
|
free_lang_data_in_decl (tree decl)
|
4582 |
|
|
{
|
4583 |
|
|
gcc_assert (DECL_P (decl));
|
4584 |
|
|
|
4585 |
|
|
/* Give the FE a chance to remove its own data first. */
|
4586 |
|
|
lang_hooks.free_lang_data (decl);
|
4587 |
|
|
|
4588 |
|
|
TREE_LANG_FLAG_0 (decl) = 0;
|
4589 |
|
|
TREE_LANG_FLAG_1 (decl) = 0;
|
4590 |
|
|
TREE_LANG_FLAG_2 (decl) = 0;
|
4591 |
|
|
TREE_LANG_FLAG_3 (decl) = 0;
|
4592 |
|
|
TREE_LANG_FLAG_4 (decl) = 0;
|
4593 |
|
|
TREE_LANG_FLAG_5 (decl) = 0;
|
4594 |
|
|
TREE_LANG_FLAG_6 (decl) = 0;
|
4595 |
|
|
|
4596 |
|
|
free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
|
4597 |
|
|
free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
|
4598 |
|
|
if (TREE_CODE (decl) == FIELD_DECL)
|
4599 |
|
|
{
|
4600 |
|
|
free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
|
4601 |
|
|
DECL_QUALIFIER (decl) = NULL_TREE;
|
4602 |
|
|
}
|
4603 |
|
|
|
4604 |
|
|
if (TREE_CODE (decl) == FUNCTION_DECL)
|
4605 |
|
|
{
|
4606 |
|
|
if (gimple_has_body_p (decl))
|
4607 |
|
|
{
|
4608 |
|
|
tree t;
|
4609 |
|
|
|
4610 |
|
|
/* If DECL has a gimple body, then the context for its
|
4611 |
|
|
arguments must be DECL. Otherwise, it doesn't really
|
4612 |
|
|
matter, as we will not be emitting any code for DECL. In
|
4613 |
|
|
general, there may be other instances of DECL created by
|
4614 |
|
|
the front end and since PARM_DECLs are generally shared,
|
4615 |
|
|
their DECL_CONTEXT changes as the replicas of DECL are
|
4616 |
|
|
created. The only time where DECL_CONTEXT is important
|
4617 |
|
|
is for the FUNCTION_DECLs that have a gimple body (since
|
4618 |
|
|
the PARM_DECL will be used in the function's body). */
|
4619 |
|
|
for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
|
4620 |
|
|
DECL_CONTEXT (t) = decl;
|
4621 |
|
|
}
|
4622 |
|
|
|
4623 |
|
|
/* DECL_SAVED_TREE holds the GENERIC representation for DECL.
|
4624 |
|
|
At this point, it is not needed anymore. */
|
4625 |
|
|
DECL_SAVED_TREE (decl) = NULL_TREE;
|
4626 |
|
|
|
4627 |
|
|
/* Clear the abstract origin if it refers to a method. Otherwise
|
4628 |
|
|
dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
|
4629 |
|
|
origin will not be output correctly. */
|
4630 |
|
|
if (DECL_ABSTRACT_ORIGIN (decl)
|
4631 |
|
|
&& DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
|
4632 |
|
|
&& RECORD_OR_UNION_TYPE_P
|
4633 |
|
|
(DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
|
4634 |
|
|
DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
|
4635 |
|
|
|
4636 |
|
|
/* Sometimes the C++ frontend doesn't manage to transform a temporary
|
4637 |
|
|
DECL_VINDEX referring to itself into a vtable slot number as it
|
4638 |
|
|
should. Happens with functions that are copied and then forgotten
|
4639 |
|
|
about. Just clear it, it won't matter anymore. */
|
4640 |
|
|
if (DECL_VINDEX (decl) && !host_integerp (DECL_VINDEX (decl), 0))
|
4641 |
|
|
DECL_VINDEX (decl) = NULL_TREE;
|
4642 |
|
|
}
|
4643 |
|
|
else if (TREE_CODE (decl) == VAR_DECL)
|
4644 |
|
|
{
|
4645 |
|
|
if ((DECL_EXTERNAL (decl)
|
4646 |
|
|
&& (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
|
4647 |
|
|
|| (decl_function_context (decl) && !TREE_STATIC (decl)))
|
4648 |
|
|
DECL_INITIAL (decl) = NULL_TREE;
|
4649 |
|
|
}
|
4650 |
|
|
else if (TREE_CODE (decl) == TYPE_DECL
|
4651 |
|
|
|| TREE_CODE (decl) == FIELD_DECL)
|
4652 |
|
|
DECL_INITIAL (decl) = NULL_TREE;
|
4653 |
|
|
else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
|
4654 |
|
|
&& DECL_INITIAL (decl)
|
4655 |
|
|
&& TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
|
4656 |
|
|
{
|
4657 |
|
|
/* Strip builtins from the translation-unit BLOCK. We still have targets
|
4658 |
|
|
without builtin_decl_explicit support and also builtins are shared
|
4659 |
|
|
nodes and thus we can't use TREE_CHAIN in multiple lists. */
|
4660 |
|
|
tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
|
4661 |
|
|
while (*nextp)
|
4662 |
|
|
{
|
4663 |
|
|
tree var = *nextp;
|
4664 |
|
|
if (TREE_CODE (var) == FUNCTION_DECL
|
4665 |
|
|
&& DECL_BUILT_IN (var))
|
4666 |
|
|
*nextp = TREE_CHAIN (var);
|
4667 |
|
|
else
|
4668 |
|
|
nextp = &TREE_CHAIN (var);
|
4669 |
|
|
}
|
4670 |
|
|
}
|
4671 |
|
|
}
|
4672 |
|
|
|
4673 |
|
|
|
4674 |
|
|
/* Data used when collecting DECLs and TYPEs for language data removal. */
|
4675 |
|
|
|
4676 |
|
|
struct free_lang_data_d
|
4677 |
|
|
{
|
4678 |
|
|
/* Worklist to avoid excessive recursion. */
|
4679 |
|
|
VEC(tree,heap) *worklist;
|
4680 |
|
|
|
4681 |
|
|
/* Set of traversed objects. Used to avoid duplicate visits. */
|
4682 |
|
|
struct pointer_set_t *pset;
|
4683 |
|
|
|
4684 |
|
|
/* Array of symbols to process with free_lang_data_in_decl. */
|
4685 |
|
|
VEC(tree,heap) *decls;
|
4686 |
|
|
|
4687 |
|
|
/* Array of types to process with free_lang_data_in_type. */
|
4688 |
|
|
VEC(tree,heap) *types;
|
4689 |
|
|
};
|
4690 |
|
|
|
4691 |
|
|
|
4692 |
|
|
/* Save all language fields needed to generate proper debug information
|
4693 |
|
|
for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
|
4694 |
|
|
|
4695 |
|
|
static void
|
4696 |
|
|
save_debug_info_for_decl (tree t)
|
4697 |
|
|
{
|
4698 |
|
|
/*struct saved_debug_info_d *sdi;*/
|
4699 |
|
|
|
4700 |
|
|
gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
|
4701 |
|
|
|
4702 |
|
|
/* FIXME. Partial implementation for saving debug info removed. */
|
4703 |
|
|
}
|
4704 |
|
|
|
4705 |
|
|
|
4706 |
|
|
/* Save all language fields needed to generate proper debug information
|
4707 |
|
|
for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
|
4708 |
|
|
|
4709 |
|
|
static void
|
4710 |
|
|
save_debug_info_for_type (tree t)
|
4711 |
|
|
{
|
4712 |
|
|
/*struct saved_debug_info_d *sdi;*/
|
4713 |
|
|
|
4714 |
|
|
gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
|
4715 |
|
|
|
4716 |
|
|
/* FIXME. Partial implementation for saving debug info removed. */
|
4717 |
|
|
}
|
4718 |
|
|
|
4719 |
|
|
|
4720 |
|
|
/* Add type or decl T to one of the list of tree nodes that need their
|
4721 |
|
|
language data removed. The lists are held inside FLD. */
|
4722 |
|
|
|
4723 |
|
|
static void
|
4724 |
|
|
add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
|
4725 |
|
|
{
|
4726 |
|
|
if (DECL_P (t))
|
4727 |
|
|
{
|
4728 |
|
|
VEC_safe_push (tree, heap, fld->decls, t);
|
4729 |
|
|
if (debug_info_level > DINFO_LEVEL_TERSE)
|
4730 |
|
|
save_debug_info_for_decl (t);
|
4731 |
|
|
}
|
4732 |
|
|
else if (TYPE_P (t))
|
4733 |
|
|
{
|
4734 |
|
|
VEC_safe_push (tree, heap, fld->types, t);
|
4735 |
|
|
if (debug_info_level > DINFO_LEVEL_TERSE)
|
4736 |
|
|
save_debug_info_for_type (t);
|
4737 |
|
|
}
|
4738 |
|
|
else
|
4739 |
|
|
gcc_unreachable ();
|
4740 |
|
|
}
|
4741 |
|
|
|
4742 |
|
|
/* Push tree node T into FLD->WORKLIST. */
|
4743 |
|
|
|
4744 |
|
|
static inline void
|
4745 |
|
|
fld_worklist_push (tree t, struct free_lang_data_d *fld)
|
4746 |
|
|
{
|
4747 |
|
|
if (t && !is_lang_specific (t) && !pointer_set_contains (fld->pset, t))
|
4748 |
|
|
VEC_safe_push (tree, heap, fld->worklist, (t));
|
4749 |
|
|
}
|
4750 |
|
|
|
4751 |
|
|
|
4752 |
|
|
/* Operand callback helper for free_lang_data_in_node. *TP is the
|
4753 |
|
|
subtree operand being considered. */
|
4754 |
|
|
|
4755 |
|
|
static tree
|
4756 |
|
|
find_decls_types_r (tree *tp, int *ws, void *data)
|
4757 |
|
|
{
|
4758 |
|
|
tree t = *tp;
|
4759 |
|
|
struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
|
4760 |
|
|
|
4761 |
|
|
if (TREE_CODE (t) == TREE_LIST)
|
4762 |
|
|
return NULL_TREE;
|
4763 |
|
|
|
4764 |
|
|
/* Language specific nodes will be removed, so there is no need
|
4765 |
|
|
to gather anything under them. */
|
4766 |
|
|
if (is_lang_specific (t))
|
4767 |
|
|
{
|
4768 |
|
|
*ws = 0;
|
4769 |
|
|
return NULL_TREE;
|
4770 |
|
|
}
|
4771 |
|
|
|
4772 |
|
|
if (DECL_P (t))
|
4773 |
|
|
{
|
4774 |
|
|
/* Note that walk_tree does not traverse every possible field in
|
4775 |
|
|
decls, so we have to do our own traversals here. */
|
4776 |
|
|
add_tree_to_fld_list (t, fld);
|
4777 |
|
|
|
4778 |
|
|
fld_worklist_push (DECL_NAME (t), fld);
|
4779 |
|
|
fld_worklist_push (DECL_CONTEXT (t), fld);
|
4780 |
|
|
fld_worklist_push (DECL_SIZE (t), fld);
|
4781 |
|
|
fld_worklist_push (DECL_SIZE_UNIT (t), fld);
|
4782 |
|
|
|
4783 |
|
|
/* We are going to remove everything under DECL_INITIAL for
|
4784 |
|
|
TYPE_DECLs. No point walking them. */
|
4785 |
|
|
if (TREE_CODE (t) != TYPE_DECL)
|
4786 |
|
|
fld_worklist_push (DECL_INITIAL (t), fld);
|
4787 |
|
|
|
4788 |
|
|
fld_worklist_push (DECL_ATTRIBUTES (t), fld);
|
4789 |
|
|
fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
|
4790 |
|
|
|
4791 |
|
|
if (TREE_CODE (t) == FUNCTION_DECL)
|
4792 |
|
|
{
|
4793 |
|
|
fld_worklist_push (DECL_ARGUMENTS (t), fld);
|
4794 |
|
|
fld_worklist_push (DECL_RESULT (t), fld);
|
4795 |
|
|
}
|
4796 |
|
|
else if (TREE_CODE (t) == TYPE_DECL)
|
4797 |
|
|
{
|
4798 |
|
|
fld_worklist_push (DECL_ARGUMENT_FLD (t), fld);
|
4799 |
|
|
fld_worklist_push (DECL_VINDEX (t), fld);
|
4800 |
|
|
fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
|
4801 |
|
|
}
|
4802 |
|
|
else if (TREE_CODE (t) == FIELD_DECL)
|
4803 |
|
|
{
|
4804 |
|
|
fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
|
4805 |
|
|
fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
|
4806 |
|
|
fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
|
4807 |
|
|
fld_worklist_push (DECL_FCONTEXT (t), fld);
|
4808 |
|
|
}
|
4809 |
|
|
else if (TREE_CODE (t) == VAR_DECL)
|
4810 |
|
|
{
|
4811 |
|
|
fld_worklist_push (DECL_SECTION_NAME (t), fld);
|
4812 |
|
|
fld_worklist_push (DECL_COMDAT_GROUP (t), fld);
|
4813 |
|
|
}
|
4814 |
|
|
|
4815 |
|
|
if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
|
4816 |
|
|
&& DECL_HAS_VALUE_EXPR_P (t))
|
4817 |
|
|
fld_worklist_push (DECL_VALUE_EXPR (t), fld);
|
4818 |
|
|
|
4819 |
|
|
if (TREE_CODE (t) != FIELD_DECL
|
4820 |
|
|
&& TREE_CODE (t) != TYPE_DECL)
|
4821 |
|
|
fld_worklist_push (TREE_CHAIN (t), fld);
|
4822 |
|
|
*ws = 0;
|
4823 |
|
|
}
|
4824 |
|
|
else if (TYPE_P (t))
|
4825 |
|
|
{
|
4826 |
|
|
/* Note that walk_tree does not traverse every possible field in
|
4827 |
|
|
types, so we have to do our own traversals here. */
|
4828 |
|
|
add_tree_to_fld_list (t, fld);
|
4829 |
|
|
|
4830 |
|
|
if (!RECORD_OR_UNION_TYPE_P (t))
|
4831 |
|
|
fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
|
4832 |
|
|
fld_worklist_push (TYPE_SIZE (t), fld);
|
4833 |
|
|
fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
|
4834 |
|
|
fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
|
4835 |
|
|
fld_worklist_push (TYPE_POINTER_TO (t), fld);
|
4836 |
|
|
fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
|
4837 |
|
|
fld_worklist_push (TYPE_NAME (t), fld);
|
4838 |
|
|
/* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
|
4839 |
|
|
them and thus do not and want not to reach unused pointer types
|
4840 |
|
|
this way. */
|
4841 |
|
|
if (!POINTER_TYPE_P (t))
|
4842 |
|
|
fld_worklist_push (TYPE_MINVAL (t), fld);
|
4843 |
|
|
if (!RECORD_OR_UNION_TYPE_P (t))
|
4844 |
|
|
fld_worklist_push (TYPE_MAXVAL (t), fld);
|
4845 |
|
|
fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
|
4846 |
|
|
/* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
|
4847 |
|
|
do not and want not to reach unused variants this way. */
|
4848 |
|
|
fld_worklist_push (TYPE_CONTEXT (t), fld);
|
4849 |
|
|
/* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
|
4850 |
|
|
and want not to reach unused types this way. */
|
4851 |
|
|
|
4852 |
|
|
if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
|
4853 |
|
|
{
|
4854 |
|
|
unsigned i;
|
4855 |
|
|
tree tem;
|
4856 |
|
|
for (i = 0; VEC_iterate (tree, BINFO_BASE_BINFOS (TYPE_BINFO (t)),
|
4857 |
|
|
i, tem); ++i)
|
4858 |
|
|
fld_worklist_push (TREE_TYPE (tem), fld);
|
4859 |
|
|
tem = BINFO_VIRTUALS (TYPE_BINFO (t));
|
4860 |
|
|
if (tem
|
4861 |
|
|
/* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
|
4862 |
|
|
&& TREE_CODE (tem) == TREE_LIST)
|
4863 |
|
|
do
|
4864 |
|
|
{
|
4865 |
|
|
fld_worklist_push (TREE_VALUE (tem), fld);
|
4866 |
|
|
tem = TREE_CHAIN (tem);
|
4867 |
|
|
}
|
4868 |
|
|
while (tem);
|
4869 |
|
|
}
|
4870 |
|
|
if (RECORD_OR_UNION_TYPE_P (t))
|
4871 |
|
|
{
|
4872 |
|
|
tree tem;
|
4873 |
|
|
/* Push all TYPE_FIELDS - there can be interleaving interesting
|
4874 |
|
|
and non-interesting things. */
|
4875 |
|
|
tem = TYPE_FIELDS (t);
|
4876 |
|
|
while (tem)
|
4877 |
|
|
{
|
4878 |
|
|
if (TREE_CODE (tem) == FIELD_DECL
|
4879 |
|
|
|| TREE_CODE (tem) == TYPE_DECL)
|
4880 |
|
|
fld_worklist_push (tem, fld);
|
4881 |
|
|
tem = TREE_CHAIN (tem);
|
4882 |
|
|
}
|
4883 |
|
|
}
|
4884 |
|
|
|
4885 |
|
|
fld_worklist_push (TYPE_STUB_DECL (t), fld);
|
4886 |
|
|
*ws = 0;
|
4887 |
|
|
}
|
4888 |
|
|
else if (TREE_CODE (t) == BLOCK)
|
4889 |
|
|
{
|
4890 |
|
|
tree tem;
|
4891 |
|
|
for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
|
4892 |
|
|
fld_worklist_push (tem, fld);
|
4893 |
|
|
for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
|
4894 |
|
|
fld_worklist_push (tem, fld);
|
4895 |
|
|
fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
|
4896 |
|
|
}
|
4897 |
|
|
|
4898 |
|
|
if (TREE_CODE (t) != IDENTIFIER_NODE
|
4899 |
|
|
&& CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
|
4900 |
|
|
fld_worklist_push (TREE_TYPE (t), fld);
|
4901 |
|
|
|
4902 |
|
|
return NULL_TREE;
|
4903 |
|
|
}
|
4904 |
|
|
|
4905 |
|
|
|
4906 |
|
|
/* Find decls and types in T. */
|
4907 |
|
|
|
4908 |
|
|
static void
|
4909 |
|
|
find_decls_types (tree t, struct free_lang_data_d *fld)
|
4910 |
|
|
{
|
4911 |
|
|
while (1)
|
4912 |
|
|
{
|
4913 |
|
|
if (!pointer_set_contains (fld->pset, t))
|
4914 |
|
|
walk_tree (&t, find_decls_types_r, fld, fld->pset);
|
4915 |
|
|
if (VEC_empty (tree, fld->worklist))
|
4916 |
|
|
break;
|
4917 |
|
|
t = VEC_pop (tree, fld->worklist);
|
4918 |
|
|
}
|
4919 |
|
|
}
|
4920 |
|
|
|
4921 |
|
|
/* Translate all the types in LIST with the corresponding runtime
|
4922 |
|
|
types. */
|
4923 |
|
|
|
4924 |
|
|
static tree
|
4925 |
|
|
get_eh_types_for_runtime (tree list)
|
4926 |
|
|
{
|
4927 |
|
|
tree head, prev;
|
4928 |
|
|
|
4929 |
|
|
if (list == NULL_TREE)
|
4930 |
|
|
return NULL_TREE;
|
4931 |
|
|
|
4932 |
|
|
head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
|
4933 |
|
|
prev = head;
|
4934 |
|
|
list = TREE_CHAIN (list);
|
4935 |
|
|
while (list)
|
4936 |
|
|
{
|
4937 |
|
|
tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
|
4938 |
|
|
TREE_CHAIN (prev) = n;
|
4939 |
|
|
prev = TREE_CHAIN (prev);
|
4940 |
|
|
list = TREE_CHAIN (list);
|
4941 |
|
|
}
|
4942 |
|
|
|
4943 |
|
|
return head;
|
4944 |
|
|
}
|
4945 |
|
|
|
4946 |
|
|
|
4947 |
|
|
/* Find decls and types referenced in EH region R and store them in
|
4948 |
|
|
FLD->DECLS and FLD->TYPES. */
|
4949 |
|
|
|
4950 |
|
|
static void
|
4951 |
|
|
find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
|
4952 |
|
|
{
|
4953 |
|
|
switch (r->type)
|
4954 |
|
|
{
|
4955 |
|
|
case ERT_CLEANUP:
|
4956 |
|
|
break;
|
4957 |
|
|
|
4958 |
|
|
case ERT_TRY:
|
4959 |
|
|
{
|
4960 |
|
|
eh_catch c;
|
4961 |
|
|
|
4962 |
|
|
/* The types referenced in each catch must first be changed to the
|
4963 |
|
|
EH types used at runtime. This removes references to FE types
|
4964 |
|
|
in the region. */
|
4965 |
|
|
for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
|
4966 |
|
|
{
|
4967 |
|
|
c->type_list = get_eh_types_for_runtime (c->type_list);
|
4968 |
|
|
walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
|
4969 |
|
|
}
|
4970 |
|
|
}
|
4971 |
|
|
break;
|
4972 |
|
|
|
4973 |
|
|
case ERT_ALLOWED_EXCEPTIONS:
|
4974 |
|
|
r->u.allowed.type_list
|
4975 |
|
|
= get_eh_types_for_runtime (r->u.allowed.type_list);
|
4976 |
|
|
walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
|
4977 |
|
|
break;
|
4978 |
|
|
|
4979 |
|
|
case ERT_MUST_NOT_THROW:
|
4980 |
|
|
walk_tree (&r->u.must_not_throw.failure_decl,
|
4981 |
|
|
find_decls_types_r, fld, fld->pset);
|
4982 |
|
|
break;
|
4983 |
|
|
}
|
4984 |
|
|
}
|
4985 |
|
|
|
4986 |
|
|
|
4987 |
|
|
/* Find decls and types referenced in cgraph node N and store them in
|
4988 |
|
|
FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
|
4989 |
|
|
look for *every* kind of DECL and TYPE node reachable from N,
|
4990 |
|
|
including those embedded inside types and decls (i.e,, TYPE_DECLs,
|
4991 |
|
|
NAMESPACE_DECLs, etc). */
|
4992 |
|
|
|
4993 |
|
|
static void
|
4994 |
|
|
find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
|
4995 |
|
|
{
|
4996 |
|
|
basic_block bb;
|
4997 |
|
|
struct function *fn;
|
4998 |
|
|
unsigned ix;
|
4999 |
|
|
tree t;
|
5000 |
|
|
|
5001 |
|
|
find_decls_types (n->decl, fld);
|
5002 |
|
|
|
5003 |
|
|
if (!gimple_has_body_p (n->decl))
|
5004 |
|
|
return;
|
5005 |
|
|
|
5006 |
|
|
gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
|
5007 |
|
|
|
5008 |
|
|
fn = DECL_STRUCT_FUNCTION (n->decl);
|
5009 |
|
|
|
5010 |
|
|
/* Traverse locals. */
|
5011 |
|
|
FOR_EACH_LOCAL_DECL (fn, ix, t)
|
5012 |
|
|
find_decls_types (t, fld);
|
5013 |
|
|
|
5014 |
|
|
/* Traverse EH regions in FN. */
|
5015 |
|
|
{
|
5016 |
|
|
eh_region r;
|
5017 |
|
|
FOR_ALL_EH_REGION_FN (r, fn)
|
5018 |
|
|
find_decls_types_in_eh_region (r, fld);
|
5019 |
|
|
}
|
5020 |
|
|
|
5021 |
|
|
/* Traverse every statement in FN. */
|
5022 |
|
|
FOR_EACH_BB_FN (bb, fn)
|
5023 |
|
|
{
|
5024 |
|
|
gimple_stmt_iterator si;
|
5025 |
|
|
unsigned i;
|
5026 |
|
|
|
5027 |
|
|
for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
|
5028 |
|
|
{
|
5029 |
|
|
gimple phi = gsi_stmt (si);
|
5030 |
|
|
|
5031 |
|
|
for (i = 0; i < gimple_phi_num_args (phi); i++)
|
5032 |
|
|
{
|
5033 |
|
|
tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
|
5034 |
|
|
find_decls_types (*arg_p, fld);
|
5035 |
|
|
}
|
5036 |
|
|
}
|
5037 |
|
|
|
5038 |
|
|
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
|
5039 |
|
|
{
|
5040 |
|
|
gimple stmt = gsi_stmt (si);
|
5041 |
|
|
|
5042 |
|
|
if (is_gimple_call (stmt))
|
5043 |
|
|
find_decls_types (gimple_call_fntype (stmt), fld);
|
5044 |
|
|
|
5045 |
|
|
for (i = 0; i < gimple_num_ops (stmt); i++)
|
5046 |
|
|
{
|
5047 |
|
|
tree arg = gimple_op (stmt, i);
|
5048 |
|
|
find_decls_types (arg, fld);
|
5049 |
|
|
}
|
5050 |
|
|
}
|
5051 |
|
|
}
|
5052 |
|
|
}
|
5053 |
|
|
|
5054 |
|
|
|
5055 |
|
|
/* Find decls and types referenced in varpool node N and store them in
|
5056 |
|
|
FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
|
5057 |
|
|
look for *every* kind of DECL and TYPE node reachable from N,
|
5058 |
|
|
including those embedded inside types and decls (i.e,, TYPE_DECLs,
|
5059 |
|
|
NAMESPACE_DECLs, etc). */
|
5060 |
|
|
|
5061 |
|
|
static void
|
5062 |
|
|
find_decls_types_in_var (struct varpool_node *v, struct free_lang_data_d *fld)
|
5063 |
|
|
{
|
5064 |
|
|
find_decls_types (v->decl, fld);
|
5065 |
|
|
}
|
5066 |
|
|
|
5067 |
|
|
/* If T needs an assembler name, have one created for it. */
|
5068 |
|
|
|
5069 |
|
|
void
|
5070 |
|
|
assign_assembler_name_if_neeeded (tree t)
|
5071 |
|
|
{
|
5072 |
|
|
if (need_assembler_name_p (t))
|
5073 |
|
|
{
|
5074 |
|
|
/* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
|
5075 |
|
|
diagnostics that use input_location to show locus
|
5076 |
|
|
information. The problem here is that, at this point,
|
5077 |
|
|
input_location is generally anchored to the end of the file
|
5078 |
|
|
(since the parser is long gone), so we don't have a good
|
5079 |
|
|
position to pin it to.
|
5080 |
|
|
|
5081 |
|
|
To alleviate this problem, this uses the location of T's
|
5082 |
|
|
declaration. Examples of this are
|
5083 |
|
|
testsuite/g++.dg/template/cond2.C and
|
5084 |
|
|
testsuite/g++.dg/template/pr35240.C. */
|
5085 |
|
|
location_t saved_location = input_location;
|
5086 |
|
|
input_location = DECL_SOURCE_LOCATION (t);
|
5087 |
|
|
|
5088 |
|
|
decl_assembler_name (t);
|
5089 |
|
|
|
5090 |
|
|
input_location = saved_location;
|
5091 |
|
|
}
|
5092 |
|
|
}
|
5093 |
|
|
|
5094 |
|
|
|
5095 |
|
|
/* Free language specific information for every operand and expression
|
5096 |
|
|
in every node of the call graph. This process operates in three stages:
|
5097 |
|
|
|
5098 |
|
|
1- Every callgraph node and varpool node is traversed looking for
|
5099 |
|
|
decls and types embedded in them. This is a more exhaustive
|
5100 |
|
|
search than that done by find_referenced_vars, because it will
|
5101 |
|
|
also collect individual fields, decls embedded in types, etc.
|
5102 |
|
|
|
5103 |
|
|
2- All the decls found are sent to free_lang_data_in_decl.
|
5104 |
|
|
|
5105 |
|
|
3- All the types found are sent to free_lang_data_in_type.
|
5106 |
|
|
|
5107 |
|
|
The ordering between decls and types is important because
|
5108 |
|
|
free_lang_data_in_decl sets assembler names, which includes
|
5109 |
|
|
mangling. So types cannot be freed up until assembler names have
|
5110 |
|
|
been set up. */
|
5111 |
|
|
|
5112 |
|
|
static void
|
5113 |
|
|
free_lang_data_in_cgraph (void)
|
5114 |
|
|
{
|
5115 |
|
|
struct cgraph_node *n;
|
5116 |
|
|
struct varpool_node *v;
|
5117 |
|
|
struct free_lang_data_d fld;
|
5118 |
|
|
tree t;
|
5119 |
|
|
unsigned i;
|
5120 |
|
|
alias_pair *p;
|
5121 |
|
|
|
5122 |
|
|
/* Initialize sets and arrays to store referenced decls and types. */
|
5123 |
|
|
fld.pset = pointer_set_create ();
|
5124 |
|
|
fld.worklist = NULL;
|
5125 |
|
|
fld.decls = VEC_alloc (tree, heap, 100);
|
5126 |
|
|
fld.types = VEC_alloc (tree, heap, 100);
|
5127 |
|
|
|
5128 |
|
|
/* Find decls and types in the body of every function in the callgraph. */
|
5129 |
|
|
for (n = cgraph_nodes; n; n = n->next)
|
5130 |
|
|
find_decls_types_in_node (n, &fld);
|
5131 |
|
|
|
5132 |
|
|
FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p)
|
5133 |
|
|
find_decls_types (p->decl, &fld);
|
5134 |
|
|
|
5135 |
|
|
/* Find decls and types in every varpool symbol. */
|
5136 |
|
|
for (v = varpool_nodes; v; v = v->next)
|
5137 |
|
|
find_decls_types_in_var (v, &fld);
|
5138 |
|
|
|
5139 |
|
|
/* Set the assembler name on every decl found. We need to do this
|
5140 |
|
|
now because free_lang_data_in_decl will invalidate data needed
|
5141 |
|
|
for mangling. This breaks mangling on interdependent decls. */
|
5142 |
|
|
FOR_EACH_VEC_ELT (tree, fld.decls, i, t)
|
5143 |
|
|
assign_assembler_name_if_neeeded (t);
|
5144 |
|
|
|
5145 |
|
|
/* Traverse every decl found freeing its language data. */
|
5146 |
|
|
FOR_EACH_VEC_ELT (tree, fld.decls, i, t)
|
5147 |
|
|
free_lang_data_in_decl (t);
|
5148 |
|
|
|
5149 |
|
|
/* Traverse every type found freeing its language data. */
|
5150 |
|
|
FOR_EACH_VEC_ELT (tree, fld.types, i, t)
|
5151 |
|
|
free_lang_data_in_type (t);
|
5152 |
|
|
|
5153 |
|
|
pointer_set_destroy (fld.pset);
|
5154 |
|
|
VEC_free (tree, heap, fld.worklist);
|
5155 |
|
|
VEC_free (tree, heap, fld.decls);
|
5156 |
|
|
VEC_free (tree, heap, fld.types);
|
5157 |
|
|
}
|
5158 |
|
|
|
5159 |
|
|
|
5160 |
|
|
/* Free resources that are used by FE but are not needed once they are done. */
|
5161 |
|
|
|
5162 |
|
|
static unsigned
|
5163 |
|
|
free_lang_data (void)
|
5164 |
|
|
{
|
5165 |
|
|
unsigned i;
|
5166 |
|
|
|
5167 |
|
|
/* If we are the LTO frontend we have freed lang-specific data already. */
|
5168 |
|
|
if (in_lto_p
|
5169 |
|
|
|| !flag_generate_lto)
|
5170 |
|
|
return 0;
|
5171 |
|
|
|
5172 |
|
|
/* Allocate and assign alias sets to the standard integer types
|
5173 |
|
|
while the slots are still in the way the frontends generated them. */
|
5174 |
|
|
for (i = 0; i < itk_none; ++i)
|
5175 |
|
|
if (integer_types[i])
|
5176 |
|
|
TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
|
5177 |
|
|
|
5178 |
|
|
/* Traverse the IL resetting language specific information for
|
5179 |
|
|
operands, expressions, etc. */
|
5180 |
|
|
free_lang_data_in_cgraph ();
|
5181 |
|
|
|
5182 |
|
|
/* Create gimple variants for common types. */
|
5183 |
|
|
ptrdiff_type_node = integer_type_node;
|
5184 |
|
|
fileptr_type_node = ptr_type_node;
|
5185 |
|
|
|
5186 |
|
|
/* Reset some langhooks. Do not reset types_compatible_p, it may
|
5187 |
|
|
still be used indirectly via the get_alias_set langhook. */
|
5188 |
|
|
lang_hooks.callgraph.analyze_expr = NULL;
|
5189 |
|
|
lang_hooks.dwarf_name = lhd_dwarf_name;
|
5190 |
|
|
lang_hooks.decl_printable_name = gimple_decl_printable_name;
|
5191 |
|
|
/* We do not want the default decl_assembler_name implementation,
|
5192 |
|
|
rather if we have fixed everything we want a wrapper around it
|
5193 |
|
|
asserting that all non-local symbols already got their assembler
|
5194 |
|
|
name and only produce assembler names for local symbols. Or rather
|
5195 |
|
|
make sure we never call decl_assembler_name on local symbols and
|
5196 |
|
|
devise a separate, middle-end private scheme for it. */
|
5197 |
|
|
|
5198 |
|
|
/* Reset diagnostic machinery. */
|
5199 |
|
|
diagnostic_starter (global_dc) = default_tree_diagnostic_starter;
|
5200 |
|
|
diagnostic_finalizer (global_dc) = default_diagnostic_finalizer;
|
5201 |
|
|
diagnostic_format_decoder (global_dc) = default_tree_printer;
|
5202 |
|
|
|
5203 |
|
|
return 0;
|
5204 |
|
|
}
|
5205 |
|
|
|
5206 |
|
|
|
5207 |
|
|
struct simple_ipa_opt_pass pass_ipa_free_lang_data =
|
5208 |
|
|
{
|
5209 |
|
|
{
|
5210 |
|
|
SIMPLE_IPA_PASS,
|
5211 |
|
|
"*free_lang_data", /* name */
|
5212 |
|
|
NULL, /* gate */
|
5213 |
|
|
free_lang_data, /* execute */
|
5214 |
|
|
NULL, /* sub */
|
5215 |
|
|
NULL, /* next */
|
5216 |
|
|
0, /* static_pass_number */
|
5217 |
|
|
TV_IPA_FREE_LANG_DATA, /* tv_id */
|
5218 |
|
|
0, /* properties_required */
|
5219 |
|
|
0, /* properties_provided */
|
5220 |
|
|
0, /* properties_destroyed */
|
5221 |
|
|
0, /* todo_flags_start */
|
5222 |
|
|
TODO_ggc_collect /* todo_flags_finish */
|
5223 |
|
|
}
|
5224 |
|
|
};
|
5225 |
|
|
|
5226 |
|
|
/* The backbone of is_attribute_p(). ATTR_LEN is the string length of
|
5227 |
|
|
ATTR_NAME. Also used internally by remove_attribute(). */
|
5228 |
|
|
bool
|
5229 |
|
|
private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
|
5230 |
|
|
{
|
5231 |
|
|
size_t ident_len = IDENTIFIER_LENGTH (ident);
|
5232 |
|
|
|
5233 |
|
|
if (ident_len == attr_len)
|
5234 |
|
|
{
|
5235 |
|
|
if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
|
5236 |
|
|
return true;
|
5237 |
|
|
}
|
5238 |
|
|
else if (ident_len == attr_len + 4)
|
5239 |
|
|
{
|
5240 |
|
|
/* There is the possibility that ATTR is 'text' and IDENT is
|
5241 |
|
|
'__text__'. */
|
5242 |
|
|
const char *p = IDENTIFIER_POINTER (ident);
|
5243 |
|
|
if (p[0] == '_' && p[1] == '_'
|
5244 |
|
|
&& p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
|
5245 |
|
|
&& strncmp (attr_name, p + 2, attr_len) == 0)
|
5246 |
|
|
return true;
|
5247 |
|
|
}
|
5248 |
|
|
|
5249 |
|
|
return false;
|
5250 |
|
|
}
|
5251 |
|
|
|
5252 |
|
|
/* The backbone of lookup_attribute(). ATTR_LEN is the string length
|
5253 |
|
|
of ATTR_NAME, and LIST is not NULL_TREE. */
|
5254 |
|
|
tree
|
5255 |
|
|
private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
|
5256 |
|
|
{
|
5257 |
|
|
while (list)
|
5258 |
|
|
{
|
5259 |
|
|
size_t ident_len = IDENTIFIER_LENGTH (TREE_PURPOSE (list));
|
5260 |
|
|
|
5261 |
|
|
if (ident_len == attr_len)
|
5262 |
|
|
{
|
5263 |
|
|
if (strcmp (attr_name, IDENTIFIER_POINTER (TREE_PURPOSE (list))) == 0)
|
5264 |
|
|
break;
|
5265 |
|
|
}
|
5266 |
|
|
/* TODO: If we made sure that attributes were stored in the
|
5267 |
|
|
canonical form without '__...__' (ie, as in 'text' as opposed
|
5268 |
|
|
to '__text__') then we could avoid the following case. */
|
5269 |
|
|
else if (ident_len == attr_len + 4)
|
5270 |
|
|
{
|
5271 |
|
|
const char *p = IDENTIFIER_POINTER (TREE_PURPOSE (list));
|
5272 |
|
|
if (p[0] == '_' && p[1] == '_'
|
5273 |
|
|
&& p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
|
5274 |
|
|
&& strncmp (attr_name, p + 2, attr_len) == 0)
|
5275 |
|
|
break;
|
5276 |
|
|
}
|
5277 |
|
|
list = TREE_CHAIN (list);
|
5278 |
|
|
}
|
5279 |
|
|
|
5280 |
|
|
return list;
|
5281 |
|
|
}
|
5282 |
|
|
|
5283 |
|
|
/* A variant of lookup_attribute() that can be used with an identifier
|
5284 |
|
|
as the first argument, and where the identifier can be either
|
5285 |
|
|
'text' or '__text__'.
|
5286 |
|
|
|
5287 |
|
|
Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
|
5288 |
|
|
return a pointer to the attribute's list element if the attribute
|
5289 |
|
|
is part of the list, or NULL_TREE if not found. If the attribute
|
5290 |
|
|
appears more than once, this only returns the first occurrence; the
|
5291 |
|
|
TREE_CHAIN of the return value should be passed back in if further
|
5292 |
|
|
occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
|
5293 |
|
|
can be in the form 'text' or '__text__'. */
|
5294 |
|
|
static tree
|
5295 |
|
|
lookup_ident_attribute (tree attr_identifier, tree list)
|
5296 |
|
|
{
|
5297 |
|
|
gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
|
5298 |
|
|
|
5299 |
|
|
while (list)
|
5300 |
|
|
{
|
5301 |
|
|
gcc_checking_assert (TREE_CODE (TREE_PURPOSE (list)) == IDENTIFIER_NODE);
|
5302 |
|
|
|
5303 |
|
|
/* Identifiers can be compared directly for equality. */
|
5304 |
|
|
if (attr_identifier == TREE_PURPOSE (list))
|
5305 |
|
|
break;
|
5306 |
|
|
|
5307 |
|
|
/* If they are not equal, they may still be one in the form
|
5308 |
|
|
'text' while the other one is in the form '__text__'. TODO:
|
5309 |
|
|
If we were storing attributes in normalized 'text' form, then
|
5310 |
|
|
this could all go away and we could take full advantage of
|
5311 |
|
|
the fact that we're comparing identifiers. :-) */
|
5312 |
|
|
{
|
5313 |
|
|
size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
|
5314 |
|
|
size_t ident_len = IDENTIFIER_LENGTH (TREE_PURPOSE (list));
|
5315 |
|
|
|
5316 |
|
|
if (ident_len == attr_len + 4)
|
5317 |
|
|
{
|
5318 |
|
|
const char *p = IDENTIFIER_POINTER (TREE_PURPOSE (list));
|
5319 |
|
|
const char *q = IDENTIFIER_POINTER (attr_identifier);
|
5320 |
|
|
if (p[0] == '_' && p[1] == '_'
|
5321 |
|
|
&& p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
|
5322 |
|
|
&& strncmp (q, p + 2, attr_len) == 0)
|
5323 |
|
|
break;
|
5324 |
|
|
}
|
5325 |
|
|
else if (ident_len + 4 == attr_len)
|
5326 |
|
|
{
|
5327 |
|
|
const char *p = IDENTIFIER_POINTER (TREE_PURPOSE (list));
|
5328 |
|
|
const char *q = IDENTIFIER_POINTER (attr_identifier);
|
5329 |
|
|
if (q[0] == '_' && q[1] == '_'
|
5330 |
|
|
&& q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
|
5331 |
|
|
&& strncmp (q + 2, p, ident_len) == 0)
|
5332 |
|
|
break;
|
5333 |
|
|
}
|
5334 |
|
|
}
|
5335 |
|
|
list = TREE_CHAIN (list);
|
5336 |
|
|
}
|
5337 |
|
|
|
5338 |
|
|
return list;
|
5339 |
|
|
}
|
5340 |
|
|
|
5341 |
|
|
/* Remove any instances of attribute ATTR_NAME in LIST and return the
|
5342 |
|
|
modified list. */
|
5343 |
|
|
|
5344 |
|
|
tree
|
5345 |
|
|
remove_attribute (const char *attr_name, tree list)
|
5346 |
|
|
{
|
5347 |
|
|
tree *p;
|
5348 |
|
|
size_t attr_len = strlen (attr_name);
|
5349 |
|
|
|
5350 |
|
|
gcc_checking_assert (attr_name[0] != '_');
|
5351 |
|
|
|
5352 |
|
|
for (p = &list; *p; )
|
5353 |
|
|
{
|
5354 |
|
|
tree l = *p;
|
5355 |
|
|
/* TODO: If we were storing attributes in normalized form, here
|
5356 |
|
|
we could use a simple strcmp(). */
|
5357 |
|
|
if (private_is_attribute_p (attr_name, attr_len, TREE_PURPOSE (l)))
|
5358 |
|
|
*p = TREE_CHAIN (l);
|
5359 |
|
|
else
|
5360 |
|
|
p = &TREE_CHAIN (l);
|
5361 |
|
|
}
|
5362 |
|
|
|
5363 |
|
|
return list;
|
5364 |
|
|
}
|
5365 |
|
|
|
5366 |
|
|
/* Return an attribute list that is the union of a1 and a2. */
|
5367 |
|
|
|
5368 |
|
|
tree
|
5369 |
|
|
merge_attributes (tree a1, tree a2)
|
5370 |
|
|
{
|
5371 |
|
|
tree attributes;
|
5372 |
|
|
|
5373 |
|
|
/* Either one unset? Take the set one. */
|
5374 |
|
|
|
5375 |
|
|
if ((attributes = a1) == 0)
|
5376 |
|
|
attributes = a2;
|
5377 |
|
|
|
5378 |
|
|
/* One that completely contains the other? Take it. */
|
5379 |
|
|
|
5380 |
|
|
else if (a2 != 0 && ! attribute_list_contained (a1, a2))
|
5381 |
|
|
{
|
5382 |
|
|
if (attribute_list_contained (a2, a1))
|
5383 |
|
|
attributes = a2;
|
5384 |
|
|
else
|
5385 |
|
|
{
|
5386 |
|
|
/* Pick the longest list, and hang on the other list. */
|
5387 |
|
|
|
5388 |
|
|
if (list_length (a1) < list_length (a2))
|
5389 |
|
|
attributes = a2, a2 = a1;
|
5390 |
|
|
|
5391 |
|
|
for (; a2 != 0; a2 = TREE_CHAIN (a2))
|
5392 |
|
|
{
|
5393 |
|
|
tree a;
|
5394 |
|
|
for (a = lookup_ident_attribute (TREE_PURPOSE (a2), attributes);
|
5395 |
|
|
a != NULL_TREE && !attribute_value_equal (a, a2);
|
5396 |
|
|
a = lookup_ident_attribute (TREE_PURPOSE (a2), TREE_CHAIN (a)))
|
5397 |
|
|
;
|
5398 |
|
|
if (a == NULL_TREE)
|
5399 |
|
|
{
|
5400 |
|
|
a1 = copy_node (a2);
|
5401 |
|
|
TREE_CHAIN (a1) = attributes;
|
5402 |
|
|
attributes = a1;
|
5403 |
|
|
}
|
5404 |
|
|
}
|
5405 |
|
|
}
|
5406 |
|
|
}
|
5407 |
|
|
return attributes;
|
5408 |
|
|
}
|
5409 |
|
|
|
5410 |
|
|
/* Given types T1 and T2, merge their attributes and return
|
5411 |
|
|
the result. */
|
5412 |
|
|
|
5413 |
|
|
tree
|
5414 |
|
|
merge_type_attributes (tree t1, tree t2)
|
5415 |
|
|
{
|
5416 |
|
|
return merge_attributes (TYPE_ATTRIBUTES (t1),
|
5417 |
|
|
TYPE_ATTRIBUTES (t2));
|
5418 |
|
|
}
|
5419 |
|
|
|
5420 |
|
|
/* Given decls OLDDECL and NEWDECL, merge their attributes and return
|
5421 |
|
|
the result. */
|
5422 |
|
|
|
5423 |
|
|
tree
|
5424 |
|
|
merge_decl_attributes (tree olddecl, tree newdecl)
|
5425 |
|
|
{
|
5426 |
|
|
return merge_attributes (DECL_ATTRIBUTES (olddecl),
|
5427 |
|
|
DECL_ATTRIBUTES (newdecl));
|
5428 |
|
|
}
|
5429 |
|
|
|
5430 |
|
|
#if TARGET_DLLIMPORT_DECL_ATTRIBUTES
|
5431 |
|
|
|
5432 |
|
|
/* Specialization of merge_decl_attributes for various Windows targets.
|
5433 |
|
|
|
5434 |
|
|
This handles the following situation:
|
5435 |
|
|
|
5436 |
|
|
__declspec (dllimport) int foo;
|
5437 |
|
|
int foo;
|
5438 |
|
|
|
5439 |
|
|
The second instance of `foo' nullifies the dllimport. */
|
5440 |
|
|
|
5441 |
|
|
tree
|
5442 |
|
|
merge_dllimport_decl_attributes (tree old, tree new_tree)
|
5443 |
|
|
{
|
5444 |
|
|
tree a;
|
5445 |
|
|
int delete_dllimport_p = 1;
|
5446 |
|
|
|
5447 |
|
|
/* What we need to do here is remove from `old' dllimport if it doesn't
|
5448 |
|
|
appear in `new'. dllimport behaves like extern: if a declaration is
|
5449 |
|
|
marked dllimport and a definition appears later, then the object
|
5450 |
|
|
is not dllimport'd. We also remove a `new' dllimport if the old list
|
5451 |
|
|
contains dllexport: dllexport always overrides dllimport, regardless
|
5452 |
|
|
of the order of declaration. */
|
5453 |
|
|
if (!VAR_OR_FUNCTION_DECL_P (new_tree))
|
5454 |
|
|
delete_dllimport_p = 0;
|
5455 |
|
|
else if (DECL_DLLIMPORT_P (new_tree)
|
5456 |
|
|
&& lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
|
5457 |
|
|
{
|
5458 |
|
|
DECL_DLLIMPORT_P (new_tree) = 0;
|
5459 |
|
|
warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
|
5460 |
|
|
"dllimport ignored", new_tree);
|
5461 |
|
|
}
|
5462 |
|
|
else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
|
5463 |
|
|
{
|
5464 |
|
|
/* Warn about overriding a symbol that has already been used, e.g.:
|
5465 |
|
|
extern int __attribute__ ((dllimport)) foo;
|
5466 |
|
|
int* bar () {return &foo;}
|
5467 |
|
|
int foo;
|
5468 |
|
|
*/
|
5469 |
|
|
if (TREE_USED (old))
|
5470 |
|
|
{
|
5471 |
|
|
warning (0, "%q+D redeclared without dllimport attribute "
|
5472 |
|
|
"after being referenced with dll linkage", new_tree);
|
5473 |
|
|
/* If we have used a variable's address with dllimport linkage,
|
5474 |
|
|
keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
|
5475 |
|
|
decl may already have had TREE_CONSTANT computed.
|
5476 |
|
|
We still remove the attribute so that assembler code refers
|
5477 |
|
|
to '&foo rather than '_imp__foo'. */
|
5478 |
|
|
if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
|
5479 |
|
|
DECL_DLLIMPORT_P (new_tree) = 1;
|
5480 |
|
|
}
|
5481 |
|
|
|
5482 |
|
|
/* Let an inline definition silently override the external reference,
|
5483 |
|
|
but otherwise warn about attribute inconsistency. */
|
5484 |
|
|
else if (TREE_CODE (new_tree) == VAR_DECL
|
5485 |
|
|
|| !DECL_DECLARED_INLINE_P (new_tree))
|
5486 |
|
|
warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
|
5487 |
|
|
"previous dllimport ignored", new_tree);
|
5488 |
|
|
}
|
5489 |
|
|
else
|
5490 |
|
|
delete_dllimport_p = 0;
|
5491 |
|
|
|
5492 |
|
|
a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
|
5493 |
|
|
|
5494 |
|
|
if (delete_dllimport_p)
|
5495 |
|
|
a = remove_attribute ("dllimport", a);
|
5496 |
|
|
|
5497 |
|
|
return a;
|
5498 |
|
|
}
|
5499 |
|
|
|
5500 |
|
|
/* Handle a "dllimport" or "dllexport" attribute; arguments as in
|
5501 |
|
|
struct attribute_spec.handler. */
|
5502 |
|
|
|
5503 |
|
|
tree
|
5504 |
|
|
handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
|
5505 |
|
|
bool *no_add_attrs)
|
5506 |
|
|
{
|
5507 |
|
|
tree node = *pnode;
|
5508 |
|
|
bool is_dllimport;
|
5509 |
|
|
|
5510 |
|
|
/* These attributes may apply to structure and union types being created,
|
5511 |
|
|
but otherwise should pass to the declaration involved. */
|
5512 |
|
|
if (!DECL_P (node))
|
5513 |
|
|
{
|
5514 |
|
|
if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
|
5515 |
|
|
| (int) ATTR_FLAG_ARRAY_NEXT))
|
5516 |
|
|
{
|
5517 |
|
|
*no_add_attrs = true;
|
5518 |
|
|
return tree_cons (name, args, NULL_TREE);
|
5519 |
|
|
}
|
5520 |
|
|
if (TREE_CODE (node) == RECORD_TYPE
|
5521 |
|
|
|| TREE_CODE (node) == UNION_TYPE)
|
5522 |
|
|
{
|
5523 |
|
|
node = TYPE_NAME (node);
|
5524 |
|
|
if (!node)
|
5525 |
|
|
return NULL_TREE;
|
5526 |
|
|
}
|
5527 |
|
|
else
|
5528 |
|
|
{
|
5529 |
|
|
warning (OPT_Wattributes, "%qE attribute ignored",
|
5530 |
|
|
name);
|
5531 |
|
|
*no_add_attrs = true;
|
5532 |
|
|
return NULL_TREE;
|
5533 |
|
|
}
|
5534 |
|
|
}
|
5535 |
|
|
|
5536 |
|
|
if (TREE_CODE (node) != FUNCTION_DECL
|
5537 |
|
|
&& TREE_CODE (node) != VAR_DECL
|
5538 |
|
|
&& TREE_CODE (node) != TYPE_DECL)
|
5539 |
|
|
{
|
5540 |
|
|
*no_add_attrs = true;
|
5541 |
|
|
warning (OPT_Wattributes, "%qE attribute ignored",
|
5542 |
|
|
name);
|
5543 |
|
|
return NULL_TREE;
|
5544 |
|
|
}
|
5545 |
|
|
|
5546 |
|
|
if (TREE_CODE (node) == TYPE_DECL
|
5547 |
|
|
&& TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
|
5548 |
|
|
&& TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
|
5549 |
|
|
{
|
5550 |
|
|
*no_add_attrs = true;
|
5551 |
|
|
warning (OPT_Wattributes, "%qE attribute ignored",
|
5552 |
|
|
name);
|
5553 |
|
|
return NULL_TREE;
|
5554 |
|
|
}
|
5555 |
|
|
|
5556 |
|
|
is_dllimport = is_attribute_p ("dllimport", name);
|
5557 |
|
|
|
5558 |
|
|
/* Report error on dllimport ambiguities seen now before they cause
|
5559 |
|
|
any damage. */
|
5560 |
|
|
if (is_dllimport)
|
5561 |
|
|
{
|
5562 |
|
|
/* Honor any target-specific overrides. */
|
5563 |
|
|
if (!targetm.valid_dllimport_attribute_p (node))
|
5564 |
|
|
*no_add_attrs = true;
|
5565 |
|
|
|
5566 |
|
|
else if (TREE_CODE (node) == FUNCTION_DECL
|
5567 |
|
|
&& DECL_DECLARED_INLINE_P (node))
|
5568 |
|
|
{
|
5569 |
|
|
warning (OPT_Wattributes, "inline function %q+D declared as "
|
5570 |
|
|
" dllimport: attribute ignored", node);
|
5571 |
|
|
*no_add_attrs = true;
|
5572 |
|
|
}
|
5573 |
|
|
/* Like MS, treat definition of dllimported variables and
|
5574 |
|
|
non-inlined functions on declaration as syntax errors. */
|
5575 |
|
|
else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
|
5576 |
|
|
{
|
5577 |
|
|
error ("function %q+D definition is marked dllimport", node);
|
5578 |
|
|
*no_add_attrs = true;
|
5579 |
|
|
}
|
5580 |
|
|
|
5581 |
|
|
else if (TREE_CODE (node) == VAR_DECL)
|
5582 |
|
|
{
|
5583 |
|
|
if (DECL_INITIAL (node))
|
5584 |
|
|
{
|
5585 |
|
|
error ("variable %q+D definition is marked dllimport",
|
5586 |
|
|
node);
|
5587 |
|
|
*no_add_attrs = true;
|
5588 |
|
|
}
|
5589 |
|
|
|
5590 |
|
|
/* `extern' needn't be specified with dllimport.
|
5591 |
|
|
Specify `extern' now and hope for the best. Sigh. */
|
5592 |
|
|
DECL_EXTERNAL (node) = 1;
|
5593 |
|
|
/* Also, implicitly give dllimport'd variables declared within
|
5594 |
|
|
a function global scope, unless declared static. */
|
5595 |
|
|
if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
|
5596 |
|
|
TREE_PUBLIC (node) = 1;
|
5597 |
|
|
}
|
5598 |
|
|
|
5599 |
|
|
if (*no_add_attrs == false)
|
5600 |
|
|
DECL_DLLIMPORT_P (node) = 1;
|
5601 |
|
|
}
|
5602 |
|
|
else if (TREE_CODE (node) == FUNCTION_DECL
|
5603 |
|
|
&& DECL_DECLARED_INLINE_P (node)
|
5604 |
|
|
&& flag_keep_inline_dllexport)
|
5605 |
|
|
/* An exported function, even if inline, must be emitted. */
|
5606 |
|
|
DECL_EXTERNAL (node) = 0;
|
5607 |
|
|
|
5608 |
|
|
/* Report error if symbol is not accessible at global scope. */
|
5609 |
|
|
if (!TREE_PUBLIC (node)
|
5610 |
|
|
&& (TREE_CODE (node) == VAR_DECL
|
5611 |
|
|
|| TREE_CODE (node) == FUNCTION_DECL))
|
5612 |
|
|
{
|
5613 |
|
|
error ("external linkage required for symbol %q+D because of "
|
5614 |
|
|
"%qE attribute", node, name);
|
5615 |
|
|
*no_add_attrs = true;
|
5616 |
|
|
}
|
5617 |
|
|
|
5618 |
|
|
/* A dllexport'd entity must have default visibility so that other
|
5619 |
|
|
program units (shared libraries or the main executable) can see
|
5620 |
|
|
it. A dllimport'd entity must have default visibility so that
|
5621 |
|
|
the linker knows that undefined references within this program
|
5622 |
|
|
unit can be resolved by the dynamic linker. */
|
5623 |
|
|
if (!*no_add_attrs)
|
5624 |
|
|
{
|
5625 |
|
|
if (DECL_VISIBILITY_SPECIFIED (node)
|
5626 |
|
|
&& DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
|
5627 |
|
|
error ("%qE implies default visibility, but %qD has already "
|
5628 |
|
|
"been declared with a different visibility",
|
5629 |
|
|
name, node);
|
5630 |
|
|
DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
|
5631 |
|
|
DECL_VISIBILITY_SPECIFIED (node) = 1;
|
5632 |
|
|
}
|
5633 |
|
|
|
5634 |
|
|
return NULL_TREE;
|
5635 |
|
|
}
|
5636 |
|
|
|
5637 |
|
|
#endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
|
5638 |
|
|
|
5639 |
|
|
/* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
|
5640 |
|
|
of the various TYPE_QUAL values. */
|
5641 |
|
|
|
5642 |
|
|
static void
|
5643 |
|
|
set_type_quals (tree type, int type_quals)
|
5644 |
|
|
{
|
5645 |
|
|
TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
|
5646 |
|
|
TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
|
5647 |
|
|
TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
|
5648 |
|
|
TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
|
5649 |
|
|
}
|
5650 |
|
|
|
5651 |
|
|
/* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
|
5652 |
|
|
|
5653 |
|
|
bool
|
5654 |
|
|
check_qualified_type (const_tree cand, const_tree base, int type_quals)
|
5655 |
|
|
{
|
5656 |
|
|
return (TYPE_QUALS (cand) == type_quals
|
5657 |
|
|
&& TYPE_NAME (cand) == TYPE_NAME (base)
|
5658 |
|
|
/* Apparently this is needed for Objective-C. */
|
5659 |
|
|
&& TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
|
5660 |
|
|
/* Check alignment. */
|
5661 |
|
|
&& TYPE_ALIGN (cand) == TYPE_ALIGN (base)
|
5662 |
|
|
&& attribute_list_equal (TYPE_ATTRIBUTES (cand),
|
5663 |
|
|
TYPE_ATTRIBUTES (base)));
|
5664 |
|
|
}
|
5665 |
|
|
|
5666 |
|
|
/* Returns true iff CAND is equivalent to BASE with ALIGN. */
|
5667 |
|
|
|
5668 |
|
|
static bool
|
5669 |
|
|
check_aligned_type (const_tree cand, const_tree base, unsigned int align)
|
5670 |
|
|
{
|
5671 |
|
|
return (TYPE_QUALS (cand) == TYPE_QUALS (base)
|
5672 |
|
|
&& TYPE_NAME (cand) == TYPE_NAME (base)
|
5673 |
|
|
/* Apparently this is needed for Objective-C. */
|
5674 |
|
|
&& TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
|
5675 |
|
|
/* Check alignment. */
|
5676 |
|
|
&& TYPE_ALIGN (cand) == align
|
5677 |
|
|
&& attribute_list_equal (TYPE_ATTRIBUTES (cand),
|
5678 |
|
|
TYPE_ATTRIBUTES (base)));
|
5679 |
|
|
}
|
5680 |
|
|
|
5681 |
|
|
/* Return a version of the TYPE, qualified as indicated by the
|
5682 |
|
|
TYPE_QUALS, if one exists. If no qualified version exists yet,
|
5683 |
|
|
return NULL_TREE. */
|
5684 |
|
|
|
5685 |
|
|
tree
|
5686 |
|
|
get_qualified_type (tree type, int type_quals)
|
5687 |
|
|
{
|
5688 |
|
|
tree t;
|
5689 |
|
|
|
5690 |
|
|
if (TYPE_QUALS (type) == type_quals)
|
5691 |
|
|
return type;
|
5692 |
|
|
|
5693 |
|
|
/* Search the chain of variants to see if there is already one there just
|
5694 |
|
|
like the one we need to have. If so, use that existing one. We must
|
5695 |
|
|
preserve the TYPE_NAME, since there is code that depends on this. */
|
5696 |
|
|
for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
|
5697 |
|
|
if (check_qualified_type (t, type, type_quals))
|
5698 |
|
|
return t;
|
5699 |
|
|
|
5700 |
|
|
return NULL_TREE;
|
5701 |
|
|
}
|
5702 |
|
|
|
5703 |
|
|
/* Like get_qualified_type, but creates the type if it does not
|
5704 |
|
|
exist. This function never returns NULL_TREE. */
|
5705 |
|
|
|
5706 |
|
|
tree
|
5707 |
|
|
build_qualified_type (tree type, int type_quals)
|
5708 |
|
|
{
|
5709 |
|
|
tree t;
|
5710 |
|
|
|
5711 |
|
|
/* See if we already have the appropriate qualified variant. */
|
5712 |
|
|
t = get_qualified_type (type, type_quals);
|
5713 |
|
|
|
5714 |
|
|
/* If not, build it. */
|
5715 |
|
|
if (!t)
|
5716 |
|
|
{
|
5717 |
|
|
t = build_variant_type_copy (type);
|
5718 |
|
|
set_type_quals (t, type_quals);
|
5719 |
|
|
|
5720 |
|
|
if (TYPE_STRUCTURAL_EQUALITY_P (type))
|
5721 |
|
|
/* Propagate structural equality. */
|
5722 |
|
|
SET_TYPE_STRUCTURAL_EQUALITY (t);
|
5723 |
|
|
else if (TYPE_CANONICAL (type) != type)
|
5724 |
|
|
/* Build the underlying canonical type, since it is different
|
5725 |
|
|
from TYPE. */
|
5726 |
|
|
TYPE_CANONICAL (t) = build_qualified_type (TYPE_CANONICAL (type),
|
5727 |
|
|
type_quals);
|
5728 |
|
|
else
|
5729 |
|
|
/* T is its own canonical type. */
|
5730 |
|
|
TYPE_CANONICAL (t) = t;
|
5731 |
|
|
|
5732 |
|
|
}
|
5733 |
|
|
|
5734 |
|
|
return t;
|
5735 |
|
|
}
|
5736 |
|
|
|
5737 |
|
|
/* Create a variant of type T with alignment ALIGN. */
|
5738 |
|
|
|
5739 |
|
|
tree
|
5740 |
|
|
build_aligned_type (tree type, unsigned int align)
|
5741 |
|
|
{
|
5742 |
|
|
tree t;
|
5743 |
|
|
|
5744 |
|
|
if (TYPE_PACKED (type)
|
5745 |
|
|
|| TYPE_ALIGN (type) == align)
|
5746 |
|
|
return type;
|
5747 |
|
|
|
5748 |
|
|
for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
|
5749 |
|
|
if (check_aligned_type (t, type, align))
|
5750 |
|
|
return t;
|
5751 |
|
|
|
5752 |
|
|
t = build_variant_type_copy (type);
|
5753 |
|
|
TYPE_ALIGN (t) = align;
|
5754 |
|
|
|
5755 |
|
|
return t;
|
5756 |
|
|
}
|
5757 |
|
|
|
5758 |
|
|
/* Create a new distinct copy of TYPE. The new type is made its own
|
5759 |
|
|
MAIN_VARIANT. If TYPE requires structural equality checks, the
|
5760 |
|
|
resulting type requires structural equality checks; otherwise, its
|
5761 |
|
|
TYPE_CANONICAL points to itself. */
|
5762 |
|
|
|
5763 |
|
|
tree
|
5764 |
|
|
build_distinct_type_copy (tree type)
|
5765 |
|
|
{
|
5766 |
|
|
tree t = copy_node (type);
|
5767 |
|
|
|
5768 |
|
|
TYPE_POINTER_TO (t) = 0;
|
5769 |
|
|
TYPE_REFERENCE_TO (t) = 0;
|
5770 |
|
|
|
5771 |
|
|
/* Set the canonical type either to a new equivalence class, or
|
5772 |
|
|
propagate the need for structural equality checks. */
|
5773 |
|
|
if (TYPE_STRUCTURAL_EQUALITY_P (type))
|
5774 |
|
|
SET_TYPE_STRUCTURAL_EQUALITY (t);
|
5775 |
|
|
else
|
5776 |
|
|
TYPE_CANONICAL (t) = t;
|
5777 |
|
|
|
5778 |
|
|
/* Make it its own variant. */
|
5779 |
|
|
TYPE_MAIN_VARIANT (t) = t;
|
5780 |
|
|
TYPE_NEXT_VARIANT (t) = 0;
|
5781 |
|
|
|
5782 |
|
|
/* Note that it is now possible for TYPE_MIN_VALUE to be a value
|
5783 |
|
|
whose TREE_TYPE is not t. This can also happen in the Ada
|
5784 |
|
|
frontend when using subtypes. */
|
5785 |
|
|
|
5786 |
|
|
return t;
|
5787 |
|
|
}
|
5788 |
|
|
|
5789 |
|
|
/* Create a new variant of TYPE, equivalent but distinct. This is so
|
5790 |
|
|
the caller can modify it. TYPE_CANONICAL for the return type will
|
5791 |
|
|
be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
|
5792 |
|
|
are considered equal by the language itself (or that both types
|
5793 |
|
|
require structural equality checks). */
|
5794 |
|
|
|
5795 |
|
|
tree
|
5796 |
|
|
build_variant_type_copy (tree type)
|
5797 |
|
|
{
|
5798 |
|
|
tree t, m = TYPE_MAIN_VARIANT (type);
|
5799 |
|
|
|
5800 |
|
|
t = build_distinct_type_copy (type);
|
5801 |
|
|
|
5802 |
|
|
/* Since we're building a variant, assume that it is a non-semantic
|
5803 |
|
|
variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
|
5804 |
|
|
TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
|
5805 |
|
|
|
5806 |
|
|
/* Add the new type to the chain of variants of TYPE. */
|
5807 |
|
|
TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
|
5808 |
|
|
TYPE_NEXT_VARIANT (m) = t;
|
5809 |
|
|
TYPE_MAIN_VARIANT (t) = m;
|
5810 |
|
|
|
5811 |
|
|
return t;
|
5812 |
|
|
}
|
5813 |
|
|
|
5814 |
|
|
/* Return true if the from tree in both tree maps are equal. */
|
5815 |
|
|
|
5816 |
|
|
int
|
5817 |
|
|
tree_map_base_eq (const void *va, const void *vb)
|
5818 |
|
|
{
|
5819 |
|
|
const struct tree_map_base *const a = (const struct tree_map_base *) va,
|
5820 |
|
|
*const b = (const struct tree_map_base *) vb;
|
5821 |
|
|
return (a->from == b->from);
|
5822 |
|
|
}
|
5823 |
|
|
|
5824 |
|
|
/* Hash a from tree in a tree_base_map. */
|
5825 |
|
|
|
5826 |
|
|
unsigned int
|
5827 |
|
|
tree_map_base_hash (const void *item)
|
5828 |
|
|
{
|
5829 |
|
|
return htab_hash_pointer (((const struct tree_map_base *)item)->from);
|
5830 |
|
|
}
|
5831 |
|
|
|
5832 |
|
|
/* Return true if this tree map structure is marked for garbage collection
|
5833 |
|
|
purposes. We simply return true if the from tree is marked, so that this
|
5834 |
|
|
structure goes away when the from tree goes away. */
|
5835 |
|
|
|
5836 |
|
|
int
|
5837 |
|
|
tree_map_base_marked_p (const void *p)
|
5838 |
|
|
{
|
5839 |
|
|
return ggc_marked_p (((const struct tree_map_base *) p)->from);
|
5840 |
|
|
}
|
5841 |
|
|
|
5842 |
|
|
/* Hash a from tree in a tree_map. */
|
5843 |
|
|
|
5844 |
|
|
unsigned int
|
5845 |
|
|
tree_map_hash (const void *item)
|
5846 |
|
|
{
|
5847 |
|
|
return (((const struct tree_map *) item)->hash);
|
5848 |
|
|
}
|
5849 |
|
|
|
5850 |
|
|
/* Hash a from tree in a tree_decl_map. */
|
5851 |
|
|
|
5852 |
|
|
unsigned int
|
5853 |
|
|
tree_decl_map_hash (const void *item)
|
5854 |
|
|
{
|
5855 |
|
|
return DECL_UID (((const struct tree_decl_map *) item)->base.from);
|
5856 |
|
|
}
|
5857 |
|
|
|
5858 |
|
|
/* Return the initialization priority for DECL. */
|
5859 |
|
|
|
5860 |
|
|
priority_type
|
5861 |
|
|
decl_init_priority_lookup (tree decl)
|
5862 |
|
|
{
|
5863 |
|
|
struct tree_priority_map *h;
|
5864 |
|
|
struct tree_map_base in;
|
5865 |
|
|
|
5866 |
|
|
gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
|
5867 |
|
|
in.from = decl;
|
5868 |
|
|
h = (struct tree_priority_map *) htab_find (init_priority_for_decl, &in);
|
5869 |
|
|
return h ? h->init : DEFAULT_INIT_PRIORITY;
|
5870 |
|
|
}
|
5871 |
|
|
|
5872 |
|
|
/* Return the finalization priority for DECL. */
|
5873 |
|
|
|
5874 |
|
|
priority_type
|
5875 |
|
|
decl_fini_priority_lookup (tree decl)
|
5876 |
|
|
{
|
5877 |
|
|
struct tree_priority_map *h;
|
5878 |
|
|
struct tree_map_base in;
|
5879 |
|
|
|
5880 |
|
|
gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
|
5881 |
|
|
in.from = decl;
|
5882 |
|
|
h = (struct tree_priority_map *) htab_find (init_priority_for_decl, &in);
|
5883 |
|
|
return h ? h->fini : DEFAULT_INIT_PRIORITY;
|
5884 |
|
|
}
|
5885 |
|
|
|
5886 |
|
|
/* Return the initialization and finalization priority information for
|
5887 |
|
|
DECL. If there is no previous priority information, a freshly
|
5888 |
|
|
allocated structure is returned. */
|
5889 |
|
|
|
5890 |
|
|
static struct tree_priority_map *
|
5891 |
|
|
decl_priority_info (tree decl)
|
5892 |
|
|
{
|
5893 |
|
|
struct tree_priority_map in;
|
5894 |
|
|
struct tree_priority_map *h;
|
5895 |
|
|
void **loc;
|
5896 |
|
|
|
5897 |
|
|
in.base.from = decl;
|
5898 |
|
|
loc = htab_find_slot (init_priority_for_decl, &in, INSERT);
|
5899 |
|
|
h = (struct tree_priority_map *) *loc;
|
5900 |
|
|
if (!h)
|
5901 |
|
|
{
|
5902 |
|
|
h = ggc_alloc_cleared_tree_priority_map ();
|
5903 |
|
|
*loc = h;
|
5904 |
|
|
h->base.from = decl;
|
5905 |
|
|
h->init = DEFAULT_INIT_PRIORITY;
|
5906 |
|
|
h->fini = DEFAULT_INIT_PRIORITY;
|
5907 |
|
|
}
|
5908 |
|
|
|
5909 |
|
|
return h;
|
5910 |
|
|
}
|
5911 |
|
|
|
5912 |
|
|
/* Set the initialization priority for DECL to PRIORITY. */
|
5913 |
|
|
|
5914 |
|
|
void
|
5915 |
|
|
decl_init_priority_insert (tree decl, priority_type priority)
|
5916 |
|
|
{
|
5917 |
|
|
struct tree_priority_map *h;
|
5918 |
|
|
|
5919 |
|
|
gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
|
5920 |
|
|
if (priority == DEFAULT_INIT_PRIORITY)
|
5921 |
|
|
return;
|
5922 |
|
|
h = decl_priority_info (decl);
|
5923 |
|
|
h->init = priority;
|
5924 |
|
|
}
|
5925 |
|
|
|
5926 |
|
|
/* Set the finalization priority for DECL to PRIORITY. */
|
5927 |
|
|
|
5928 |
|
|
void
|
5929 |
|
|
decl_fini_priority_insert (tree decl, priority_type priority)
|
5930 |
|
|
{
|
5931 |
|
|
struct tree_priority_map *h;
|
5932 |
|
|
|
5933 |
|
|
gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
|
5934 |
|
|
if (priority == DEFAULT_INIT_PRIORITY)
|
5935 |
|
|
return;
|
5936 |
|
|
h = decl_priority_info (decl);
|
5937 |
|
|
h->fini = priority;
|
5938 |
|
|
}
|
5939 |
|
|
|
5940 |
|
|
/* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
|
5941 |
|
|
|
5942 |
|
|
static void
|
5943 |
|
|
print_debug_expr_statistics (void)
|
5944 |
|
|
{
|
5945 |
|
|
fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
|
5946 |
|
|
(long) htab_size (debug_expr_for_decl),
|
5947 |
|
|
(long) htab_elements (debug_expr_for_decl),
|
5948 |
|
|
htab_collisions (debug_expr_for_decl));
|
5949 |
|
|
}
|
5950 |
|
|
|
5951 |
|
|
/* Print out the statistics for the DECL_VALUE_EXPR hash table. */
|
5952 |
|
|
|
5953 |
|
|
static void
|
5954 |
|
|
print_value_expr_statistics (void)
|
5955 |
|
|
{
|
5956 |
|
|
fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
|
5957 |
|
|
(long) htab_size (value_expr_for_decl),
|
5958 |
|
|
(long) htab_elements (value_expr_for_decl),
|
5959 |
|
|
htab_collisions (value_expr_for_decl));
|
5960 |
|
|
}
|
5961 |
|
|
|
5962 |
|
|
/* Lookup a debug expression for FROM, and return it if we find one. */
|
5963 |
|
|
|
5964 |
|
|
tree
|
5965 |
|
|
decl_debug_expr_lookup (tree from)
|
5966 |
|
|
{
|
5967 |
|
|
struct tree_decl_map *h, in;
|
5968 |
|
|
in.base.from = from;
|
5969 |
|
|
|
5970 |
|
|
h = (struct tree_decl_map *)
|
5971 |
|
|
htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
|
5972 |
|
|
if (h)
|
5973 |
|
|
return h->to;
|
5974 |
|
|
return NULL_TREE;
|
5975 |
|
|
}
|
5976 |
|
|
|
5977 |
|
|
/* Insert a mapping FROM->TO in the debug expression hashtable. */
|
5978 |
|
|
|
5979 |
|
|
void
|
5980 |
|
|
decl_debug_expr_insert (tree from, tree to)
|
5981 |
|
|
{
|
5982 |
|
|
struct tree_decl_map *h;
|
5983 |
|
|
void **loc;
|
5984 |
|
|
|
5985 |
|
|
h = ggc_alloc_tree_decl_map ();
|
5986 |
|
|
h->base.from = from;
|
5987 |
|
|
h->to = to;
|
5988 |
|
|
loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
|
5989 |
|
|
INSERT);
|
5990 |
|
|
*(struct tree_decl_map **) loc = h;
|
5991 |
|
|
}
|
5992 |
|
|
|
5993 |
|
|
/* Lookup a value expression for FROM, and return it if we find one. */
|
5994 |
|
|
|
5995 |
|
|
tree
|
5996 |
|
|
decl_value_expr_lookup (tree from)
|
5997 |
|
|
{
|
5998 |
|
|
struct tree_decl_map *h, in;
|
5999 |
|
|
in.base.from = from;
|
6000 |
|
|
|
6001 |
|
|
h = (struct tree_decl_map *)
|
6002 |
|
|
htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
|
6003 |
|
|
if (h)
|
6004 |
|
|
return h->to;
|
6005 |
|
|
return NULL_TREE;
|
6006 |
|
|
}
|
6007 |
|
|
|
6008 |
|
|
/* Insert a mapping FROM->TO in the value expression hashtable. */
|
6009 |
|
|
|
6010 |
|
|
void
|
6011 |
|
|
decl_value_expr_insert (tree from, tree to)
|
6012 |
|
|
{
|
6013 |
|
|
struct tree_decl_map *h;
|
6014 |
|
|
void **loc;
|
6015 |
|
|
|
6016 |
|
|
h = ggc_alloc_tree_decl_map ();
|
6017 |
|
|
h->base.from = from;
|
6018 |
|
|
h->to = to;
|
6019 |
|
|
loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
|
6020 |
|
|
INSERT);
|
6021 |
|
|
*(struct tree_decl_map **) loc = h;
|
6022 |
|
|
}
|
6023 |
|
|
|
6024 |
|
|
/* Lookup a vector of debug arguments for FROM, and return it if we
|
6025 |
|
|
find one. */
|
6026 |
|
|
|
6027 |
|
|
VEC(tree, gc) **
|
6028 |
|
|
decl_debug_args_lookup (tree from)
|
6029 |
|
|
{
|
6030 |
|
|
struct tree_vec_map *h, in;
|
6031 |
|
|
|
6032 |
|
|
if (!DECL_HAS_DEBUG_ARGS_P (from))
|
6033 |
|
|
return NULL;
|
6034 |
|
|
gcc_checking_assert (debug_args_for_decl != NULL);
|
6035 |
|
|
in.base.from = from;
|
6036 |
|
|
h = (struct tree_vec_map *)
|
6037 |
|
|
htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
|
6038 |
|
|
if (h)
|
6039 |
|
|
return &h->to;
|
6040 |
|
|
return NULL;
|
6041 |
|
|
}
|
6042 |
|
|
|
6043 |
|
|
/* Insert a mapping FROM->empty vector of debug arguments in the value
|
6044 |
|
|
expression hashtable. */
|
6045 |
|
|
|
6046 |
|
|
VEC(tree, gc) **
|
6047 |
|
|
decl_debug_args_insert (tree from)
|
6048 |
|
|
{
|
6049 |
|
|
struct tree_vec_map *h;
|
6050 |
|
|
void **loc;
|
6051 |
|
|
|
6052 |
|
|
if (DECL_HAS_DEBUG_ARGS_P (from))
|
6053 |
|
|
return decl_debug_args_lookup (from);
|
6054 |
|
|
if (debug_args_for_decl == NULL)
|
6055 |
|
|
debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
|
6056 |
|
|
tree_vec_map_eq, 0);
|
6057 |
|
|
h = ggc_alloc_tree_vec_map ();
|
6058 |
|
|
h->base.from = from;
|
6059 |
|
|
h->to = NULL;
|
6060 |
|
|
loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
|
6061 |
|
|
INSERT);
|
6062 |
|
|
*(struct tree_vec_map **) loc = h;
|
6063 |
|
|
DECL_HAS_DEBUG_ARGS_P (from) = 1;
|
6064 |
|
|
return &h->to;
|
6065 |
|
|
}
|
6066 |
|
|
|
6067 |
|
|
/* Hashing of types so that we don't make duplicates.
|
6068 |
|
|
The entry point is `type_hash_canon'. */
|
6069 |
|
|
|
6070 |
|
|
/* Compute a hash code for a list of types (chain of TREE_LIST nodes
|
6071 |
|
|
with types in the TREE_VALUE slots), by adding the hash codes
|
6072 |
|
|
of the individual types. */
|
6073 |
|
|
|
6074 |
|
|
static unsigned int
|
6075 |
|
|
type_hash_list (const_tree list, hashval_t hashcode)
|
6076 |
|
|
{
|
6077 |
|
|
const_tree tail;
|
6078 |
|
|
|
6079 |
|
|
for (tail = list; tail; tail = TREE_CHAIN (tail))
|
6080 |
|
|
if (TREE_VALUE (tail) != error_mark_node)
|
6081 |
|
|
hashcode = iterative_hash_object (TYPE_HASH (TREE_VALUE (tail)),
|
6082 |
|
|
hashcode);
|
6083 |
|
|
|
6084 |
|
|
return hashcode;
|
6085 |
|
|
}
|
6086 |
|
|
|
6087 |
|
|
/* These are the Hashtable callback functions. */
|
6088 |
|
|
|
6089 |
|
|
/* Returns true iff the types are equivalent. */
|
6090 |
|
|
|
6091 |
|
|
static int
|
6092 |
|
|
type_hash_eq (const void *va, const void *vb)
|
6093 |
|
|
{
|
6094 |
|
|
const struct type_hash *const a = (const struct type_hash *) va,
|
6095 |
|
|
*const b = (const struct type_hash *) vb;
|
6096 |
|
|
|
6097 |
|
|
/* First test the things that are the same for all types. */
|
6098 |
|
|
if (a->hash != b->hash
|
6099 |
|
|
|| TREE_CODE (a->type) != TREE_CODE (b->type)
|
6100 |
|
|
|| TREE_TYPE (a->type) != TREE_TYPE (b->type)
|
6101 |
|
|
|| !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
|
6102 |
|
|
TYPE_ATTRIBUTES (b->type))
|
6103 |
|
|
|| (TREE_CODE (a->type) != COMPLEX_TYPE
|
6104 |
|
|
&& TYPE_NAME (a->type) != TYPE_NAME (b->type)))
|
6105 |
|
|
return 0;
|
6106 |
|
|
|
6107 |
|
|
/* Be careful about comparing arrays before and after the element type
|
6108 |
|
|
has been completed; don't compare TYPE_ALIGN unless both types are
|
6109 |
|
|
complete. */
|
6110 |
|
|
if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
|
6111 |
|
|
&& (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
|
6112 |
|
|
|| TYPE_MODE (a->type) != TYPE_MODE (b->type)))
|
6113 |
|
|
return 0;
|
6114 |
|
|
|
6115 |
|
|
switch (TREE_CODE (a->type))
|
6116 |
|
|
{
|
6117 |
|
|
case VOID_TYPE:
|
6118 |
|
|
case COMPLEX_TYPE:
|
6119 |
|
|
case POINTER_TYPE:
|
6120 |
|
|
case REFERENCE_TYPE:
|
6121 |
|
|
return 1;
|
6122 |
|
|
|
6123 |
|
|
case VECTOR_TYPE:
|
6124 |
|
|
return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
|
6125 |
|
|
|
6126 |
|
|
case ENUMERAL_TYPE:
|
6127 |
|
|
if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
|
6128 |
|
|
&& !(TYPE_VALUES (a->type)
|
6129 |
|
|
&& TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
|
6130 |
|
|
&& TYPE_VALUES (b->type)
|
6131 |
|
|
&& TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
|
6132 |
|
|
&& type_list_equal (TYPE_VALUES (a->type),
|
6133 |
|
|
TYPE_VALUES (b->type))))
|
6134 |
|
|
return 0;
|
6135 |
|
|
|
6136 |
|
|
/* ... fall through ... */
|
6137 |
|
|
|
6138 |
|
|
case INTEGER_TYPE:
|
6139 |
|
|
case REAL_TYPE:
|
6140 |
|
|
case BOOLEAN_TYPE:
|
6141 |
|
|
return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
|
6142 |
|
|
|| tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
|
6143 |
|
|
TYPE_MAX_VALUE (b->type)))
|
6144 |
|
|
&& (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
|
6145 |
|
|
|| tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
|
6146 |
|
|
TYPE_MIN_VALUE (b->type))));
|
6147 |
|
|
|
6148 |
|
|
case FIXED_POINT_TYPE:
|
6149 |
|
|
return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
|
6150 |
|
|
|
6151 |
|
|
case OFFSET_TYPE:
|
6152 |
|
|
return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
|
6153 |
|
|
|
6154 |
|
|
case METHOD_TYPE:
|
6155 |
|
|
if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
|
6156 |
|
|
&& (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
|
6157 |
|
|
|| (TYPE_ARG_TYPES (a->type)
|
6158 |
|
|
&& TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
|
6159 |
|
|
&& TYPE_ARG_TYPES (b->type)
|
6160 |
|
|
&& TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
|
6161 |
|
|
&& type_list_equal (TYPE_ARG_TYPES (a->type),
|
6162 |
|
|
TYPE_ARG_TYPES (b->type)))))
|
6163 |
|
|
break;
|
6164 |
|
|
return 0;
|
6165 |
|
|
case ARRAY_TYPE:
|
6166 |
|
|
return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
|
6167 |
|
|
|
6168 |
|
|
case RECORD_TYPE:
|
6169 |
|
|
case UNION_TYPE:
|
6170 |
|
|
case QUAL_UNION_TYPE:
|
6171 |
|
|
return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
|
6172 |
|
|
|| (TYPE_FIELDS (a->type)
|
6173 |
|
|
&& TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
|
6174 |
|
|
&& TYPE_FIELDS (b->type)
|
6175 |
|
|
&& TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
|
6176 |
|
|
&& type_list_equal (TYPE_FIELDS (a->type),
|
6177 |
|
|
TYPE_FIELDS (b->type))));
|
6178 |
|
|
|
6179 |
|
|
case FUNCTION_TYPE:
|
6180 |
|
|
if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
|
6181 |
|
|
|| (TYPE_ARG_TYPES (a->type)
|
6182 |
|
|
&& TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
|
6183 |
|
|
&& TYPE_ARG_TYPES (b->type)
|
6184 |
|
|
&& TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
|
6185 |
|
|
&& type_list_equal (TYPE_ARG_TYPES (a->type),
|
6186 |
|
|
TYPE_ARG_TYPES (b->type))))
|
6187 |
|
|
break;
|
6188 |
|
|
return 0;
|
6189 |
|
|
|
6190 |
|
|
default:
|
6191 |
|
|
return 0;
|
6192 |
|
|
}
|
6193 |
|
|
|
6194 |
|
|
if (lang_hooks.types.type_hash_eq != NULL)
|
6195 |
|
|
return lang_hooks.types.type_hash_eq (a->type, b->type);
|
6196 |
|
|
|
6197 |
|
|
return 1;
|
6198 |
|
|
}
|
6199 |
|
|
|
6200 |
|
|
/* Return the cached hash value. */
|
6201 |
|
|
|
6202 |
|
|
static hashval_t
|
6203 |
|
|
type_hash_hash (const void *item)
|
6204 |
|
|
{
|
6205 |
|
|
return ((const struct type_hash *) item)->hash;
|
6206 |
|
|
}
|
6207 |
|
|
|
6208 |
|
|
/* Look in the type hash table for a type isomorphic to TYPE.
|
6209 |
|
|
If one is found, return it. Otherwise return 0. */
|
6210 |
|
|
|
6211 |
|
|
tree
|
6212 |
|
|
type_hash_lookup (hashval_t hashcode, tree type)
|
6213 |
|
|
{
|
6214 |
|
|
struct type_hash *h, in;
|
6215 |
|
|
|
6216 |
|
|
/* The TYPE_ALIGN field of a type is set by layout_type(), so we
|
6217 |
|
|
must call that routine before comparing TYPE_ALIGNs. */
|
6218 |
|
|
layout_type (type);
|
6219 |
|
|
|
6220 |
|
|
in.hash = hashcode;
|
6221 |
|
|
in.type = type;
|
6222 |
|
|
|
6223 |
|
|
h = (struct type_hash *) htab_find_with_hash (type_hash_table, &in,
|
6224 |
|
|
hashcode);
|
6225 |
|
|
if (h)
|
6226 |
|
|
return h->type;
|
6227 |
|
|
return NULL_TREE;
|
6228 |
|
|
}
|
6229 |
|
|
|
6230 |
|
|
/* Add an entry to the type-hash-table
|
6231 |
|
|
for a type TYPE whose hash code is HASHCODE. */
|
6232 |
|
|
|
6233 |
|
|
void
|
6234 |
|
|
type_hash_add (hashval_t hashcode, tree type)
|
6235 |
|
|
{
|
6236 |
|
|
struct type_hash *h;
|
6237 |
|
|
void **loc;
|
6238 |
|
|
|
6239 |
|
|
h = ggc_alloc_type_hash ();
|
6240 |
|
|
h->hash = hashcode;
|
6241 |
|
|
h->type = type;
|
6242 |
|
|
loc = htab_find_slot_with_hash (type_hash_table, h, hashcode, INSERT);
|
6243 |
|
|
*loc = (void *)h;
|
6244 |
|
|
}
|
6245 |
|
|
|
6246 |
|
|
/* Given TYPE, and HASHCODE its hash code, return the canonical
|
6247 |
|
|
object for an identical type if one already exists.
|
6248 |
|
|
Otherwise, return TYPE, and record it as the canonical object.
|
6249 |
|
|
|
6250 |
|
|
To use this function, first create a type of the sort you want.
|
6251 |
|
|
Then compute its hash code from the fields of the type that
|
6252 |
|
|
make it different from other similar types.
|
6253 |
|
|
Then call this function and use the value. */
|
6254 |
|
|
|
6255 |
|
|
tree
|
6256 |
|
|
type_hash_canon (unsigned int hashcode, tree type)
|
6257 |
|
|
{
|
6258 |
|
|
tree t1;
|
6259 |
|
|
|
6260 |
|
|
/* The hash table only contains main variants, so ensure that's what we're
|
6261 |
|
|
being passed. */
|
6262 |
|
|
gcc_assert (TYPE_MAIN_VARIANT (type) == type);
|
6263 |
|
|
|
6264 |
|
|
/* See if the type is in the hash table already. If so, return it.
|
6265 |
|
|
Otherwise, add the type. */
|
6266 |
|
|
t1 = type_hash_lookup (hashcode, type);
|
6267 |
|
|
if (t1 != 0)
|
6268 |
|
|
{
|
6269 |
|
|
#ifdef GATHER_STATISTICS
|
6270 |
|
|
tree_code_counts[(int) TREE_CODE (type)]--;
|
6271 |
|
|
tree_node_counts[(int) t_kind]--;
|
6272 |
|
|
tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
|
6273 |
|
|
#endif
|
6274 |
|
|
return t1;
|
6275 |
|
|
}
|
6276 |
|
|
else
|
6277 |
|
|
{
|
6278 |
|
|
type_hash_add (hashcode, type);
|
6279 |
|
|
return type;
|
6280 |
|
|
}
|
6281 |
|
|
}
|
6282 |
|
|
|
6283 |
|
|
/* See if the data pointed to by the type hash table is marked. We consider
|
6284 |
|
|
it marked if the type is marked or if a debug type number or symbol
|
6285 |
|
|
table entry has been made for the type. */
|
6286 |
|
|
|
6287 |
|
|
static int
|
6288 |
|
|
type_hash_marked_p (const void *p)
|
6289 |
|
|
{
|
6290 |
|
|
const_tree const type = ((const struct type_hash *) p)->type;
|
6291 |
|
|
|
6292 |
|
|
return ggc_marked_p (type);
|
6293 |
|
|
}
|
6294 |
|
|
|
6295 |
|
|
static void
|
6296 |
|
|
print_type_hash_statistics (void)
|
6297 |
|
|
{
|
6298 |
|
|
fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
|
6299 |
|
|
(long) htab_size (type_hash_table),
|
6300 |
|
|
(long) htab_elements (type_hash_table),
|
6301 |
|
|
htab_collisions (type_hash_table));
|
6302 |
|
|
}
|
6303 |
|
|
|
6304 |
|
|
/* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
|
6305 |
|
|
with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
|
6306 |
|
|
by adding the hash codes of the individual attributes. */
|
6307 |
|
|
|
6308 |
|
|
static unsigned int
|
6309 |
|
|
attribute_hash_list (const_tree list, hashval_t hashcode)
|
6310 |
|
|
{
|
6311 |
|
|
const_tree tail;
|
6312 |
|
|
|
6313 |
|
|
for (tail = list; tail; tail = TREE_CHAIN (tail))
|
6314 |
|
|
/* ??? Do we want to add in TREE_VALUE too? */
|
6315 |
|
|
hashcode = iterative_hash_object
|
6316 |
|
|
(IDENTIFIER_HASH_VALUE (TREE_PURPOSE (tail)), hashcode);
|
6317 |
|
|
return hashcode;
|
6318 |
|
|
}
|
6319 |
|
|
|
6320 |
|
|
/* Given two lists of attributes, return true if list l2 is
|
6321 |
|
|
equivalent to l1. */
|
6322 |
|
|
|
6323 |
|
|
int
|
6324 |
|
|
attribute_list_equal (const_tree l1, const_tree l2)
|
6325 |
|
|
{
|
6326 |
|
|
if (l1 == l2)
|
6327 |
|
|
return 1;
|
6328 |
|
|
|
6329 |
|
|
return attribute_list_contained (l1, l2)
|
6330 |
|
|
&& attribute_list_contained (l2, l1);
|
6331 |
|
|
}
|
6332 |
|
|
|
6333 |
|
|
/* Given two lists of attributes, return true if list L2 is
|
6334 |
|
|
completely contained within L1. */
|
6335 |
|
|
/* ??? This would be faster if attribute names were stored in a canonicalized
|
6336 |
|
|
form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
|
6337 |
|
|
must be used to show these elements are equivalent (which they are). */
|
6338 |
|
|
/* ??? It's not clear that attributes with arguments will always be handled
|
6339 |
|
|
correctly. */
|
6340 |
|
|
|
6341 |
|
|
int
|
6342 |
|
|
attribute_list_contained (const_tree l1, const_tree l2)
|
6343 |
|
|
{
|
6344 |
|
|
const_tree t1, t2;
|
6345 |
|
|
|
6346 |
|
|
/* First check the obvious, maybe the lists are identical. */
|
6347 |
|
|
if (l1 == l2)
|
6348 |
|
|
return 1;
|
6349 |
|
|
|
6350 |
|
|
/* Maybe the lists are similar. */
|
6351 |
|
|
for (t1 = l1, t2 = l2;
|
6352 |
|
|
t1 != 0 && t2 != 0
|
6353 |
|
|
&& TREE_PURPOSE (t1) == TREE_PURPOSE (t2)
|
6354 |
|
|
&& TREE_VALUE (t1) == TREE_VALUE (t2);
|
6355 |
|
|
t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
|
6356 |
|
|
;
|
6357 |
|
|
|
6358 |
|
|
/* Maybe the lists are equal. */
|
6359 |
|
|
if (t1 == 0 && t2 == 0)
|
6360 |
|
|
return 1;
|
6361 |
|
|
|
6362 |
|
|
for (; t2 != 0; t2 = TREE_CHAIN (t2))
|
6363 |
|
|
{
|
6364 |
|
|
const_tree attr;
|
6365 |
|
|
/* This CONST_CAST is okay because lookup_attribute does not
|
6366 |
|
|
modify its argument and the return value is assigned to a
|
6367 |
|
|
const_tree. */
|
6368 |
|
|
for (attr = lookup_ident_attribute (TREE_PURPOSE (t2), CONST_CAST_TREE(l1));
|
6369 |
|
|
attr != NULL_TREE && !attribute_value_equal (t2, attr);
|
6370 |
|
|
attr = lookup_ident_attribute (TREE_PURPOSE (t2), TREE_CHAIN (attr)))
|
6371 |
|
|
;
|
6372 |
|
|
|
6373 |
|
|
if (attr == NULL_TREE)
|
6374 |
|
|
return 0;
|
6375 |
|
|
}
|
6376 |
|
|
|
6377 |
|
|
return 1;
|
6378 |
|
|
}
|
6379 |
|
|
|
6380 |
|
|
/* Given two lists of types
|
6381 |
|
|
(chains of TREE_LIST nodes with types in the TREE_VALUE slots)
|
6382 |
|
|
return 1 if the lists contain the same types in the same order.
|
6383 |
|
|
Also, the TREE_PURPOSEs must match. */
|
6384 |
|
|
|
6385 |
|
|
int
|
6386 |
|
|
type_list_equal (const_tree l1, const_tree l2)
|
6387 |
|
|
{
|
6388 |
|
|
const_tree t1, t2;
|
6389 |
|
|
|
6390 |
|
|
for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
|
6391 |
|
|
if (TREE_VALUE (t1) != TREE_VALUE (t2)
|
6392 |
|
|
|| (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
|
6393 |
|
|
&& ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
|
6394 |
|
|
&& (TREE_TYPE (TREE_PURPOSE (t1))
|
6395 |
|
|
== TREE_TYPE (TREE_PURPOSE (t2))))))
|
6396 |
|
|
return 0;
|
6397 |
|
|
|
6398 |
|
|
return t1 == t2;
|
6399 |
|
|
}
|
6400 |
|
|
|
6401 |
|
|
/* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
|
6402 |
|
|
given by TYPE. If the argument list accepts variable arguments,
|
6403 |
|
|
then this function counts only the ordinary arguments. */
|
6404 |
|
|
|
6405 |
|
|
int
|
6406 |
|
|
type_num_arguments (const_tree type)
|
6407 |
|
|
{
|
6408 |
|
|
int i = 0;
|
6409 |
|
|
tree t;
|
6410 |
|
|
|
6411 |
|
|
for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
|
6412 |
|
|
/* If the function does not take a variable number of arguments,
|
6413 |
|
|
the last element in the list will have type `void'. */
|
6414 |
|
|
if (VOID_TYPE_P (TREE_VALUE (t)))
|
6415 |
|
|
break;
|
6416 |
|
|
else
|
6417 |
|
|
++i;
|
6418 |
|
|
|
6419 |
|
|
return i;
|
6420 |
|
|
}
|
6421 |
|
|
|
6422 |
|
|
/* Nonzero if integer constants T1 and T2
|
6423 |
|
|
represent the same constant value. */
|
6424 |
|
|
|
6425 |
|
|
int
|
6426 |
|
|
tree_int_cst_equal (const_tree t1, const_tree t2)
|
6427 |
|
|
{
|
6428 |
|
|
if (t1 == t2)
|
6429 |
|
|
return 1;
|
6430 |
|
|
|
6431 |
|
|
if (t1 == 0 || t2 == 0)
|
6432 |
|
|
return 0;
|
6433 |
|
|
|
6434 |
|
|
if (TREE_CODE (t1) == INTEGER_CST
|
6435 |
|
|
&& TREE_CODE (t2) == INTEGER_CST
|
6436 |
|
|
&& TREE_INT_CST_LOW (t1) == TREE_INT_CST_LOW (t2)
|
6437 |
|
|
&& TREE_INT_CST_HIGH (t1) == TREE_INT_CST_HIGH (t2))
|
6438 |
|
|
return 1;
|
6439 |
|
|
|
6440 |
|
|
return 0;
|
6441 |
|
|
}
|
6442 |
|
|
|
6443 |
|
|
/* Nonzero if integer constants T1 and T2 represent values that satisfy <.
|
6444 |
|
|
The precise way of comparison depends on their data type. */
|
6445 |
|
|
|
6446 |
|
|
int
|
6447 |
|
|
tree_int_cst_lt (const_tree t1, const_tree t2)
|
6448 |
|
|
{
|
6449 |
|
|
if (t1 == t2)
|
6450 |
|
|
return 0;
|
6451 |
|
|
|
6452 |
|
|
if (TYPE_UNSIGNED (TREE_TYPE (t1)) != TYPE_UNSIGNED (TREE_TYPE (t2)))
|
6453 |
|
|
{
|
6454 |
|
|
int t1_sgn = tree_int_cst_sgn (t1);
|
6455 |
|
|
int t2_sgn = tree_int_cst_sgn (t2);
|
6456 |
|
|
|
6457 |
|
|
if (t1_sgn < t2_sgn)
|
6458 |
|
|
return 1;
|
6459 |
|
|
else if (t1_sgn > t2_sgn)
|
6460 |
|
|
return 0;
|
6461 |
|
|
/* Otherwise, both are non-negative, so we compare them as
|
6462 |
|
|
unsigned just in case one of them would overflow a signed
|
6463 |
|
|
type. */
|
6464 |
|
|
}
|
6465 |
|
|
else if (!TYPE_UNSIGNED (TREE_TYPE (t1)))
|
6466 |
|
|
return INT_CST_LT (t1, t2);
|
6467 |
|
|
|
6468 |
|
|
return INT_CST_LT_UNSIGNED (t1, t2);
|
6469 |
|
|
}
|
6470 |
|
|
|
6471 |
|
|
/* Returns -1 if T1 < T2, 0 if T1 == T2, and 1 if T1 > T2. */
|
6472 |
|
|
|
6473 |
|
|
int
|
6474 |
|
|
tree_int_cst_compare (const_tree t1, const_tree t2)
|
6475 |
|
|
{
|
6476 |
|
|
if (tree_int_cst_lt (t1, t2))
|
6477 |
|
|
return -1;
|
6478 |
|
|
else if (tree_int_cst_lt (t2, t1))
|
6479 |
|
|
return 1;
|
6480 |
|
|
else
|
6481 |
|
|
return 0;
|
6482 |
|
|
}
|
6483 |
|
|
|
6484 |
|
|
/* Return 1 if T is an INTEGER_CST that can be manipulated efficiently on
|
6485 |
|
|
the host. If POS is zero, the value can be represented in a single
|
6486 |
|
|
HOST_WIDE_INT. If POS is nonzero, the value must be non-negative and can
|
6487 |
|
|
be represented in a single unsigned HOST_WIDE_INT. */
|
6488 |
|
|
|
6489 |
|
|
int
|
6490 |
|
|
host_integerp (const_tree t, int pos)
|
6491 |
|
|
{
|
6492 |
|
|
if (t == NULL_TREE)
|
6493 |
|
|
return 0;
|
6494 |
|
|
|
6495 |
|
|
return (TREE_CODE (t) == INTEGER_CST
|
6496 |
|
|
&& ((TREE_INT_CST_HIGH (t) == 0
|
6497 |
|
|
&& (HOST_WIDE_INT) TREE_INT_CST_LOW (t) >= 0)
|
6498 |
|
|
|| (! pos && TREE_INT_CST_HIGH (t) == -1
|
6499 |
|
|
&& (HOST_WIDE_INT) TREE_INT_CST_LOW (t) < 0
|
6500 |
|
|
&& (!TYPE_UNSIGNED (TREE_TYPE (t))
|
6501 |
|
|
|| (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
|
6502 |
|
|
&& TYPE_IS_SIZETYPE (TREE_TYPE (t)))))
|
6503 |
|
|
|| (pos && TREE_INT_CST_HIGH (t) == 0)));
|
6504 |
|
|
}
|
6505 |
|
|
|
6506 |
|
|
/* Return the HOST_WIDE_INT least significant bits of T if it is an
|
6507 |
|
|
INTEGER_CST and there is no overflow. POS is nonzero if the result must
|
6508 |
|
|
be non-negative. We must be able to satisfy the above conditions. */
|
6509 |
|
|
|
6510 |
|
|
HOST_WIDE_INT
|
6511 |
|
|
tree_low_cst (const_tree t, int pos)
|
6512 |
|
|
{
|
6513 |
|
|
gcc_assert (host_integerp (t, pos));
|
6514 |
|
|
return TREE_INT_CST_LOW (t);
|
6515 |
|
|
}
|
6516 |
|
|
|
6517 |
|
|
/* Return the HOST_WIDE_INT least significant bits of T, a sizetype
|
6518 |
|
|
kind INTEGER_CST. This makes sure to properly sign-extend the
|
6519 |
|
|
constant. */
|
6520 |
|
|
|
6521 |
|
|
HOST_WIDE_INT
|
6522 |
|
|
size_low_cst (const_tree t)
|
6523 |
|
|
{
|
6524 |
|
|
double_int d = tree_to_double_int (t);
|
6525 |
|
|
return double_int_sext (d, TYPE_PRECISION (TREE_TYPE (t))).low;
|
6526 |
|
|
}
|
6527 |
|
|
|
6528 |
|
|
/* Return the most significant (sign) bit of T. */
|
6529 |
|
|
|
6530 |
|
|
int
|
6531 |
|
|
tree_int_cst_sign_bit (const_tree t)
|
6532 |
|
|
{
|
6533 |
|
|
unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
|
6534 |
|
|
unsigned HOST_WIDE_INT w;
|
6535 |
|
|
|
6536 |
|
|
if (bitno < HOST_BITS_PER_WIDE_INT)
|
6537 |
|
|
w = TREE_INT_CST_LOW (t);
|
6538 |
|
|
else
|
6539 |
|
|
{
|
6540 |
|
|
w = TREE_INT_CST_HIGH (t);
|
6541 |
|
|
bitno -= HOST_BITS_PER_WIDE_INT;
|
6542 |
|
|
}
|
6543 |
|
|
|
6544 |
|
|
return (w >> bitno) & 1;
|
6545 |
|
|
}
|
6546 |
|
|
|
6547 |
|
|
/* Return an indication of the sign of the integer constant T.
|
6548 |
|
|
The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
|
6549 |
|
|
Note that -1 will never be returned if T's type is unsigned. */
|
6550 |
|
|
|
6551 |
|
|
int
|
6552 |
|
|
tree_int_cst_sgn (const_tree t)
|
6553 |
|
|
{
|
6554 |
|
|
if (TREE_INT_CST_LOW (t) == 0 && TREE_INT_CST_HIGH (t) == 0)
|
6555 |
|
|
return 0;
|
6556 |
|
|
else if (TYPE_UNSIGNED (TREE_TYPE (t)))
|
6557 |
|
|
return 1;
|
6558 |
|
|
else if (TREE_INT_CST_HIGH (t) < 0)
|
6559 |
|
|
return -1;
|
6560 |
|
|
else
|
6561 |
|
|
return 1;
|
6562 |
|
|
}
|
6563 |
|
|
|
6564 |
|
|
/* Return the minimum number of bits needed to represent VALUE in a
|
6565 |
|
|
signed or unsigned type, UNSIGNEDP says which. */
|
6566 |
|
|
|
6567 |
|
|
unsigned int
|
6568 |
|
|
tree_int_cst_min_precision (tree value, bool unsignedp)
|
6569 |
|
|
{
|
6570 |
|
|
int log;
|
6571 |
|
|
|
6572 |
|
|
/* If the value is negative, compute its negative minus 1. The latter
|
6573 |
|
|
adjustment is because the absolute value of the largest negative value
|
6574 |
|
|
is one larger than the largest positive value. This is equivalent to
|
6575 |
|
|
a bit-wise negation, so use that operation instead. */
|
6576 |
|
|
|
6577 |
|
|
if (tree_int_cst_sgn (value) < 0)
|
6578 |
|
|
value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
|
6579 |
|
|
|
6580 |
|
|
/* Return the number of bits needed, taking into account the fact
|
6581 |
|
|
that we need one more bit for a signed than unsigned type. */
|
6582 |
|
|
|
6583 |
|
|
if (integer_zerop (value))
|
6584 |
|
|
log = 0;
|
6585 |
|
|
else
|
6586 |
|
|
log = tree_floor_log2 (value);
|
6587 |
|
|
|
6588 |
|
|
return log + 1 + !unsignedp;
|
6589 |
|
|
}
|
6590 |
|
|
|
6591 |
|
|
/* Compare two constructor-element-type constants. Return 1 if the lists
|
6592 |
|
|
are known to be equal; otherwise return 0. */
|
6593 |
|
|
|
6594 |
|
|
int
|
6595 |
|
|
simple_cst_list_equal (const_tree l1, const_tree l2)
|
6596 |
|
|
{
|
6597 |
|
|
while (l1 != NULL_TREE && l2 != NULL_TREE)
|
6598 |
|
|
{
|
6599 |
|
|
if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
|
6600 |
|
|
return 0;
|
6601 |
|
|
|
6602 |
|
|
l1 = TREE_CHAIN (l1);
|
6603 |
|
|
l2 = TREE_CHAIN (l2);
|
6604 |
|
|
}
|
6605 |
|
|
|
6606 |
|
|
return l1 == l2;
|
6607 |
|
|
}
|
6608 |
|
|
|
6609 |
|
|
/* Return truthvalue of whether T1 is the same tree structure as T2.
|
6610 |
|
|
Return 1 if they are the same.
|
6611 |
|
|
Return 0 if they are understandably different.
|
6612 |
|
|
Return -1 if either contains tree structure not understood by
|
6613 |
|
|
this function. */
|
6614 |
|
|
|
6615 |
|
|
int
|
6616 |
|
|
simple_cst_equal (const_tree t1, const_tree t2)
|
6617 |
|
|
{
|
6618 |
|
|
enum tree_code code1, code2;
|
6619 |
|
|
int cmp;
|
6620 |
|
|
int i;
|
6621 |
|
|
|
6622 |
|
|
if (t1 == t2)
|
6623 |
|
|
return 1;
|
6624 |
|
|
if (t1 == 0 || t2 == 0)
|
6625 |
|
|
return 0;
|
6626 |
|
|
|
6627 |
|
|
code1 = TREE_CODE (t1);
|
6628 |
|
|
code2 = TREE_CODE (t2);
|
6629 |
|
|
|
6630 |
|
|
if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
|
6631 |
|
|
{
|
6632 |
|
|
if (CONVERT_EXPR_CODE_P (code2)
|
6633 |
|
|
|| code2 == NON_LVALUE_EXPR)
|
6634 |
|
|
return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
|
6635 |
|
|
else
|
6636 |
|
|
return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
|
6637 |
|
|
}
|
6638 |
|
|
|
6639 |
|
|
else if (CONVERT_EXPR_CODE_P (code2)
|
6640 |
|
|
|| code2 == NON_LVALUE_EXPR)
|
6641 |
|
|
return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
|
6642 |
|
|
|
6643 |
|
|
if (code1 != code2)
|
6644 |
|
|
return 0;
|
6645 |
|
|
|
6646 |
|
|
switch (code1)
|
6647 |
|
|
{
|
6648 |
|
|
case INTEGER_CST:
|
6649 |
|
|
return (TREE_INT_CST_LOW (t1) == TREE_INT_CST_LOW (t2)
|
6650 |
|
|
&& TREE_INT_CST_HIGH (t1) == TREE_INT_CST_HIGH (t2));
|
6651 |
|
|
|
6652 |
|
|
case REAL_CST:
|
6653 |
|
|
return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
|
6654 |
|
|
|
6655 |
|
|
case FIXED_CST:
|
6656 |
|
|
return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
|
6657 |
|
|
|
6658 |
|
|
case STRING_CST:
|
6659 |
|
|
return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
|
6660 |
|
|
&& ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
|
6661 |
|
|
TREE_STRING_LENGTH (t1)));
|
6662 |
|
|
|
6663 |
|
|
case CONSTRUCTOR:
|
6664 |
|
|
{
|
6665 |
|
|
unsigned HOST_WIDE_INT idx;
|
6666 |
|
|
VEC(constructor_elt, gc) *v1 = CONSTRUCTOR_ELTS (t1);
|
6667 |
|
|
VEC(constructor_elt, gc) *v2 = CONSTRUCTOR_ELTS (t2);
|
6668 |
|
|
|
6669 |
|
|
if (VEC_length (constructor_elt, v1) != VEC_length (constructor_elt, v2))
|
6670 |
|
|
return false;
|
6671 |
|
|
|
6672 |
|
|
for (idx = 0; idx < VEC_length (constructor_elt, v1); ++idx)
|
6673 |
|
|
/* ??? Should we handle also fields here? */
|
6674 |
|
|
if (!simple_cst_equal (VEC_index (constructor_elt, v1, idx)->value,
|
6675 |
|
|
VEC_index (constructor_elt, v2, idx)->value))
|
6676 |
|
|
return false;
|
6677 |
|
|
return true;
|
6678 |
|
|
}
|
6679 |
|
|
|
6680 |
|
|
case SAVE_EXPR:
|
6681 |
|
|
return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
|
6682 |
|
|
|
6683 |
|
|
case CALL_EXPR:
|
6684 |
|
|
cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
|
6685 |
|
|
if (cmp <= 0)
|
6686 |
|
|
return cmp;
|
6687 |
|
|
if (call_expr_nargs (t1) != call_expr_nargs (t2))
|
6688 |
|
|
return 0;
|
6689 |
|
|
{
|
6690 |
|
|
const_tree arg1, arg2;
|
6691 |
|
|
const_call_expr_arg_iterator iter1, iter2;
|
6692 |
|
|
for (arg1 = first_const_call_expr_arg (t1, &iter1),
|
6693 |
|
|
arg2 = first_const_call_expr_arg (t2, &iter2);
|
6694 |
|
|
arg1 && arg2;
|
6695 |
|
|
arg1 = next_const_call_expr_arg (&iter1),
|
6696 |
|
|
arg2 = next_const_call_expr_arg (&iter2))
|
6697 |
|
|
{
|
6698 |
|
|
cmp = simple_cst_equal (arg1, arg2);
|
6699 |
|
|
if (cmp <= 0)
|
6700 |
|
|
return cmp;
|
6701 |
|
|
}
|
6702 |
|
|
return arg1 == arg2;
|
6703 |
|
|
}
|
6704 |
|
|
|
6705 |
|
|
case TARGET_EXPR:
|
6706 |
|
|
/* Special case: if either target is an unallocated VAR_DECL,
|
6707 |
|
|
it means that it's going to be unified with whatever the
|
6708 |
|
|
TARGET_EXPR is really supposed to initialize, so treat it
|
6709 |
|
|
as being equivalent to anything. */
|
6710 |
|
|
if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
|
6711 |
|
|
&& DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
|
6712 |
|
|
&& !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
|
6713 |
|
|
|| (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
|
6714 |
|
|
&& DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
|
6715 |
|
|
&& !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
|
6716 |
|
|
cmp = 1;
|
6717 |
|
|
else
|
6718 |
|
|
cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
|
6719 |
|
|
|
6720 |
|
|
if (cmp <= 0)
|
6721 |
|
|
return cmp;
|
6722 |
|
|
|
6723 |
|
|
return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
|
6724 |
|
|
|
6725 |
|
|
case WITH_CLEANUP_EXPR:
|
6726 |
|
|
cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
|
6727 |
|
|
if (cmp <= 0)
|
6728 |
|
|
return cmp;
|
6729 |
|
|
|
6730 |
|
|
return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
|
6731 |
|
|
|
6732 |
|
|
case COMPONENT_REF:
|
6733 |
|
|
if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
|
6734 |
|
|
return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
|
6735 |
|
|
|
6736 |
|
|
return 0;
|
6737 |
|
|
|
6738 |
|
|
case VAR_DECL:
|
6739 |
|
|
case PARM_DECL:
|
6740 |
|
|
case CONST_DECL:
|
6741 |
|
|
case FUNCTION_DECL:
|
6742 |
|
|
return 0;
|
6743 |
|
|
|
6744 |
|
|
default:
|
6745 |
|
|
break;
|
6746 |
|
|
}
|
6747 |
|
|
|
6748 |
|
|
/* This general rule works for most tree codes. All exceptions should be
|
6749 |
|
|
handled above. If this is a language-specific tree code, we can't
|
6750 |
|
|
trust what might be in the operand, so say we don't know
|
6751 |
|
|
the situation. */
|
6752 |
|
|
if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
|
6753 |
|
|
return -1;
|
6754 |
|
|
|
6755 |
|
|
switch (TREE_CODE_CLASS (code1))
|
6756 |
|
|
{
|
6757 |
|
|
case tcc_unary:
|
6758 |
|
|
case tcc_binary:
|
6759 |
|
|
case tcc_comparison:
|
6760 |
|
|
case tcc_expression:
|
6761 |
|
|
case tcc_reference:
|
6762 |
|
|
case tcc_statement:
|
6763 |
|
|
cmp = 1;
|
6764 |
|
|
for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
|
6765 |
|
|
{
|
6766 |
|
|
cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
|
6767 |
|
|
if (cmp <= 0)
|
6768 |
|
|
return cmp;
|
6769 |
|
|
}
|
6770 |
|
|
|
6771 |
|
|
return cmp;
|
6772 |
|
|
|
6773 |
|
|
default:
|
6774 |
|
|
return -1;
|
6775 |
|
|
}
|
6776 |
|
|
}
|
6777 |
|
|
|
6778 |
|
|
/* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
|
6779 |
|
|
Return -1, 0, or 1 if the value of T is less than, equal to, or greater
|
6780 |
|
|
than U, respectively. */
|
6781 |
|
|
|
6782 |
|
|
int
|
6783 |
|
|
compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
|
6784 |
|
|
{
|
6785 |
|
|
if (tree_int_cst_sgn (t) < 0)
|
6786 |
|
|
return -1;
|
6787 |
|
|
else if (TREE_INT_CST_HIGH (t) != 0)
|
6788 |
|
|
return 1;
|
6789 |
|
|
else if (TREE_INT_CST_LOW (t) == u)
|
6790 |
|
|
return 0;
|
6791 |
|
|
else if (TREE_INT_CST_LOW (t) < u)
|
6792 |
|
|
return -1;
|
6793 |
|
|
else
|
6794 |
|
|
return 1;
|
6795 |
|
|
}
|
6796 |
|
|
|
6797 |
|
|
/* Return true if CODE represents an associative tree code. Otherwise
|
6798 |
|
|
return false. */
|
6799 |
|
|
bool
|
6800 |
|
|
associative_tree_code (enum tree_code code)
|
6801 |
|
|
{
|
6802 |
|
|
switch (code)
|
6803 |
|
|
{
|
6804 |
|
|
case BIT_IOR_EXPR:
|
6805 |
|
|
case BIT_AND_EXPR:
|
6806 |
|
|
case BIT_XOR_EXPR:
|
6807 |
|
|
case PLUS_EXPR:
|
6808 |
|
|
case MULT_EXPR:
|
6809 |
|
|
case MIN_EXPR:
|
6810 |
|
|
case MAX_EXPR:
|
6811 |
|
|
return true;
|
6812 |
|
|
|
6813 |
|
|
default:
|
6814 |
|
|
break;
|
6815 |
|
|
}
|
6816 |
|
|
return false;
|
6817 |
|
|
}
|
6818 |
|
|
|
6819 |
|
|
/* Return true if CODE represents a commutative tree code. Otherwise
|
6820 |
|
|
return false. */
|
6821 |
|
|
bool
|
6822 |
|
|
commutative_tree_code (enum tree_code code)
|
6823 |
|
|
{
|
6824 |
|
|
switch (code)
|
6825 |
|
|
{
|
6826 |
|
|
case PLUS_EXPR:
|
6827 |
|
|
case MULT_EXPR:
|
6828 |
|
|
case MIN_EXPR:
|
6829 |
|
|
case MAX_EXPR:
|
6830 |
|
|
case BIT_IOR_EXPR:
|
6831 |
|
|
case BIT_XOR_EXPR:
|
6832 |
|
|
case BIT_AND_EXPR:
|
6833 |
|
|
case NE_EXPR:
|
6834 |
|
|
case EQ_EXPR:
|
6835 |
|
|
case UNORDERED_EXPR:
|
6836 |
|
|
case ORDERED_EXPR:
|
6837 |
|
|
case UNEQ_EXPR:
|
6838 |
|
|
case LTGT_EXPR:
|
6839 |
|
|
case TRUTH_AND_EXPR:
|
6840 |
|
|
case TRUTH_XOR_EXPR:
|
6841 |
|
|
case TRUTH_OR_EXPR:
|
6842 |
|
|
return true;
|
6843 |
|
|
|
6844 |
|
|
default:
|
6845 |
|
|
break;
|
6846 |
|
|
}
|
6847 |
|
|
return false;
|
6848 |
|
|
}
|
6849 |
|
|
|
6850 |
|
|
/* Return true if CODE represents a ternary tree code for which the
|
6851 |
|
|
first two operands are commutative. Otherwise return false. */
|
6852 |
|
|
bool
|
6853 |
|
|
commutative_ternary_tree_code (enum tree_code code)
|
6854 |
|
|
{
|
6855 |
|
|
switch (code)
|
6856 |
|
|
{
|
6857 |
|
|
case WIDEN_MULT_PLUS_EXPR:
|
6858 |
|
|
case WIDEN_MULT_MINUS_EXPR:
|
6859 |
|
|
return true;
|
6860 |
|
|
|
6861 |
|
|
default:
|
6862 |
|
|
break;
|
6863 |
|
|
}
|
6864 |
|
|
return false;
|
6865 |
|
|
}
|
6866 |
|
|
|
6867 |
|
|
/* Generate a hash value for an expression. This can be used iteratively
|
6868 |
|
|
by passing a previous result as the VAL argument.
|
6869 |
|
|
|
6870 |
|
|
This function is intended to produce the same hash for expressions which
|
6871 |
|
|
would compare equal using operand_equal_p. */
|
6872 |
|
|
|
6873 |
|
|
hashval_t
|
6874 |
|
|
iterative_hash_expr (const_tree t, hashval_t val)
|
6875 |
|
|
{
|
6876 |
|
|
int i;
|
6877 |
|
|
enum tree_code code;
|
6878 |
|
|
char tclass;
|
6879 |
|
|
|
6880 |
|
|
if (t == NULL_TREE)
|
6881 |
|
|
return iterative_hash_hashval_t (0, val);
|
6882 |
|
|
|
6883 |
|
|
code = TREE_CODE (t);
|
6884 |
|
|
|
6885 |
|
|
switch (code)
|
6886 |
|
|
{
|
6887 |
|
|
/* Alas, constants aren't shared, so we can't rely on pointer
|
6888 |
|
|
identity. */
|
6889 |
|
|
case INTEGER_CST:
|
6890 |
|
|
val = iterative_hash_host_wide_int (TREE_INT_CST_LOW (t), val);
|
6891 |
|
|
return iterative_hash_host_wide_int (TREE_INT_CST_HIGH (t), val);
|
6892 |
|
|
case REAL_CST:
|
6893 |
|
|
{
|
6894 |
|
|
unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
|
6895 |
|
|
|
6896 |
|
|
return iterative_hash_hashval_t (val2, val);
|
6897 |
|
|
}
|
6898 |
|
|
case FIXED_CST:
|
6899 |
|
|
{
|
6900 |
|
|
unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
|
6901 |
|
|
|
6902 |
|
|
return iterative_hash_hashval_t (val2, val);
|
6903 |
|
|
}
|
6904 |
|
|
case STRING_CST:
|
6905 |
|
|
return iterative_hash (TREE_STRING_POINTER (t),
|
6906 |
|
|
TREE_STRING_LENGTH (t), val);
|
6907 |
|
|
case COMPLEX_CST:
|
6908 |
|
|
val = iterative_hash_expr (TREE_REALPART (t), val);
|
6909 |
|
|
return iterative_hash_expr (TREE_IMAGPART (t), val);
|
6910 |
|
|
case VECTOR_CST:
|
6911 |
|
|
return iterative_hash_expr (TREE_VECTOR_CST_ELTS (t), val);
|
6912 |
|
|
case SSA_NAME:
|
6913 |
|
|
/* We can just compare by pointer. */
|
6914 |
|
|
return iterative_hash_host_wide_int (SSA_NAME_VERSION (t), val);
|
6915 |
|
|
case PLACEHOLDER_EXPR:
|
6916 |
|
|
/* The node itself doesn't matter. */
|
6917 |
|
|
return val;
|
6918 |
|
|
case TREE_LIST:
|
6919 |
|
|
/* A list of expressions, for a CALL_EXPR or as the elements of a
|
6920 |
|
|
VECTOR_CST. */
|
6921 |
|
|
for (; t; t = TREE_CHAIN (t))
|
6922 |
|
|
val = iterative_hash_expr (TREE_VALUE (t), val);
|
6923 |
|
|
return val;
|
6924 |
|
|
case CONSTRUCTOR:
|
6925 |
|
|
{
|
6926 |
|
|
unsigned HOST_WIDE_INT idx;
|
6927 |
|
|
tree field, value;
|
6928 |
|
|
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
|
6929 |
|
|
{
|
6930 |
|
|
val = iterative_hash_expr (field, val);
|
6931 |
|
|
val = iterative_hash_expr (value, val);
|
6932 |
|
|
}
|
6933 |
|
|
return val;
|
6934 |
|
|
}
|
6935 |
|
|
case MEM_REF:
|
6936 |
|
|
{
|
6937 |
|
|
/* The type of the second operand is relevant, except for
|
6938 |
|
|
its top-level qualifiers. */
|
6939 |
|
|
tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (t, 1)));
|
6940 |
|
|
|
6941 |
|
|
val = iterative_hash_object (TYPE_HASH (type), val);
|
6942 |
|
|
|
6943 |
|
|
/* We could use the standard hash computation from this point
|
6944 |
|
|
on. */
|
6945 |
|
|
val = iterative_hash_object (code, val);
|
6946 |
|
|
val = iterative_hash_expr (TREE_OPERAND (t, 1), val);
|
6947 |
|
|
val = iterative_hash_expr (TREE_OPERAND (t, 0), val);
|
6948 |
|
|
return val;
|
6949 |
|
|
}
|
6950 |
|
|
case FUNCTION_DECL:
|
6951 |
|
|
/* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
|
6952 |
|
|
Otherwise nodes that compare equal according to operand_equal_p might
|
6953 |
|
|
get different hash codes. However, don't do this for machine specific
|
6954 |
|
|
or front end builtins, since the function code is overloaded in those
|
6955 |
|
|
cases. */
|
6956 |
|
|
if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
|
6957 |
|
|
&& builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
|
6958 |
|
|
{
|
6959 |
|
|
t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
|
6960 |
|
|
code = TREE_CODE (t);
|
6961 |
|
|
}
|
6962 |
|
|
/* FALL THROUGH */
|
6963 |
|
|
default:
|
6964 |
|
|
tclass = TREE_CODE_CLASS (code);
|
6965 |
|
|
|
6966 |
|
|
if (tclass == tcc_declaration)
|
6967 |
|
|
{
|
6968 |
|
|
/* DECL's have a unique ID */
|
6969 |
|
|
val = iterative_hash_host_wide_int (DECL_UID (t), val);
|
6970 |
|
|
}
|
6971 |
|
|
else
|
6972 |
|
|
{
|
6973 |
|
|
gcc_assert (IS_EXPR_CODE_CLASS (tclass));
|
6974 |
|
|
|
6975 |
|
|
val = iterative_hash_object (code, val);
|
6976 |
|
|
|
6977 |
|
|
/* Don't hash the type, that can lead to having nodes which
|
6978 |
|
|
compare equal according to operand_equal_p, but which
|
6979 |
|
|
have different hash codes. */
|
6980 |
|
|
if (CONVERT_EXPR_CODE_P (code)
|
6981 |
|
|
|| code == NON_LVALUE_EXPR)
|
6982 |
|
|
{
|
6983 |
|
|
/* Make sure to include signness in the hash computation. */
|
6984 |
|
|
val += TYPE_UNSIGNED (TREE_TYPE (t));
|
6985 |
|
|
val = iterative_hash_expr (TREE_OPERAND (t, 0), val);
|
6986 |
|
|
}
|
6987 |
|
|
|
6988 |
|
|
else if (commutative_tree_code (code))
|
6989 |
|
|
{
|
6990 |
|
|
/* It's a commutative expression. We want to hash it the same
|
6991 |
|
|
however it appears. We do this by first hashing both operands
|
6992 |
|
|
and then rehashing based on the order of their independent
|
6993 |
|
|
hashes. */
|
6994 |
|
|
hashval_t one = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
|
6995 |
|
|
hashval_t two = iterative_hash_expr (TREE_OPERAND (t, 1), 0);
|
6996 |
|
|
hashval_t t;
|
6997 |
|
|
|
6998 |
|
|
if (one > two)
|
6999 |
|
|
t = one, one = two, two = t;
|
7000 |
|
|
|
7001 |
|
|
val = iterative_hash_hashval_t (one, val);
|
7002 |
|
|
val = iterative_hash_hashval_t (two, val);
|
7003 |
|
|
}
|
7004 |
|
|
else
|
7005 |
|
|
for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
|
7006 |
|
|
val = iterative_hash_expr (TREE_OPERAND (t, i), val);
|
7007 |
|
|
}
|
7008 |
|
|
return val;
|
7009 |
|
|
}
|
7010 |
|
|
}
|
7011 |
|
|
|
7012 |
|
|
/* Generate a hash value for a pair of expressions. This can be used
|
7013 |
|
|
iteratively by passing a previous result as the VAL argument.
|
7014 |
|
|
|
7015 |
|
|
The same hash value is always returned for a given pair of expressions,
|
7016 |
|
|
regardless of the order in which they are presented. This is useful in
|
7017 |
|
|
hashing the operands of commutative functions. */
|
7018 |
|
|
|
7019 |
|
|
hashval_t
|
7020 |
|
|
iterative_hash_exprs_commutative (const_tree t1,
|
7021 |
|
|
const_tree t2, hashval_t val)
|
7022 |
|
|
{
|
7023 |
|
|
hashval_t one = iterative_hash_expr (t1, 0);
|
7024 |
|
|
hashval_t two = iterative_hash_expr (t2, 0);
|
7025 |
|
|
hashval_t t;
|
7026 |
|
|
|
7027 |
|
|
if (one > two)
|
7028 |
|
|
t = one, one = two, two = t;
|
7029 |
|
|
val = iterative_hash_hashval_t (one, val);
|
7030 |
|
|
val = iterative_hash_hashval_t (two, val);
|
7031 |
|
|
|
7032 |
|
|
return val;
|
7033 |
|
|
}
|
7034 |
|
|
|
7035 |
|
|
/* Constructors for pointer, array and function types.
|
7036 |
|
|
(RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
|
7037 |
|
|
constructed by language-dependent code, not here.) */
|
7038 |
|
|
|
7039 |
|
|
/* Construct, lay out and return the type of pointers to TO_TYPE with
|
7040 |
|
|
mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
|
7041 |
|
|
reference all of memory. If such a type has already been
|
7042 |
|
|
constructed, reuse it. */
|
7043 |
|
|
|
7044 |
|
|
tree
|
7045 |
|
|
build_pointer_type_for_mode (tree to_type, enum machine_mode mode,
|
7046 |
|
|
bool can_alias_all)
|
7047 |
|
|
{
|
7048 |
|
|
tree t;
|
7049 |
|
|
|
7050 |
|
|
if (to_type == error_mark_node)
|
7051 |
|
|
return error_mark_node;
|
7052 |
|
|
|
7053 |
|
|
/* If the pointed-to type has the may_alias attribute set, force
|
7054 |
|
|
a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
|
7055 |
|
|
if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
|
7056 |
|
|
can_alias_all = true;
|
7057 |
|
|
|
7058 |
|
|
/* In some cases, languages will have things that aren't a POINTER_TYPE
|
7059 |
|
|
(such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
|
7060 |
|
|
In that case, return that type without regard to the rest of our
|
7061 |
|
|
operands.
|
7062 |
|
|
|
7063 |
|
|
??? This is a kludge, but consistent with the way this function has
|
7064 |
|
|
always operated and there doesn't seem to be a good way to avoid this
|
7065 |
|
|
at the moment. */
|
7066 |
|
|
if (TYPE_POINTER_TO (to_type) != 0
|
7067 |
|
|
&& TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
|
7068 |
|
|
return TYPE_POINTER_TO (to_type);
|
7069 |
|
|
|
7070 |
|
|
/* First, if we already have a type for pointers to TO_TYPE and it's
|
7071 |
|
|
the proper mode, use it. */
|
7072 |
|
|
for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
|
7073 |
|
|
if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
|
7074 |
|
|
return t;
|
7075 |
|
|
|
7076 |
|
|
t = make_node (POINTER_TYPE);
|
7077 |
|
|
|
7078 |
|
|
TREE_TYPE (t) = to_type;
|
7079 |
|
|
SET_TYPE_MODE (t, mode);
|
7080 |
|
|
TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
|
7081 |
|
|
TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
|
7082 |
|
|
TYPE_POINTER_TO (to_type) = t;
|
7083 |
|
|
|
7084 |
|
|
if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
|
7085 |
|
|
SET_TYPE_STRUCTURAL_EQUALITY (t);
|
7086 |
|
|
else if (TYPE_CANONICAL (to_type) != to_type)
|
7087 |
|
|
TYPE_CANONICAL (t)
|
7088 |
|
|
= build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
|
7089 |
|
|
mode, can_alias_all);
|
7090 |
|
|
|
7091 |
|
|
/* Lay out the type. This function has many callers that are concerned
|
7092 |
|
|
with expression-construction, and this simplifies them all. */
|
7093 |
|
|
layout_type (t);
|
7094 |
|
|
|
7095 |
|
|
return t;
|
7096 |
|
|
}
|
7097 |
|
|
|
7098 |
|
|
/* By default build pointers in ptr_mode. */
|
7099 |
|
|
|
7100 |
|
|
tree
|
7101 |
|
|
build_pointer_type (tree to_type)
|
7102 |
|
|
{
|
7103 |
|
|
addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
|
7104 |
|
|
: TYPE_ADDR_SPACE (to_type);
|
7105 |
|
|
enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
|
7106 |
|
|
return build_pointer_type_for_mode (to_type, pointer_mode, false);
|
7107 |
|
|
}
|
7108 |
|
|
|
7109 |
|
|
/* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
|
7110 |
|
|
|
7111 |
|
|
tree
|
7112 |
|
|
build_reference_type_for_mode (tree to_type, enum machine_mode mode,
|
7113 |
|
|
bool can_alias_all)
|
7114 |
|
|
{
|
7115 |
|
|
tree t;
|
7116 |
|
|
|
7117 |
|
|
if (to_type == error_mark_node)
|
7118 |
|
|
return error_mark_node;
|
7119 |
|
|
|
7120 |
|
|
/* If the pointed-to type has the may_alias attribute set, force
|
7121 |
|
|
a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
|
7122 |
|
|
if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
|
7123 |
|
|
can_alias_all = true;
|
7124 |
|
|
|
7125 |
|
|
/* In some cases, languages will have things that aren't a REFERENCE_TYPE
|
7126 |
|
|
(such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
|
7127 |
|
|
In that case, return that type without regard to the rest of our
|
7128 |
|
|
operands.
|
7129 |
|
|
|
7130 |
|
|
??? This is a kludge, but consistent with the way this function has
|
7131 |
|
|
always operated and there doesn't seem to be a good way to avoid this
|
7132 |
|
|
at the moment. */
|
7133 |
|
|
if (TYPE_REFERENCE_TO (to_type) != 0
|
7134 |
|
|
&& TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
|
7135 |
|
|
return TYPE_REFERENCE_TO (to_type);
|
7136 |
|
|
|
7137 |
|
|
/* First, if we already have a type for pointers to TO_TYPE and it's
|
7138 |
|
|
the proper mode, use it. */
|
7139 |
|
|
for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
|
7140 |
|
|
if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
|
7141 |
|
|
return t;
|
7142 |
|
|
|
7143 |
|
|
t = make_node (REFERENCE_TYPE);
|
7144 |
|
|
|
7145 |
|
|
TREE_TYPE (t) = to_type;
|
7146 |
|
|
SET_TYPE_MODE (t, mode);
|
7147 |
|
|
TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
|
7148 |
|
|
TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
|
7149 |
|
|
TYPE_REFERENCE_TO (to_type) = t;
|
7150 |
|
|
|
7151 |
|
|
if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
|
7152 |
|
|
SET_TYPE_STRUCTURAL_EQUALITY (t);
|
7153 |
|
|
else if (TYPE_CANONICAL (to_type) != to_type)
|
7154 |
|
|
TYPE_CANONICAL (t)
|
7155 |
|
|
= build_reference_type_for_mode (TYPE_CANONICAL (to_type),
|
7156 |
|
|
mode, can_alias_all);
|
7157 |
|
|
|
7158 |
|
|
layout_type (t);
|
7159 |
|
|
|
7160 |
|
|
return t;
|
7161 |
|
|
}
|
7162 |
|
|
|
7163 |
|
|
|
7164 |
|
|
/* Build the node for the type of references-to-TO_TYPE by default
|
7165 |
|
|
in ptr_mode. */
|
7166 |
|
|
|
7167 |
|
|
tree
|
7168 |
|
|
build_reference_type (tree to_type)
|
7169 |
|
|
{
|
7170 |
|
|
addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
|
7171 |
|
|
: TYPE_ADDR_SPACE (to_type);
|
7172 |
|
|
enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
|
7173 |
|
|
return build_reference_type_for_mode (to_type, pointer_mode, false);
|
7174 |
|
|
}
|
7175 |
|
|
|
7176 |
|
|
/* Build a type that is compatible with t but has no cv quals anywhere
|
7177 |
|
|
in its type, thus
|
7178 |
|
|
|
7179 |
|
|
const char *const *const * -> char ***. */
|
7180 |
|
|
|
7181 |
|
|
tree
|
7182 |
|
|
build_type_no_quals (tree t)
|
7183 |
|
|
{
|
7184 |
|
|
switch (TREE_CODE (t))
|
7185 |
|
|
{
|
7186 |
|
|
case POINTER_TYPE:
|
7187 |
|
|
return build_pointer_type_for_mode (build_type_no_quals (TREE_TYPE (t)),
|
7188 |
|
|
TYPE_MODE (t),
|
7189 |
|
|
TYPE_REF_CAN_ALIAS_ALL (t));
|
7190 |
|
|
case REFERENCE_TYPE:
|
7191 |
|
|
return
|
7192 |
|
|
build_reference_type_for_mode (build_type_no_quals (TREE_TYPE (t)),
|
7193 |
|
|
TYPE_MODE (t),
|
7194 |
|
|
TYPE_REF_CAN_ALIAS_ALL (t));
|
7195 |
|
|
default:
|
7196 |
|
|
return TYPE_MAIN_VARIANT (t);
|
7197 |
|
|
}
|
7198 |
|
|
}
|
7199 |
|
|
|
7200 |
|
|
#define MAX_INT_CACHED_PREC \
|
7201 |
|
|
(HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
|
7202 |
|
|
static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
|
7203 |
|
|
|
7204 |
|
|
/* Builds a signed or unsigned integer type of precision PRECISION.
|
7205 |
|
|
Used for C bitfields whose precision does not match that of
|
7206 |
|
|
built-in target types. */
|
7207 |
|
|
tree
|
7208 |
|
|
build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
|
7209 |
|
|
int unsignedp)
|
7210 |
|
|
{
|
7211 |
|
|
tree itype, ret;
|
7212 |
|
|
|
7213 |
|
|
if (unsignedp)
|
7214 |
|
|
unsignedp = MAX_INT_CACHED_PREC + 1;
|
7215 |
|
|
|
7216 |
|
|
if (precision <= MAX_INT_CACHED_PREC)
|
7217 |
|
|
{
|
7218 |
|
|
itype = nonstandard_integer_type_cache[precision + unsignedp];
|
7219 |
|
|
if (itype)
|
7220 |
|
|
return itype;
|
7221 |
|
|
}
|
7222 |
|
|
|
7223 |
|
|
itype = make_node (INTEGER_TYPE);
|
7224 |
|
|
TYPE_PRECISION (itype) = precision;
|
7225 |
|
|
|
7226 |
|
|
if (unsignedp)
|
7227 |
|
|
fixup_unsigned_type (itype);
|
7228 |
|
|
else
|
7229 |
|
|
fixup_signed_type (itype);
|
7230 |
|
|
|
7231 |
|
|
ret = itype;
|
7232 |
|
|
if (host_integerp (TYPE_MAX_VALUE (itype), 1))
|
7233 |
|
|
ret = type_hash_canon (tree_low_cst (TYPE_MAX_VALUE (itype), 1), itype);
|
7234 |
|
|
if (precision <= MAX_INT_CACHED_PREC)
|
7235 |
|
|
nonstandard_integer_type_cache[precision + unsignedp] = ret;
|
7236 |
|
|
|
7237 |
|
|
return ret;
|
7238 |
|
|
}
|
7239 |
|
|
|
7240 |
|
|
/* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
|
7241 |
|
|
or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
|
7242 |
|
|
is true, reuse such a type that has already been constructed. */
|
7243 |
|
|
|
7244 |
|
|
static tree
|
7245 |
|
|
build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
|
7246 |
|
|
{
|
7247 |
|
|
tree itype = make_node (INTEGER_TYPE);
|
7248 |
|
|
hashval_t hashcode = 0;
|
7249 |
|
|
|
7250 |
|
|
TREE_TYPE (itype) = type;
|
7251 |
|
|
|
7252 |
|
|
TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
|
7253 |
|
|
TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
|
7254 |
|
|
|
7255 |
|
|
TYPE_PRECISION (itype) = TYPE_PRECISION (type);
|
7256 |
|
|
SET_TYPE_MODE (itype, TYPE_MODE (type));
|
7257 |
|
|
TYPE_SIZE (itype) = TYPE_SIZE (type);
|
7258 |
|
|
TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
|
7259 |
|
|
TYPE_ALIGN (itype) = TYPE_ALIGN (type);
|
7260 |
|
|
TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
|
7261 |
|
|
|
7262 |
|
|
if (!shared)
|
7263 |
|
|
return itype;
|
7264 |
|
|
|
7265 |
|
|
if ((TYPE_MIN_VALUE (itype)
|
7266 |
|
|
&& TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
|
7267 |
|
|
|| (TYPE_MAX_VALUE (itype)
|
7268 |
|
|
&& TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
|
7269 |
|
|
{
|
7270 |
|
|
/* Since we cannot reliably merge this type, we need to compare it using
|
7271 |
|
|
structural equality checks. */
|
7272 |
|
|
SET_TYPE_STRUCTURAL_EQUALITY (itype);
|
7273 |
|
|
return itype;
|
7274 |
|
|
}
|
7275 |
|
|
|
7276 |
|
|
hashcode = iterative_hash_expr (TYPE_MIN_VALUE (itype), hashcode);
|
7277 |
|
|
hashcode = iterative_hash_expr (TYPE_MAX_VALUE (itype), hashcode);
|
7278 |
|
|
hashcode = iterative_hash_hashval_t (TYPE_HASH (type), hashcode);
|
7279 |
|
|
itype = type_hash_canon (hashcode, itype);
|
7280 |
|
|
|
7281 |
|
|
return itype;
|
7282 |
|
|
}
|
7283 |
|
|
|
7284 |
|
|
/* Wrapper around build_range_type_1 with SHARED set to true. */
|
7285 |
|
|
|
7286 |
|
|
tree
|
7287 |
|
|
build_range_type (tree type, tree lowval, tree highval)
|
7288 |
|
|
{
|
7289 |
|
|
return build_range_type_1 (type, lowval, highval, true);
|
7290 |
|
|
}
|
7291 |
|
|
|
7292 |
|
|
/* Wrapper around build_range_type_1 with SHARED set to false. */
|
7293 |
|
|
|
7294 |
|
|
tree
|
7295 |
|
|
build_nonshared_range_type (tree type, tree lowval, tree highval)
|
7296 |
|
|
{
|
7297 |
|
|
return build_range_type_1 (type, lowval, highval, false);
|
7298 |
|
|
}
|
7299 |
|
|
|
7300 |
|
|
/* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
|
7301 |
|
|
MAXVAL should be the maximum value in the domain
|
7302 |
|
|
(one less than the length of the array).
|
7303 |
|
|
|
7304 |
|
|
The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
|
7305 |
|
|
We don't enforce this limit, that is up to caller (e.g. language front end).
|
7306 |
|
|
The limit exists because the result is a signed type and we don't handle
|
7307 |
|
|
sizes that use more than one HOST_WIDE_INT. */
|
7308 |
|
|
|
7309 |
|
|
tree
|
7310 |
|
|
build_index_type (tree maxval)
|
7311 |
|
|
{
|
7312 |
|
|
return build_range_type (sizetype, size_zero_node, maxval);
|
7313 |
|
|
}
|
7314 |
|
|
|
7315 |
|
|
/* Return true if the debug information for TYPE, a subtype, should be emitted
|
7316 |
|
|
as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
|
7317 |
|
|
high bound, respectively. Sometimes doing so unnecessarily obfuscates the
|
7318 |
|
|
debug info and doesn't reflect the source code. */
|
7319 |
|
|
|
7320 |
|
|
bool
|
7321 |
|
|
subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
|
7322 |
|
|
{
|
7323 |
|
|
tree base_type = TREE_TYPE (type), low, high;
|
7324 |
|
|
|
7325 |
|
|
/* Subrange types have a base type which is an integral type. */
|
7326 |
|
|
if (!INTEGRAL_TYPE_P (base_type))
|
7327 |
|
|
return false;
|
7328 |
|
|
|
7329 |
|
|
/* Get the real bounds of the subtype. */
|
7330 |
|
|
if (lang_hooks.types.get_subrange_bounds)
|
7331 |
|
|
lang_hooks.types.get_subrange_bounds (type, &low, &high);
|
7332 |
|
|
else
|
7333 |
|
|
{
|
7334 |
|
|
low = TYPE_MIN_VALUE (type);
|
7335 |
|
|
high = TYPE_MAX_VALUE (type);
|
7336 |
|
|
}
|
7337 |
|
|
|
7338 |
|
|
/* If the type and its base type have the same representation and the same
|
7339 |
|
|
name, then the type is not a subrange but a copy of the base type. */
|
7340 |
|
|
if ((TREE_CODE (base_type) == INTEGER_TYPE
|
7341 |
|
|
|| TREE_CODE (base_type) == BOOLEAN_TYPE)
|
7342 |
|
|
&& int_size_in_bytes (type) == int_size_in_bytes (base_type)
|
7343 |
|
|
&& tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
|
7344 |
|
|
&& tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type)))
|
7345 |
|
|
{
|
7346 |
|
|
tree type_name = TYPE_NAME (type);
|
7347 |
|
|
tree base_type_name = TYPE_NAME (base_type);
|
7348 |
|
|
|
7349 |
|
|
if (type_name && TREE_CODE (type_name) == TYPE_DECL)
|
7350 |
|
|
type_name = DECL_NAME (type_name);
|
7351 |
|
|
|
7352 |
|
|
if (base_type_name && TREE_CODE (base_type_name) == TYPE_DECL)
|
7353 |
|
|
base_type_name = DECL_NAME (base_type_name);
|
7354 |
|
|
|
7355 |
|
|
if (type_name == base_type_name)
|
7356 |
|
|
return false;
|
7357 |
|
|
}
|
7358 |
|
|
|
7359 |
|
|
if (lowval)
|
7360 |
|
|
*lowval = low;
|
7361 |
|
|
if (highval)
|
7362 |
|
|
*highval = high;
|
7363 |
|
|
return true;
|
7364 |
|
|
}
|
7365 |
|
|
|
7366 |
|
|
/* Construct, lay out and return the type of arrays of elements with ELT_TYPE
|
7367 |
|
|
and number of elements specified by the range of values of INDEX_TYPE.
|
7368 |
|
|
If SHARED is true, reuse such a type that has already been constructed. */
|
7369 |
|
|
|
7370 |
|
|
static tree
|
7371 |
|
|
build_array_type_1 (tree elt_type, tree index_type, bool shared)
|
7372 |
|
|
{
|
7373 |
|
|
tree t;
|
7374 |
|
|
|
7375 |
|
|
if (TREE_CODE (elt_type) == FUNCTION_TYPE)
|
7376 |
|
|
{
|
7377 |
|
|
error ("arrays of functions are not meaningful");
|
7378 |
|
|
elt_type = integer_type_node;
|
7379 |
|
|
}
|
7380 |
|
|
|
7381 |
|
|
t = make_node (ARRAY_TYPE);
|
7382 |
|
|
TREE_TYPE (t) = elt_type;
|
7383 |
|
|
TYPE_DOMAIN (t) = index_type;
|
7384 |
|
|
TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
|
7385 |
|
|
layout_type (t);
|
7386 |
|
|
|
7387 |
|
|
/* If the element type is incomplete at this point we get marked for
|
7388 |
|
|
structural equality. Do not record these types in the canonical
|
7389 |
|
|
type hashtable. */
|
7390 |
|
|
if (TYPE_STRUCTURAL_EQUALITY_P (t))
|
7391 |
|
|
return t;
|
7392 |
|
|
|
7393 |
|
|
if (shared)
|
7394 |
|
|
{
|
7395 |
|
|
hashval_t hashcode = iterative_hash_object (TYPE_HASH (elt_type), 0);
|
7396 |
|
|
if (index_type)
|
7397 |
|
|
hashcode = iterative_hash_object (TYPE_HASH (index_type), hashcode);
|
7398 |
|
|
t = type_hash_canon (hashcode, t);
|
7399 |
|
|
}
|
7400 |
|
|
|
7401 |
|
|
if (TYPE_CANONICAL (t) == t)
|
7402 |
|
|
{
|
7403 |
|
|
if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
|
7404 |
|
|
|| (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
|
7405 |
|
|
SET_TYPE_STRUCTURAL_EQUALITY (t);
|
7406 |
|
|
else if (TYPE_CANONICAL (elt_type) != elt_type
|
7407 |
|
|
|| (index_type && TYPE_CANONICAL (index_type) != index_type))
|
7408 |
|
|
TYPE_CANONICAL (t)
|
7409 |
|
|
= build_array_type_1 (TYPE_CANONICAL (elt_type),
|
7410 |
|
|
index_type
|
7411 |
|
|
? TYPE_CANONICAL (index_type) : NULL_TREE,
|
7412 |
|
|
shared);
|
7413 |
|
|
}
|
7414 |
|
|
|
7415 |
|
|
return t;
|
7416 |
|
|
}
|
7417 |
|
|
|
7418 |
|
|
/* Wrapper around build_array_type_1 with SHARED set to true. */
|
7419 |
|
|
|
7420 |
|
|
tree
|
7421 |
|
|
build_array_type (tree elt_type, tree index_type)
|
7422 |
|
|
{
|
7423 |
|
|
return build_array_type_1 (elt_type, index_type, true);
|
7424 |
|
|
}
|
7425 |
|
|
|
7426 |
|
|
/* Wrapper around build_array_type_1 with SHARED set to false. */
|
7427 |
|
|
|
7428 |
|
|
tree
|
7429 |
|
|
build_nonshared_array_type (tree elt_type, tree index_type)
|
7430 |
|
|
{
|
7431 |
|
|
return build_array_type_1 (elt_type, index_type, false);
|
7432 |
|
|
}
|
7433 |
|
|
|
7434 |
|
|
/* Return a representation of ELT_TYPE[NELTS], using indices of type
|
7435 |
|
|
sizetype. */
|
7436 |
|
|
|
7437 |
|
|
tree
|
7438 |
|
|
build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
|
7439 |
|
|
{
|
7440 |
|
|
return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
|
7441 |
|
|
}
|
7442 |
|
|
|
7443 |
|
|
/* Recursively examines the array elements of TYPE, until a non-array
|
7444 |
|
|
element type is found. */
|
7445 |
|
|
|
7446 |
|
|
tree
|
7447 |
|
|
strip_array_types (tree type)
|
7448 |
|
|
{
|
7449 |
|
|
while (TREE_CODE (type) == ARRAY_TYPE)
|
7450 |
|
|
type = TREE_TYPE (type);
|
7451 |
|
|
|
7452 |
|
|
return type;
|
7453 |
|
|
}
|
7454 |
|
|
|
7455 |
|
|
/* Computes the canonical argument types from the argument type list
|
7456 |
|
|
ARGTYPES.
|
7457 |
|
|
|
7458 |
|
|
Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
|
7459 |
|
|
on entry to this function, or if any of the ARGTYPES are
|
7460 |
|
|
structural.
|
7461 |
|
|
|
7462 |
|
|
Upon return, *ANY_NONCANONICAL_P will be true iff either it was
|
7463 |
|
|
true on entry to this function, or if any of the ARGTYPES are
|
7464 |
|
|
non-canonical.
|
7465 |
|
|
|
7466 |
|
|
Returns a canonical argument list, which may be ARGTYPES when the
|
7467 |
|
|
canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
|
7468 |
|
|
true) or would not differ from ARGTYPES. */
|
7469 |
|
|
|
7470 |
|
|
static tree
|
7471 |
|
|
maybe_canonicalize_argtypes(tree argtypes,
|
7472 |
|
|
bool *any_structural_p,
|
7473 |
|
|
bool *any_noncanonical_p)
|
7474 |
|
|
{
|
7475 |
|
|
tree arg;
|
7476 |
|
|
bool any_noncanonical_argtypes_p = false;
|
7477 |
|
|
|
7478 |
|
|
for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
|
7479 |
|
|
{
|
7480 |
|
|
if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
|
7481 |
|
|
/* Fail gracefully by stating that the type is structural. */
|
7482 |
|
|
*any_structural_p = true;
|
7483 |
|
|
else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
|
7484 |
|
|
*any_structural_p = true;
|
7485 |
|
|
else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
|
7486 |
|
|
|| TREE_PURPOSE (arg))
|
7487 |
|
|
/* If the argument has a default argument, we consider it
|
7488 |
|
|
non-canonical even though the type itself is canonical.
|
7489 |
|
|
That way, different variants of function and method types
|
7490 |
|
|
with default arguments will all point to the variant with
|
7491 |
|
|
no defaults as their canonical type. */
|
7492 |
|
|
any_noncanonical_argtypes_p = true;
|
7493 |
|
|
}
|
7494 |
|
|
|
7495 |
|
|
if (*any_structural_p)
|
7496 |
|
|
return argtypes;
|
7497 |
|
|
|
7498 |
|
|
if (any_noncanonical_argtypes_p)
|
7499 |
|
|
{
|
7500 |
|
|
/* Build the canonical list of argument types. */
|
7501 |
|
|
tree canon_argtypes = NULL_TREE;
|
7502 |
|
|
bool is_void = false;
|
7503 |
|
|
|
7504 |
|
|
for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
|
7505 |
|
|
{
|
7506 |
|
|
if (arg == void_list_node)
|
7507 |
|
|
is_void = true;
|
7508 |
|
|
else
|
7509 |
|
|
canon_argtypes = tree_cons (NULL_TREE,
|
7510 |
|
|
TYPE_CANONICAL (TREE_VALUE (arg)),
|
7511 |
|
|
canon_argtypes);
|
7512 |
|
|
}
|
7513 |
|
|
|
7514 |
|
|
canon_argtypes = nreverse (canon_argtypes);
|
7515 |
|
|
if (is_void)
|
7516 |
|
|
canon_argtypes = chainon (canon_argtypes, void_list_node);
|
7517 |
|
|
|
7518 |
|
|
/* There is a non-canonical type. */
|
7519 |
|
|
*any_noncanonical_p = true;
|
7520 |
|
|
return canon_argtypes;
|
7521 |
|
|
}
|
7522 |
|
|
|
7523 |
|
|
/* The canonical argument types are the same as ARGTYPES. */
|
7524 |
|
|
return argtypes;
|
7525 |
|
|
}
|
7526 |
|
|
|
7527 |
|
|
/* Construct, lay out and return
|
7528 |
|
|
the type of functions returning type VALUE_TYPE
|
7529 |
|
|
given arguments of types ARG_TYPES.
|
7530 |
|
|
ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
|
7531 |
|
|
are data type nodes for the arguments of the function.
|
7532 |
|
|
If such a type has already been constructed, reuse it. */
|
7533 |
|
|
|
7534 |
|
|
tree
|
7535 |
|
|
build_function_type (tree value_type, tree arg_types)
|
7536 |
|
|
{
|
7537 |
|
|
tree t;
|
7538 |
|
|
hashval_t hashcode = 0;
|
7539 |
|
|
bool any_structural_p, any_noncanonical_p;
|
7540 |
|
|
tree canon_argtypes;
|
7541 |
|
|
|
7542 |
|
|
if (TREE_CODE (value_type) == FUNCTION_TYPE)
|
7543 |
|
|
{
|
7544 |
|
|
error ("function return type cannot be function");
|
7545 |
|
|
value_type = integer_type_node;
|
7546 |
|
|
}
|
7547 |
|
|
|
7548 |
|
|
/* Make a node of the sort we want. */
|
7549 |
|
|
t = make_node (FUNCTION_TYPE);
|
7550 |
|
|
TREE_TYPE (t) = value_type;
|
7551 |
|
|
TYPE_ARG_TYPES (t) = arg_types;
|
7552 |
|
|
|
7553 |
|
|
/* If we already have such a type, use the old one. */
|
7554 |
|
|
hashcode = iterative_hash_object (TYPE_HASH (value_type), hashcode);
|
7555 |
|
|
hashcode = type_hash_list (arg_types, hashcode);
|
7556 |
|
|
t = type_hash_canon (hashcode, t);
|
7557 |
|
|
|
7558 |
|
|
/* Set up the canonical type. */
|
7559 |
|
|
any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
|
7560 |
|
|
any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
|
7561 |
|
|
canon_argtypes = maybe_canonicalize_argtypes (arg_types,
|
7562 |
|
|
&any_structural_p,
|
7563 |
|
|
&any_noncanonical_p);
|
7564 |
|
|
if (any_structural_p)
|
7565 |
|
|
SET_TYPE_STRUCTURAL_EQUALITY (t);
|
7566 |
|
|
else if (any_noncanonical_p)
|
7567 |
|
|
TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
|
7568 |
|
|
canon_argtypes);
|
7569 |
|
|
|
7570 |
|
|
if (!COMPLETE_TYPE_P (t))
|
7571 |
|
|
layout_type (t);
|
7572 |
|
|
return t;
|
7573 |
|
|
}
|
7574 |
|
|
|
7575 |
|
|
/* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP and the
|
7576 |
|
|
return value if SKIP_RETURN is true. */
|
7577 |
|
|
|
7578 |
|
|
static tree
|
7579 |
|
|
build_function_type_skip_args (tree orig_type, bitmap args_to_skip,
|
7580 |
|
|
bool skip_return)
|
7581 |
|
|
{
|
7582 |
|
|
tree new_type = NULL;
|
7583 |
|
|
tree args, new_args = NULL, t;
|
7584 |
|
|
tree new_reversed;
|
7585 |
|
|
int i = 0;
|
7586 |
|
|
|
7587 |
|
|
for (args = TYPE_ARG_TYPES (orig_type); args && args != void_list_node;
|
7588 |
|
|
args = TREE_CHAIN (args), i++)
|
7589 |
|
|
if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
|
7590 |
|
|
new_args = tree_cons (NULL_TREE, TREE_VALUE (args), new_args);
|
7591 |
|
|
|
7592 |
|
|
new_reversed = nreverse (new_args);
|
7593 |
|
|
if (args)
|
7594 |
|
|
{
|
7595 |
|
|
if (new_reversed)
|
7596 |
|
|
TREE_CHAIN (new_args) = void_list_node;
|
7597 |
|
|
else
|
7598 |
|
|
new_reversed = void_list_node;
|
7599 |
|
|
}
|
7600 |
|
|
|
7601 |
|
|
/* Use copy_node to preserve as much as possible from original type
|
7602 |
|
|
(debug info, attribute lists etc.)
|
7603 |
|
|
Exception is METHOD_TYPEs must have THIS argument.
|
7604 |
|
|
When we are asked to remove it, we need to build new FUNCTION_TYPE
|
7605 |
|
|
instead. */
|
7606 |
|
|
if (TREE_CODE (orig_type) != METHOD_TYPE
|
7607 |
|
|
|| !args_to_skip
|
7608 |
|
|
|| !bitmap_bit_p (args_to_skip, 0))
|
7609 |
|
|
{
|
7610 |
|
|
new_type = build_distinct_type_copy (orig_type);
|
7611 |
|
|
TYPE_ARG_TYPES (new_type) = new_reversed;
|
7612 |
|
|
}
|
7613 |
|
|
else
|
7614 |
|
|
{
|
7615 |
|
|
new_type
|
7616 |
|
|
= build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
|
7617 |
|
|
new_reversed));
|
7618 |
|
|
TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
|
7619 |
|
|
}
|
7620 |
|
|
|
7621 |
|
|
if (skip_return)
|
7622 |
|
|
TREE_TYPE (new_type) = void_type_node;
|
7623 |
|
|
|
7624 |
|
|
/* This is a new type, not a copy of an old type. Need to reassociate
|
7625 |
|
|
variants. We can handle everything except the main variant lazily. */
|
7626 |
|
|
t = TYPE_MAIN_VARIANT (orig_type);
|
7627 |
|
|
if (t != orig_type)
|
7628 |
|
|
{
|
7629 |
|
|
t = build_function_type_skip_args (t, args_to_skip, skip_return);
|
7630 |
|
|
TYPE_MAIN_VARIANT (new_type) = t;
|
7631 |
|
|
TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
|
7632 |
|
|
TYPE_NEXT_VARIANT (t) = new_type;
|
7633 |
|
|
}
|
7634 |
|
|
else
|
7635 |
|
|
{
|
7636 |
|
|
TYPE_MAIN_VARIANT (new_type) = new_type;
|
7637 |
|
|
TYPE_NEXT_VARIANT (new_type) = NULL;
|
7638 |
|
|
}
|
7639 |
|
|
|
7640 |
|
|
return new_type;
|
7641 |
|
|
}
|
7642 |
|
|
|
7643 |
|
|
/* Build variant of function decl ORIG_DECL skipping ARGS_TO_SKIP and the
|
7644 |
|
|
return value if SKIP_RETURN is true.
|
7645 |
|
|
|
7646 |
|
|
Arguments from DECL_ARGUMENTS list can't be removed now, since they are
|
7647 |
|
|
linked by TREE_CHAIN directly. The caller is responsible for eliminating
|
7648 |
|
|
them when they are being duplicated (i.e. copy_arguments_for_versioning). */
|
7649 |
|
|
|
7650 |
|
|
tree
|
7651 |
|
|
build_function_decl_skip_args (tree orig_decl, bitmap args_to_skip,
|
7652 |
|
|
bool skip_return)
|
7653 |
|
|
{
|
7654 |
|
|
tree new_decl = copy_node (orig_decl);
|
7655 |
|
|
tree new_type;
|
7656 |
|
|
|
7657 |
|
|
new_type = TREE_TYPE (orig_decl);
|
7658 |
|
|
if (prototype_p (new_type)
|
7659 |
|
|
|| (skip_return && !VOID_TYPE_P (TREE_TYPE (new_type))))
|
7660 |
|
|
new_type
|
7661 |
|
|
= build_function_type_skip_args (new_type, args_to_skip, skip_return);
|
7662 |
|
|
TREE_TYPE (new_decl) = new_type;
|
7663 |
|
|
|
7664 |
|
|
/* For declarations setting DECL_VINDEX (i.e. methods)
|
7665 |
|
|
we expect first argument to be THIS pointer. */
|
7666 |
|
|
if (args_to_skip && bitmap_bit_p (args_to_skip, 0))
|
7667 |
|
|
DECL_VINDEX (new_decl) = NULL_TREE;
|
7668 |
|
|
|
7669 |
|
|
/* When signature changes, we need to clear builtin info. */
|
7670 |
|
|
if (DECL_BUILT_IN (new_decl)
|
7671 |
|
|
&& args_to_skip
|
7672 |
|
|
&& !bitmap_empty_p (args_to_skip))
|
7673 |
|
|
{
|
7674 |
|
|
DECL_BUILT_IN_CLASS (new_decl) = NOT_BUILT_IN;
|
7675 |
|
|
DECL_FUNCTION_CODE (new_decl) = (enum built_in_function) 0;
|
7676 |
|
|
}
|
7677 |
|
|
return new_decl;
|
7678 |
|
|
}
|
7679 |
|
|
|
7680 |
|
|
/* Build a function type. The RETURN_TYPE is the type returned by the
|
7681 |
|
|
function. If VAARGS is set, no void_type_node is appended to the
|
7682 |
|
|
the list. ARGP must be always be terminated be a NULL_TREE. */
|
7683 |
|
|
|
7684 |
|
|
static tree
|
7685 |
|
|
build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
|
7686 |
|
|
{
|
7687 |
|
|
tree t, args, last;
|
7688 |
|
|
|
7689 |
|
|
t = va_arg (argp, tree);
|
7690 |
|
|
for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
|
7691 |
|
|
args = tree_cons (NULL_TREE, t, args);
|
7692 |
|
|
|
7693 |
|
|
if (vaargs)
|
7694 |
|
|
{
|
7695 |
|
|
last = args;
|
7696 |
|
|
if (args != NULL_TREE)
|
7697 |
|
|
args = nreverse (args);
|
7698 |
|
|
gcc_assert (last != void_list_node);
|
7699 |
|
|
}
|
7700 |
|
|
else if (args == NULL_TREE)
|
7701 |
|
|
args = void_list_node;
|
7702 |
|
|
else
|
7703 |
|
|
{
|
7704 |
|
|
last = args;
|
7705 |
|
|
args = nreverse (args);
|
7706 |
|
|
TREE_CHAIN (last) = void_list_node;
|
7707 |
|
|
}
|
7708 |
|
|
args = build_function_type (return_type, args);
|
7709 |
|
|
|
7710 |
|
|
return args;
|
7711 |
|
|
}
|
7712 |
|
|
|
7713 |
|
|
/* Build a function type. The RETURN_TYPE is the type returned by the
|
7714 |
|
|
function. If additional arguments are provided, they are
|
7715 |
|
|
additional argument types. The list of argument types must always
|
7716 |
|
|
be terminated by NULL_TREE. */
|
7717 |
|
|
|
7718 |
|
|
tree
|
7719 |
|
|
build_function_type_list (tree return_type, ...)
|
7720 |
|
|
{
|
7721 |
|
|
tree args;
|
7722 |
|
|
va_list p;
|
7723 |
|
|
|
7724 |
|
|
va_start (p, return_type);
|
7725 |
|
|
args = build_function_type_list_1 (false, return_type, p);
|
7726 |
|
|
va_end (p);
|
7727 |
|
|
return args;
|
7728 |
|
|
}
|
7729 |
|
|
|
7730 |
|
|
/* Build a variable argument function type. The RETURN_TYPE is the
|
7731 |
|
|
type returned by the function. If additional arguments are provided,
|
7732 |
|
|
they are additional argument types. The list of argument types must
|
7733 |
|
|
always be terminated by NULL_TREE. */
|
7734 |
|
|
|
7735 |
|
|
tree
|
7736 |
|
|
build_varargs_function_type_list (tree return_type, ...)
|
7737 |
|
|
{
|
7738 |
|
|
tree args;
|
7739 |
|
|
va_list p;
|
7740 |
|
|
|
7741 |
|
|
va_start (p, return_type);
|
7742 |
|
|
args = build_function_type_list_1 (true, return_type, p);
|
7743 |
|
|
va_end (p);
|
7744 |
|
|
|
7745 |
|
|
return args;
|
7746 |
|
|
}
|
7747 |
|
|
|
7748 |
|
|
/* Build a function type. RETURN_TYPE is the type returned by the
|
7749 |
|
|
function; VAARGS indicates whether the function takes varargs. The
|
7750 |
|
|
function takes N named arguments, the types of which are provided in
|
7751 |
|
|
ARG_TYPES. */
|
7752 |
|
|
|
7753 |
|
|
static tree
|
7754 |
|
|
build_function_type_array_1 (bool vaargs, tree return_type, int n,
|
7755 |
|
|
tree *arg_types)
|
7756 |
|
|
{
|
7757 |
|
|
int i;
|
7758 |
|
|
tree t = vaargs ? NULL_TREE : void_list_node;
|
7759 |
|
|
|
7760 |
|
|
for (i = n - 1; i >= 0; i--)
|
7761 |
|
|
t = tree_cons (NULL_TREE, arg_types[i], t);
|
7762 |
|
|
|
7763 |
|
|
return build_function_type (return_type, t);
|
7764 |
|
|
}
|
7765 |
|
|
|
7766 |
|
|
/* Build a function type. RETURN_TYPE is the type returned by the
|
7767 |
|
|
function. The function takes N named arguments, the types of which
|
7768 |
|
|
are provided in ARG_TYPES. */
|
7769 |
|
|
|
7770 |
|
|
tree
|
7771 |
|
|
build_function_type_array (tree return_type, int n, tree *arg_types)
|
7772 |
|
|
{
|
7773 |
|
|
return build_function_type_array_1 (false, return_type, n, arg_types);
|
7774 |
|
|
}
|
7775 |
|
|
|
7776 |
|
|
/* Build a variable argument function type. RETURN_TYPE is the type
|
7777 |
|
|
returned by the function. The function takes N named arguments, the
|
7778 |
|
|
types of which are provided in ARG_TYPES. */
|
7779 |
|
|
|
7780 |
|
|
tree
|
7781 |
|
|
build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
|
7782 |
|
|
{
|
7783 |
|
|
return build_function_type_array_1 (true, return_type, n, arg_types);
|
7784 |
|
|
}
|
7785 |
|
|
|
7786 |
|
|
/* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
|
7787 |
|
|
and ARGTYPES (a TREE_LIST) are the return type and arguments types
|
7788 |
|
|
for the method. An implicit additional parameter (of type
|
7789 |
|
|
pointer-to-BASETYPE) is added to the ARGTYPES. */
|
7790 |
|
|
|
7791 |
|
|
tree
|
7792 |
|
|
build_method_type_directly (tree basetype,
|
7793 |
|
|
tree rettype,
|
7794 |
|
|
tree argtypes)
|
7795 |
|
|
{
|
7796 |
|
|
tree t;
|
7797 |
|
|
tree ptype;
|
7798 |
|
|
int hashcode = 0;
|
7799 |
|
|
bool any_structural_p, any_noncanonical_p;
|
7800 |
|
|
tree canon_argtypes;
|
7801 |
|
|
|
7802 |
|
|
/* Make a node of the sort we want. */
|
7803 |
|
|
t = make_node (METHOD_TYPE);
|
7804 |
|
|
|
7805 |
|
|
TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
|
7806 |
|
|
TREE_TYPE (t) = rettype;
|
7807 |
|
|
ptype = build_pointer_type (basetype);
|
7808 |
|
|
|
7809 |
|
|
/* The actual arglist for this function includes a "hidden" argument
|
7810 |
|
|
which is "this". Put it into the list of argument types. */
|
7811 |
|
|
argtypes = tree_cons (NULL_TREE, ptype, argtypes);
|
7812 |
|
|
TYPE_ARG_TYPES (t) = argtypes;
|
7813 |
|
|
|
7814 |
|
|
/* If we already have such a type, use the old one. */
|
7815 |
|
|
hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
|
7816 |
|
|
hashcode = iterative_hash_object (TYPE_HASH (rettype), hashcode);
|
7817 |
|
|
hashcode = type_hash_list (argtypes, hashcode);
|
7818 |
|
|
t = type_hash_canon (hashcode, t);
|
7819 |
|
|
|
7820 |
|
|
/* Set up the canonical type. */
|
7821 |
|
|
any_structural_p
|
7822 |
|
|
= (TYPE_STRUCTURAL_EQUALITY_P (basetype)
|
7823 |
|
|
|| TYPE_STRUCTURAL_EQUALITY_P (rettype));
|
7824 |
|
|
any_noncanonical_p
|
7825 |
|
|
= (TYPE_CANONICAL (basetype) != basetype
|
7826 |
|
|
|| TYPE_CANONICAL (rettype) != rettype);
|
7827 |
|
|
canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
|
7828 |
|
|
&any_structural_p,
|
7829 |
|
|
&any_noncanonical_p);
|
7830 |
|
|
if (any_structural_p)
|
7831 |
|
|
SET_TYPE_STRUCTURAL_EQUALITY (t);
|
7832 |
|
|
else if (any_noncanonical_p)
|
7833 |
|
|
TYPE_CANONICAL (t)
|
7834 |
|
|
= build_method_type_directly (TYPE_CANONICAL (basetype),
|
7835 |
|
|
TYPE_CANONICAL (rettype),
|
7836 |
|
|
canon_argtypes);
|
7837 |
|
|
if (!COMPLETE_TYPE_P (t))
|
7838 |
|
|
layout_type (t);
|
7839 |
|
|
|
7840 |
|
|
return t;
|
7841 |
|
|
}
|
7842 |
|
|
|
7843 |
|
|
/* Construct, lay out and return the type of methods belonging to class
|
7844 |
|
|
BASETYPE and whose arguments and values are described by TYPE.
|
7845 |
|
|
If that type exists already, reuse it.
|
7846 |
|
|
TYPE must be a FUNCTION_TYPE node. */
|
7847 |
|
|
|
7848 |
|
|
tree
|
7849 |
|
|
build_method_type (tree basetype, tree type)
|
7850 |
|
|
{
|
7851 |
|
|
gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
|
7852 |
|
|
|
7853 |
|
|
return build_method_type_directly (basetype,
|
7854 |
|
|
TREE_TYPE (type),
|
7855 |
|
|
TYPE_ARG_TYPES (type));
|
7856 |
|
|
}
|
7857 |
|
|
|
7858 |
|
|
/* Construct, lay out and return the type of offsets to a value
|
7859 |
|
|
of type TYPE, within an object of type BASETYPE.
|
7860 |
|
|
If a suitable offset type exists already, reuse it. */
|
7861 |
|
|
|
7862 |
|
|
tree
|
7863 |
|
|
build_offset_type (tree basetype, tree type)
|
7864 |
|
|
{
|
7865 |
|
|
tree t;
|
7866 |
|
|
hashval_t hashcode = 0;
|
7867 |
|
|
|
7868 |
|
|
/* Make a node of the sort we want. */
|
7869 |
|
|
t = make_node (OFFSET_TYPE);
|
7870 |
|
|
|
7871 |
|
|
TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
|
7872 |
|
|
TREE_TYPE (t) = type;
|
7873 |
|
|
|
7874 |
|
|
/* If we already have such a type, use the old one. */
|
7875 |
|
|
hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
|
7876 |
|
|
hashcode = iterative_hash_object (TYPE_HASH (type), hashcode);
|
7877 |
|
|
t = type_hash_canon (hashcode, t);
|
7878 |
|
|
|
7879 |
|
|
if (!COMPLETE_TYPE_P (t))
|
7880 |
|
|
layout_type (t);
|
7881 |
|
|
|
7882 |
|
|
if (TYPE_CANONICAL (t) == t)
|
7883 |
|
|
{
|
7884 |
|
|
if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
|
7885 |
|
|
|| TYPE_STRUCTURAL_EQUALITY_P (type))
|
7886 |
|
|
SET_TYPE_STRUCTURAL_EQUALITY (t);
|
7887 |
|
|
else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
|
7888 |
|
|
|| TYPE_CANONICAL (type) != type)
|
7889 |
|
|
TYPE_CANONICAL (t)
|
7890 |
|
|
= build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
|
7891 |
|
|
TYPE_CANONICAL (type));
|
7892 |
|
|
}
|
7893 |
|
|
|
7894 |
|
|
return t;
|
7895 |
|
|
}
|
7896 |
|
|
|
7897 |
|
|
/* Create a complex type whose components are COMPONENT_TYPE. */
|
7898 |
|
|
|
7899 |
|
|
tree
|
7900 |
|
|
build_complex_type (tree component_type)
|
7901 |
|
|
{
|
7902 |
|
|
tree t;
|
7903 |
|
|
hashval_t hashcode;
|
7904 |
|
|
|
7905 |
|
|
gcc_assert (INTEGRAL_TYPE_P (component_type)
|
7906 |
|
|
|| SCALAR_FLOAT_TYPE_P (component_type)
|
7907 |
|
|
|| FIXED_POINT_TYPE_P (component_type));
|
7908 |
|
|
|
7909 |
|
|
/* Make a node of the sort we want. */
|
7910 |
|
|
t = make_node (COMPLEX_TYPE);
|
7911 |
|
|
|
7912 |
|
|
TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
|
7913 |
|
|
|
7914 |
|
|
/* If we already have such a type, use the old one. */
|
7915 |
|
|
hashcode = iterative_hash_object (TYPE_HASH (component_type), 0);
|
7916 |
|
|
t = type_hash_canon (hashcode, t);
|
7917 |
|
|
|
7918 |
|
|
if (!COMPLETE_TYPE_P (t))
|
7919 |
|
|
layout_type (t);
|
7920 |
|
|
|
7921 |
|
|
if (TYPE_CANONICAL (t) == t)
|
7922 |
|
|
{
|
7923 |
|
|
if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
|
7924 |
|
|
SET_TYPE_STRUCTURAL_EQUALITY (t);
|
7925 |
|
|
else if (TYPE_CANONICAL (component_type) != component_type)
|
7926 |
|
|
TYPE_CANONICAL (t)
|
7927 |
|
|
= build_complex_type (TYPE_CANONICAL (component_type));
|
7928 |
|
|
}
|
7929 |
|
|
|
7930 |
|
|
/* We need to create a name, since complex is a fundamental type. */
|
7931 |
|
|
if (! TYPE_NAME (t))
|
7932 |
|
|
{
|
7933 |
|
|
const char *name;
|
7934 |
|
|
if (component_type == char_type_node)
|
7935 |
|
|
name = "complex char";
|
7936 |
|
|
else if (component_type == signed_char_type_node)
|
7937 |
|
|
name = "complex signed char";
|
7938 |
|
|
else if (component_type == unsigned_char_type_node)
|
7939 |
|
|
name = "complex unsigned char";
|
7940 |
|
|
else if (component_type == short_integer_type_node)
|
7941 |
|
|
name = "complex short int";
|
7942 |
|
|
else if (component_type == short_unsigned_type_node)
|
7943 |
|
|
name = "complex short unsigned int";
|
7944 |
|
|
else if (component_type == integer_type_node)
|
7945 |
|
|
name = "complex int";
|
7946 |
|
|
else if (component_type == unsigned_type_node)
|
7947 |
|
|
name = "complex unsigned int";
|
7948 |
|
|
else if (component_type == long_integer_type_node)
|
7949 |
|
|
name = "complex long int";
|
7950 |
|
|
else if (component_type == long_unsigned_type_node)
|
7951 |
|
|
name = "complex long unsigned int";
|
7952 |
|
|
else if (component_type == long_long_integer_type_node)
|
7953 |
|
|
name = "complex long long int";
|
7954 |
|
|
else if (component_type == long_long_unsigned_type_node)
|
7955 |
|
|
name = "complex long long unsigned int";
|
7956 |
|
|
else
|
7957 |
|
|
name = 0;
|
7958 |
|
|
|
7959 |
|
|
if (name != 0)
|
7960 |
|
|
TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
|
7961 |
|
|
get_identifier (name), t);
|
7962 |
|
|
}
|
7963 |
|
|
|
7964 |
|
|
return build_qualified_type (t, TYPE_QUALS (component_type));
|
7965 |
|
|
}
|
7966 |
|
|
|
7967 |
|
|
/* If TYPE is a real or complex floating-point type and the target
|
7968 |
|
|
does not directly support arithmetic on TYPE then return the wider
|
7969 |
|
|
type to be used for arithmetic on TYPE. Otherwise, return
|
7970 |
|
|
NULL_TREE. */
|
7971 |
|
|
|
7972 |
|
|
tree
|
7973 |
|
|
excess_precision_type (tree type)
|
7974 |
|
|
{
|
7975 |
|
|
if (flag_excess_precision != EXCESS_PRECISION_FAST)
|
7976 |
|
|
{
|
7977 |
|
|
int flt_eval_method = TARGET_FLT_EVAL_METHOD;
|
7978 |
|
|
switch (TREE_CODE (type))
|
7979 |
|
|
{
|
7980 |
|
|
case REAL_TYPE:
|
7981 |
|
|
switch (flt_eval_method)
|
7982 |
|
|
{
|
7983 |
|
|
case 1:
|
7984 |
|
|
if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
|
7985 |
|
|
return double_type_node;
|
7986 |
|
|
break;
|
7987 |
|
|
case 2:
|
7988 |
|
|
if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
|
7989 |
|
|
|| TYPE_MODE (type) == TYPE_MODE (double_type_node))
|
7990 |
|
|
return long_double_type_node;
|
7991 |
|
|
break;
|
7992 |
|
|
default:
|
7993 |
|
|
gcc_unreachable ();
|
7994 |
|
|
}
|
7995 |
|
|
break;
|
7996 |
|
|
case COMPLEX_TYPE:
|
7997 |
|
|
if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
|
7998 |
|
|
return NULL_TREE;
|
7999 |
|
|
switch (flt_eval_method)
|
8000 |
|
|
{
|
8001 |
|
|
case 1:
|
8002 |
|
|
if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
|
8003 |
|
|
return complex_double_type_node;
|
8004 |
|
|
break;
|
8005 |
|
|
case 2:
|
8006 |
|
|
if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
|
8007 |
|
|
|| (TYPE_MODE (TREE_TYPE (type))
|
8008 |
|
|
== TYPE_MODE (double_type_node)))
|
8009 |
|
|
return complex_long_double_type_node;
|
8010 |
|
|
break;
|
8011 |
|
|
default:
|
8012 |
|
|
gcc_unreachable ();
|
8013 |
|
|
}
|
8014 |
|
|
break;
|
8015 |
|
|
default:
|
8016 |
|
|
break;
|
8017 |
|
|
}
|
8018 |
|
|
}
|
8019 |
|
|
return NULL_TREE;
|
8020 |
|
|
}
|
8021 |
|
|
|
8022 |
|
|
/* Return OP, stripped of any conversions to wider types as much as is safe.
|
8023 |
|
|
Converting the value back to OP's type makes a value equivalent to OP.
|
8024 |
|
|
|
8025 |
|
|
If FOR_TYPE is nonzero, we return a value which, if converted to
|
8026 |
|
|
type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
|
8027 |
|
|
|
8028 |
|
|
OP must have integer, real or enumeral type. Pointers are not allowed!
|
8029 |
|
|
|
8030 |
|
|
There are some cases where the obvious value we could return
|
8031 |
|
|
would regenerate to OP if converted to OP's type,
|
8032 |
|
|
but would not extend like OP to wider types.
|
8033 |
|
|
If FOR_TYPE indicates such extension is contemplated, we eschew such values.
|
8034 |
|
|
For example, if OP is (unsigned short)(signed char)-1,
|
8035 |
|
|
we avoid returning (signed char)-1 if FOR_TYPE is int,
|
8036 |
|
|
even though extending that to an unsigned short would regenerate OP,
|
8037 |
|
|
since the result of extending (signed char)-1 to (int)
|
8038 |
|
|
is different from (int) OP. */
|
8039 |
|
|
|
8040 |
|
|
tree
|
8041 |
|
|
get_unwidened (tree op, tree for_type)
|
8042 |
|
|
{
|
8043 |
|
|
/* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
|
8044 |
|
|
tree type = TREE_TYPE (op);
|
8045 |
|
|
unsigned final_prec
|
8046 |
|
|
= TYPE_PRECISION (for_type != 0 ? for_type : type);
|
8047 |
|
|
int uns
|
8048 |
|
|
= (for_type != 0 && for_type != type
|
8049 |
|
|
&& final_prec > TYPE_PRECISION (type)
|
8050 |
|
|
&& TYPE_UNSIGNED (type));
|
8051 |
|
|
tree win = op;
|
8052 |
|
|
|
8053 |
|
|
while (CONVERT_EXPR_P (op))
|
8054 |
|
|
{
|
8055 |
|
|
int bitschange;
|
8056 |
|
|
|
8057 |
|
|
/* TYPE_PRECISION on vector types has different meaning
|
8058 |
|
|
(TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
|
8059 |
|
|
so avoid them here. */
|
8060 |
|
|
if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
|
8061 |
|
|
break;
|
8062 |
|
|
|
8063 |
|
|
bitschange = TYPE_PRECISION (TREE_TYPE (op))
|
8064 |
|
|
- TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
|
8065 |
|
|
|
8066 |
|
|
/* Truncations are many-one so cannot be removed.
|
8067 |
|
|
Unless we are later going to truncate down even farther. */
|
8068 |
|
|
if (bitschange < 0
|
8069 |
|
|
&& final_prec > TYPE_PRECISION (TREE_TYPE (op)))
|
8070 |
|
|
break;
|
8071 |
|
|
|
8072 |
|
|
/* See what's inside this conversion. If we decide to strip it,
|
8073 |
|
|
we will set WIN. */
|
8074 |
|
|
op = TREE_OPERAND (op, 0);
|
8075 |
|
|
|
8076 |
|
|
/* If we have not stripped any zero-extensions (uns is 0),
|
8077 |
|
|
we can strip any kind of extension.
|
8078 |
|
|
If we have previously stripped a zero-extension,
|
8079 |
|
|
only zero-extensions can safely be stripped.
|
8080 |
|
|
Any extension can be stripped if the bits it would produce
|
8081 |
|
|
are all going to be discarded later by truncating to FOR_TYPE. */
|
8082 |
|
|
|
8083 |
|
|
if (bitschange > 0)
|
8084 |
|
|
{
|
8085 |
|
|
if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
|
8086 |
|
|
win = op;
|
8087 |
|
|
/* TYPE_UNSIGNED says whether this is a zero-extension.
|
8088 |
|
|
Let's avoid computing it if it does not affect WIN
|
8089 |
|
|
and if UNS will not be needed again. */
|
8090 |
|
|
if ((uns
|
8091 |
|
|
|| CONVERT_EXPR_P (op))
|
8092 |
|
|
&& TYPE_UNSIGNED (TREE_TYPE (op)))
|
8093 |
|
|
{
|
8094 |
|
|
uns = 1;
|
8095 |
|
|
win = op;
|
8096 |
|
|
}
|
8097 |
|
|
}
|
8098 |
|
|
}
|
8099 |
|
|
|
8100 |
|
|
/* If we finally reach a constant see if it fits in for_type and
|
8101 |
|
|
in that case convert it. */
|
8102 |
|
|
if (for_type
|
8103 |
|
|
&& TREE_CODE (win) == INTEGER_CST
|
8104 |
|
|
&& TREE_TYPE (win) != for_type
|
8105 |
|
|
&& int_fits_type_p (win, for_type))
|
8106 |
|
|
win = fold_convert (for_type, win);
|
8107 |
|
|
|
8108 |
|
|
return win;
|
8109 |
|
|
}
|
8110 |
|
|
|
8111 |
|
|
/* Return OP or a simpler expression for a narrower value
|
8112 |
|
|
which can be sign-extended or zero-extended to give back OP.
|
8113 |
|
|
Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
|
8114 |
|
|
or 0 if the value should be sign-extended. */
|
8115 |
|
|
|
8116 |
|
|
tree
|
8117 |
|
|
get_narrower (tree op, int *unsignedp_ptr)
|
8118 |
|
|
{
|
8119 |
|
|
int uns = 0;
|
8120 |
|
|
int first = 1;
|
8121 |
|
|
tree win = op;
|
8122 |
|
|
bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
|
8123 |
|
|
|
8124 |
|
|
while (TREE_CODE (op) == NOP_EXPR)
|
8125 |
|
|
{
|
8126 |
|
|
int bitschange
|
8127 |
|
|
= (TYPE_PRECISION (TREE_TYPE (op))
|
8128 |
|
|
- TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
|
8129 |
|
|
|
8130 |
|
|
/* Truncations are many-one so cannot be removed. */
|
8131 |
|
|
if (bitschange < 0)
|
8132 |
|
|
break;
|
8133 |
|
|
|
8134 |
|
|
/* See what's inside this conversion. If we decide to strip it,
|
8135 |
|
|
we will set WIN. */
|
8136 |
|
|
|
8137 |
|
|
if (bitschange > 0)
|
8138 |
|
|
{
|
8139 |
|
|
op = TREE_OPERAND (op, 0);
|
8140 |
|
|
/* An extension: the outermost one can be stripped,
|
8141 |
|
|
but remember whether it is zero or sign extension. */
|
8142 |
|
|
if (first)
|
8143 |
|
|
uns = TYPE_UNSIGNED (TREE_TYPE (op));
|
8144 |
|
|
/* Otherwise, if a sign extension has been stripped,
|
8145 |
|
|
only sign extensions can now be stripped;
|
8146 |
|
|
if a zero extension has been stripped, only zero-extensions. */
|
8147 |
|
|
else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
|
8148 |
|
|
break;
|
8149 |
|
|
first = 0;
|
8150 |
|
|
}
|
8151 |
|
|
else /* bitschange == 0 */
|
8152 |
|
|
{
|
8153 |
|
|
/* A change in nominal type can always be stripped, but we must
|
8154 |
|
|
preserve the unsignedness. */
|
8155 |
|
|
if (first)
|
8156 |
|
|
uns = TYPE_UNSIGNED (TREE_TYPE (op));
|
8157 |
|
|
first = 0;
|
8158 |
|
|
op = TREE_OPERAND (op, 0);
|
8159 |
|
|
/* Keep trying to narrow, but don't assign op to win if it
|
8160 |
|
|
would turn an integral type into something else. */
|
8161 |
|
|
if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
|
8162 |
|
|
continue;
|
8163 |
|
|
}
|
8164 |
|
|
|
8165 |
|
|
win = op;
|
8166 |
|
|
}
|
8167 |
|
|
|
8168 |
|
|
if (TREE_CODE (op) == COMPONENT_REF
|
8169 |
|
|
/* Since type_for_size always gives an integer type. */
|
8170 |
|
|
&& TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
|
8171 |
|
|
&& TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
|
8172 |
|
|
/* Ensure field is laid out already. */
|
8173 |
|
|
&& DECL_SIZE (TREE_OPERAND (op, 1)) != 0
|
8174 |
|
|
&& host_integerp (DECL_SIZE (TREE_OPERAND (op, 1)), 1))
|
8175 |
|
|
{
|
8176 |
|
|
unsigned HOST_WIDE_INT innerprec
|
8177 |
|
|
= tree_low_cst (DECL_SIZE (TREE_OPERAND (op, 1)), 1);
|
8178 |
|
|
int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
|
8179 |
|
|
|| TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
|
8180 |
|
|
tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
|
8181 |
|
|
|
8182 |
|
|
/* We can get this structure field in a narrower type that fits it,
|
8183 |
|
|
but the resulting extension to its nominal type (a fullword type)
|
8184 |
|
|
must satisfy the same conditions as for other extensions.
|
8185 |
|
|
|
8186 |
|
|
Do this only for fields that are aligned (not bit-fields),
|
8187 |
|
|
because when bit-field insns will be used there is no
|
8188 |
|
|
advantage in doing this. */
|
8189 |
|
|
|
8190 |
|
|
if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
|
8191 |
|
|
&& ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
|
8192 |
|
|
&& (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
|
8193 |
|
|
&& type != 0)
|
8194 |
|
|
{
|
8195 |
|
|
if (first)
|
8196 |
|
|
uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
|
8197 |
|
|
win = fold_convert (type, op);
|
8198 |
|
|
}
|
8199 |
|
|
}
|
8200 |
|
|
|
8201 |
|
|
*unsignedp_ptr = uns;
|
8202 |
|
|
return win;
|
8203 |
|
|
}
|
8204 |
|
|
|
8205 |
|
|
/* Returns true if integer constant C has a value that is permissible
|
8206 |
|
|
for type TYPE (an INTEGER_TYPE). */
|
8207 |
|
|
|
8208 |
|
|
bool
|
8209 |
|
|
int_fits_type_p (const_tree c, const_tree type)
|
8210 |
|
|
{
|
8211 |
|
|
tree type_low_bound, type_high_bound;
|
8212 |
|
|
bool ok_for_low_bound, ok_for_high_bound, unsc;
|
8213 |
|
|
double_int dc, dd;
|
8214 |
|
|
|
8215 |
|
|
dc = tree_to_double_int (c);
|
8216 |
|
|
unsc = TYPE_UNSIGNED (TREE_TYPE (c));
|
8217 |
|
|
|
8218 |
|
|
if (TREE_CODE (TREE_TYPE (c)) == INTEGER_TYPE
|
8219 |
|
|
&& TYPE_IS_SIZETYPE (TREE_TYPE (c))
|
8220 |
|
|
&& unsc)
|
8221 |
|
|
/* So c is an unsigned integer whose type is sizetype and type is not.
|
8222 |
|
|
sizetype'd integers are sign extended even though they are
|
8223 |
|
|
unsigned. If the integer value fits in the lower end word of c,
|
8224 |
|
|
and if the higher end word has all its bits set to 1, that
|
8225 |
|
|
means the higher end bits are set to 1 only for sign extension.
|
8226 |
|
|
So let's convert c into an equivalent zero extended unsigned
|
8227 |
|
|
integer. */
|
8228 |
|
|
dc = double_int_zext (dc, TYPE_PRECISION (TREE_TYPE (c)));
|
8229 |
|
|
|
8230 |
|
|
retry:
|
8231 |
|
|
type_low_bound = TYPE_MIN_VALUE (type);
|
8232 |
|
|
type_high_bound = TYPE_MAX_VALUE (type);
|
8233 |
|
|
|
8234 |
|
|
/* If at least one bound of the type is a constant integer, we can check
|
8235 |
|
|
ourselves and maybe make a decision. If no such decision is possible, but
|
8236 |
|
|
this type is a subtype, try checking against that. Otherwise, use
|
8237 |
|
|
double_int_fits_to_tree_p, which checks against the precision.
|
8238 |
|
|
|
8239 |
|
|
Compute the status for each possibly constant bound, and return if we see
|
8240 |
|
|
one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
|
8241 |
|
|
for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
|
8242 |
|
|
for "constant known to fit". */
|
8243 |
|
|
|
8244 |
|
|
/* Check if c >= type_low_bound. */
|
8245 |
|
|
if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
|
8246 |
|
|
{
|
8247 |
|
|
dd = tree_to_double_int (type_low_bound);
|
8248 |
|
|
if (TREE_CODE (type) == INTEGER_TYPE
|
8249 |
|
|
&& TYPE_IS_SIZETYPE (type)
|
8250 |
|
|
&& TYPE_UNSIGNED (type))
|
8251 |
|
|
dd = double_int_zext (dd, TYPE_PRECISION (type));
|
8252 |
|
|
if (unsc != TYPE_UNSIGNED (TREE_TYPE (type_low_bound)))
|
8253 |
|
|
{
|
8254 |
|
|
int c_neg = (!unsc && double_int_negative_p (dc));
|
8255 |
|
|
int t_neg = (unsc && double_int_negative_p (dd));
|
8256 |
|
|
|
8257 |
|
|
if (c_neg && !t_neg)
|
8258 |
|
|
return false;
|
8259 |
|
|
if ((c_neg || !t_neg) && double_int_ucmp (dc, dd) < 0)
|
8260 |
|
|
return false;
|
8261 |
|
|
}
|
8262 |
|
|
else if (double_int_cmp (dc, dd, unsc) < 0)
|
8263 |
|
|
return false;
|
8264 |
|
|
ok_for_low_bound = true;
|
8265 |
|
|
}
|
8266 |
|
|
else
|
8267 |
|
|
ok_for_low_bound = false;
|
8268 |
|
|
|
8269 |
|
|
/* Check if c <= type_high_bound. */
|
8270 |
|
|
if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
|
8271 |
|
|
{
|
8272 |
|
|
dd = tree_to_double_int (type_high_bound);
|
8273 |
|
|
if (TREE_CODE (type) == INTEGER_TYPE
|
8274 |
|
|
&& TYPE_IS_SIZETYPE (type)
|
8275 |
|
|
&& TYPE_UNSIGNED (type))
|
8276 |
|
|
dd = double_int_zext (dd, TYPE_PRECISION (type));
|
8277 |
|
|
if (unsc != TYPE_UNSIGNED (TREE_TYPE (type_high_bound)))
|
8278 |
|
|
{
|
8279 |
|
|
int c_neg = (!unsc && double_int_negative_p (dc));
|
8280 |
|
|
int t_neg = (unsc && double_int_negative_p (dd));
|
8281 |
|
|
|
8282 |
|
|
if (t_neg && !c_neg)
|
8283 |
|
|
return false;
|
8284 |
|
|
if ((t_neg || !c_neg) && double_int_ucmp (dc, dd) > 0)
|
8285 |
|
|
return false;
|
8286 |
|
|
}
|
8287 |
|
|
else if (double_int_cmp (dc, dd, unsc) > 0)
|
8288 |
|
|
return false;
|
8289 |
|
|
ok_for_high_bound = true;
|
8290 |
|
|
}
|
8291 |
|
|
else
|
8292 |
|
|
ok_for_high_bound = false;
|
8293 |
|
|
|
8294 |
|
|
/* If the constant fits both bounds, the result is known. */
|
8295 |
|
|
if (ok_for_low_bound && ok_for_high_bound)
|
8296 |
|
|
return true;
|
8297 |
|
|
|
8298 |
|
|
/* Perform some generic filtering which may allow making a decision
|
8299 |
|
|
even if the bounds are not constant. First, negative integers
|
8300 |
|
|
never fit in unsigned types, */
|
8301 |
|
|
if (TYPE_UNSIGNED (type) && !unsc && double_int_negative_p (dc))
|
8302 |
|
|
return false;
|
8303 |
|
|
|
8304 |
|
|
/* Second, narrower types always fit in wider ones. */
|
8305 |
|
|
if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
|
8306 |
|
|
return true;
|
8307 |
|
|
|
8308 |
|
|
/* Third, unsigned integers with top bit set never fit signed types. */
|
8309 |
|
|
if (! TYPE_UNSIGNED (type) && unsc)
|
8310 |
|
|
{
|
8311 |
|
|
int prec = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (c))) - 1;
|
8312 |
|
|
if (prec < HOST_BITS_PER_WIDE_INT)
|
8313 |
|
|
{
|
8314 |
|
|
if (((((unsigned HOST_WIDE_INT) 1) << prec) & dc.low) != 0)
|
8315 |
|
|
return false;
|
8316 |
|
|
}
|
8317 |
|
|
else if (((((unsigned HOST_WIDE_INT) 1)
|
8318 |
|
|
<< (prec - HOST_BITS_PER_WIDE_INT)) & dc.high) != 0)
|
8319 |
|
|
return false;
|
8320 |
|
|
}
|
8321 |
|
|
|
8322 |
|
|
/* If we haven't been able to decide at this point, there nothing more we
|
8323 |
|
|
can check ourselves here. Look at the base type if we have one and it
|
8324 |
|
|
has the same precision. */
|
8325 |
|
|
if (TREE_CODE (type) == INTEGER_TYPE
|
8326 |
|
|
&& TREE_TYPE (type) != 0
|
8327 |
|
|
&& TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
|
8328 |
|
|
{
|
8329 |
|
|
type = TREE_TYPE (type);
|
8330 |
|
|
goto retry;
|
8331 |
|
|
}
|
8332 |
|
|
|
8333 |
|
|
/* Or to double_int_fits_to_tree_p, if nothing else. */
|
8334 |
|
|
return double_int_fits_to_tree_p (type, dc);
|
8335 |
|
|
}
|
8336 |
|
|
|
8337 |
|
|
/* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
|
8338 |
|
|
bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
|
8339 |
|
|
represented (assuming two's-complement arithmetic) within the bit
|
8340 |
|
|
precision of the type are returned instead. */
|
8341 |
|
|
|
8342 |
|
|
void
|
8343 |
|
|
get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
|
8344 |
|
|
{
|
8345 |
|
|
if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
|
8346 |
|
|
&& TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
|
8347 |
|
|
mpz_set_double_int (min, tree_to_double_int (TYPE_MIN_VALUE (type)),
|
8348 |
|
|
TYPE_UNSIGNED (type));
|
8349 |
|
|
else
|
8350 |
|
|
{
|
8351 |
|
|
if (TYPE_UNSIGNED (type))
|
8352 |
|
|
mpz_set_ui (min, 0);
|
8353 |
|
|
else
|
8354 |
|
|
{
|
8355 |
|
|
double_int mn;
|
8356 |
|
|
mn = double_int_mask (TYPE_PRECISION (type) - 1);
|
8357 |
|
|
mn = double_int_sext (double_int_add (mn, double_int_one),
|
8358 |
|
|
TYPE_PRECISION (type));
|
8359 |
|
|
mpz_set_double_int (min, mn, false);
|
8360 |
|
|
}
|
8361 |
|
|
}
|
8362 |
|
|
|
8363 |
|
|
if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
|
8364 |
|
|
&& TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
|
8365 |
|
|
mpz_set_double_int (max, tree_to_double_int (TYPE_MAX_VALUE (type)),
|
8366 |
|
|
TYPE_UNSIGNED (type));
|
8367 |
|
|
else
|
8368 |
|
|
{
|
8369 |
|
|
if (TYPE_UNSIGNED (type))
|
8370 |
|
|
mpz_set_double_int (max, double_int_mask (TYPE_PRECISION (type)),
|
8371 |
|
|
true);
|
8372 |
|
|
else
|
8373 |
|
|
mpz_set_double_int (max, double_int_mask (TYPE_PRECISION (type) - 1),
|
8374 |
|
|
true);
|
8375 |
|
|
}
|
8376 |
|
|
}
|
8377 |
|
|
|
8378 |
|
|
/* Return true if VAR is an automatic variable defined in function FN. */
|
8379 |
|
|
|
8380 |
|
|
bool
|
8381 |
|
|
auto_var_in_fn_p (const_tree var, const_tree fn)
|
8382 |
|
|
{
|
8383 |
|
|
return (DECL_P (var) && DECL_CONTEXT (var) == fn
|
8384 |
|
|
&& ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
|
8385 |
|
|
|| TREE_CODE (var) == PARM_DECL)
|
8386 |
|
|
&& ! TREE_STATIC (var))
|
8387 |
|
|
|| TREE_CODE (var) == LABEL_DECL
|
8388 |
|
|
|| TREE_CODE (var) == RESULT_DECL));
|
8389 |
|
|
}
|
8390 |
|
|
|
8391 |
|
|
/* Subprogram of following function. Called by walk_tree.
|
8392 |
|
|
|
8393 |
|
|
Return *TP if it is an automatic variable or parameter of the
|
8394 |
|
|
function passed in as DATA. */
|
8395 |
|
|
|
8396 |
|
|
static tree
|
8397 |
|
|
find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
|
8398 |
|
|
{
|
8399 |
|
|
tree fn = (tree) data;
|
8400 |
|
|
|
8401 |
|
|
if (TYPE_P (*tp))
|
8402 |
|
|
*walk_subtrees = 0;
|
8403 |
|
|
|
8404 |
|
|
else if (DECL_P (*tp)
|
8405 |
|
|
&& auto_var_in_fn_p (*tp, fn))
|
8406 |
|
|
return *tp;
|
8407 |
|
|
|
8408 |
|
|
return NULL_TREE;
|
8409 |
|
|
}
|
8410 |
|
|
|
8411 |
|
|
/* Returns true if T is, contains, or refers to a type with variable
|
8412 |
|
|
size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
|
8413 |
|
|
arguments, but not the return type. If FN is nonzero, only return
|
8414 |
|
|
true if a modifier of the type or position of FN is a variable or
|
8415 |
|
|
parameter inside FN.
|
8416 |
|
|
|
8417 |
|
|
This concept is more general than that of C99 'variably modified types':
|
8418 |
|
|
in C99, a struct type is never variably modified because a VLA may not
|
8419 |
|
|
appear as a structure member. However, in GNU C code like:
|
8420 |
|
|
|
8421 |
|
|
struct S { int i[f()]; };
|
8422 |
|
|
|
8423 |
|
|
is valid, and other languages may define similar constructs. */
|
8424 |
|
|
|
8425 |
|
|
bool
|
8426 |
|
|
variably_modified_type_p (tree type, tree fn)
|
8427 |
|
|
{
|
8428 |
|
|
tree t;
|
8429 |
|
|
|
8430 |
|
|
/* Test if T is either variable (if FN is zero) or an expression containing
|
8431 |
|
|
a variable in FN. */
|
8432 |
|
|
#define RETURN_TRUE_IF_VAR(T) \
|
8433 |
|
|
do { tree _t = (T); \
|
8434 |
|
|
if (_t && _t != error_mark_node && TREE_CODE (_t) != INTEGER_CST \
|
8435 |
|
|
&& (!fn || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
|
8436 |
|
|
return true; } while (0)
|
8437 |
|
|
|
8438 |
|
|
if (type == error_mark_node)
|
8439 |
|
|
return false;
|
8440 |
|
|
|
8441 |
|
|
/* If TYPE itself has variable size, it is variably modified. */
|
8442 |
|
|
RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
|
8443 |
|
|
RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
|
8444 |
|
|
|
8445 |
|
|
switch (TREE_CODE (type))
|
8446 |
|
|
{
|
8447 |
|
|
case POINTER_TYPE:
|
8448 |
|
|
case REFERENCE_TYPE:
|
8449 |
|
|
case VECTOR_TYPE:
|
8450 |
|
|
if (variably_modified_type_p (TREE_TYPE (type), fn))
|
8451 |
|
|
return true;
|
8452 |
|
|
break;
|
8453 |
|
|
|
8454 |
|
|
case FUNCTION_TYPE:
|
8455 |
|
|
case METHOD_TYPE:
|
8456 |
|
|
/* If TYPE is a function type, it is variably modified if the
|
8457 |
|
|
return type is variably modified. */
|
8458 |
|
|
if (variably_modified_type_p (TREE_TYPE (type), fn))
|
8459 |
|
|
return true;
|
8460 |
|
|
break;
|
8461 |
|
|
|
8462 |
|
|
case INTEGER_TYPE:
|
8463 |
|
|
case REAL_TYPE:
|
8464 |
|
|
case FIXED_POINT_TYPE:
|
8465 |
|
|
case ENUMERAL_TYPE:
|
8466 |
|
|
case BOOLEAN_TYPE:
|
8467 |
|
|
/* Scalar types are variably modified if their end points
|
8468 |
|
|
aren't constant. */
|
8469 |
|
|
RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
|
8470 |
|
|
RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
|
8471 |
|
|
break;
|
8472 |
|
|
|
8473 |
|
|
case RECORD_TYPE:
|
8474 |
|
|
case UNION_TYPE:
|
8475 |
|
|
case QUAL_UNION_TYPE:
|
8476 |
|
|
/* We can't see if any of the fields are variably-modified by the
|
8477 |
|
|
definition we normally use, since that would produce infinite
|
8478 |
|
|
recursion via pointers. */
|
8479 |
|
|
/* This is variably modified if some field's type is. */
|
8480 |
|
|
for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
|
8481 |
|
|
if (TREE_CODE (t) == FIELD_DECL)
|
8482 |
|
|
{
|
8483 |
|
|
RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
|
8484 |
|
|
RETURN_TRUE_IF_VAR (DECL_SIZE (t));
|
8485 |
|
|
RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
|
8486 |
|
|
|
8487 |
|
|
if (TREE_CODE (type) == QUAL_UNION_TYPE)
|
8488 |
|
|
RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
|
8489 |
|
|
}
|
8490 |
|
|
break;
|
8491 |
|
|
|
8492 |
|
|
case ARRAY_TYPE:
|
8493 |
|
|
/* Do not call ourselves to avoid infinite recursion. This is
|
8494 |
|
|
variably modified if the element type is. */
|
8495 |
|
|
RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
|
8496 |
|
|
RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
|
8497 |
|
|
break;
|
8498 |
|
|
|
8499 |
|
|
default:
|
8500 |
|
|
break;
|
8501 |
|
|
}
|
8502 |
|
|
|
8503 |
|
|
/* The current language may have other cases to check, but in general,
|
8504 |
|
|
all other types are not variably modified. */
|
8505 |
|
|
return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
|
8506 |
|
|
|
8507 |
|
|
#undef RETURN_TRUE_IF_VAR
|
8508 |
|
|
}
|
8509 |
|
|
|
8510 |
|
|
/* Given a DECL or TYPE, return the scope in which it was declared, or
|
8511 |
|
|
NULL_TREE if there is no containing scope. */
|
8512 |
|
|
|
8513 |
|
|
tree
|
8514 |
|
|
get_containing_scope (const_tree t)
|
8515 |
|
|
{
|
8516 |
|
|
return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
|
8517 |
|
|
}
|
8518 |
|
|
|
8519 |
|
|
/* Return the innermost context enclosing DECL that is
|
8520 |
|
|
a FUNCTION_DECL, or zero if none. */
|
8521 |
|
|
|
8522 |
|
|
tree
|
8523 |
|
|
decl_function_context (const_tree decl)
|
8524 |
|
|
{
|
8525 |
|
|
tree context;
|
8526 |
|
|
|
8527 |
|
|
if (TREE_CODE (decl) == ERROR_MARK)
|
8528 |
|
|
return 0;
|
8529 |
|
|
|
8530 |
|
|
/* C++ virtual functions use DECL_CONTEXT for the class of the vtable
|
8531 |
|
|
where we look up the function at runtime. Such functions always take
|
8532 |
|
|
a first argument of type 'pointer to real context'.
|
8533 |
|
|
|
8534 |
|
|
C++ should really be fixed to use DECL_CONTEXT for the real context,
|
8535 |
|
|
and use something else for the "virtual context". */
|
8536 |
|
|
else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
|
8537 |
|
|
context
|
8538 |
|
|
= TYPE_MAIN_VARIANT
|
8539 |
|
|
(TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
|
8540 |
|
|
else
|
8541 |
|
|
context = DECL_CONTEXT (decl);
|
8542 |
|
|
|
8543 |
|
|
while (context && TREE_CODE (context) != FUNCTION_DECL)
|
8544 |
|
|
{
|
8545 |
|
|
if (TREE_CODE (context) == BLOCK)
|
8546 |
|
|
context = BLOCK_SUPERCONTEXT (context);
|
8547 |
|
|
else
|
8548 |
|
|
context = get_containing_scope (context);
|
8549 |
|
|
}
|
8550 |
|
|
|
8551 |
|
|
return context;
|
8552 |
|
|
}
|
8553 |
|
|
|
8554 |
|
|
/* Return the innermost context enclosing DECL that is
|
8555 |
|
|
a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
|
8556 |
|
|
TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
|
8557 |
|
|
|
8558 |
|
|
tree
|
8559 |
|
|
decl_type_context (const_tree decl)
|
8560 |
|
|
{
|
8561 |
|
|
tree context = DECL_CONTEXT (decl);
|
8562 |
|
|
|
8563 |
|
|
while (context)
|
8564 |
|
|
switch (TREE_CODE (context))
|
8565 |
|
|
{
|
8566 |
|
|
case NAMESPACE_DECL:
|
8567 |
|
|
case TRANSLATION_UNIT_DECL:
|
8568 |
|
|
return NULL_TREE;
|
8569 |
|
|
|
8570 |
|
|
case RECORD_TYPE:
|
8571 |
|
|
case UNION_TYPE:
|
8572 |
|
|
case QUAL_UNION_TYPE:
|
8573 |
|
|
return context;
|
8574 |
|
|
|
8575 |
|
|
case TYPE_DECL:
|
8576 |
|
|
case FUNCTION_DECL:
|
8577 |
|
|
context = DECL_CONTEXT (context);
|
8578 |
|
|
break;
|
8579 |
|
|
|
8580 |
|
|
case BLOCK:
|
8581 |
|
|
context = BLOCK_SUPERCONTEXT (context);
|
8582 |
|
|
break;
|
8583 |
|
|
|
8584 |
|
|
default:
|
8585 |
|
|
gcc_unreachable ();
|
8586 |
|
|
}
|
8587 |
|
|
|
8588 |
|
|
return NULL_TREE;
|
8589 |
|
|
}
|
8590 |
|
|
|
8591 |
|
|
/* CALL is a CALL_EXPR. Return the declaration for the function
|
8592 |
|
|
called, or NULL_TREE if the called function cannot be
|
8593 |
|
|
determined. */
|
8594 |
|
|
|
8595 |
|
|
tree
|
8596 |
|
|
get_callee_fndecl (const_tree call)
|
8597 |
|
|
{
|
8598 |
|
|
tree addr;
|
8599 |
|
|
|
8600 |
|
|
if (call == error_mark_node)
|
8601 |
|
|
return error_mark_node;
|
8602 |
|
|
|
8603 |
|
|
/* It's invalid to call this function with anything but a
|
8604 |
|
|
CALL_EXPR. */
|
8605 |
|
|
gcc_assert (TREE_CODE (call) == CALL_EXPR);
|
8606 |
|
|
|
8607 |
|
|
/* The first operand to the CALL is the address of the function
|
8608 |
|
|
called. */
|
8609 |
|
|
addr = CALL_EXPR_FN (call);
|
8610 |
|
|
|
8611 |
|
|
STRIP_NOPS (addr);
|
8612 |
|
|
|
8613 |
|
|
/* If this is a readonly function pointer, extract its initial value. */
|
8614 |
|
|
if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
|
8615 |
|
|
&& TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
|
8616 |
|
|
&& DECL_INITIAL (addr))
|
8617 |
|
|
addr = DECL_INITIAL (addr);
|
8618 |
|
|
|
8619 |
|
|
/* If the address is just `&f' for some function `f', then we know
|
8620 |
|
|
that `f' is being called. */
|
8621 |
|
|
if (TREE_CODE (addr) == ADDR_EXPR
|
8622 |
|
|
&& TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
|
8623 |
|
|
return TREE_OPERAND (addr, 0);
|
8624 |
|
|
|
8625 |
|
|
/* We couldn't figure out what was being called. */
|
8626 |
|
|
return NULL_TREE;
|
8627 |
|
|
}
|
8628 |
|
|
|
8629 |
|
|
/* Print debugging information about tree nodes generated during the compile,
|
8630 |
|
|
and any language-specific information. */
|
8631 |
|
|
|
8632 |
|
|
void
|
8633 |
|
|
dump_tree_statistics (void)
|
8634 |
|
|
{
|
8635 |
|
|
#ifdef GATHER_STATISTICS
|
8636 |
|
|
int i;
|
8637 |
|
|
int total_nodes, total_bytes;
|
8638 |
|
|
#endif
|
8639 |
|
|
|
8640 |
|
|
fprintf (stderr, "\n??? tree nodes created\n\n");
|
8641 |
|
|
#ifdef GATHER_STATISTICS
|
8642 |
|
|
fprintf (stderr, "Kind Nodes Bytes\n");
|
8643 |
|
|
fprintf (stderr, "---------------------------------------\n");
|
8644 |
|
|
total_nodes = total_bytes = 0;
|
8645 |
|
|
for (i = 0; i < (int) all_kinds; i++)
|
8646 |
|
|
{
|
8647 |
|
|
fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
|
8648 |
|
|
tree_node_counts[i], tree_node_sizes[i]);
|
8649 |
|
|
total_nodes += tree_node_counts[i];
|
8650 |
|
|
total_bytes += tree_node_sizes[i];
|
8651 |
|
|
}
|
8652 |
|
|
fprintf (stderr, "---------------------------------------\n");
|
8653 |
|
|
fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
|
8654 |
|
|
fprintf (stderr, "---------------------------------------\n");
|
8655 |
|
|
fprintf (stderr, "Code Nodes\n");
|
8656 |
|
|
fprintf (stderr, "----------------------------\n");
|
8657 |
|
|
for (i = 0; i < (int) MAX_TREE_CODES; i++)
|
8658 |
|
|
fprintf (stderr, "%-20s %7d\n", tree_code_name[i], tree_code_counts[i]);
|
8659 |
|
|
fprintf (stderr, "----------------------------\n");
|
8660 |
|
|
ssanames_print_statistics ();
|
8661 |
|
|
phinodes_print_statistics ();
|
8662 |
|
|
#else
|
8663 |
|
|
fprintf (stderr, "(No per-node statistics)\n");
|
8664 |
|
|
#endif
|
8665 |
|
|
print_type_hash_statistics ();
|
8666 |
|
|
print_debug_expr_statistics ();
|
8667 |
|
|
print_value_expr_statistics ();
|
8668 |
|
|
lang_hooks.print_statistics ();
|
8669 |
|
|
}
|
8670 |
|
|
|
8671 |
|
|
#define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
|
8672 |
|
|
|
8673 |
|
|
/* Generate a crc32 of a byte. */
|
8674 |
|
|
|
8675 |
|
|
unsigned
|
8676 |
|
|
crc32_byte (unsigned chksum, char byte)
|
8677 |
|
|
{
|
8678 |
|
|
unsigned value = (unsigned) byte << 24;
|
8679 |
|
|
unsigned ix;
|
8680 |
|
|
|
8681 |
|
|
for (ix = 8; ix--; value <<= 1)
|
8682 |
|
|
{
|
8683 |
|
|
unsigned feedback;
|
8684 |
|
|
|
8685 |
|
|
feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
|
8686 |
|
|
chksum <<= 1;
|
8687 |
|
|
chksum ^= feedback;
|
8688 |
|
|
}
|
8689 |
|
|
return chksum;
|
8690 |
|
|
}
|
8691 |
|
|
|
8692 |
|
|
|
8693 |
|
|
/* Generate a crc32 of a string. */
|
8694 |
|
|
|
8695 |
|
|
unsigned
|
8696 |
|
|
crc32_string (unsigned chksum, const char *string)
|
8697 |
|
|
{
|
8698 |
|
|
do
|
8699 |
|
|
{
|
8700 |
|
|
chksum = crc32_byte (chksum, *string);
|
8701 |
|
|
}
|
8702 |
|
|
while (*string++);
|
8703 |
|
|
return chksum;
|
8704 |
|
|
}
|
8705 |
|
|
|
8706 |
|
|
/* P is a string that will be used in a symbol. Mask out any characters
|
8707 |
|
|
that are not valid in that context. */
|
8708 |
|
|
|
8709 |
|
|
void
|
8710 |
|
|
clean_symbol_name (char *p)
|
8711 |
|
|
{
|
8712 |
|
|
for (; *p; p++)
|
8713 |
|
|
if (! (ISALNUM (*p)
|
8714 |
|
|
#ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
|
8715 |
|
|
|| *p == '$'
|
8716 |
|
|
#endif
|
8717 |
|
|
#ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
|
8718 |
|
|
|| *p == '.'
|
8719 |
|
|
#endif
|
8720 |
|
|
))
|
8721 |
|
|
*p = '_';
|
8722 |
|
|
}
|
8723 |
|
|
|
8724 |
|
|
/* Generate a name for a special-purpose function.
|
8725 |
|
|
The generated name may need to be unique across the whole link.
|
8726 |
|
|
Changes to this function may also require corresponding changes to
|
8727 |
|
|
xstrdup_mask_random.
|
8728 |
|
|
TYPE is some string to identify the purpose of this function to the
|
8729 |
|
|
linker or collect2; it must start with an uppercase letter,
|
8730 |
|
|
one of:
|
8731 |
|
|
I - for constructors
|
8732 |
|
|
D - for destructors
|
8733 |
|
|
N - for C++ anonymous namespaces
|
8734 |
|
|
F - for DWARF unwind frame information. */
|
8735 |
|
|
|
8736 |
|
|
tree
|
8737 |
|
|
get_file_function_name (const char *type)
|
8738 |
|
|
{
|
8739 |
|
|
char *buf;
|
8740 |
|
|
const char *p;
|
8741 |
|
|
char *q;
|
8742 |
|
|
|
8743 |
|
|
/* If we already have a name we know to be unique, just use that. */
|
8744 |
|
|
if (first_global_object_name)
|
8745 |
|
|
p = q = ASTRDUP (first_global_object_name);
|
8746 |
|
|
/* If the target is handling the constructors/destructors, they
|
8747 |
|
|
will be local to this file and the name is only necessary for
|
8748 |
|
|
debugging purposes.
|
8749 |
|
|
We also assign sub_I and sub_D sufixes to constructors called from
|
8750 |
|
|
the global static constructors. These are always local. */
|
8751 |
|
|
else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
|
8752 |
|
|
|| (strncmp (type, "sub_", 4) == 0
|
8753 |
|
|
&& (type[4] == 'I' || type[4] == 'D')))
|
8754 |
|
|
{
|
8755 |
|
|
const char *file = main_input_filename;
|
8756 |
|
|
if (! file)
|
8757 |
|
|
file = input_filename;
|
8758 |
|
|
/* Just use the file's basename, because the full pathname
|
8759 |
|
|
might be quite long. */
|
8760 |
|
|
p = q = ASTRDUP (lbasename (file));
|
8761 |
|
|
}
|
8762 |
|
|
else
|
8763 |
|
|
{
|
8764 |
|
|
/* Otherwise, the name must be unique across the entire link.
|
8765 |
|
|
We don't have anything that we know to be unique to this translation
|
8766 |
|
|
unit, so use what we do have and throw in some randomness. */
|
8767 |
|
|
unsigned len;
|
8768 |
|
|
const char *name = weak_global_object_name;
|
8769 |
|
|
const char *file = main_input_filename;
|
8770 |
|
|
|
8771 |
|
|
if (! name)
|
8772 |
|
|
name = "";
|
8773 |
|
|
if (! file)
|
8774 |
|
|
file = input_filename;
|
8775 |
|
|
|
8776 |
|
|
len = strlen (file);
|
8777 |
|
|
q = (char *) alloca (9 + 17 + len + 1);
|
8778 |
|
|
memcpy (q, file, len + 1);
|
8779 |
|
|
|
8780 |
|
|
snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
|
8781 |
|
|
crc32_string (0, name), get_random_seed (false));
|
8782 |
|
|
|
8783 |
|
|
p = q;
|
8784 |
|
|
}
|
8785 |
|
|
|
8786 |
|
|
clean_symbol_name (q);
|
8787 |
|
|
buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
|
8788 |
|
|
+ strlen (type));
|
8789 |
|
|
|
8790 |
|
|
/* Set up the name of the file-level functions we may need.
|
8791 |
|
|
Use a global object (which is already required to be unique over
|
8792 |
|
|
the program) rather than the file name (which imposes extra
|
8793 |
|
|
constraints). */
|
8794 |
|
|
sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
|
8795 |
|
|
|
8796 |
|
|
return get_identifier (buf);
|
8797 |
|
|
}
|
8798 |
|
|
|
8799 |
|
|
#if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
|
8800 |
|
|
|
8801 |
|
|
/* Complain that the tree code of NODE does not match the expected 0
|
8802 |
|
|
terminated list of trailing codes. The trailing code list can be
|
8803 |
|
|
empty, for a more vague error message. FILE, LINE, and FUNCTION
|
8804 |
|
|
are of the caller. */
|
8805 |
|
|
|
8806 |
|
|
void
|
8807 |
|
|
tree_check_failed (const_tree node, const char *file,
|
8808 |
|
|
int line, const char *function, ...)
|
8809 |
|
|
{
|
8810 |
|
|
va_list args;
|
8811 |
|
|
const char *buffer;
|
8812 |
|
|
unsigned length = 0;
|
8813 |
|
|
int code;
|
8814 |
|
|
|
8815 |
|
|
va_start (args, function);
|
8816 |
|
|
while ((code = va_arg (args, int)))
|
8817 |
|
|
length += 4 + strlen (tree_code_name[code]);
|
8818 |
|
|
va_end (args);
|
8819 |
|
|
if (length)
|
8820 |
|
|
{
|
8821 |
|
|
char *tmp;
|
8822 |
|
|
va_start (args, function);
|
8823 |
|
|
length += strlen ("expected ");
|
8824 |
|
|
buffer = tmp = (char *) alloca (length);
|
8825 |
|
|
length = 0;
|
8826 |
|
|
while ((code = va_arg (args, int)))
|
8827 |
|
|
{
|
8828 |
|
|
const char *prefix = length ? " or " : "expected ";
|
8829 |
|
|
|
8830 |
|
|
strcpy (tmp + length, prefix);
|
8831 |
|
|
length += strlen (prefix);
|
8832 |
|
|
strcpy (tmp + length, tree_code_name[code]);
|
8833 |
|
|
length += strlen (tree_code_name[code]);
|
8834 |
|
|
}
|
8835 |
|
|
va_end (args);
|
8836 |
|
|
}
|
8837 |
|
|
else
|
8838 |
|
|
buffer = "unexpected node";
|
8839 |
|
|
|
8840 |
|
|
internal_error ("tree check: %s, have %s in %s, at %s:%d",
|
8841 |
|
|
buffer, tree_code_name[TREE_CODE (node)],
|
8842 |
|
|
function, trim_filename (file), line);
|
8843 |
|
|
}
|
8844 |
|
|
|
8845 |
|
|
/* Complain that the tree code of NODE does match the expected 0
|
8846 |
|
|
terminated list of trailing codes. FILE, LINE, and FUNCTION are of
|
8847 |
|
|
the caller. */
|
8848 |
|
|
|
8849 |
|
|
void
|
8850 |
|
|
tree_not_check_failed (const_tree node, const char *file,
|
8851 |
|
|
int line, const char *function, ...)
|
8852 |
|
|
{
|
8853 |
|
|
va_list args;
|
8854 |
|
|
char *buffer;
|
8855 |
|
|
unsigned length = 0;
|
8856 |
|
|
int code;
|
8857 |
|
|
|
8858 |
|
|
va_start (args, function);
|
8859 |
|
|
while ((code = va_arg (args, int)))
|
8860 |
|
|
length += 4 + strlen (tree_code_name[code]);
|
8861 |
|
|
va_end (args);
|
8862 |
|
|
va_start (args, function);
|
8863 |
|
|
buffer = (char *) alloca (length);
|
8864 |
|
|
length = 0;
|
8865 |
|
|
while ((code = va_arg (args, int)))
|
8866 |
|
|
{
|
8867 |
|
|
if (length)
|
8868 |
|
|
{
|
8869 |
|
|
strcpy (buffer + length, " or ");
|
8870 |
|
|
length += 4;
|
8871 |
|
|
}
|
8872 |
|
|
strcpy (buffer + length, tree_code_name[code]);
|
8873 |
|
|
length += strlen (tree_code_name[code]);
|
8874 |
|
|
}
|
8875 |
|
|
va_end (args);
|
8876 |
|
|
|
8877 |
|
|
internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
|
8878 |
|
|
buffer, tree_code_name[TREE_CODE (node)],
|
8879 |
|
|
function, trim_filename (file), line);
|
8880 |
|
|
}
|
8881 |
|
|
|
8882 |
|
|
/* Similar to tree_check_failed, except that we check for a class of tree
|
8883 |
|
|
code, given in CL. */
|
8884 |
|
|
|
8885 |
|
|
void
|
8886 |
|
|
tree_class_check_failed (const_tree node, const enum tree_code_class cl,
|
8887 |
|
|
const char *file, int line, const char *function)
|
8888 |
|
|
{
|
8889 |
|
|
internal_error
|
8890 |
|
|
("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
|
8891 |
|
|
TREE_CODE_CLASS_STRING (cl),
|
8892 |
|
|
TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
|
8893 |
|
|
tree_code_name[TREE_CODE (node)], function, trim_filename (file), line);
|
8894 |
|
|
}
|
8895 |
|
|
|
8896 |
|
|
/* Similar to tree_check_failed, except that instead of specifying a
|
8897 |
|
|
dozen codes, use the knowledge that they're all sequential. */
|
8898 |
|
|
|
8899 |
|
|
void
|
8900 |
|
|
tree_range_check_failed (const_tree node, const char *file, int line,
|
8901 |
|
|
const char *function, enum tree_code c1,
|
8902 |
|
|
enum tree_code c2)
|
8903 |
|
|
{
|
8904 |
|
|
char *buffer;
|
8905 |
|
|
unsigned length = 0;
|
8906 |
|
|
unsigned int c;
|
8907 |
|
|
|
8908 |
|
|
for (c = c1; c <= c2; ++c)
|
8909 |
|
|
length += 4 + strlen (tree_code_name[c]);
|
8910 |
|
|
|
8911 |
|
|
length += strlen ("expected ");
|
8912 |
|
|
buffer = (char *) alloca (length);
|
8913 |
|
|
length = 0;
|
8914 |
|
|
|
8915 |
|
|
for (c = c1; c <= c2; ++c)
|
8916 |
|
|
{
|
8917 |
|
|
const char *prefix = length ? " or " : "expected ";
|
8918 |
|
|
|
8919 |
|
|
strcpy (buffer + length, prefix);
|
8920 |
|
|
length += strlen (prefix);
|
8921 |
|
|
strcpy (buffer + length, tree_code_name[c]);
|
8922 |
|
|
length += strlen (tree_code_name[c]);
|
8923 |
|
|
}
|
8924 |
|
|
|
8925 |
|
|
internal_error ("tree check: %s, have %s in %s, at %s:%d",
|
8926 |
|
|
buffer, tree_code_name[TREE_CODE (node)],
|
8927 |
|
|
function, trim_filename (file), line);
|
8928 |
|
|
}
|
8929 |
|
|
|
8930 |
|
|
|
8931 |
|
|
/* Similar to tree_check_failed, except that we check that a tree does
|
8932 |
|
|
not have the specified code, given in CL. */
|
8933 |
|
|
|
8934 |
|
|
void
|
8935 |
|
|
tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
|
8936 |
|
|
const char *file, int line, const char *function)
|
8937 |
|
|
{
|
8938 |
|
|
internal_error
|
8939 |
|
|
("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
|
8940 |
|
|
TREE_CODE_CLASS_STRING (cl),
|
8941 |
|
|
TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
|
8942 |
|
|
tree_code_name[TREE_CODE (node)], function, trim_filename (file), line);
|
8943 |
|
|
}
|
8944 |
|
|
|
8945 |
|
|
|
8946 |
|
|
/* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
|
8947 |
|
|
|
8948 |
|
|
void
|
8949 |
|
|
omp_clause_check_failed (const_tree node, const char *file, int line,
|
8950 |
|
|
const char *function, enum omp_clause_code code)
|
8951 |
|
|
{
|
8952 |
|
|
internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
|
8953 |
|
|
omp_clause_code_name[code], tree_code_name[TREE_CODE (node)],
|
8954 |
|
|
function, trim_filename (file), line);
|
8955 |
|
|
}
|
8956 |
|
|
|
8957 |
|
|
|
8958 |
|
|
/* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
|
8959 |
|
|
|
8960 |
|
|
void
|
8961 |
|
|
omp_clause_range_check_failed (const_tree node, const char *file, int line,
|
8962 |
|
|
const char *function, enum omp_clause_code c1,
|
8963 |
|
|
enum omp_clause_code c2)
|
8964 |
|
|
{
|
8965 |
|
|
char *buffer;
|
8966 |
|
|
unsigned length = 0;
|
8967 |
|
|
unsigned int c;
|
8968 |
|
|
|
8969 |
|
|
for (c = c1; c <= c2; ++c)
|
8970 |
|
|
length += 4 + strlen (omp_clause_code_name[c]);
|
8971 |
|
|
|
8972 |
|
|
length += strlen ("expected ");
|
8973 |
|
|
buffer = (char *) alloca (length);
|
8974 |
|
|
length = 0;
|
8975 |
|
|
|
8976 |
|
|
for (c = c1; c <= c2; ++c)
|
8977 |
|
|
{
|
8978 |
|
|
const char *prefix = length ? " or " : "expected ";
|
8979 |
|
|
|
8980 |
|
|
strcpy (buffer + length, prefix);
|
8981 |
|
|
length += strlen (prefix);
|
8982 |
|
|
strcpy (buffer + length, omp_clause_code_name[c]);
|
8983 |
|
|
length += strlen (omp_clause_code_name[c]);
|
8984 |
|
|
}
|
8985 |
|
|
|
8986 |
|
|
internal_error ("tree check: %s, have %s in %s, at %s:%d",
|
8987 |
|
|
buffer, omp_clause_code_name[TREE_CODE (node)],
|
8988 |
|
|
function, trim_filename (file), line);
|
8989 |
|
|
}
|
8990 |
|
|
|
8991 |
|
|
|
8992 |
|
|
#undef DEFTREESTRUCT
|
8993 |
|
|
#define DEFTREESTRUCT(VAL, NAME) NAME,
|
8994 |
|
|
|
8995 |
|
|
static const char *ts_enum_names[] = {
|
8996 |
|
|
#include "treestruct.def"
|
8997 |
|
|
};
|
8998 |
|
|
#undef DEFTREESTRUCT
|
8999 |
|
|
|
9000 |
|
|
#define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
|
9001 |
|
|
|
9002 |
|
|
/* Similar to tree_class_check_failed, except that we check for
|
9003 |
|
|
whether CODE contains the tree structure identified by EN. */
|
9004 |
|
|
|
9005 |
|
|
void
|
9006 |
|
|
tree_contains_struct_check_failed (const_tree node,
|
9007 |
|
|
const enum tree_node_structure_enum en,
|
9008 |
|
|
const char *file, int line,
|
9009 |
|
|
const char *function)
|
9010 |
|
|
{
|
9011 |
|
|
internal_error
|
9012 |
|
|
("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
|
9013 |
|
|
TS_ENUM_NAME(en),
|
9014 |
|
|
tree_code_name[TREE_CODE (node)], function, trim_filename (file), line);
|
9015 |
|
|
}
|
9016 |
|
|
|
9017 |
|
|
|
9018 |
|
|
/* Similar to above, except that the check is for the bounds of a TREE_VEC's
|
9019 |
|
|
(dynamically sized) vector. */
|
9020 |
|
|
|
9021 |
|
|
void
|
9022 |
|
|
tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
|
9023 |
|
|
const char *function)
|
9024 |
|
|
{
|
9025 |
|
|
internal_error
|
9026 |
|
|
("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
|
9027 |
|
|
idx + 1, len, function, trim_filename (file), line);
|
9028 |
|
|
}
|
9029 |
|
|
|
9030 |
|
|
/* Similar to above, except that the check is for the bounds of the operand
|
9031 |
|
|
vector of an expression node EXP. */
|
9032 |
|
|
|
9033 |
|
|
void
|
9034 |
|
|
tree_operand_check_failed (int idx, const_tree exp, const char *file,
|
9035 |
|
|
int line, const char *function)
|
9036 |
|
|
{
|
9037 |
|
|
int code = TREE_CODE (exp);
|
9038 |
|
|
internal_error
|
9039 |
|
|
("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
|
9040 |
|
|
idx + 1, tree_code_name[code], TREE_OPERAND_LENGTH (exp),
|
9041 |
|
|
function, trim_filename (file), line);
|
9042 |
|
|
}
|
9043 |
|
|
|
9044 |
|
|
/* Similar to above, except that the check is for the number of
|
9045 |
|
|
operands of an OMP_CLAUSE node. */
|
9046 |
|
|
|
9047 |
|
|
void
|
9048 |
|
|
omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
|
9049 |
|
|
int line, const char *function)
|
9050 |
|
|
{
|
9051 |
|
|
internal_error
|
9052 |
|
|
("tree check: accessed operand %d of omp_clause %s with %d operands "
|
9053 |
|
|
"in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
|
9054 |
|
|
omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
|
9055 |
|
|
trim_filename (file), line);
|
9056 |
|
|
}
|
9057 |
|
|
#endif /* ENABLE_TREE_CHECKING */
|
9058 |
|
|
|
9059 |
|
|
/* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
|
9060 |
|
|
and mapped to the machine mode MODE. Initialize its fields and build
|
9061 |
|
|
the information necessary for debugging output. */
|
9062 |
|
|
|
9063 |
|
|
static tree
|
9064 |
|
|
make_vector_type (tree innertype, int nunits, enum machine_mode mode)
|
9065 |
|
|
{
|
9066 |
|
|
tree t;
|
9067 |
|
|
hashval_t hashcode = 0;
|
9068 |
|
|
|
9069 |
|
|
t = make_node (VECTOR_TYPE);
|
9070 |
|
|
TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
|
9071 |
|
|
SET_TYPE_VECTOR_SUBPARTS (t, nunits);
|
9072 |
|
|
SET_TYPE_MODE (t, mode);
|
9073 |
|
|
|
9074 |
|
|
if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
|
9075 |
|
|
SET_TYPE_STRUCTURAL_EQUALITY (t);
|
9076 |
|
|
else if (TYPE_CANONICAL (innertype) != innertype
|
9077 |
|
|
|| mode != VOIDmode)
|
9078 |
|
|
TYPE_CANONICAL (t)
|
9079 |
|
|
= make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
|
9080 |
|
|
|
9081 |
|
|
layout_type (t);
|
9082 |
|
|
|
9083 |
|
|
hashcode = iterative_hash_host_wide_int (VECTOR_TYPE, hashcode);
|
9084 |
|
|
hashcode = iterative_hash_host_wide_int (nunits, hashcode);
|
9085 |
|
|
hashcode = iterative_hash_host_wide_int (mode, hashcode);
|
9086 |
|
|
hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (t)), hashcode);
|
9087 |
|
|
t = type_hash_canon (hashcode, t);
|
9088 |
|
|
|
9089 |
|
|
/* We have built a main variant, based on the main variant of the
|
9090 |
|
|
inner type. Use it to build the variant we return. */
|
9091 |
|
|
if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
|
9092 |
|
|
&& TREE_TYPE (t) != innertype)
|
9093 |
|
|
return build_type_attribute_qual_variant (t,
|
9094 |
|
|
TYPE_ATTRIBUTES (innertype),
|
9095 |
|
|
TYPE_QUALS (innertype));
|
9096 |
|
|
|
9097 |
|
|
return t;
|
9098 |
|
|
}
|
9099 |
|
|
|
9100 |
|
|
static tree
|
9101 |
|
|
make_or_reuse_type (unsigned size, int unsignedp)
|
9102 |
|
|
{
|
9103 |
|
|
if (size == INT_TYPE_SIZE)
|
9104 |
|
|
return unsignedp ? unsigned_type_node : integer_type_node;
|
9105 |
|
|
if (size == CHAR_TYPE_SIZE)
|
9106 |
|
|
return unsignedp ? unsigned_char_type_node : signed_char_type_node;
|
9107 |
|
|
if (size == SHORT_TYPE_SIZE)
|
9108 |
|
|
return unsignedp ? short_unsigned_type_node : short_integer_type_node;
|
9109 |
|
|
if (size == LONG_TYPE_SIZE)
|
9110 |
|
|
return unsignedp ? long_unsigned_type_node : long_integer_type_node;
|
9111 |
|
|
if (size == LONG_LONG_TYPE_SIZE)
|
9112 |
|
|
return (unsignedp ? long_long_unsigned_type_node
|
9113 |
|
|
: long_long_integer_type_node);
|
9114 |
|
|
if (size == 128 && int128_integer_type_node)
|
9115 |
|
|
return (unsignedp ? int128_unsigned_type_node
|
9116 |
|
|
: int128_integer_type_node);
|
9117 |
|
|
|
9118 |
|
|
if (unsignedp)
|
9119 |
|
|
return make_unsigned_type (size);
|
9120 |
|
|
else
|
9121 |
|
|
return make_signed_type (size);
|
9122 |
|
|
}
|
9123 |
|
|
|
9124 |
|
|
/* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
|
9125 |
|
|
|
9126 |
|
|
static tree
|
9127 |
|
|
make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
|
9128 |
|
|
{
|
9129 |
|
|
if (satp)
|
9130 |
|
|
{
|
9131 |
|
|
if (size == SHORT_FRACT_TYPE_SIZE)
|
9132 |
|
|
return unsignedp ? sat_unsigned_short_fract_type_node
|
9133 |
|
|
: sat_short_fract_type_node;
|
9134 |
|
|
if (size == FRACT_TYPE_SIZE)
|
9135 |
|
|
return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
|
9136 |
|
|
if (size == LONG_FRACT_TYPE_SIZE)
|
9137 |
|
|
return unsignedp ? sat_unsigned_long_fract_type_node
|
9138 |
|
|
: sat_long_fract_type_node;
|
9139 |
|
|
if (size == LONG_LONG_FRACT_TYPE_SIZE)
|
9140 |
|
|
return unsignedp ? sat_unsigned_long_long_fract_type_node
|
9141 |
|
|
: sat_long_long_fract_type_node;
|
9142 |
|
|
}
|
9143 |
|
|
else
|
9144 |
|
|
{
|
9145 |
|
|
if (size == SHORT_FRACT_TYPE_SIZE)
|
9146 |
|
|
return unsignedp ? unsigned_short_fract_type_node
|
9147 |
|
|
: short_fract_type_node;
|
9148 |
|
|
if (size == FRACT_TYPE_SIZE)
|
9149 |
|
|
return unsignedp ? unsigned_fract_type_node : fract_type_node;
|
9150 |
|
|
if (size == LONG_FRACT_TYPE_SIZE)
|
9151 |
|
|
return unsignedp ? unsigned_long_fract_type_node
|
9152 |
|
|
: long_fract_type_node;
|
9153 |
|
|
if (size == LONG_LONG_FRACT_TYPE_SIZE)
|
9154 |
|
|
return unsignedp ? unsigned_long_long_fract_type_node
|
9155 |
|
|
: long_long_fract_type_node;
|
9156 |
|
|
}
|
9157 |
|
|
|
9158 |
|
|
return make_fract_type (size, unsignedp, satp);
|
9159 |
|
|
}
|
9160 |
|
|
|
9161 |
|
|
/* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
|
9162 |
|
|
|
9163 |
|
|
static tree
|
9164 |
|
|
make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
|
9165 |
|
|
{
|
9166 |
|
|
if (satp)
|
9167 |
|
|
{
|
9168 |
|
|
if (size == SHORT_ACCUM_TYPE_SIZE)
|
9169 |
|
|
return unsignedp ? sat_unsigned_short_accum_type_node
|
9170 |
|
|
: sat_short_accum_type_node;
|
9171 |
|
|
if (size == ACCUM_TYPE_SIZE)
|
9172 |
|
|
return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
|
9173 |
|
|
if (size == LONG_ACCUM_TYPE_SIZE)
|
9174 |
|
|
return unsignedp ? sat_unsigned_long_accum_type_node
|
9175 |
|
|
: sat_long_accum_type_node;
|
9176 |
|
|
if (size == LONG_LONG_ACCUM_TYPE_SIZE)
|
9177 |
|
|
return unsignedp ? sat_unsigned_long_long_accum_type_node
|
9178 |
|
|
: sat_long_long_accum_type_node;
|
9179 |
|
|
}
|
9180 |
|
|
else
|
9181 |
|
|
{
|
9182 |
|
|
if (size == SHORT_ACCUM_TYPE_SIZE)
|
9183 |
|
|
return unsignedp ? unsigned_short_accum_type_node
|
9184 |
|
|
: short_accum_type_node;
|
9185 |
|
|
if (size == ACCUM_TYPE_SIZE)
|
9186 |
|
|
return unsignedp ? unsigned_accum_type_node : accum_type_node;
|
9187 |
|
|
if (size == LONG_ACCUM_TYPE_SIZE)
|
9188 |
|
|
return unsignedp ? unsigned_long_accum_type_node
|
9189 |
|
|
: long_accum_type_node;
|
9190 |
|
|
if (size == LONG_LONG_ACCUM_TYPE_SIZE)
|
9191 |
|
|
return unsignedp ? unsigned_long_long_accum_type_node
|
9192 |
|
|
: long_long_accum_type_node;
|
9193 |
|
|
}
|
9194 |
|
|
|
9195 |
|
|
return make_accum_type (size, unsignedp, satp);
|
9196 |
|
|
}
|
9197 |
|
|
|
9198 |
|
|
/* Create nodes for all integer types (and error_mark_node) using the sizes
|
9199 |
|
|
of C datatypes. SIGNED_CHAR specifies whether char is signed,
|
9200 |
|
|
SHORT_DOUBLE specifies whether double should be of the same precision
|
9201 |
|
|
as float. */
|
9202 |
|
|
|
9203 |
|
|
void
|
9204 |
|
|
build_common_tree_nodes (bool signed_char, bool short_double)
|
9205 |
|
|
{
|
9206 |
|
|
error_mark_node = make_node (ERROR_MARK);
|
9207 |
|
|
TREE_TYPE (error_mark_node) = error_mark_node;
|
9208 |
|
|
|
9209 |
|
|
initialize_sizetypes ();
|
9210 |
|
|
|
9211 |
|
|
/* Define both `signed char' and `unsigned char'. */
|
9212 |
|
|
signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
|
9213 |
|
|
TYPE_STRING_FLAG (signed_char_type_node) = 1;
|
9214 |
|
|
unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
|
9215 |
|
|
TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
|
9216 |
|
|
|
9217 |
|
|
/* Define `char', which is like either `signed char' or `unsigned char'
|
9218 |
|
|
but not the same as either. */
|
9219 |
|
|
char_type_node
|
9220 |
|
|
= (signed_char
|
9221 |
|
|
? make_signed_type (CHAR_TYPE_SIZE)
|
9222 |
|
|
: make_unsigned_type (CHAR_TYPE_SIZE));
|
9223 |
|
|
TYPE_STRING_FLAG (char_type_node) = 1;
|
9224 |
|
|
|
9225 |
|
|
short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
|
9226 |
|
|
short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
|
9227 |
|
|
integer_type_node = make_signed_type (INT_TYPE_SIZE);
|
9228 |
|
|
unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
|
9229 |
|
|
long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
|
9230 |
|
|
long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
|
9231 |
|
|
long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
|
9232 |
|
|
long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
|
9233 |
|
|
#if HOST_BITS_PER_WIDE_INT >= 64
|
9234 |
|
|
/* TODO: This isn't correct, but as logic depends at the moment on
|
9235 |
|
|
host's instead of target's wide-integer.
|
9236 |
|
|
If there is a target not supporting TImode, but has an 128-bit
|
9237 |
|
|
integer-scalar register, this target check needs to be adjusted. */
|
9238 |
|
|
if (targetm.scalar_mode_supported_p (TImode))
|
9239 |
|
|
{
|
9240 |
|
|
int128_integer_type_node = make_signed_type (128);
|
9241 |
|
|
int128_unsigned_type_node = make_unsigned_type (128);
|
9242 |
|
|
}
|
9243 |
|
|
#endif
|
9244 |
|
|
|
9245 |
|
|
/* Define a boolean type. This type only represents boolean values but
|
9246 |
|
|
may be larger than char depending on the value of BOOL_TYPE_SIZE.
|
9247 |
|
|
Front ends which want to override this size (i.e. Java) can redefine
|
9248 |
|
|
boolean_type_node before calling build_common_tree_nodes_2. */
|
9249 |
|
|
boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
|
9250 |
|
|
TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
|
9251 |
|
|
TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
|
9252 |
|
|
TYPE_PRECISION (boolean_type_node) = 1;
|
9253 |
|
|
|
9254 |
|
|
/* Define what type to use for size_t. */
|
9255 |
|
|
if (strcmp (SIZE_TYPE, "unsigned int") == 0)
|
9256 |
|
|
size_type_node = unsigned_type_node;
|
9257 |
|
|
else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
|
9258 |
|
|
size_type_node = long_unsigned_type_node;
|
9259 |
|
|
else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
|
9260 |
|
|
size_type_node = long_long_unsigned_type_node;
|
9261 |
|
|
else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
|
9262 |
|
|
size_type_node = short_unsigned_type_node;
|
9263 |
|
|
else
|
9264 |
|
|
gcc_unreachable ();
|
9265 |
|
|
|
9266 |
|
|
/* Fill in the rest of the sized types. Reuse existing type nodes
|
9267 |
|
|
when possible. */
|
9268 |
|
|
intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
|
9269 |
|
|
intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
|
9270 |
|
|
intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
|
9271 |
|
|
intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
|
9272 |
|
|
intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
|
9273 |
|
|
|
9274 |
|
|
unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
|
9275 |
|
|
unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
|
9276 |
|
|
unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
|
9277 |
|
|
unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
|
9278 |
|
|
unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
|
9279 |
|
|
|
9280 |
|
|
access_public_node = get_identifier ("public");
|
9281 |
|
|
access_protected_node = get_identifier ("protected");
|
9282 |
|
|
access_private_node = get_identifier ("private");
|
9283 |
|
|
|
9284 |
|
|
/* Define these next since types below may used them. */
|
9285 |
|
|
integer_zero_node = build_int_cst (integer_type_node, 0);
|
9286 |
|
|
integer_one_node = build_int_cst (integer_type_node, 1);
|
9287 |
|
|
integer_three_node = build_int_cst (integer_type_node, 3);
|
9288 |
|
|
integer_minus_one_node = build_int_cst (integer_type_node, -1);
|
9289 |
|
|
|
9290 |
|
|
size_zero_node = size_int (0);
|
9291 |
|
|
size_one_node = size_int (1);
|
9292 |
|
|
bitsize_zero_node = bitsize_int (0);
|
9293 |
|
|
bitsize_one_node = bitsize_int (1);
|
9294 |
|
|
bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
|
9295 |
|
|
|
9296 |
|
|
boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
|
9297 |
|
|
boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
|
9298 |
|
|
|
9299 |
|
|
void_type_node = make_node (VOID_TYPE);
|
9300 |
|
|
layout_type (void_type_node);
|
9301 |
|
|
|
9302 |
|
|
/* We are not going to have real types in C with less than byte alignment,
|
9303 |
|
|
so we might as well not have any types that claim to have it. */
|
9304 |
|
|
TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
|
9305 |
|
|
TYPE_USER_ALIGN (void_type_node) = 0;
|
9306 |
|
|
|
9307 |
|
|
null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
|
9308 |
|
|
layout_type (TREE_TYPE (null_pointer_node));
|
9309 |
|
|
|
9310 |
|
|
ptr_type_node = build_pointer_type (void_type_node);
|
9311 |
|
|
const_ptr_type_node
|
9312 |
|
|
= build_pointer_type (build_type_variant (void_type_node, 1, 0));
|
9313 |
|
|
fileptr_type_node = ptr_type_node;
|
9314 |
|
|
|
9315 |
|
|
float_type_node = make_node (REAL_TYPE);
|
9316 |
|
|
TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
|
9317 |
|
|
layout_type (float_type_node);
|
9318 |
|
|
|
9319 |
|
|
double_type_node = make_node (REAL_TYPE);
|
9320 |
|
|
if (short_double)
|
9321 |
|
|
TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
|
9322 |
|
|
else
|
9323 |
|
|
TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
|
9324 |
|
|
layout_type (double_type_node);
|
9325 |
|
|
|
9326 |
|
|
long_double_type_node = make_node (REAL_TYPE);
|
9327 |
|
|
TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
|
9328 |
|
|
layout_type (long_double_type_node);
|
9329 |
|
|
|
9330 |
|
|
float_ptr_type_node = build_pointer_type (float_type_node);
|
9331 |
|
|
double_ptr_type_node = build_pointer_type (double_type_node);
|
9332 |
|
|
long_double_ptr_type_node = build_pointer_type (long_double_type_node);
|
9333 |
|
|
integer_ptr_type_node = build_pointer_type (integer_type_node);
|
9334 |
|
|
|
9335 |
|
|
/* Fixed size integer types. */
|
9336 |
|
|
uint32_type_node = build_nonstandard_integer_type (32, true);
|
9337 |
|
|
uint64_type_node = build_nonstandard_integer_type (64, true);
|
9338 |
|
|
|
9339 |
|
|
/* Decimal float types. */
|
9340 |
|
|
dfloat32_type_node = make_node (REAL_TYPE);
|
9341 |
|
|
TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
|
9342 |
|
|
layout_type (dfloat32_type_node);
|
9343 |
|
|
SET_TYPE_MODE (dfloat32_type_node, SDmode);
|
9344 |
|
|
dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
|
9345 |
|
|
|
9346 |
|
|
dfloat64_type_node = make_node (REAL_TYPE);
|
9347 |
|
|
TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
|
9348 |
|
|
layout_type (dfloat64_type_node);
|
9349 |
|
|
SET_TYPE_MODE (dfloat64_type_node, DDmode);
|
9350 |
|
|
dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
|
9351 |
|
|
|
9352 |
|
|
dfloat128_type_node = make_node (REAL_TYPE);
|
9353 |
|
|
TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
|
9354 |
|
|
layout_type (dfloat128_type_node);
|
9355 |
|
|
SET_TYPE_MODE (dfloat128_type_node, TDmode);
|
9356 |
|
|
dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
|
9357 |
|
|
|
9358 |
|
|
complex_integer_type_node = build_complex_type (integer_type_node);
|
9359 |
|
|
complex_float_type_node = build_complex_type (float_type_node);
|
9360 |
|
|
complex_double_type_node = build_complex_type (double_type_node);
|
9361 |
|
|
complex_long_double_type_node = build_complex_type (long_double_type_node);
|
9362 |
|
|
|
9363 |
|
|
/* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
|
9364 |
|
|
#define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
|
9365 |
|
|
sat_ ## KIND ## _type_node = \
|
9366 |
|
|
make_sat_signed_ ## KIND ## _type (SIZE); \
|
9367 |
|
|
sat_unsigned_ ## KIND ## _type_node = \
|
9368 |
|
|
make_sat_unsigned_ ## KIND ## _type (SIZE); \
|
9369 |
|
|
KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
|
9370 |
|
|
unsigned_ ## KIND ## _type_node = \
|
9371 |
|
|
make_unsigned_ ## KIND ## _type (SIZE);
|
9372 |
|
|
|
9373 |
|
|
#define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
|
9374 |
|
|
sat_ ## WIDTH ## KIND ## _type_node = \
|
9375 |
|
|
make_sat_signed_ ## KIND ## _type (SIZE); \
|
9376 |
|
|
sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
|
9377 |
|
|
make_sat_unsigned_ ## KIND ## _type (SIZE); \
|
9378 |
|
|
WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
|
9379 |
|
|
unsigned_ ## WIDTH ## KIND ## _type_node = \
|
9380 |
|
|
make_unsigned_ ## KIND ## _type (SIZE);
|
9381 |
|
|
|
9382 |
|
|
/* Make fixed-point type nodes based on four different widths. */
|
9383 |
|
|
#define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
|
9384 |
|
|
MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
|
9385 |
|
|
MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
|
9386 |
|
|
MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
|
9387 |
|
|
MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
|
9388 |
|
|
|
9389 |
|
|
/* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
|
9390 |
|
|
#define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
|
9391 |
|
|
NAME ## _type_node = \
|
9392 |
|
|
make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
|
9393 |
|
|
u ## NAME ## _type_node = \
|
9394 |
|
|
make_or_reuse_unsigned_ ## KIND ## _type \
|
9395 |
|
|
(GET_MODE_BITSIZE (U ## MODE ## mode)); \
|
9396 |
|
|
sat_ ## NAME ## _type_node = \
|
9397 |
|
|
make_or_reuse_sat_signed_ ## KIND ## _type \
|
9398 |
|
|
(GET_MODE_BITSIZE (MODE ## mode)); \
|
9399 |
|
|
sat_u ## NAME ## _type_node = \
|
9400 |
|
|
make_or_reuse_sat_unsigned_ ## KIND ## _type \
|
9401 |
|
|
(GET_MODE_BITSIZE (U ## MODE ## mode));
|
9402 |
|
|
|
9403 |
|
|
/* Fixed-point type and mode nodes. */
|
9404 |
|
|
MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
|
9405 |
|
|
MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
|
9406 |
|
|
MAKE_FIXED_MODE_NODE (fract, qq, QQ)
|
9407 |
|
|
MAKE_FIXED_MODE_NODE (fract, hq, HQ)
|
9408 |
|
|
MAKE_FIXED_MODE_NODE (fract, sq, SQ)
|
9409 |
|
|
MAKE_FIXED_MODE_NODE (fract, dq, DQ)
|
9410 |
|
|
MAKE_FIXED_MODE_NODE (fract, tq, TQ)
|
9411 |
|
|
MAKE_FIXED_MODE_NODE (accum, ha, HA)
|
9412 |
|
|
MAKE_FIXED_MODE_NODE (accum, sa, SA)
|
9413 |
|
|
MAKE_FIXED_MODE_NODE (accum, da, DA)
|
9414 |
|
|
MAKE_FIXED_MODE_NODE (accum, ta, TA)
|
9415 |
|
|
|
9416 |
|
|
{
|
9417 |
|
|
tree t = targetm.build_builtin_va_list ();
|
9418 |
|
|
|
9419 |
|
|
/* Many back-ends define record types without setting TYPE_NAME.
|
9420 |
|
|
If we copied the record type here, we'd keep the original
|
9421 |
|
|
record type without a name. This breaks name mangling. So,
|
9422 |
|
|
don't copy record types and let c_common_nodes_and_builtins()
|
9423 |
|
|
declare the type to be __builtin_va_list. */
|
9424 |
|
|
if (TREE_CODE (t) != RECORD_TYPE)
|
9425 |
|
|
t = build_variant_type_copy (t);
|
9426 |
|
|
|
9427 |
|
|
va_list_type_node = t;
|
9428 |
|
|
}
|
9429 |
|
|
}
|
9430 |
|
|
|
9431 |
|
|
/* A subroutine of build_common_builtin_nodes. Define a builtin function. */
|
9432 |
|
|
|
9433 |
|
|
static void
|
9434 |
|
|
local_define_builtin (const char *name, tree type, enum built_in_function code,
|
9435 |
|
|
const char *library_name, int ecf_flags)
|
9436 |
|
|
{
|
9437 |
|
|
tree decl;
|
9438 |
|
|
|
9439 |
|
|
decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
|
9440 |
|
|
library_name, NULL_TREE);
|
9441 |
|
|
if (ecf_flags & ECF_CONST)
|
9442 |
|
|
TREE_READONLY (decl) = 1;
|
9443 |
|
|
if (ecf_flags & ECF_PURE)
|
9444 |
|
|
DECL_PURE_P (decl) = 1;
|
9445 |
|
|
if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
|
9446 |
|
|
DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
|
9447 |
|
|
if (ecf_flags & ECF_NORETURN)
|
9448 |
|
|
TREE_THIS_VOLATILE (decl) = 1;
|
9449 |
|
|
if (ecf_flags & ECF_NOTHROW)
|
9450 |
|
|
TREE_NOTHROW (decl) = 1;
|
9451 |
|
|
if (ecf_flags & ECF_MALLOC)
|
9452 |
|
|
DECL_IS_MALLOC (decl) = 1;
|
9453 |
|
|
if (ecf_flags & ECF_LEAF)
|
9454 |
|
|
DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
|
9455 |
|
|
NULL, DECL_ATTRIBUTES (decl));
|
9456 |
|
|
if ((ecf_flags & ECF_TM_PURE) && flag_tm)
|
9457 |
|
|
apply_tm_attr (decl, get_identifier ("transaction_pure"));
|
9458 |
|
|
|
9459 |
|
|
set_builtin_decl (code, decl, true);
|
9460 |
|
|
}
|
9461 |
|
|
|
9462 |
|
|
/* Call this function after instantiating all builtins that the language
|
9463 |
|
|
front end cares about. This will build the rest of the builtins that
|
9464 |
|
|
are relied upon by the tree optimizers and the middle-end. */
|
9465 |
|
|
|
9466 |
|
|
void
|
9467 |
|
|
build_common_builtin_nodes (void)
|
9468 |
|
|
{
|
9469 |
|
|
tree tmp, ftype;
|
9470 |
|
|
int ecf_flags;
|
9471 |
|
|
|
9472 |
|
|
if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
|
9473 |
|
|
|| !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
|
9474 |
|
|
{
|
9475 |
|
|
ftype = build_function_type_list (ptr_type_node,
|
9476 |
|
|
ptr_type_node, const_ptr_type_node,
|
9477 |
|
|
size_type_node, NULL_TREE);
|
9478 |
|
|
|
9479 |
|
|
if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
|
9480 |
|
|
local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
|
9481 |
|
|
"memcpy", ECF_NOTHROW | ECF_LEAF);
|
9482 |
|
|
if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
|
9483 |
|
|
local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
|
9484 |
|
|
"memmove", ECF_NOTHROW | ECF_LEAF);
|
9485 |
|
|
}
|
9486 |
|
|
|
9487 |
|
|
if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
|
9488 |
|
|
{
|
9489 |
|
|
ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
|
9490 |
|
|
const_ptr_type_node, size_type_node,
|
9491 |
|
|
NULL_TREE);
|
9492 |
|
|
local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
|
9493 |
|
|
"memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
|
9494 |
|
|
}
|
9495 |
|
|
|
9496 |
|
|
if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
|
9497 |
|
|
{
|
9498 |
|
|
ftype = build_function_type_list (ptr_type_node,
|
9499 |
|
|
ptr_type_node, integer_type_node,
|
9500 |
|
|
size_type_node, NULL_TREE);
|
9501 |
|
|
local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
|
9502 |
|
|
"memset", ECF_NOTHROW | ECF_LEAF);
|
9503 |
|
|
}
|
9504 |
|
|
|
9505 |
|
|
if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
|
9506 |
|
|
{
|
9507 |
|
|
ftype = build_function_type_list (ptr_type_node,
|
9508 |
|
|
size_type_node, NULL_TREE);
|
9509 |
|
|
local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
|
9510 |
|
|
"alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
|
9511 |
|
|
}
|
9512 |
|
|
|
9513 |
|
|
ftype = build_function_type_list (ptr_type_node, size_type_node,
|
9514 |
|
|
size_type_node, NULL_TREE);
|
9515 |
|
|
local_define_builtin ("__builtin_alloca_with_align", ftype,
|
9516 |
|
|
BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
|
9517 |
|
|
ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
|
9518 |
|
|
|
9519 |
|
|
/* If we're checking the stack, `alloca' can throw. */
|
9520 |
|
|
if (flag_stack_check)
|
9521 |
|
|
{
|
9522 |
|
|
TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
|
9523 |
|
|
TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
|
9524 |
|
|
}
|
9525 |
|
|
|
9526 |
|
|
ftype = build_function_type_list (void_type_node,
|
9527 |
|
|
ptr_type_node, ptr_type_node,
|
9528 |
|
|
ptr_type_node, NULL_TREE);
|
9529 |
|
|
local_define_builtin ("__builtin_init_trampoline", ftype,
|
9530 |
|
|
BUILT_IN_INIT_TRAMPOLINE,
|
9531 |
|
|
"__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
|
9532 |
|
|
local_define_builtin ("__builtin_init_heap_trampoline", ftype,
|
9533 |
|
|
BUILT_IN_INIT_HEAP_TRAMPOLINE,
|
9534 |
|
|
"__builtin_init_heap_trampoline",
|
9535 |
|
|
ECF_NOTHROW | ECF_LEAF);
|
9536 |
|
|
|
9537 |
|
|
ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
|
9538 |
|
|
local_define_builtin ("__builtin_adjust_trampoline", ftype,
|
9539 |
|
|
BUILT_IN_ADJUST_TRAMPOLINE,
|
9540 |
|
|
"__builtin_adjust_trampoline",
|
9541 |
|
|
ECF_CONST | ECF_NOTHROW);
|
9542 |
|
|
|
9543 |
|
|
ftype = build_function_type_list (void_type_node,
|
9544 |
|
|
ptr_type_node, ptr_type_node, NULL_TREE);
|
9545 |
|
|
local_define_builtin ("__builtin_nonlocal_goto", ftype,
|
9546 |
|
|
BUILT_IN_NONLOCAL_GOTO,
|
9547 |
|
|
"__builtin_nonlocal_goto",
|
9548 |
|
|
ECF_NORETURN | ECF_NOTHROW);
|
9549 |
|
|
|
9550 |
|
|
ftype = build_function_type_list (void_type_node,
|
9551 |
|
|
ptr_type_node, ptr_type_node, NULL_TREE);
|
9552 |
|
|
local_define_builtin ("__builtin_setjmp_setup", ftype,
|
9553 |
|
|
BUILT_IN_SETJMP_SETUP,
|
9554 |
|
|
"__builtin_setjmp_setup", ECF_NOTHROW);
|
9555 |
|
|
|
9556 |
|
|
ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
|
9557 |
|
|
local_define_builtin ("__builtin_setjmp_dispatcher", ftype,
|
9558 |
|
|
BUILT_IN_SETJMP_DISPATCHER,
|
9559 |
|
|
"__builtin_setjmp_dispatcher",
|
9560 |
|
|
ECF_PURE | ECF_NOTHROW);
|
9561 |
|
|
|
9562 |
|
|
ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
|
9563 |
|
|
local_define_builtin ("__builtin_setjmp_receiver", ftype,
|
9564 |
|
|
BUILT_IN_SETJMP_RECEIVER,
|
9565 |
|
|
"__builtin_setjmp_receiver", ECF_NOTHROW);
|
9566 |
|
|
|
9567 |
|
|
ftype = build_function_type_list (ptr_type_node, NULL_TREE);
|
9568 |
|
|
local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
|
9569 |
|
|
"__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
|
9570 |
|
|
|
9571 |
|
|
ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
|
9572 |
|
|
local_define_builtin ("__builtin_stack_restore", ftype,
|
9573 |
|
|
BUILT_IN_STACK_RESTORE,
|
9574 |
|
|
"__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
|
9575 |
|
|
|
9576 |
|
|
/* If there's a possibility that we might use the ARM EABI, build the
|
9577 |
|
|
alternate __cxa_end_cleanup node used to resume from C++ and Java. */
|
9578 |
|
|
if (targetm.arm_eabi_unwinder)
|
9579 |
|
|
{
|
9580 |
|
|
ftype = build_function_type_list (void_type_node, NULL_TREE);
|
9581 |
|
|
local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
|
9582 |
|
|
BUILT_IN_CXA_END_CLEANUP,
|
9583 |
|
|
"__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
|
9584 |
|
|
}
|
9585 |
|
|
|
9586 |
|
|
ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
|
9587 |
|
|
local_define_builtin ("__builtin_unwind_resume", ftype,
|
9588 |
|
|
BUILT_IN_UNWIND_RESUME,
|
9589 |
|
|
((targetm_common.except_unwind_info (&global_options)
|
9590 |
|
|
== UI_SJLJ)
|
9591 |
|
|
? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
|
9592 |
|
|
ECF_NORETURN);
|
9593 |
|
|
|
9594 |
|
|
if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
|
9595 |
|
|
{
|
9596 |
|
|
ftype = build_function_type_list (ptr_type_node, integer_type_node,
|
9597 |
|
|
NULL_TREE);
|
9598 |
|
|
local_define_builtin ("__builtin_return_address", ftype,
|
9599 |
|
|
BUILT_IN_RETURN_ADDRESS,
|
9600 |
|
|
"__builtin_return_address",
|
9601 |
|
|
ECF_NOTHROW);
|
9602 |
|
|
}
|
9603 |
|
|
|
9604 |
|
|
if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
|
9605 |
|
|
|| !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
|
9606 |
|
|
{
|
9607 |
|
|
ftype = build_function_type_list (void_type_node, ptr_type_node,
|
9608 |
|
|
ptr_type_node, NULL_TREE);
|
9609 |
|
|
if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
|
9610 |
|
|
local_define_builtin ("__cyg_profile_func_enter", ftype,
|
9611 |
|
|
BUILT_IN_PROFILE_FUNC_ENTER,
|
9612 |
|
|
"__cyg_profile_func_enter", 0);
|
9613 |
|
|
if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
|
9614 |
|
|
local_define_builtin ("__cyg_profile_func_exit", ftype,
|
9615 |
|
|
BUILT_IN_PROFILE_FUNC_EXIT,
|
9616 |
|
|
"__cyg_profile_func_exit", 0);
|
9617 |
|
|
}
|
9618 |
|
|
|
9619 |
|
|
/* The exception object and filter values from the runtime. The argument
|
9620 |
|
|
must be zero before exception lowering, i.e. from the front end. After
|
9621 |
|
|
exception lowering, it will be the region number for the exception
|
9622 |
|
|
landing pad. These functions are PURE instead of CONST to prevent
|
9623 |
|
|
them from being hoisted past the exception edge that will initialize
|
9624 |
|
|
its value in the landing pad. */
|
9625 |
|
|
ftype = build_function_type_list (ptr_type_node,
|
9626 |
|
|
integer_type_node, NULL_TREE);
|
9627 |
|
|
ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
|
9628 |
|
|
/* Only use TM_PURE if we we have TM language support. */
|
9629 |
|
|
if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
|
9630 |
|
|
ecf_flags |= ECF_TM_PURE;
|
9631 |
|
|
local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
|
9632 |
|
|
"__builtin_eh_pointer", ecf_flags);
|
9633 |
|
|
|
9634 |
|
|
tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
|
9635 |
|
|
ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
|
9636 |
|
|
local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
|
9637 |
|
|
"__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
|
9638 |
|
|
|
9639 |
|
|
ftype = build_function_type_list (void_type_node,
|
9640 |
|
|
integer_type_node, integer_type_node,
|
9641 |
|
|
NULL_TREE);
|
9642 |
|
|
local_define_builtin ("__builtin_eh_copy_values", ftype,
|
9643 |
|
|
BUILT_IN_EH_COPY_VALUES,
|
9644 |
|
|
"__builtin_eh_copy_values", ECF_NOTHROW);
|
9645 |
|
|
|
9646 |
|
|
/* Complex multiplication and division. These are handled as builtins
|
9647 |
|
|
rather than optabs because emit_library_call_value doesn't support
|
9648 |
|
|
complex. Further, we can do slightly better with folding these
|
9649 |
|
|
beasties if the real and complex parts of the arguments are separate. */
|
9650 |
|
|
{
|
9651 |
|
|
int mode;
|
9652 |
|
|
|
9653 |
|
|
for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
|
9654 |
|
|
{
|
9655 |
|
|
char mode_name_buf[4], *q;
|
9656 |
|
|
const char *p;
|
9657 |
|
|
enum built_in_function mcode, dcode;
|
9658 |
|
|
tree type, inner_type;
|
9659 |
|
|
const char *prefix = "__";
|
9660 |
|
|
|
9661 |
|
|
if (targetm.libfunc_gnu_prefix)
|
9662 |
|
|
prefix = "__gnu_";
|
9663 |
|
|
|
9664 |
|
|
type = lang_hooks.types.type_for_mode ((enum machine_mode) mode, 0);
|
9665 |
|
|
if (type == NULL)
|
9666 |
|
|
continue;
|
9667 |
|
|
inner_type = TREE_TYPE (type);
|
9668 |
|
|
|
9669 |
|
|
ftype = build_function_type_list (type, inner_type, inner_type,
|
9670 |
|
|
inner_type, inner_type, NULL_TREE);
|
9671 |
|
|
|
9672 |
|
|
mcode = ((enum built_in_function)
|
9673 |
|
|
(BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
|
9674 |
|
|
dcode = ((enum built_in_function)
|
9675 |
|
|
(BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
|
9676 |
|
|
|
9677 |
|
|
for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
|
9678 |
|
|
*q = TOLOWER (*p);
|
9679 |
|
|
*q = '\0';
|
9680 |
|
|
|
9681 |
|
|
built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
|
9682 |
|
|
NULL);
|
9683 |
|
|
local_define_builtin (built_in_names[mcode], ftype, mcode,
|
9684 |
|
|
built_in_names[mcode],
|
9685 |
|
|
ECF_CONST | ECF_NOTHROW | ECF_LEAF);
|
9686 |
|
|
|
9687 |
|
|
built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
|
9688 |
|
|
NULL);
|
9689 |
|
|
local_define_builtin (built_in_names[dcode], ftype, dcode,
|
9690 |
|
|
built_in_names[dcode],
|
9691 |
|
|
ECF_CONST | ECF_NOTHROW | ECF_LEAF);
|
9692 |
|
|
}
|
9693 |
|
|
}
|
9694 |
|
|
}
|
9695 |
|
|
|
9696 |
|
|
/* HACK. GROSS. This is absolutely disgusting. I wish there was a
|
9697 |
|
|
better way.
|
9698 |
|
|
|
9699 |
|
|
If we requested a pointer to a vector, build up the pointers that
|
9700 |
|
|
we stripped off while looking for the inner type. Similarly for
|
9701 |
|
|
return values from functions.
|
9702 |
|
|
|
9703 |
|
|
The argument TYPE is the top of the chain, and BOTTOM is the
|
9704 |
|
|
new type which we will point to. */
|
9705 |
|
|
|
9706 |
|
|
tree
|
9707 |
|
|
reconstruct_complex_type (tree type, tree bottom)
|
9708 |
|
|
{
|
9709 |
|
|
tree inner, outer;
|
9710 |
|
|
|
9711 |
|
|
if (TREE_CODE (type) == POINTER_TYPE)
|
9712 |
|
|
{
|
9713 |
|
|
inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
|
9714 |
|
|
outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
|
9715 |
|
|
TYPE_REF_CAN_ALIAS_ALL (type));
|
9716 |
|
|
}
|
9717 |
|
|
else if (TREE_CODE (type) == REFERENCE_TYPE)
|
9718 |
|
|
{
|
9719 |
|
|
inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
|
9720 |
|
|
outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
|
9721 |
|
|
TYPE_REF_CAN_ALIAS_ALL (type));
|
9722 |
|
|
}
|
9723 |
|
|
else if (TREE_CODE (type) == ARRAY_TYPE)
|
9724 |
|
|
{
|
9725 |
|
|
inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
|
9726 |
|
|
outer = build_array_type (inner, TYPE_DOMAIN (type));
|
9727 |
|
|
}
|
9728 |
|
|
else if (TREE_CODE (type) == FUNCTION_TYPE)
|
9729 |
|
|
{
|
9730 |
|
|
inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
|
9731 |
|
|
outer = build_function_type (inner, TYPE_ARG_TYPES (type));
|
9732 |
|
|
}
|
9733 |
|
|
else if (TREE_CODE (type) == METHOD_TYPE)
|
9734 |
|
|
{
|
9735 |
|
|
inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
|
9736 |
|
|
/* The build_method_type_directly() routine prepends 'this' to argument list,
|
9737 |
|
|
so we must compensate by getting rid of it. */
|
9738 |
|
|
outer
|
9739 |
|
|
= build_method_type_directly
|
9740 |
|
|
(TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
|
9741 |
|
|
inner,
|
9742 |
|
|
TREE_CHAIN (TYPE_ARG_TYPES (type)));
|
9743 |
|
|
}
|
9744 |
|
|
else if (TREE_CODE (type) == OFFSET_TYPE)
|
9745 |
|
|
{
|
9746 |
|
|
inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
|
9747 |
|
|
outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
|
9748 |
|
|
}
|
9749 |
|
|
else
|
9750 |
|
|
return bottom;
|
9751 |
|
|
|
9752 |
|
|
return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
|
9753 |
|
|
TYPE_QUALS (type));
|
9754 |
|
|
}
|
9755 |
|
|
|
9756 |
|
|
/* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
|
9757 |
|
|
the inner type. */
|
9758 |
|
|
tree
|
9759 |
|
|
build_vector_type_for_mode (tree innertype, enum machine_mode mode)
|
9760 |
|
|
{
|
9761 |
|
|
int nunits;
|
9762 |
|
|
|
9763 |
|
|
switch (GET_MODE_CLASS (mode))
|
9764 |
|
|
{
|
9765 |
|
|
case MODE_VECTOR_INT:
|
9766 |
|
|
case MODE_VECTOR_FLOAT:
|
9767 |
|
|
case MODE_VECTOR_FRACT:
|
9768 |
|
|
case MODE_VECTOR_UFRACT:
|
9769 |
|
|
case MODE_VECTOR_ACCUM:
|
9770 |
|
|
case MODE_VECTOR_UACCUM:
|
9771 |
|
|
nunits = GET_MODE_NUNITS (mode);
|
9772 |
|
|
break;
|
9773 |
|
|
|
9774 |
|
|
case MODE_INT:
|
9775 |
|
|
/* Check that there are no leftover bits. */
|
9776 |
|
|
gcc_assert (GET_MODE_BITSIZE (mode)
|
9777 |
|
|
% TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
|
9778 |
|
|
|
9779 |
|
|
nunits = GET_MODE_BITSIZE (mode)
|
9780 |
|
|
/ TREE_INT_CST_LOW (TYPE_SIZE (innertype));
|
9781 |
|
|
break;
|
9782 |
|
|
|
9783 |
|
|
default:
|
9784 |
|
|
gcc_unreachable ();
|
9785 |
|
|
}
|
9786 |
|
|
|
9787 |
|
|
return make_vector_type (innertype, nunits, mode);
|
9788 |
|
|
}
|
9789 |
|
|
|
9790 |
|
|
/* Similarly, but takes the inner type and number of units, which must be
|
9791 |
|
|
a power of two. */
|
9792 |
|
|
|
9793 |
|
|
tree
|
9794 |
|
|
build_vector_type (tree innertype, int nunits)
|
9795 |
|
|
{
|
9796 |
|
|
return make_vector_type (innertype, nunits, VOIDmode);
|
9797 |
|
|
}
|
9798 |
|
|
|
9799 |
|
|
/* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
|
9800 |
|
|
|
9801 |
|
|
tree
|
9802 |
|
|
build_opaque_vector_type (tree innertype, int nunits)
|
9803 |
|
|
{
|
9804 |
|
|
tree t = make_vector_type (innertype, nunits, VOIDmode);
|
9805 |
|
|
tree cand;
|
9806 |
|
|
/* We always build the non-opaque variant before the opaque one,
|
9807 |
|
|
so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
|
9808 |
|
|
cand = TYPE_NEXT_VARIANT (t);
|
9809 |
|
|
if (cand
|
9810 |
|
|
&& TYPE_VECTOR_OPAQUE (cand)
|
9811 |
|
|
&& check_qualified_type (cand, t, TYPE_QUALS (t)))
|
9812 |
|
|
return cand;
|
9813 |
|
|
/* Othewise build a variant type and make sure to queue it after
|
9814 |
|
|
the non-opaque type. */
|
9815 |
|
|
cand = build_distinct_type_copy (t);
|
9816 |
|
|
TYPE_VECTOR_OPAQUE (cand) = true;
|
9817 |
|
|
TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
|
9818 |
|
|
TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
|
9819 |
|
|
TYPE_NEXT_VARIANT (t) = cand;
|
9820 |
|
|
TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
|
9821 |
|
|
return cand;
|
9822 |
|
|
}
|
9823 |
|
|
|
9824 |
|
|
|
9825 |
|
|
/* Given an initializer INIT, return TRUE if INIT is zero or some
|
9826 |
|
|
aggregate of zeros. Otherwise return FALSE. */
|
9827 |
|
|
bool
|
9828 |
|
|
initializer_zerop (const_tree init)
|
9829 |
|
|
{
|
9830 |
|
|
tree elt;
|
9831 |
|
|
|
9832 |
|
|
STRIP_NOPS (init);
|
9833 |
|
|
|
9834 |
|
|
switch (TREE_CODE (init))
|
9835 |
|
|
{
|
9836 |
|
|
case INTEGER_CST:
|
9837 |
|
|
return integer_zerop (init);
|
9838 |
|
|
|
9839 |
|
|
case REAL_CST:
|
9840 |
|
|
/* ??? Note that this is not correct for C4X float formats. There,
|
9841 |
|
|
a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
|
9842 |
|
|
negative exponent. */
|
9843 |
|
|
return real_zerop (init)
|
9844 |
|
|
&& ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
|
9845 |
|
|
|
9846 |
|
|
case FIXED_CST:
|
9847 |
|
|
return fixed_zerop (init);
|
9848 |
|
|
|
9849 |
|
|
case COMPLEX_CST:
|
9850 |
|
|
return integer_zerop (init)
|
9851 |
|
|
|| (real_zerop (init)
|
9852 |
|
|
&& ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
|
9853 |
|
|
&& ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
|
9854 |
|
|
|
9855 |
|
|
case VECTOR_CST:
|
9856 |
|
|
for (elt = TREE_VECTOR_CST_ELTS (init); elt; elt = TREE_CHAIN (elt))
|
9857 |
|
|
if (!initializer_zerop (TREE_VALUE (elt)))
|
9858 |
|
|
return false;
|
9859 |
|
|
return true;
|
9860 |
|
|
|
9861 |
|
|
case CONSTRUCTOR:
|
9862 |
|
|
{
|
9863 |
|
|
unsigned HOST_WIDE_INT idx;
|
9864 |
|
|
|
9865 |
|
|
FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
|
9866 |
|
|
if (!initializer_zerop (elt))
|
9867 |
|
|
return false;
|
9868 |
|
|
return true;
|
9869 |
|
|
}
|
9870 |
|
|
|
9871 |
|
|
case STRING_CST:
|
9872 |
|
|
{
|
9873 |
|
|
int i;
|
9874 |
|
|
|
9875 |
|
|
/* We need to loop through all elements to handle cases like
|
9876 |
|
|
"\0" and "\0foobar". */
|
9877 |
|
|
for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
|
9878 |
|
|
if (TREE_STRING_POINTER (init)[i] != '\0')
|
9879 |
|
|
return false;
|
9880 |
|
|
|
9881 |
|
|
return true;
|
9882 |
|
|
}
|
9883 |
|
|
|
9884 |
|
|
default:
|
9885 |
|
|
return false;
|
9886 |
|
|
}
|
9887 |
|
|
}
|
9888 |
|
|
|
9889 |
|
|
/* Build an empty statement at location LOC. */
|
9890 |
|
|
|
9891 |
|
|
tree
|
9892 |
|
|
build_empty_stmt (location_t loc)
|
9893 |
|
|
{
|
9894 |
|
|
tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
|
9895 |
|
|
SET_EXPR_LOCATION (t, loc);
|
9896 |
|
|
return t;
|
9897 |
|
|
}
|
9898 |
|
|
|
9899 |
|
|
|
9900 |
|
|
/* Build an OpenMP clause with code CODE. LOC is the location of the
|
9901 |
|
|
clause. */
|
9902 |
|
|
|
9903 |
|
|
tree
|
9904 |
|
|
build_omp_clause (location_t loc, enum omp_clause_code code)
|
9905 |
|
|
{
|
9906 |
|
|
tree t;
|
9907 |
|
|
int size, length;
|
9908 |
|
|
|
9909 |
|
|
length = omp_clause_num_ops[code];
|
9910 |
|
|
size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
|
9911 |
|
|
|
9912 |
|
|
record_node_allocation_statistics (OMP_CLAUSE, size);
|
9913 |
|
|
|
9914 |
|
|
t = ggc_alloc_tree_node (size);
|
9915 |
|
|
memset (t, 0, size);
|
9916 |
|
|
TREE_SET_CODE (t, OMP_CLAUSE);
|
9917 |
|
|
OMP_CLAUSE_SET_CODE (t, code);
|
9918 |
|
|
OMP_CLAUSE_LOCATION (t) = loc;
|
9919 |
|
|
|
9920 |
|
|
return t;
|
9921 |
|
|
}
|
9922 |
|
|
|
9923 |
|
|
/* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
|
9924 |
|
|
includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
|
9925 |
|
|
Except for the CODE and operand count field, other storage for the
|
9926 |
|
|
object is initialized to zeros. */
|
9927 |
|
|
|
9928 |
|
|
tree
|
9929 |
|
|
build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
|
9930 |
|
|
{
|
9931 |
|
|
tree t;
|
9932 |
|
|
int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
|
9933 |
|
|
|
9934 |
|
|
gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
|
9935 |
|
|
gcc_assert (len >= 1);
|
9936 |
|
|
|
9937 |
|
|
record_node_allocation_statistics (code, length);
|
9938 |
|
|
|
9939 |
|
|
t = ggc_alloc_zone_cleared_tree_node_stat (&tree_zone, length PASS_MEM_STAT);
|
9940 |
|
|
|
9941 |
|
|
TREE_SET_CODE (t, code);
|
9942 |
|
|
|
9943 |
|
|
/* Can't use TREE_OPERAND to store the length because if checking is
|
9944 |
|
|
enabled, it will try to check the length before we store it. :-P */
|
9945 |
|
|
t->exp.operands[0] = build_int_cst (sizetype, len);
|
9946 |
|
|
|
9947 |
|
|
return t;
|
9948 |
|
|
}
|
9949 |
|
|
|
9950 |
|
|
/* Helper function for build_call_* functions; build a CALL_EXPR with
|
9951 |
|
|
indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
|
9952 |
|
|
the argument slots. */
|
9953 |
|
|
|
9954 |
|
|
static tree
|
9955 |
|
|
build_call_1 (tree return_type, tree fn, int nargs)
|
9956 |
|
|
{
|
9957 |
|
|
tree t;
|
9958 |
|
|
|
9959 |
|
|
t = build_vl_exp (CALL_EXPR, nargs + 3);
|
9960 |
|
|
TREE_TYPE (t) = return_type;
|
9961 |
|
|
CALL_EXPR_FN (t) = fn;
|
9962 |
|
|
CALL_EXPR_STATIC_CHAIN (t) = NULL;
|
9963 |
|
|
|
9964 |
|
|
return t;
|
9965 |
|
|
}
|
9966 |
|
|
|
9967 |
|
|
/* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
|
9968 |
|
|
FN and a null static chain slot. NARGS is the number of call arguments
|
9969 |
|
|
which are specified as "..." arguments. */
|
9970 |
|
|
|
9971 |
|
|
tree
|
9972 |
|
|
build_call_nary (tree return_type, tree fn, int nargs, ...)
|
9973 |
|
|
{
|
9974 |
|
|
tree ret;
|
9975 |
|
|
va_list args;
|
9976 |
|
|
va_start (args, nargs);
|
9977 |
|
|
ret = build_call_valist (return_type, fn, nargs, args);
|
9978 |
|
|
va_end (args);
|
9979 |
|
|
return ret;
|
9980 |
|
|
}
|
9981 |
|
|
|
9982 |
|
|
/* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
|
9983 |
|
|
FN and a null static chain slot. NARGS is the number of call arguments
|
9984 |
|
|
which are specified as a va_list ARGS. */
|
9985 |
|
|
|
9986 |
|
|
tree
|
9987 |
|
|
build_call_valist (tree return_type, tree fn, int nargs, va_list args)
|
9988 |
|
|
{
|
9989 |
|
|
tree t;
|
9990 |
|
|
int i;
|
9991 |
|
|
|
9992 |
|
|
t = build_call_1 (return_type, fn, nargs);
|
9993 |
|
|
for (i = 0; i < nargs; i++)
|
9994 |
|
|
CALL_EXPR_ARG (t, i) = va_arg (args, tree);
|
9995 |
|
|
process_call_operands (t);
|
9996 |
|
|
return t;
|
9997 |
|
|
}
|
9998 |
|
|
|
9999 |
|
|
/* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
|
10000 |
|
|
FN and a null static chain slot. NARGS is the number of call arguments
|
10001 |
|
|
which are specified as a tree array ARGS. */
|
10002 |
|
|
|
10003 |
|
|
tree
|
10004 |
|
|
build_call_array_loc (location_t loc, tree return_type, tree fn,
|
10005 |
|
|
int nargs, const tree *args)
|
10006 |
|
|
{
|
10007 |
|
|
tree t;
|
10008 |
|
|
int i;
|
10009 |
|
|
|
10010 |
|
|
t = build_call_1 (return_type, fn, nargs);
|
10011 |
|
|
for (i = 0; i < nargs; i++)
|
10012 |
|
|
CALL_EXPR_ARG (t, i) = args[i];
|
10013 |
|
|
process_call_operands (t);
|
10014 |
|
|
SET_EXPR_LOCATION (t, loc);
|
10015 |
|
|
return t;
|
10016 |
|
|
}
|
10017 |
|
|
|
10018 |
|
|
/* Like build_call_array, but takes a VEC. */
|
10019 |
|
|
|
10020 |
|
|
tree
|
10021 |
|
|
build_call_vec (tree return_type, tree fn, VEC(tree,gc) *args)
|
10022 |
|
|
{
|
10023 |
|
|
tree ret, t;
|
10024 |
|
|
unsigned int ix;
|
10025 |
|
|
|
10026 |
|
|
ret = build_call_1 (return_type, fn, VEC_length (tree, args));
|
10027 |
|
|
FOR_EACH_VEC_ELT (tree, args, ix, t)
|
10028 |
|
|
CALL_EXPR_ARG (ret, ix) = t;
|
10029 |
|
|
process_call_operands (ret);
|
10030 |
|
|
return ret;
|
10031 |
|
|
}
|
10032 |
|
|
|
10033 |
|
|
|
10034 |
|
|
/* Returns true if it is possible to prove that the index of
|
10035 |
|
|
an array access REF (an ARRAY_REF expression) falls into the
|
10036 |
|
|
array bounds. */
|
10037 |
|
|
|
10038 |
|
|
bool
|
10039 |
|
|
in_array_bounds_p (tree ref)
|
10040 |
|
|
{
|
10041 |
|
|
tree idx = TREE_OPERAND (ref, 1);
|
10042 |
|
|
tree min, max;
|
10043 |
|
|
|
10044 |
|
|
if (TREE_CODE (idx) != INTEGER_CST)
|
10045 |
|
|
return false;
|
10046 |
|
|
|
10047 |
|
|
min = array_ref_low_bound (ref);
|
10048 |
|
|
max = array_ref_up_bound (ref);
|
10049 |
|
|
if (!min
|
10050 |
|
|
|| !max
|
10051 |
|
|
|| TREE_CODE (min) != INTEGER_CST
|
10052 |
|
|
|| TREE_CODE (max) != INTEGER_CST)
|
10053 |
|
|
return false;
|
10054 |
|
|
|
10055 |
|
|
if (tree_int_cst_lt (idx, min)
|
10056 |
|
|
|| tree_int_cst_lt (max, idx))
|
10057 |
|
|
return false;
|
10058 |
|
|
|
10059 |
|
|
return true;
|
10060 |
|
|
}
|
10061 |
|
|
|
10062 |
|
|
/* Returns true if it is possible to prove that the range of
|
10063 |
|
|
an array access REF (an ARRAY_RANGE_REF expression) falls
|
10064 |
|
|
into the array bounds. */
|
10065 |
|
|
|
10066 |
|
|
bool
|
10067 |
|
|
range_in_array_bounds_p (tree ref)
|
10068 |
|
|
{
|
10069 |
|
|
tree domain_type = TYPE_DOMAIN (TREE_TYPE (ref));
|
10070 |
|
|
tree range_min, range_max, min, max;
|
10071 |
|
|
|
10072 |
|
|
range_min = TYPE_MIN_VALUE (domain_type);
|
10073 |
|
|
range_max = TYPE_MAX_VALUE (domain_type);
|
10074 |
|
|
if (!range_min
|
10075 |
|
|
|| !range_max
|
10076 |
|
|
|| TREE_CODE (range_min) != INTEGER_CST
|
10077 |
|
|
|| TREE_CODE (range_max) != INTEGER_CST)
|
10078 |
|
|
return false;
|
10079 |
|
|
|
10080 |
|
|
min = array_ref_low_bound (ref);
|
10081 |
|
|
max = array_ref_up_bound (ref);
|
10082 |
|
|
if (!min
|
10083 |
|
|
|| !max
|
10084 |
|
|
|| TREE_CODE (min) != INTEGER_CST
|
10085 |
|
|
|| TREE_CODE (max) != INTEGER_CST)
|
10086 |
|
|
return false;
|
10087 |
|
|
|
10088 |
|
|
if (tree_int_cst_lt (range_min, min)
|
10089 |
|
|
|| tree_int_cst_lt (max, range_max))
|
10090 |
|
|
return false;
|
10091 |
|
|
|
10092 |
|
|
return true;
|
10093 |
|
|
}
|
10094 |
|
|
|
10095 |
|
|
/* Return true if T (assumed to be a DECL) must be assigned a memory
|
10096 |
|
|
location. */
|
10097 |
|
|
|
10098 |
|
|
bool
|
10099 |
|
|
needs_to_live_in_memory (const_tree t)
|
10100 |
|
|
{
|
10101 |
|
|
if (TREE_CODE (t) == SSA_NAME)
|
10102 |
|
|
t = SSA_NAME_VAR (t);
|
10103 |
|
|
|
10104 |
|
|
return (TREE_ADDRESSABLE (t)
|
10105 |
|
|
|| is_global_var (t)
|
10106 |
|
|
|| (TREE_CODE (t) == RESULT_DECL
|
10107 |
|
|
&& !DECL_BY_REFERENCE (t)
|
10108 |
|
|
&& aggregate_value_p (t, current_function_decl)));
|
10109 |
|
|
}
|
10110 |
|
|
|
10111 |
|
|
/* Return value of a constant X and sign-extend it. */
|
10112 |
|
|
|
10113 |
|
|
HOST_WIDE_INT
|
10114 |
|
|
int_cst_value (const_tree x)
|
10115 |
|
|
{
|
10116 |
|
|
unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
|
10117 |
|
|
unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
|
10118 |
|
|
|
10119 |
|
|
/* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
|
10120 |
|
|
gcc_assert (TREE_INT_CST_HIGH (x) == 0
|
10121 |
|
|
|| TREE_INT_CST_HIGH (x) == -1);
|
10122 |
|
|
|
10123 |
|
|
if (bits < HOST_BITS_PER_WIDE_INT)
|
10124 |
|
|
{
|
10125 |
|
|
bool negative = ((val >> (bits - 1)) & 1) != 0;
|
10126 |
|
|
if (negative)
|
10127 |
|
|
val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
|
10128 |
|
|
else
|
10129 |
|
|
val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
|
10130 |
|
|
}
|
10131 |
|
|
|
10132 |
|
|
return val;
|
10133 |
|
|
}
|
10134 |
|
|
|
10135 |
|
|
/* Return value of a constant X and sign-extend it. */
|
10136 |
|
|
|
10137 |
|
|
HOST_WIDEST_INT
|
10138 |
|
|
widest_int_cst_value (const_tree x)
|
10139 |
|
|
{
|
10140 |
|
|
unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
|
10141 |
|
|
unsigned HOST_WIDEST_INT val = TREE_INT_CST_LOW (x);
|
10142 |
|
|
|
10143 |
|
|
#if HOST_BITS_PER_WIDEST_INT > HOST_BITS_PER_WIDE_INT
|
10144 |
|
|
gcc_assert (HOST_BITS_PER_WIDEST_INT >= 2 * HOST_BITS_PER_WIDE_INT);
|
10145 |
|
|
val |= (((unsigned HOST_WIDEST_INT) TREE_INT_CST_HIGH (x))
|
10146 |
|
|
<< HOST_BITS_PER_WIDE_INT);
|
10147 |
|
|
#else
|
10148 |
|
|
/* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
|
10149 |
|
|
gcc_assert (TREE_INT_CST_HIGH (x) == 0
|
10150 |
|
|
|| TREE_INT_CST_HIGH (x) == -1);
|
10151 |
|
|
#endif
|
10152 |
|
|
|
10153 |
|
|
if (bits < HOST_BITS_PER_WIDEST_INT)
|
10154 |
|
|
{
|
10155 |
|
|
bool negative = ((val >> (bits - 1)) & 1) != 0;
|
10156 |
|
|
if (negative)
|
10157 |
|
|
val |= (~(unsigned HOST_WIDEST_INT) 0) << (bits - 1) << 1;
|
10158 |
|
|
else
|
10159 |
|
|
val &= ~((~(unsigned HOST_WIDEST_INT) 0) << (bits - 1) << 1);
|
10160 |
|
|
}
|
10161 |
|
|
|
10162 |
|
|
return val;
|
10163 |
|
|
}
|
10164 |
|
|
|
10165 |
|
|
/* If TYPE is an integral type, return an equivalent type which is
|
10166 |
|
|
unsigned iff UNSIGNEDP is true. If TYPE is not an integral type,
|
10167 |
|
|
return TYPE itself. */
|
10168 |
|
|
|
10169 |
|
|
tree
|
10170 |
|
|
signed_or_unsigned_type_for (int unsignedp, tree type)
|
10171 |
|
|
{
|
10172 |
|
|
tree t = type;
|
10173 |
|
|
if (POINTER_TYPE_P (type))
|
10174 |
|
|
{
|
10175 |
|
|
/* If the pointer points to the normal address space, use the
|
10176 |
|
|
size_type_node. Otherwise use an appropriate size for the pointer
|
10177 |
|
|
based on the named address space it points to. */
|
10178 |
|
|
if (!TYPE_ADDR_SPACE (TREE_TYPE (t)))
|
10179 |
|
|
t = size_type_node;
|
10180 |
|
|
else
|
10181 |
|
|
return lang_hooks.types.type_for_size (TYPE_PRECISION (t), unsignedp);
|
10182 |
|
|
}
|
10183 |
|
|
|
10184 |
|
|
if (!INTEGRAL_TYPE_P (t) || TYPE_UNSIGNED (t) == unsignedp)
|
10185 |
|
|
return t;
|
10186 |
|
|
|
10187 |
|
|
return lang_hooks.types.type_for_size (TYPE_PRECISION (t), unsignedp);
|
10188 |
|
|
}
|
10189 |
|
|
|
10190 |
|
|
/* Returns unsigned variant of TYPE. */
|
10191 |
|
|
|
10192 |
|
|
tree
|
10193 |
|
|
unsigned_type_for (tree type)
|
10194 |
|
|
{
|
10195 |
|
|
return signed_or_unsigned_type_for (1, type);
|
10196 |
|
|
}
|
10197 |
|
|
|
10198 |
|
|
/* Returns signed variant of TYPE. */
|
10199 |
|
|
|
10200 |
|
|
tree
|
10201 |
|
|
signed_type_for (tree type)
|
10202 |
|
|
{
|
10203 |
|
|
return signed_or_unsigned_type_for (0, type);
|
10204 |
|
|
}
|
10205 |
|
|
|
10206 |
|
|
/* Returns the largest value obtainable by casting something in INNER type to
|
10207 |
|
|
OUTER type. */
|
10208 |
|
|
|
10209 |
|
|
tree
|
10210 |
|
|
upper_bound_in_type (tree outer, tree inner)
|
10211 |
|
|
{
|
10212 |
|
|
double_int high;
|
10213 |
|
|
unsigned int det = 0;
|
10214 |
|
|
unsigned oprec = TYPE_PRECISION (outer);
|
10215 |
|
|
unsigned iprec = TYPE_PRECISION (inner);
|
10216 |
|
|
unsigned prec;
|
10217 |
|
|
|
10218 |
|
|
/* Compute a unique number for every combination. */
|
10219 |
|
|
det |= (oprec > iprec) ? 4 : 0;
|
10220 |
|
|
det |= TYPE_UNSIGNED (outer) ? 2 : 0;
|
10221 |
|
|
det |= TYPE_UNSIGNED (inner) ? 1 : 0;
|
10222 |
|
|
|
10223 |
|
|
/* Determine the exponent to use. */
|
10224 |
|
|
switch (det)
|
10225 |
|
|
{
|
10226 |
|
|
case 0:
|
10227 |
|
|
case 1:
|
10228 |
|
|
/* oprec <= iprec, outer: signed, inner: don't care. */
|
10229 |
|
|
prec = oprec - 1;
|
10230 |
|
|
break;
|
10231 |
|
|
case 2:
|
10232 |
|
|
case 3:
|
10233 |
|
|
/* oprec <= iprec, outer: unsigned, inner: don't care. */
|
10234 |
|
|
prec = oprec;
|
10235 |
|
|
break;
|
10236 |
|
|
case 4:
|
10237 |
|
|
/* oprec > iprec, outer: signed, inner: signed. */
|
10238 |
|
|
prec = iprec - 1;
|
10239 |
|
|
break;
|
10240 |
|
|
case 5:
|
10241 |
|
|
/* oprec > iprec, outer: signed, inner: unsigned. */
|
10242 |
|
|
prec = iprec;
|
10243 |
|
|
break;
|
10244 |
|
|
case 6:
|
10245 |
|
|
/* oprec > iprec, outer: unsigned, inner: signed. */
|
10246 |
|
|
prec = oprec;
|
10247 |
|
|
break;
|
10248 |
|
|
case 7:
|
10249 |
|
|
/* oprec > iprec, outer: unsigned, inner: unsigned. */
|
10250 |
|
|
prec = iprec;
|
10251 |
|
|
break;
|
10252 |
|
|
default:
|
10253 |
|
|
gcc_unreachable ();
|
10254 |
|
|
}
|
10255 |
|
|
|
10256 |
|
|
/* Compute 2^^prec - 1. */
|
10257 |
|
|
if (prec <= HOST_BITS_PER_WIDE_INT)
|
10258 |
|
|
{
|
10259 |
|
|
high.high = 0;
|
10260 |
|
|
high.low = ((~(unsigned HOST_WIDE_INT) 0)
|
10261 |
|
|
>> (HOST_BITS_PER_WIDE_INT - prec));
|
10262 |
|
|
}
|
10263 |
|
|
else
|
10264 |
|
|
{
|
10265 |
|
|
high.high = ((~(unsigned HOST_WIDE_INT) 0)
|
10266 |
|
|
>> (2 * HOST_BITS_PER_WIDE_INT - prec));
|
10267 |
|
|
high.low = ~(unsigned HOST_WIDE_INT) 0;
|
10268 |
|
|
}
|
10269 |
|
|
|
10270 |
|
|
return double_int_to_tree (outer, high);
|
10271 |
|
|
}
|
10272 |
|
|
|
10273 |
|
|
/* Returns the smallest value obtainable by casting something in INNER type to
|
10274 |
|
|
OUTER type. */
|
10275 |
|
|
|
10276 |
|
|
tree
|
10277 |
|
|
lower_bound_in_type (tree outer, tree inner)
|
10278 |
|
|
{
|
10279 |
|
|
double_int low;
|
10280 |
|
|
unsigned oprec = TYPE_PRECISION (outer);
|
10281 |
|
|
unsigned iprec = TYPE_PRECISION (inner);
|
10282 |
|
|
|
10283 |
|
|
/* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
|
10284 |
|
|
and obtain 0. */
|
10285 |
|
|
if (TYPE_UNSIGNED (outer)
|
10286 |
|
|
/* If we are widening something of an unsigned type, OUTER type
|
10287 |
|
|
contains all values of INNER type. In particular, both INNER
|
10288 |
|
|
and OUTER types have zero in common. */
|
10289 |
|
|
|| (oprec > iprec && TYPE_UNSIGNED (inner)))
|
10290 |
|
|
low.low = low.high = 0;
|
10291 |
|
|
else
|
10292 |
|
|
{
|
10293 |
|
|
/* If we are widening a signed type to another signed type, we
|
10294 |
|
|
want to obtain -2^^(iprec-1). If we are keeping the
|
10295 |
|
|
precision or narrowing to a signed type, we want to obtain
|
10296 |
|
|
-2^(oprec-1). */
|
10297 |
|
|
unsigned prec = oprec > iprec ? iprec : oprec;
|
10298 |
|
|
|
10299 |
|
|
if (prec <= HOST_BITS_PER_WIDE_INT)
|
10300 |
|
|
{
|
10301 |
|
|
low.high = ~(unsigned HOST_WIDE_INT) 0;
|
10302 |
|
|
low.low = (~(unsigned HOST_WIDE_INT) 0) << (prec - 1);
|
10303 |
|
|
}
|
10304 |
|
|
else
|
10305 |
|
|
{
|
10306 |
|
|
low.high = ((~(unsigned HOST_WIDE_INT) 0)
|
10307 |
|
|
<< (prec - HOST_BITS_PER_WIDE_INT - 1));
|
10308 |
|
|
low.low = 0;
|
10309 |
|
|
}
|
10310 |
|
|
}
|
10311 |
|
|
|
10312 |
|
|
return double_int_to_tree (outer, low);
|
10313 |
|
|
}
|
10314 |
|
|
|
10315 |
|
|
/* Return nonzero if two operands that are suitable for PHI nodes are
|
10316 |
|
|
necessarily equal. Specifically, both ARG0 and ARG1 must be either
|
10317 |
|
|
SSA_NAME or invariant. Note that this is strictly an optimization.
|
10318 |
|
|
That is, callers of this function can directly call operand_equal_p
|
10319 |
|
|
and get the same result, only slower. */
|
10320 |
|
|
|
10321 |
|
|
int
|
10322 |
|
|
operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
|
10323 |
|
|
{
|
10324 |
|
|
if (arg0 == arg1)
|
10325 |
|
|
return 1;
|
10326 |
|
|
if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
|
10327 |
|
|
return 0;
|
10328 |
|
|
return operand_equal_p (arg0, arg1, 0);
|
10329 |
|
|
}
|
10330 |
|
|
|
10331 |
|
|
/* Returns number of zeros at the end of binary representation of X.
|
10332 |
|
|
|
10333 |
|
|
??? Use ffs if available? */
|
10334 |
|
|
|
10335 |
|
|
tree
|
10336 |
|
|
num_ending_zeros (const_tree x)
|
10337 |
|
|
{
|
10338 |
|
|
unsigned HOST_WIDE_INT fr, nfr;
|
10339 |
|
|
unsigned num, abits;
|
10340 |
|
|
tree type = TREE_TYPE (x);
|
10341 |
|
|
|
10342 |
|
|
if (TREE_INT_CST_LOW (x) == 0)
|
10343 |
|
|
{
|
10344 |
|
|
num = HOST_BITS_PER_WIDE_INT;
|
10345 |
|
|
fr = TREE_INT_CST_HIGH (x);
|
10346 |
|
|
}
|
10347 |
|
|
else
|
10348 |
|
|
{
|
10349 |
|
|
num = 0;
|
10350 |
|
|
fr = TREE_INT_CST_LOW (x);
|
10351 |
|
|
}
|
10352 |
|
|
|
10353 |
|
|
for (abits = HOST_BITS_PER_WIDE_INT / 2; abits; abits /= 2)
|
10354 |
|
|
{
|
10355 |
|
|
nfr = fr >> abits;
|
10356 |
|
|
if (nfr << abits == fr)
|
10357 |
|
|
{
|
10358 |
|
|
num += abits;
|
10359 |
|
|
fr = nfr;
|
10360 |
|
|
}
|
10361 |
|
|
}
|
10362 |
|
|
|
10363 |
|
|
if (num > TYPE_PRECISION (type))
|
10364 |
|
|
num = TYPE_PRECISION (type);
|
10365 |
|
|
|
10366 |
|
|
return build_int_cst_type (type, num);
|
10367 |
|
|
}
|
10368 |
|
|
|
10369 |
|
|
|
10370 |
|
|
#define WALK_SUBTREE(NODE) \
|
10371 |
|
|
do \
|
10372 |
|
|
{ \
|
10373 |
|
|
result = walk_tree_1 (&(NODE), func, data, pset, lh); \
|
10374 |
|
|
if (result) \
|
10375 |
|
|
return result; \
|
10376 |
|
|
} \
|
10377 |
|
|
while (0)
|
10378 |
|
|
|
10379 |
|
|
/* This is a subroutine of walk_tree that walks field of TYPE that are to
|
10380 |
|
|
be walked whenever a type is seen in the tree. Rest of operands and return
|
10381 |
|
|
value are as for walk_tree. */
|
10382 |
|
|
|
10383 |
|
|
static tree
|
10384 |
|
|
walk_type_fields (tree type, walk_tree_fn func, void *data,
|
10385 |
|
|
struct pointer_set_t *pset, walk_tree_lh lh)
|
10386 |
|
|
{
|
10387 |
|
|
tree result = NULL_TREE;
|
10388 |
|
|
|
10389 |
|
|
switch (TREE_CODE (type))
|
10390 |
|
|
{
|
10391 |
|
|
case POINTER_TYPE:
|
10392 |
|
|
case REFERENCE_TYPE:
|
10393 |
|
|
/* We have to worry about mutually recursive pointers. These can't
|
10394 |
|
|
be written in C. They can in Ada. It's pathological, but
|
10395 |
|
|
there's an ACATS test (c38102a) that checks it. Deal with this
|
10396 |
|
|
by checking if we're pointing to another pointer, that one
|
10397 |
|
|
points to another pointer, that one does too, and we have no htab.
|
10398 |
|
|
If so, get a hash table. We check three levels deep to avoid
|
10399 |
|
|
the cost of the hash table if we don't need one. */
|
10400 |
|
|
if (POINTER_TYPE_P (TREE_TYPE (type))
|
10401 |
|
|
&& POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
|
10402 |
|
|
&& POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
|
10403 |
|
|
&& !pset)
|
10404 |
|
|
{
|
10405 |
|
|
result = walk_tree_without_duplicates (&TREE_TYPE (type),
|
10406 |
|
|
func, data);
|
10407 |
|
|
if (result)
|
10408 |
|
|
return result;
|
10409 |
|
|
|
10410 |
|
|
break;
|
10411 |
|
|
}
|
10412 |
|
|
|
10413 |
|
|
/* ... fall through ... */
|
10414 |
|
|
|
10415 |
|
|
case COMPLEX_TYPE:
|
10416 |
|
|
WALK_SUBTREE (TREE_TYPE (type));
|
10417 |
|
|
break;
|
10418 |
|
|
|
10419 |
|
|
case METHOD_TYPE:
|
10420 |
|
|
WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
|
10421 |
|
|
|
10422 |
|
|
/* Fall through. */
|
10423 |
|
|
|
10424 |
|
|
case FUNCTION_TYPE:
|
10425 |
|
|
WALK_SUBTREE (TREE_TYPE (type));
|
10426 |
|
|
{
|
10427 |
|
|
tree arg;
|
10428 |
|
|
|
10429 |
|
|
/* We never want to walk into default arguments. */
|
10430 |
|
|
for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
|
10431 |
|
|
WALK_SUBTREE (TREE_VALUE (arg));
|
10432 |
|
|
}
|
10433 |
|
|
break;
|
10434 |
|
|
|
10435 |
|
|
case ARRAY_TYPE:
|
10436 |
|
|
/* Don't follow this nodes's type if a pointer for fear that
|
10437 |
|
|
we'll have infinite recursion. If we have a PSET, then we
|
10438 |
|
|
need not fear. */
|
10439 |
|
|
if (pset
|
10440 |
|
|
|| (!POINTER_TYPE_P (TREE_TYPE (type))
|
10441 |
|
|
&& TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
|
10442 |
|
|
WALK_SUBTREE (TREE_TYPE (type));
|
10443 |
|
|
WALK_SUBTREE (TYPE_DOMAIN (type));
|
10444 |
|
|
break;
|
10445 |
|
|
|
10446 |
|
|
case OFFSET_TYPE:
|
10447 |
|
|
WALK_SUBTREE (TREE_TYPE (type));
|
10448 |
|
|
WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
|
10449 |
|
|
break;
|
10450 |
|
|
|
10451 |
|
|
default:
|
10452 |
|
|
break;
|
10453 |
|
|
}
|
10454 |
|
|
|
10455 |
|
|
return NULL_TREE;
|
10456 |
|
|
}
|
10457 |
|
|
|
10458 |
|
|
/* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
|
10459 |
|
|
called with the DATA and the address of each sub-tree. If FUNC returns a
|
10460 |
|
|
non-NULL value, the traversal is stopped, and the value returned by FUNC
|
10461 |
|
|
is returned. If PSET is non-NULL it is used to record the nodes visited,
|
10462 |
|
|
and to avoid visiting a node more than once. */
|
10463 |
|
|
|
10464 |
|
|
tree
|
10465 |
|
|
walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
|
10466 |
|
|
struct pointer_set_t *pset, walk_tree_lh lh)
|
10467 |
|
|
{
|
10468 |
|
|
enum tree_code code;
|
10469 |
|
|
int walk_subtrees;
|
10470 |
|
|
tree result;
|
10471 |
|
|
|
10472 |
|
|
#define WALK_SUBTREE_TAIL(NODE) \
|
10473 |
|
|
do \
|
10474 |
|
|
{ \
|
10475 |
|
|
tp = & (NODE); \
|
10476 |
|
|
goto tail_recurse; \
|
10477 |
|
|
} \
|
10478 |
|
|
while (0)
|
10479 |
|
|
|
10480 |
|
|
tail_recurse:
|
10481 |
|
|
/* Skip empty subtrees. */
|
10482 |
|
|
if (!*tp)
|
10483 |
|
|
return NULL_TREE;
|
10484 |
|
|
|
10485 |
|
|
/* Don't walk the same tree twice, if the user has requested
|
10486 |
|
|
that we avoid doing so. */
|
10487 |
|
|
if (pset && pointer_set_insert (pset, *tp))
|
10488 |
|
|
return NULL_TREE;
|
10489 |
|
|
|
10490 |
|
|
/* Call the function. */
|
10491 |
|
|
walk_subtrees = 1;
|
10492 |
|
|
result = (*func) (tp, &walk_subtrees, data);
|
10493 |
|
|
|
10494 |
|
|
/* If we found something, return it. */
|
10495 |
|
|
if (result)
|
10496 |
|
|
return result;
|
10497 |
|
|
|
10498 |
|
|
code = TREE_CODE (*tp);
|
10499 |
|
|
|
10500 |
|
|
/* Even if we didn't, FUNC may have decided that there was nothing
|
10501 |
|
|
interesting below this point in the tree. */
|
10502 |
|
|
if (!walk_subtrees)
|
10503 |
|
|
{
|
10504 |
|
|
/* But we still need to check our siblings. */
|
10505 |
|
|
if (code == TREE_LIST)
|
10506 |
|
|
WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
|
10507 |
|
|
else if (code == OMP_CLAUSE)
|
10508 |
|
|
WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
|
10509 |
|
|
else
|
10510 |
|
|
return NULL_TREE;
|
10511 |
|
|
}
|
10512 |
|
|
|
10513 |
|
|
if (lh)
|
10514 |
|
|
{
|
10515 |
|
|
result = (*lh) (tp, &walk_subtrees, func, data, pset);
|
10516 |
|
|
if (result || !walk_subtrees)
|
10517 |
|
|
return result;
|
10518 |
|
|
}
|
10519 |
|
|
|
10520 |
|
|
switch (code)
|
10521 |
|
|
{
|
10522 |
|
|
case ERROR_MARK:
|
10523 |
|
|
case IDENTIFIER_NODE:
|
10524 |
|
|
case INTEGER_CST:
|
10525 |
|
|
case REAL_CST:
|
10526 |
|
|
case FIXED_CST:
|
10527 |
|
|
case VECTOR_CST:
|
10528 |
|
|
case STRING_CST:
|
10529 |
|
|
case BLOCK:
|
10530 |
|
|
case PLACEHOLDER_EXPR:
|
10531 |
|
|
case SSA_NAME:
|
10532 |
|
|
case FIELD_DECL:
|
10533 |
|
|
case RESULT_DECL:
|
10534 |
|
|
/* None of these have subtrees other than those already walked
|
10535 |
|
|
above. */
|
10536 |
|
|
break;
|
10537 |
|
|
|
10538 |
|
|
case TREE_LIST:
|
10539 |
|
|
WALK_SUBTREE (TREE_VALUE (*tp));
|
10540 |
|
|
WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
|
10541 |
|
|
break;
|
10542 |
|
|
|
10543 |
|
|
case TREE_VEC:
|
10544 |
|
|
{
|
10545 |
|
|
int len = TREE_VEC_LENGTH (*tp);
|
10546 |
|
|
|
10547 |
|
|
if (len == 0)
|
10548 |
|
|
break;
|
10549 |
|
|
|
10550 |
|
|
/* Walk all elements but the first. */
|
10551 |
|
|
while (--len)
|
10552 |
|
|
WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
|
10553 |
|
|
|
10554 |
|
|
/* Now walk the first one as a tail call. */
|
10555 |
|
|
WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
|
10556 |
|
|
}
|
10557 |
|
|
|
10558 |
|
|
case COMPLEX_CST:
|
10559 |
|
|
WALK_SUBTREE (TREE_REALPART (*tp));
|
10560 |
|
|
WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
|
10561 |
|
|
|
10562 |
|
|
case CONSTRUCTOR:
|
10563 |
|
|
{
|
10564 |
|
|
unsigned HOST_WIDE_INT idx;
|
10565 |
|
|
constructor_elt *ce;
|
10566 |
|
|
|
10567 |
|
|
for (idx = 0;
|
10568 |
|
|
VEC_iterate(constructor_elt, CONSTRUCTOR_ELTS (*tp), idx, ce);
|
10569 |
|
|
idx++)
|
10570 |
|
|
WALK_SUBTREE (ce->value);
|
10571 |
|
|
}
|
10572 |
|
|
break;
|
10573 |
|
|
|
10574 |
|
|
case SAVE_EXPR:
|
10575 |
|
|
WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
|
10576 |
|
|
|
10577 |
|
|
case BIND_EXPR:
|
10578 |
|
|
{
|
10579 |
|
|
tree decl;
|
10580 |
|
|
for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
|
10581 |
|
|
{
|
10582 |
|
|
/* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
|
10583 |
|
|
into declarations that are just mentioned, rather than
|
10584 |
|
|
declared; they don't really belong to this part of the tree.
|
10585 |
|
|
And, we can see cycles: the initializer for a declaration
|
10586 |
|
|
can refer to the declaration itself. */
|
10587 |
|
|
WALK_SUBTREE (DECL_INITIAL (decl));
|
10588 |
|
|
WALK_SUBTREE (DECL_SIZE (decl));
|
10589 |
|
|
WALK_SUBTREE (DECL_SIZE_UNIT (decl));
|
10590 |
|
|
}
|
10591 |
|
|
WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
|
10592 |
|
|
}
|
10593 |
|
|
|
10594 |
|
|
case STATEMENT_LIST:
|
10595 |
|
|
{
|
10596 |
|
|
tree_stmt_iterator i;
|
10597 |
|
|
for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
|
10598 |
|
|
WALK_SUBTREE (*tsi_stmt_ptr (i));
|
10599 |
|
|
}
|
10600 |
|
|
break;
|
10601 |
|
|
|
10602 |
|
|
case OMP_CLAUSE:
|
10603 |
|
|
switch (OMP_CLAUSE_CODE (*tp))
|
10604 |
|
|
{
|
10605 |
|
|
case OMP_CLAUSE_PRIVATE:
|
10606 |
|
|
case OMP_CLAUSE_SHARED:
|
10607 |
|
|
case OMP_CLAUSE_FIRSTPRIVATE:
|
10608 |
|
|
case OMP_CLAUSE_COPYIN:
|
10609 |
|
|
case OMP_CLAUSE_COPYPRIVATE:
|
10610 |
|
|
case OMP_CLAUSE_FINAL:
|
10611 |
|
|
case OMP_CLAUSE_IF:
|
10612 |
|
|
case OMP_CLAUSE_NUM_THREADS:
|
10613 |
|
|
case OMP_CLAUSE_SCHEDULE:
|
10614 |
|
|
WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
|
10615 |
|
|
/* FALLTHRU */
|
10616 |
|
|
|
10617 |
|
|
case OMP_CLAUSE_NOWAIT:
|
10618 |
|
|
case OMP_CLAUSE_ORDERED:
|
10619 |
|
|
case OMP_CLAUSE_DEFAULT:
|
10620 |
|
|
case OMP_CLAUSE_UNTIED:
|
10621 |
|
|
case OMP_CLAUSE_MERGEABLE:
|
10622 |
|
|
WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
|
10623 |
|
|
|
10624 |
|
|
case OMP_CLAUSE_LASTPRIVATE:
|
10625 |
|
|
WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
|
10626 |
|
|
WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
|
10627 |
|
|
WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
|
10628 |
|
|
|
10629 |
|
|
case OMP_CLAUSE_COLLAPSE:
|
10630 |
|
|
{
|
10631 |
|
|
int i;
|
10632 |
|
|
for (i = 0; i < 3; i++)
|
10633 |
|
|
WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
|
10634 |
|
|
WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
|
10635 |
|
|
}
|
10636 |
|
|
|
10637 |
|
|
case OMP_CLAUSE_REDUCTION:
|
10638 |
|
|
{
|
10639 |
|
|
int i;
|
10640 |
|
|
for (i = 0; i < 4; i++)
|
10641 |
|
|
WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
|
10642 |
|
|
WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
|
10643 |
|
|
}
|
10644 |
|
|
|
10645 |
|
|
default:
|
10646 |
|
|
gcc_unreachable ();
|
10647 |
|
|
}
|
10648 |
|
|
break;
|
10649 |
|
|
|
10650 |
|
|
case TARGET_EXPR:
|
10651 |
|
|
{
|
10652 |
|
|
int i, len;
|
10653 |
|
|
|
10654 |
|
|
/* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
|
10655 |
|
|
But, we only want to walk once. */
|
10656 |
|
|
len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
|
10657 |
|
|
for (i = 0; i < len; ++i)
|
10658 |
|
|
WALK_SUBTREE (TREE_OPERAND (*tp, i));
|
10659 |
|
|
WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
|
10660 |
|
|
}
|
10661 |
|
|
|
10662 |
|
|
case DECL_EXPR:
|
10663 |
|
|
/* If this is a TYPE_DECL, walk into the fields of the type that it's
|
10664 |
|
|
defining. We only want to walk into these fields of a type in this
|
10665 |
|
|
case and not in the general case of a mere reference to the type.
|
10666 |
|
|
|
10667 |
|
|
The criterion is as follows: if the field can be an expression, it
|
10668 |
|
|
must be walked only here. This should be in keeping with the fields
|
10669 |
|
|
that are directly gimplified in gimplify_type_sizes in order for the
|
10670 |
|
|
mark/copy-if-shared/unmark machinery of the gimplifier to work with
|
10671 |
|
|
variable-sized types.
|
10672 |
|
|
|
10673 |
|
|
Note that DECLs get walked as part of processing the BIND_EXPR. */
|
10674 |
|
|
if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
|
10675 |
|
|
{
|
10676 |
|
|
tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
|
10677 |
|
|
if (TREE_CODE (*type_p) == ERROR_MARK)
|
10678 |
|
|
return NULL_TREE;
|
10679 |
|
|
|
10680 |
|
|
/* Call the function for the type. See if it returns anything or
|
10681 |
|
|
doesn't want us to continue. If we are to continue, walk both
|
10682 |
|
|
the normal fields and those for the declaration case. */
|
10683 |
|
|
result = (*func) (type_p, &walk_subtrees, data);
|
10684 |
|
|
if (result || !walk_subtrees)
|
10685 |
|
|
return result;
|
10686 |
|
|
|
10687 |
|
|
/* But do not walk a pointed-to type since it may itself need to
|
10688 |
|
|
be walked in the declaration case if it isn't anonymous. */
|
10689 |
|
|
if (!POINTER_TYPE_P (*type_p))
|
10690 |
|
|
{
|
10691 |
|
|
result = walk_type_fields (*type_p, func, data, pset, lh);
|
10692 |
|
|
if (result)
|
10693 |
|
|
return result;
|
10694 |
|
|
}
|
10695 |
|
|
|
10696 |
|
|
/* If this is a record type, also walk the fields. */
|
10697 |
|
|
if (RECORD_OR_UNION_TYPE_P (*type_p))
|
10698 |
|
|
{
|
10699 |
|
|
tree field;
|
10700 |
|
|
|
10701 |
|
|
for (field = TYPE_FIELDS (*type_p); field;
|
10702 |
|
|
field = DECL_CHAIN (field))
|
10703 |
|
|
{
|
10704 |
|
|
/* We'd like to look at the type of the field, but we can
|
10705 |
|
|
easily get infinite recursion. So assume it's pointed
|
10706 |
|
|
to elsewhere in the tree. Also, ignore things that
|
10707 |
|
|
aren't fields. */
|
10708 |
|
|
if (TREE_CODE (field) != FIELD_DECL)
|
10709 |
|
|
continue;
|
10710 |
|
|
|
10711 |
|
|
WALK_SUBTREE (DECL_FIELD_OFFSET (field));
|
10712 |
|
|
WALK_SUBTREE (DECL_SIZE (field));
|
10713 |
|
|
WALK_SUBTREE (DECL_SIZE_UNIT (field));
|
10714 |
|
|
if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
|
10715 |
|
|
WALK_SUBTREE (DECL_QUALIFIER (field));
|
10716 |
|
|
}
|
10717 |
|
|
}
|
10718 |
|
|
|
10719 |
|
|
/* Same for scalar types. */
|
10720 |
|
|
else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
|
10721 |
|
|
|| TREE_CODE (*type_p) == ENUMERAL_TYPE
|
10722 |
|
|
|| TREE_CODE (*type_p) == INTEGER_TYPE
|
10723 |
|
|
|| TREE_CODE (*type_p) == FIXED_POINT_TYPE
|
10724 |
|
|
|| TREE_CODE (*type_p) == REAL_TYPE)
|
10725 |
|
|
{
|
10726 |
|
|
WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
|
10727 |
|
|
WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
|
10728 |
|
|
}
|
10729 |
|
|
|
10730 |
|
|
WALK_SUBTREE (TYPE_SIZE (*type_p));
|
10731 |
|
|
WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
|
10732 |
|
|
}
|
10733 |
|
|
/* FALLTHRU */
|
10734 |
|
|
|
10735 |
|
|
default:
|
10736 |
|
|
if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
|
10737 |
|
|
{
|
10738 |
|
|
int i, len;
|
10739 |
|
|
|
10740 |
|
|
/* Walk over all the sub-trees of this operand. */
|
10741 |
|
|
len = TREE_OPERAND_LENGTH (*tp);
|
10742 |
|
|
|
10743 |
|
|
/* Go through the subtrees. We need to do this in forward order so
|
10744 |
|
|
that the scope of a FOR_EXPR is handled properly. */
|
10745 |
|
|
if (len)
|
10746 |
|
|
{
|
10747 |
|
|
for (i = 0; i < len - 1; ++i)
|
10748 |
|
|
WALK_SUBTREE (TREE_OPERAND (*tp, i));
|
10749 |
|
|
WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
|
10750 |
|
|
}
|
10751 |
|
|
}
|
10752 |
|
|
/* If this is a type, walk the needed fields in the type. */
|
10753 |
|
|
else if (TYPE_P (*tp))
|
10754 |
|
|
return walk_type_fields (*tp, func, data, pset, lh);
|
10755 |
|
|
break;
|
10756 |
|
|
}
|
10757 |
|
|
|
10758 |
|
|
/* We didn't find what we were looking for. */
|
10759 |
|
|
return NULL_TREE;
|
10760 |
|
|
|
10761 |
|
|
#undef WALK_SUBTREE_TAIL
|
10762 |
|
|
}
|
10763 |
|
|
#undef WALK_SUBTREE
|
10764 |
|
|
|
10765 |
|
|
/* Like walk_tree, but does not walk duplicate nodes more than once. */
|
10766 |
|
|
|
10767 |
|
|
tree
|
10768 |
|
|
walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
|
10769 |
|
|
walk_tree_lh lh)
|
10770 |
|
|
{
|
10771 |
|
|
tree result;
|
10772 |
|
|
struct pointer_set_t *pset;
|
10773 |
|
|
|
10774 |
|
|
pset = pointer_set_create ();
|
10775 |
|
|
result = walk_tree_1 (tp, func, data, pset, lh);
|
10776 |
|
|
pointer_set_destroy (pset);
|
10777 |
|
|
return result;
|
10778 |
|
|
}
|
10779 |
|
|
|
10780 |
|
|
|
10781 |
|
|
tree *
|
10782 |
|
|
tree_block (tree t)
|
10783 |
|
|
{
|
10784 |
|
|
char const c = TREE_CODE_CLASS (TREE_CODE (t));
|
10785 |
|
|
|
10786 |
|
|
if (IS_EXPR_CODE_CLASS (c))
|
10787 |
|
|
return &t->exp.block;
|
10788 |
|
|
gcc_unreachable ();
|
10789 |
|
|
return NULL;
|
10790 |
|
|
}
|
10791 |
|
|
|
10792 |
|
|
/* Create a nameless artificial label and put it in the current
|
10793 |
|
|
function context. The label has a location of LOC. Returns the
|
10794 |
|
|
newly created label. */
|
10795 |
|
|
|
10796 |
|
|
tree
|
10797 |
|
|
create_artificial_label (location_t loc)
|
10798 |
|
|
{
|
10799 |
|
|
tree lab = build_decl (loc,
|
10800 |
|
|
LABEL_DECL, NULL_TREE, void_type_node);
|
10801 |
|
|
|
10802 |
|
|
DECL_ARTIFICIAL (lab) = 1;
|
10803 |
|
|
DECL_IGNORED_P (lab) = 1;
|
10804 |
|
|
DECL_CONTEXT (lab) = current_function_decl;
|
10805 |
|
|
return lab;
|
10806 |
|
|
}
|
10807 |
|
|
|
10808 |
|
|
/* Given a tree, try to return a useful variable name that we can use
|
10809 |
|
|
to prefix a temporary that is being assigned the value of the tree.
|
10810 |
|
|
I.E. given <temp> = &A, return A. */
|
10811 |
|
|
|
10812 |
|
|
const char *
|
10813 |
|
|
get_name (tree t)
|
10814 |
|
|
{
|
10815 |
|
|
tree stripped_decl;
|
10816 |
|
|
|
10817 |
|
|
stripped_decl = t;
|
10818 |
|
|
STRIP_NOPS (stripped_decl);
|
10819 |
|
|
if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
|
10820 |
|
|
return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
|
10821 |
|
|
else
|
10822 |
|
|
{
|
10823 |
|
|
switch (TREE_CODE (stripped_decl))
|
10824 |
|
|
{
|
10825 |
|
|
case ADDR_EXPR:
|
10826 |
|
|
return get_name (TREE_OPERAND (stripped_decl, 0));
|
10827 |
|
|
default:
|
10828 |
|
|
return NULL;
|
10829 |
|
|
}
|
10830 |
|
|
}
|
10831 |
|
|
}
|
10832 |
|
|
|
10833 |
|
|
/* Return true if TYPE has a variable argument list. */
|
10834 |
|
|
|
10835 |
|
|
bool
|
10836 |
|
|
stdarg_p (const_tree fntype)
|
10837 |
|
|
{
|
10838 |
|
|
function_args_iterator args_iter;
|
10839 |
|
|
tree n = NULL_TREE, t;
|
10840 |
|
|
|
10841 |
|
|
if (!fntype)
|
10842 |
|
|
return false;
|
10843 |
|
|
|
10844 |
|
|
FOREACH_FUNCTION_ARGS(fntype, t, args_iter)
|
10845 |
|
|
{
|
10846 |
|
|
n = t;
|
10847 |
|
|
}
|
10848 |
|
|
|
10849 |
|
|
return n != NULL_TREE && n != void_type_node;
|
10850 |
|
|
}
|
10851 |
|
|
|
10852 |
|
|
/* Return true if TYPE has a prototype. */
|
10853 |
|
|
|
10854 |
|
|
bool
|
10855 |
|
|
prototype_p (tree fntype)
|
10856 |
|
|
{
|
10857 |
|
|
tree t;
|
10858 |
|
|
|
10859 |
|
|
gcc_assert (fntype != NULL_TREE);
|
10860 |
|
|
|
10861 |
|
|
t = TYPE_ARG_TYPES (fntype);
|
10862 |
|
|
return (t != NULL_TREE);
|
10863 |
|
|
}
|
10864 |
|
|
|
10865 |
|
|
/* If BLOCK is inlined from an __attribute__((__artificial__))
|
10866 |
|
|
routine, return pointer to location from where it has been
|
10867 |
|
|
called. */
|
10868 |
|
|
location_t *
|
10869 |
|
|
block_nonartificial_location (tree block)
|
10870 |
|
|
{
|
10871 |
|
|
location_t *ret = NULL;
|
10872 |
|
|
|
10873 |
|
|
while (block && TREE_CODE (block) == BLOCK
|
10874 |
|
|
&& BLOCK_ABSTRACT_ORIGIN (block))
|
10875 |
|
|
{
|
10876 |
|
|
tree ao = BLOCK_ABSTRACT_ORIGIN (block);
|
10877 |
|
|
|
10878 |
|
|
while (TREE_CODE (ao) == BLOCK
|
10879 |
|
|
&& BLOCK_ABSTRACT_ORIGIN (ao)
|
10880 |
|
|
&& BLOCK_ABSTRACT_ORIGIN (ao) != ao)
|
10881 |
|
|
ao = BLOCK_ABSTRACT_ORIGIN (ao);
|
10882 |
|
|
|
10883 |
|
|
if (TREE_CODE (ao) == FUNCTION_DECL)
|
10884 |
|
|
{
|
10885 |
|
|
/* If AO is an artificial inline, point RET to the
|
10886 |
|
|
call site locus at which it has been inlined and continue
|
10887 |
|
|
the loop, in case AO's caller is also an artificial
|
10888 |
|
|
inline. */
|
10889 |
|
|
if (DECL_DECLARED_INLINE_P (ao)
|
10890 |
|
|
&& lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
|
10891 |
|
|
ret = &BLOCK_SOURCE_LOCATION (block);
|
10892 |
|
|
else
|
10893 |
|
|
break;
|
10894 |
|
|
}
|
10895 |
|
|
else if (TREE_CODE (ao) != BLOCK)
|
10896 |
|
|
break;
|
10897 |
|
|
|
10898 |
|
|
block = BLOCK_SUPERCONTEXT (block);
|
10899 |
|
|
}
|
10900 |
|
|
return ret;
|
10901 |
|
|
}
|
10902 |
|
|
|
10903 |
|
|
|
10904 |
|
|
/* If EXP is inlined from an __attribute__((__artificial__))
|
10905 |
|
|
function, return the location of the original call expression. */
|
10906 |
|
|
|
10907 |
|
|
location_t
|
10908 |
|
|
tree_nonartificial_location (tree exp)
|
10909 |
|
|
{
|
10910 |
|
|
location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
|
10911 |
|
|
|
10912 |
|
|
if (loc)
|
10913 |
|
|
return *loc;
|
10914 |
|
|
else
|
10915 |
|
|
return EXPR_LOCATION (exp);
|
10916 |
|
|
}
|
10917 |
|
|
|
10918 |
|
|
|
10919 |
|
|
/* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
|
10920 |
|
|
nodes. */
|
10921 |
|
|
|
10922 |
|
|
/* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
|
10923 |
|
|
|
10924 |
|
|
static hashval_t
|
10925 |
|
|
cl_option_hash_hash (const void *x)
|
10926 |
|
|
{
|
10927 |
|
|
const_tree const t = (const_tree) x;
|
10928 |
|
|
const char *p;
|
10929 |
|
|
size_t i;
|
10930 |
|
|
size_t len = 0;
|
10931 |
|
|
hashval_t hash = 0;
|
10932 |
|
|
|
10933 |
|
|
if (TREE_CODE (t) == OPTIMIZATION_NODE)
|
10934 |
|
|
{
|
10935 |
|
|
p = (const char *)TREE_OPTIMIZATION (t);
|
10936 |
|
|
len = sizeof (struct cl_optimization);
|
10937 |
|
|
}
|
10938 |
|
|
|
10939 |
|
|
else if (TREE_CODE (t) == TARGET_OPTION_NODE)
|
10940 |
|
|
{
|
10941 |
|
|
p = (const char *)TREE_TARGET_OPTION (t);
|
10942 |
|
|
len = sizeof (struct cl_target_option);
|
10943 |
|
|
}
|
10944 |
|
|
|
10945 |
|
|
else
|
10946 |
|
|
gcc_unreachable ();
|
10947 |
|
|
|
10948 |
|
|
/* assume most opt flags are just 0/1, some are 2-3, and a few might be
|
10949 |
|
|
something else. */
|
10950 |
|
|
for (i = 0; i < len; i++)
|
10951 |
|
|
if (p[i])
|
10952 |
|
|
hash = (hash << 4) ^ ((i << 2) | p[i]);
|
10953 |
|
|
|
10954 |
|
|
return hash;
|
10955 |
|
|
}
|
10956 |
|
|
|
10957 |
|
|
/* Return nonzero if the value represented by *X (an OPTIMIZATION or
|
10958 |
|
|
TARGET_OPTION tree node) is the same as that given by *Y, which is the
|
10959 |
|
|
same. */
|
10960 |
|
|
|
10961 |
|
|
static int
|
10962 |
|
|
cl_option_hash_eq (const void *x, const void *y)
|
10963 |
|
|
{
|
10964 |
|
|
const_tree const xt = (const_tree) x;
|
10965 |
|
|
const_tree const yt = (const_tree) y;
|
10966 |
|
|
const char *xp;
|
10967 |
|
|
const char *yp;
|
10968 |
|
|
size_t len;
|
10969 |
|
|
|
10970 |
|
|
if (TREE_CODE (xt) != TREE_CODE (yt))
|
10971 |
|
|
return 0;
|
10972 |
|
|
|
10973 |
|
|
if (TREE_CODE (xt) == OPTIMIZATION_NODE)
|
10974 |
|
|
{
|
10975 |
|
|
xp = (const char *)TREE_OPTIMIZATION (xt);
|
10976 |
|
|
yp = (const char *)TREE_OPTIMIZATION (yt);
|
10977 |
|
|
len = sizeof (struct cl_optimization);
|
10978 |
|
|
}
|
10979 |
|
|
|
10980 |
|
|
else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
|
10981 |
|
|
{
|
10982 |
|
|
xp = (const char *)TREE_TARGET_OPTION (xt);
|
10983 |
|
|
yp = (const char *)TREE_TARGET_OPTION (yt);
|
10984 |
|
|
len = sizeof (struct cl_target_option);
|
10985 |
|
|
}
|
10986 |
|
|
|
10987 |
|
|
else
|
10988 |
|
|
gcc_unreachable ();
|
10989 |
|
|
|
10990 |
|
|
return (memcmp (xp, yp, len) == 0);
|
10991 |
|
|
}
|
10992 |
|
|
|
10993 |
|
|
/* Build an OPTIMIZATION_NODE based on the current options. */
|
10994 |
|
|
|
10995 |
|
|
tree
|
10996 |
|
|
build_optimization_node (void)
|
10997 |
|
|
{
|
10998 |
|
|
tree t;
|
10999 |
|
|
void **slot;
|
11000 |
|
|
|
11001 |
|
|
/* Use the cache of optimization nodes. */
|
11002 |
|
|
|
11003 |
|
|
cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
|
11004 |
|
|
&global_options);
|
11005 |
|
|
|
11006 |
|
|
slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
|
11007 |
|
|
t = (tree) *slot;
|
11008 |
|
|
if (!t)
|
11009 |
|
|
{
|
11010 |
|
|
/* Insert this one into the hash table. */
|
11011 |
|
|
t = cl_optimization_node;
|
11012 |
|
|
*slot = t;
|
11013 |
|
|
|
11014 |
|
|
/* Make a new node for next time round. */
|
11015 |
|
|
cl_optimization_node = make_node (OPTIMIZATION_NODE);
|
11016 |
|
|
}
|
11017 |
|
|
|
11018 |
|
|
return t;
|
11019 |
|
|
}
|
11020 |
|
|
|
11021 |
|
|
/* Build a TARGET_OPTION_NODE based on the current options. */
|
11022 |
|
|
|
11023 |
|
|
tree
|
11024 |
|
|
build_target_option_node (void)
|
11025 |
|
|
{
|
11026 |
|
|
tree t;
|
11027 |
|
|
void **slot;
|
11028 |
|
|
|
11029 |
|
|
/* Use the cache of optimization nodes. */
|
11030 |
|
|
|
11031 |
|
|
cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
|
11032 |
|
|
&global_options);
|
11033 |
|
|
|
11034 |
|
|
slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
|
11035 |
|
|
t = (tree) *slot;
|
11036 |
|
|
if (!t)
|
11037 |
|
|
{
|
11038 |
|
|
/* Insert this one into the hash table. */
|
11039 |
|
|
t = cl_target_option_node;
|
11040 |
|
|
*slot = t;
|
11041 |
|
|
|
11042 |
|
|
/* Make a new node for next time round. */
|
11043 |
|
|
cl_target_option_node = make_node (TARGET_OPTION_NODE);
|
11044 |
|
|
}
|
11045 |
|
|
|
11046 |
|
|
return t;
|
11047 |
|
|
}
|
11048 |
|
|
|
11049 |
|
|
/* Determine the "ultimate origin" of a block. The block may be an inlined
|
11050 |
|
|
instance of an inlined instance of a block which is local to an inline
|
11051 |
|
|
function, so we have to trace all of the way back through the origin chain
|
11052 |
|
|
to find out what sort of node actually served as the original seed for the
|
11053 |
|
|
given block. */
|
11054 |
|
|
|
11055 |
|
|
tree
|
11056 |
|
|
block_ultimate_origin (const_tree block)
|
11057 |
|
|
{
|
11058 |
|
|
tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
|
11059 |
|
|
|
11060 |
|
|
/* output_inline_function sets BLOCK_ABSTRACT_ORIGIN for all the
|
11061 |
|
|
nodes in the function to point to themselves; ignore that if
|
11062 |
|
|
we're trying to output the abstract instance of this function. */
|
11063 |
|
|
if (BLOCK_ABSTRACT (block) && immediate_origin == block)
|
11064 |
|
|
return NULL_TREE;
|
11065 |
|
|
|
11066 |
|
|
if (immediate_origin == NULL_TREE)
|
11067 |
|
|
return NULL_TREE;
|
11068 |
|
|
else
|
11069 |
|
|
{
|
11070 |
|
|
tree ret_val;
|
11071 |
|
|
tree lookahead = immediate_origin;
|
11072 |
|
|
|
11073 |
|
|
do
|
11074 |
|
|
{
|
11075 |
|
|
ret_val = lookahead;
|
11076 |
|
|
lookahead = (TREE_CODE (ret_val) == BLOCK
|
11077 |
|
|
? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
|
11078 |
|
|
}
|
11079 |
|
|
while (lookahead != NULL && lookahead != ret_val);
|
11080 |
|
|
|
11081 |
|
|
/* The block's abstract origin chain may not be the *ultimate* origin of
|
11082 |
|
|
the block. It could lead to a DECL that has an abstract origin set.
|
11083 |
|
|
If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
|
11084 |
|
|
will give us if it has one). Note that DECL's abstract origins are
|
11085 |
|
|
supposed to be the most distant ancestor (or so decl_ultimate_origin
|
11086 |
|
|
claims), so we don't need to loop following the DECL origins. */
|
11087 |
|
|
if (DECL_P (ret_val))
|
11088 |
|
|
return DECL_ORIGIN (ret_val);
|
11089 |
|
|
|
11090 |
|
|
return ret_val;
|
11091 |
|
|
}
|
11092 |
|
|
}
|
11093 |
|
|
|
11094 |
|
|
/* Return true if T1 and T2 are equivalent lists. */
|
11095 |
|
|
|
11096 |
|
|
bool
|
11097 |
|
|
list_equal_p (const_tree t1, const_tree t2)
|
11098 |
|
|
{
|
11099 |
|
|
for (; t1 && t2; t1 = TREE_CHAIN (t1) , t2 = TREE_CHAIN (t2))
|
11100 |
|
|
if (TREE_VALUE (t1) != TREE_VALUE (t2))
|
11101 |
|
|
return false;
|
11102 |
|
|
return !t1 && !t2;
|
11103 |
|
|
}
|
11104 |
|
|
|
11105 |
|
|
/* Return true iff conversion in EXP generates no instruction. Mark
|
11106 |
|
|
it inline so that we fully inline into the stripping functions even
|
11107 |
|
|
though we have two uses of this function. */
|
11108 |
|
|
|
11109 |
|
|
static inline bool
|
11110 |
|
|
tree_nop_conversion (const_tree exp)
|
11111 |
|
|
{
|
11112 |
|
|
tree outer_type, inner_type;
|
11113 |
|
|
|
11114 |
|
|
if (!CONVERT_EXPR_P (exp)
|
11115 |
|
|
&& TREE_CODE (exp) != NON_LVALUE_EXPR)
|
11116 |
|
|
return false;
|
11117 |
|
|
if (TREE_OPERAND (exp, 0) == error_mark_node)
|
11118 |
|
|
return false;
|
11119 |
|
|
|
11120 |
|
|
outer_type = TREE_TYPE (exp);
|
11121 |
|
|
inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
|
11122 |
|
|
|
11123 |
|
|
if (!inner_type)
|
11124 |
|
|
return false;
|
11125 |
|
|
|
11126 |
|
|
/* Use precision rather then machine mode when we can, which gives
|
11127 |
|
|
the correct answer even for submode (bit-field) types. */
|
11128 |
|
|
if ((INTEGRAL_TYPE_P (outer_type)
|
11129 |
|
|
|| POINTER_TYPE_P (outer_type)
|
11130 |
|
|
|| TREE_CODE (outer_type) == OFFSET_TYPE)
|
11131 |
|
|
&& (INTEGRAL_TYPE_P (inner_type)
|
11132 |
|
|
|| POINTER_TYPE_P (inner_type)
|
11133 |
|
|
|| TREE_CODE (inner_type) == OFFSET_TYPE))
|
11134 |
|
|
return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
|
11135 |
|
|
|
11136 |
|
|
/* Otherwise fall back on comparing machine modes (e.g. for
|
11137 |
|
|
aggregate types, floats). */
|
11138 |
|
|
return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
|
11139 |
|
|
}
|
11140 |
|
|
|
11141 |
|
|
/* Return true iff conversion in EXP generates no instruction. Don't
|
11142 |
|
|
consider conversions changing the signedness. */
|
11143 |
|
|
|
11144 |
|
|
static bool
|
11145 |
|
|
tree_sign_nop_conversion (const_tree exp)
|
11146 |
|
|
{
|
11147 |
|
|
tree outer_type, inner_type;
|
11148 |
|
|
|
11149 |
|
|
if (!tree_nop_conversion (exp))
|
11150 |
|
|
return false;
|
11151 |
|
|
|
11152 |
|
|
outer_type = TREE_TYPE (exp);
|
11153 |
|
|
inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
|
11154 |
|
|
|
11155 |
|
|
return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
|
11156 |
|
|
&& POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
|
11157 |
|
|
}
|
11158 |
|
|
|
11159 |
|
|
/* Strip conversions from EXP according to tree_nop_conversion and
|
11160 |
|
|
return the resulting expression. */
|
11161 |
|
|
|
11162 |
|
|
tree
|
11163 |
|
|
tree_strip_nop_conversions (tree exp)
|
11164 |
|
|
{
|
11165 |
|
|
while (tree_nop_conversion (exp))
|
11166 |
|
|
exp = TREE_OPERAND (exp, 0);
|
11167 |
|
|
return exp;
|
11168 |
|
|
}
|
11169 |
|
|
|
11170 |
|
|
/* Strip conversions from EXP according to tree_sign_nop_conversion
|
11171 |
|
|
and return the resulting expression. */
|
11172 |
|
|
|
11173 |
|
|
tree
|
11174 |
|
|
tree_strip_sign_nop_conversions (tree exp)
|
11175 |
|
|
{
|
11176 |
|
|
while (tree_sign_nop_conversion (exp))
|
11177 |
|
|
exp = TREE_OPERAND (exp, 0);
|
11178 |
|
|
return exp;
|
11179 |
|
|
}
|
11180 |
|
|
|
11181 |
|
|
/* Strip out all handled components that produce invariant
|
11182 |
|
|
offsets. */
|
11183 |
|
|
|
11184 |
|
|
const_tree
|
11185 |
|
|
strip_invariant_refs (const_tree op)
|
11186 |
|
|
{
|
11187 |
|
|
while (handled_component_p (op))
|
11188 |
|
|
{
|
11189 |
|
|
switch (TREE_CODE (op))
|
11190 |
|
|
{
|
11191 |
|
|
case ARRAY_REF:
|
11192 |
|
|
case ARRAY_RANGE_REF:
|
11193 |
|
|
if (!is_gimple_constant (TREE_OPERAND (op, 1))
|
11194 |
|
|
|| TREE_OPERAND (op, 2) != NULL_TREE
|
11195 |
|
|
|| TREE_OPERAND (op, 3) != NULL_TREE)
|
11196 |
|
|
return NULL;
|
11197 |
|
|
break;
|
11198 |
|
|
|
11199 |
|
|
case COMPONENT_REF:
|
11200 |
|
|
if (TREE_OPERAND (op, 2) != NULL_TREE)
|
11201 |
|
|
return NULL;
|
11202 |
|
|
break;
|
11203 |
|
|
|
11204 |
|
|
default:;
|
11205 |
|
|
}
|
11206 |
|
|
op = TREE_OPERAND (op, 0);
|
11207 |
|
|
}
|
11208 |
|
|
|
11209 |
|
|
return op;
|
11210 |
|
|
}
|
11211 |
|
|
|
11212 |
|
|
static GTY(()) tree gcc_eh_personality_decl;
|
11213 |
|
|
|
11214 |
|
|
/* Return the GCC personality function decl. */
|
11215 |
|
|
|
11216 |
|
|
tree
|
11217 |
|
|
lhd_gcc_personality (void)
|
11218 |
|
|
{
|
11219 |
|
|
if (!gcc_eh_personality_decl)
|
11220 |
|
|
gcc_eh_personality_decl = build_personality_function ("gcc");
|
11221 |
|
|
return gcc_eh_personality_decl;
|
11222 |
|
|
}
|
11223 |
|
|
|
11224 |
|
|
/* Try to find a base info of BINFO that would have its field decl at offset
|
11225 |
|
|
OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
|
11226 |
|
|
found, return, otherwise return NULL_TREE. */
|
11227 |
|
|
|
11228 |
|
|
tree
|
11229 |
|
|
get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
|
11230 |
|
|
{
|
11231 |
|
|
tree type = BINFO_TYPE (binfo);
|
11232 |
|
|
|
11233 |
|
|
while (true)
|
11234 |
|
|
{
|
11235 |
|
|
HOST_WIDE_INT pos, size;
|
11236 |
|
|
tree fld;
|
11237 |
|
|
int i;
|
11238 |
|
|
|
11239 |
|
|
if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
|
11240 |
|
|
return binfo;
|
11241 |
|
|
if (offset < 0)
|
11242 |
|
|
return NULL_TREE;
|
11243 |
|
|
|
11244 |
|
|
for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
|
11245 |
|
|
{
|
11246 |
|
|
if (TREE_CODE (fld) != FIELD_DECL)
|
11247 |
|
|
continue;
|
11248 |
|
|
|
11249 |
|
|
pos = int_bit_position (fld);
|
11250 |
|
|
size = tree_low_cst (DECL_SIZE (fld), 1);
|
11251 |
|
|
if (pos <= offset && (pos + size) > offset)
|
11252 |
|
|
break;
|
11253 |
|
|
}
|
11254 |
|
|
if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
|
11255 |
|
|
return NULL_TREE;
|
11256 |
|
|
|
11257 |
|
|
if (!DECL_ARTIFICIAL (fld))
|
11258 |
|
|
{
|
11259 |
|
|
binfo = TYPE_BINFO (TREE_TYPE (fld));
|
11260 |
|
|
if (!binfo)
|
11261 |
|
|
return NULL_TREE;
|
11262 |
|
|
}
|
11263 |
|
|
/* Offset 0 indicates the primary base, whose vtable contents are
|
11264 |
|
|
represented in the binfo for the derived class. */
|
11265 |
|
|
else if (offset != 0)
|
11266 |
|
|
{
|
11267 |
|
|
tree base_binfo, found_binfo = NULL_TREE;
|
11268 |
|
|
for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
|
11269 |
|
|
if (TREE_TYPE (base_binfo) == TREE_TYPE (fld))
|
11270 |
|
|
{
|
11271 |
|
|
found_binfo = base_binfo;
|
11272 |
|
|
break;
|
11273 |
|
|
}
|
11274 |
|
|
if (!found_binfo)
|
11275 |
|
|
return NULL_TREE;
|
11276 |
|
|
binfo = found_binfo;
|
11277 |
|
|
}
|
11278 |
|
|
|
11279 |
|
|
type = TREE_TYPE (fld);
|
11280 |
|
|
offset -= pos;
|
11281 |
|
|
}
|
11282 |
|
|
}
|
11283 |
|
|
|
11284 |
|
|
/* Returns true if X is a typedef decl. */
|
11285 |
|
|
|
11286 |
|
|
bool
|
11287 |
|
|
is_typedef_decl (tree x)
|
11288 |
|
|
{
|
11289 |
|
|
return (x && TREE_CODE (x) == TYPE_DECL
|
11290 |
|
|
&& DECL_ORIGINAL_TYPE (x) != NULL_TREE);
|
11291 |
|
|
}
|
11292 |
|
|
|
11293 |
|
|
/* Returns true iff TYPE is a type variant created for a typedef. */
|
11294 |
|
|
|
11295 |
|
|
bool
|
11296 |
|
|
typedef_variant_p (tree type)
|
11297 |
|
|
{
|
11298 |
|
|
return is_typedef_decl (TYPE_NAME (type));
|
11299 |
|
|
}
|
11300 |
|
|
|
11301 |
|
|
/* Warn about a use of an identifier which was marked deprecated. */
|
11302 |
|
|
void
|
11303 |
|
|
warn_deprecated_use (tree node, tree attr)
|
11304 |
|
|
{
|
11305 |
|
|
const char *msg;
|
11306 |
|
|
|
11307 |
|
|
if (node == 0 || !warn_deprecated_decl)
|
11308 |
|
|
return;
|
11309 |
|
|
|
11310 |
|
|
if (!attr)
|
11311 |
|
|
{
|
11312 |
|
|
if (DECL_P (node))
|
11313 |
|
|
attr = DECL_ATTRIBUTES (node);
|
11314 |
|
|
else if (TYPE_P (node))
|
11315 |
|
|
{
|
11316 |
|
|
tree decl = TYPE_STUB_DECL (node);
|
11317 |
|
|
if (decl)
|
11318 |
|
|
attr = lookup_attribute ("deprecated",
|
11319 |
|
|
TYPE_ATTRIBUTES (TREE_TYPE (decl)));
|
11320 |
|
|
}
|
11321 |
|
|
}
|
11322 |
|
|
|
11323 |
|
|
if (attr)
|
11324 |
|
|
attr = lookup_attribute ("deprecated", attr);
|
11325 |
|
|
|
11326 |
|
|
if (attr)
|
11327 |
|
|
msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
|
11328 |
|
|
else
|
11329 |
|
|
msg = NULL;
|
11330 |
|
|
|
11331 |
|
|
if (DECL_P (node))
|
11332 |
|
|
{
|
11333 |
|
|
expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
|
11334 |
|
|
if (msg)
|
11335 |
|
|
warning (OPT_Wdeprecated_declarations,
|
11336 |
|
|
"%qD is deprecated (declared at %s:%d): %s",
|
11337 |
|
|
node, xloc.file, xloc.line, msg);
|
11338 |
|
|
else
|
11339 |
|
|
warning (OPT_Wdeprecated_declarations,
|
11340 |
|
|
"%qD is deprecated (declared at %s:%d)",
|
11341 |
|
|
node, xloc.file, xloc.line);
|
11342 |
|
|
}
|
11343 |
|
|
else if (TYPE_P (node))
|
11344 |
|
|
{
|
11345 |
|
|
tree what = NULL_TREE;
|
11346 |
|
|
tree decl = TYPE_STUB_DECL (node);
|
11347 |
|
|
|
11348 |
|
|
if (TYPE_NAME (node))
|
11349 |
|
|
{
|
11350 |
|
|
if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
|
11351 |
|
|
what = TYPE_NAME (node);
|
11352 |
|
|
else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
|
11353 |
|
|
&& DECL_NAME (TYPE_NAME (node)))
|
11354 |
|
|
what = DECL_NAME (TYPE_NAME (node));
|
11355 |
|
|
}
|
11356 |
|
|
|
11357 |
|
|
if (decl)
|
11358 |
|
|
{
|
11359 |
|
|
expanded_location xloc
|
11360 |
|
|
= expand_location (DECL_SOURCE_LOCATION (decl));
|
11361 |
|
|
if (what)
|
11362 |
|
|
{
|
11363 |
|
|
if (msg)
|
11364 |
|
|
warning (OPT_Wdeprecated_declarations,
|
11365 |
|
|
"%qE is deprecated (declared at %s:%d): %s",
|
11366 |
|
|
what, xloc.file, xloc.line, msg);
|
11367 |
|
|
else
|
11368 |
|
|
warning (OPT_Wdeprecated_declarations,
|
11369 |
|
|
"%qE is deprecated (declared at %s:%d)", what,
|
11370 |
|
|
xloc.file, xloc.line);
|
11371 |
|
|
}
|
11372 |
|
|
else
|
11373 |
|
|
{
|
11374 |
|
|
if (msg)
|
11375 |
|
|
warning (OPT_Wdeprecated_declarations,
|
11376 |
|
|
"type is deprecated (declared at %s:%d): %s",
|
11377 |
|
|
xloc.file, xloc.line, msg);
|
11378 |
|
|
else
|
11379 |
|
|
warning (OPT_Wdeprecated_declarations,
|
11380 |
|
|
"type is deprecated (declared at %s:%d)",
|
11381 |
|
|
xloc.file, xloc.line);
|
11382 |
|
|
}
|
11383 |
|
|
}
|
11384 |
|
|
else
|
11385 |
|
|
{
|
11386 |
|
|
if (what)
|
11387 |
|
|
{
|
11388 |
|
|
if (msg)
|
11389 |
|
|
warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
|
11390 |
|
|
what, msg);
|
11391 |
|
|
else
|
11392 |
|
|
warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
|
11393 |
|
|
}
|
11394 |
|
|
else
|
11395 |
|
|
{
|
11396 |
|
|
if (msg)
|
11397 |
|
|
warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
|
11398 |
|
|
msg);
|
11399 |
|
|
else
|
11400 |
|
|
warning (OPT_Wdeprecated_declarations, "type is deprecated");
|
11401 |
|
|
}
|
11402 |
|
|
}
|
11403 |
|
|
}
|
11404 |
|
|
}
|
11405 |
|
|
|
11406 |
|
|
#include "gt-tree.h"
|