OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [ada/] [gcc-interface/] [utils.c] - Blame information for rev 706

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 706 jeremybenn
/****************************************************************************
2
 *                                                                          *
3
 *                         GNAT COMPILER COMPONENTS                         *
4
 *                                                                          *
5
 *                                U T I L S                                 *
6
 *                                                                          *
7
 *                          C Implementation File                           *
8
 *                                                                          *
9
 *          Copyright (C) 1992-2012, Free Software Foundation, Inc.         *
10
 *                                                                          *
11
 * GNAT is free software;  you can  redistribute it  and/or modify it under *
12
 * terms of the  GNU General Public License as published  by the Free Soft- *
13
 * ware  Foundation;  either version 3,  or (at your option) any later ver- *
14
 * sion.  GNAT is distributed in the hope that it will be useful, but WITH- *
15
 * OUT ANY WARRANTY;  without even the  implied warranty of MERCHANTABILITY *
16
 * or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License *
17
 * for  more details.  You should have received a copy of the GNU General   *
18
 * Public License along with GCC; see the file COPYING3.  If not see        *
19
 * <http://www.gnu.org/licenses/>.                                          *
20
 *                                                                          *
21
 * GNAT was originally developed  by the GNAT team at  New York University. *
22
 * Extensive contributions were provided by Ada Core Technologies Inc.      *
23
 *                                                                          *
24
 ****************************************************************************/
25
 
26
#include "config.h"
27
#include "system.h"
28
#include "coretypes.h"
29
#include "tm.h"
30
#include "tree.h"
31
#include "flags.h"
32
#include "toplev.h"
33
#include "diagnostic-core.h"
34
#include "output.h"
35
#include "ggc.h"
36
#include "debug.h"
37
#include "convert.h"
38
#include "target.h"
39
#include "common/common-target.h"
40
#include "langhooks.h"
41
#include "cgraph.h"
42
#include "diagnostic.h"
43
#include "tree-dump.h"
44
#include "tree-inline.h"
45
#include "tree-iterator.h"
46
 
47
#include "ada.h"
48
#include "types.h"
49
#include "atree.h"
50
#include "elists.h"
51
#include "namet.h"
52
#include "nlists.h"
53
#include "stringt.h"
54
#include "uintp.h"
55
#include "fe.h"
56
#include "sinfo.h"
57
#include "einfo.h"
58
#include "ada-tree.h"
59
#include "gigi.h"
60
 
61
#ifndef MAX_BITS_PER_WORD
62
#define MAX_BITS_PER_WORD  BITS_PER_WORD
63
#endif
64
 
65
/* If nonzero, pretend we are allocating at global level.  */
66
int force_global;
67
 
68
/* The default alignment of "double" floating-point types, i.e. floating
69
   point types whose size is equal to 64 bits, or 0 if this alignment is
70
   not specifically capped.  */
71
int double_float_alignment;
72
 
73
/* The default alignment of "double" or larger scalar types, i.e. scalar
74
   types whose size is greater or equal to 64 bits, or 0 if this alignment
75
   is not specifically capped.  */
76
int double_scalar_alignment;
77
 
78
/* Tree nodes for the various types and decls we create.  */
79
tree gnat_std_decls[(int) ADT_LAST];
80
 
81
/* Functions to call for each of the possible raise reasons.  */
82
tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
83
 
84
/* Likewise, but with extra info for each of the possible raise reasons.  */
85
tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
86
 
87
/* Forward declarations for handlers of attributes.  */
88
static tree handle_const_attribute (tree *, tree, tree, int, bool *);
89
static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
90
static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
91
static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
92
static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
93
static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
94
static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
95
static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
96
static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
97
static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
98
static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
99
static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
100
 
101
/* Fake handler for attributes we don't properly support, typically because
102
   they'd require dragging a lot of the common-c front-end circuitry.  */
103
static tree fake_attribute_handler      (tree *, tree, tree, int, bool *);
104
 
105
/* Table of machine-independent internal attributes for Ada.  We support
106
   this minimal set of attributes to accommodate the needs of builtins.  */
107
const struct attribute_spec gnat_internal_attribute_table[] =
108
{
109
  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
110
       affects_type_identity } */
111
  { "const",        0, 0,  true,  false, false, handle_const_attribute,
112
    false },
113
  { "nothrow",      0, 0,  true,  false, false, handle_nothrow_attribute,
114
    false },
115
  { "pure",         0, 0,  true,  false, false, handle_pure_attribute,
116
    false },
117
  { "no vops",      0, 0,  true,  false, false, handle_novops_attribute,
118
    false },
119
  { "nonnull",      0, -1, false, true,  true,  handle_nonnull_attribute,
120
    false },
121
  { "sentinel",     0, 1,  false, true,  true,  handle_sentinel_attribute,
122
    false },
123
  { "noreturn",     0, 0,  true,  false, false, handle_noreturn_attribute,
124
    false },
125
  { "leaf",         0, 0,  true,  false, false, handle_leaf_attribute,
126
    false },
127
  { "malloc",       0, 0,  true,  false, false, handle_malloc_attribute,
128
    false },
129
  { "type generic", 0, 0,  false, true, true, handle_type_generic_attribute,
130
    false },
131
 
132
  { "vector_size",  1, 1,  false, true, false,  handle_vector_size_attribute,
133
    false },
134
  { "vector_type",  0, 0,  false, true, false,  handle_vector_type_attribute,
135
    false },
136
  { "may_alias",    0, 0, false, true, false, NULL, false },
137
 
138
  /* ??? format and format_arg are heavy and not supported, which actually
139
     prevents support for stdio builtins, which we however declare as part
140
     of the common builtins.def contents.  */
141
  { "format",     3, 3,  false, true,  true,  fake_attribute_handler, false },
142
  { "format_arg", 1, 1,  false, true,  true,  fake_attribute_handler, false },
143
 
144
  { NULL,         0, 0, false, false, false, NULL, false }
145
};
146
 
147
/* Associates a GNAT tree node to a GCC tree node. It is used in
148
   `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
149
   of `save_gnu_tree' for more info.  */
150
static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
151
 
152
#define GET_GNU_TREE(GNAT_ENTITY)       \
153
  associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
154
 
155
#define SET_GNU_TREE(GNAT_ENTITY,VAL)   \
156
  associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
157
 
158
#define PRESENT_GNU_TREE(GNAT_ENTITY)   \
159
  (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
160
 
161
/* Associates a GNAT entity to a GCC tree node used as a dummy, if any.  */
162
static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
163
 
164
#define GET_DUMMY_NODE(GNAT_ENTITY)     \
165
  dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
166
 
167
#define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
168
  dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
169
 
170
#define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
171
  (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
172
 
173
/* This variable keeps a table for types for each precision so that we only
174
   allocate each of them once. Signed and unsigned types are kept separate.
175
 
176
   Note that these types are only used when fold-const requests something
177
   special.  Perhaps we should NOT share these types; we'll see how it
178
   goes later.  */
179
static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
180
 
181
/* Likewise for float types, but record these by mode.  */
182
static GTY(()) tree float_types[NUM_MACHINE_MODES];
183
 
184
/* For each binding contour we allocate a binding_level structure to indicate
185
   the binding depth.  */
186
 
187
struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
188
  /* The binding level containing this one (the enclosing binding level). */
189
  struct gnat_binding_level *chain;
190
  /* The BLOCK node for this level.  */
191
  tree block;
192
  /* If nonzero, the setjmp buffer that needs to be updated for any
193
     variable-sized definition within this context.  */
194
  tree jmpbuf_decl;
195
};
196
 
197
/* The binding level currently in effect.  */
198
static GTY(()) struct gnat_binding_level *current_binding_level;
199
 
200
/* A chain of gnat_binding_level structures awaiting reuse.  */
201
static GTY((deletable)) struct gnat_binding_level *free_binding_level;
202
 
203
/* The context to be used for global declarations.  */
204
static GTY(()) tree global_context;
205
 
206
/* An array of global declarations.  */
207
static GTY(()) VEC(tree,gc) *global_decls;
208
 
209
/* An array of builtin function declarations.  */
210
static GTY(()) VEC(tree,gc) *builtin_decls;
211
 
212
/* An array of global renaming pointers.  */
213
static GTY(()) VEC(tree,gc) *global_renaming_pointers;
214
 
215
/* A chain of unused BLOCK nodes. */
216
static GTY((deletable)) tree free_block_chain;
217
 
218
static tree merge_sizes (tree, tree, tree, bool, bool);
219
static tree compute_related_constant (tree, tree);
220
static tree split_plus (tree, tree *);
221
static tree float_type_for_precision (int, enum machine_mode);
222
static tree convert_to_fat_pointer (tree, tree);
223
static tree convert_to_thin_pointer (tree, tree);
224
static bool potential_alignment_gap (tree, tree, tree);
225
static void process_attributes (tree, struct attrib *);
226
 
227
/* Initialize the association of GNAT nodes to GCC trees.  */
228
 
229
void
230
init_gnat_to_gnu (void)
231
{
232
  associate_gnat_to_gnu = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
233
}
234
 
235
/* GNAT_ENTITY is a GNAT tree node for an entity.  Associate GNU_DECL, a GCC
236
   tree node, with GNAT_ENTITY.  If GNU_DECL is not a ..._DECL node, abort.
237
   If NO_CHECK is true, the latter check is suppressed.
238
 
239
   If GNU_DECL is zero, reset a previous association.  */
240
 
241
void
242
save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
243
{
244
  /* Check that GNAT_ENTITY is not already defined and that it is being set
245
     to something which is a decl.  If that is not the case, this usually
246
     means GNAT_ENTITY is defined twice, but occasionally is due to some
247
     Gigi problem.  */
248
  gcc_assert (!(gnu_decl
249
                && (PRESENT_GNU_TREE (gnat_entity)
250
                    || (!no_check && !DECL_P (gnu_decl)))));
251
 
252
  SET_GNU_TREE (gnat_entity, gnu_decl);
253
}
254
 
255
/* GNAT_ENTITY is a GNAT tree node for an entity.  Return the GCC tree node
256
   that was associated with it.  If there is no such tree node, abort.
257
 
258
   In some cases, such as delayed elaboration or expressions that need to
259
   be elaborated only once, GNAT_ENTITY is really not an entity.  */
260
 
261
tree
262
get_gnu_tree (Entity_Id gnat_entity)
263
{
264
  gcc_assert (PRESENT_GNU_TREE (gnat_entity));
265
  return GET_GNU_TREE (gnat_entity);
266
}
267
 
268
/* Return nonzero if a GCC tree has been associated with GNAT_ENTITY.  */
269
 
270
bool
271
present_gnu_tree (Entity_Id gnat_entity)
272
{
273
  return PRESENT_GNU_TREE (gnat_entity);
274
}
275
 
276
/* Initialize the association of GNAT nodes to GCC trees as dummies.  */
277
 
278
void
279
init_dummy_type (void)
280
{
281
  dummy_node_table = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
282
}
283
 
284
/* Make a dummy type corresponding to GNAT_TYPE.  */
285
 
286
tree
287
make_dummy_type (Entity_Id gnat_type)
288
{
289
  Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
290
  tree gnu_type;
291
 
292
  /* If there is an equivalent type, get its underlying type.  */
293
  if (Present (gnat_underlying))
294
    gnat_underlying = Gigi_Equivalent_Type (Underlying_Type (gnat_underlying));
295
 
296
  /* If there was no equivalent type (can only happen when just annotating
297
     types) or underlying type, go back to the original type.  */
298
  if (No (gnat_underlying))
299
    gnat_underlying = gnat_type;
300
 
301
  /* If it there already a dummy type, use that one.  Else make one.  */
302
  if (PRESENT_DUMMY_NODE (gnat_underlying))
303
    return GET_DUMMY_NODE (gnat_underlying);
304
 
305
  /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
306
     an ENUMERAL_TYPE.  */
307
  gnu_type = make_node (Is_Record_Type (gnat_underlying)
308
                        ? tree_code_for_record_type (gnat_underlying)
309
                        : ENUMERAL_TYPE);
310
  TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
311
  TYPE_DUMMY_P (gnu_type) = 1;
312
  TYPE_STUB_DECL (gnu_type)
313
    = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
314
  if (Is_By_Reference_Type (gnat_underlying))
315
    TYPE_BY_REFERENCE_P (gnu_type) = 1;
316
 
317
  SET_DUMMY_NODE (gnat_underlying, gnu_type);
318
 
319
  return gnu_type;
320
}
321
 
322
/* Return the dummy type that was made for GNAT_TYPE, if any.  */
323
 
324
tree
325
get_dummy_type (Entity_Id gnat_type)
326
{
327
  return GET_DUMMY_NODE (gnat_type);
328
}
329
 
330
/* Build dummy fat and thin pointer types whose designated type is specified
331
   by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter.  */
332
 
333
void
334
build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
335
{
336
  tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
337
  tree gnu_fat_type, fields, gnu_object_type;
338
 
339
  gnu_template_type = make_node (RECORD_TYPE);
340
  TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
341
  TYPE_DUMMY_P (gnu_template_type) = 1;
342
  gnu_ptr_template = build_pointer_type (gnu_template_type);
343
 
344
  gnu_array_type = make_node (ENUMERAL_TYPE);
345
  TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
346
  TYPE_DUMMY_P (gnu_array_type) = 1;
347
  gnu_ptr_array = build_pointer_type (gnu_array_type);
348
 
349
  gnu_fat_type = make_node (RECORD_TYPE);
350
  /* Build a stub DECL to trigger the special processing for fat pointer types
351
     in gnat_pushdecl.  */
352
  TYPE_NAME (gnu_fat_type)
353
    = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
354
                             gnu_fat_type);
355
  fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
356
                              gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
357
  DECL_CHAIN (fields)
358
    = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
359
                         gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
360
  finish_fat_pointer_type (gnu_fat_type, fields);
361
  SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
362
  /* Suppress debug info until after the type is completed.  */
363
  TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
364
 
365
  gnu_object_type = make_node (RECORD_TYPE);
366
  TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
367
  TYPE_DUMMY_P (gnu_object_type) = 1;
368
 
369
  TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
370
  TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
371
}
372
 
373
/* Return true if we are in the global binding level.  */
374
 
375
bool
376
global_bindings_p (void)
377
{
378
  return force_global || current_function_decl == NULL_TREE;
379
}
380
 
381
/* Enter a new binding level.  */
382
 
383
void
384
gnat_pushlevel (void)
385
{
386
  struct gnat_binding_level *newlevel = NULL;
387
 
388
  /* Reuse a struct for this binding level, if there is one.  */
389
  if (free_binding_level)
390
    {
391
      newlevel = free_binding_level;
392
      free_binding_level = free_binding_level->chain;
393
    }
394
  else
395
    newlevel = ggc_alloc_gnat_binding_level ();
396
 
397
  /* Use a free BLOCK, if any; otherwise, allocate one.  */
398
  if (free_block_chain)
399
    {
400
      newlevel->block = free_block_chain;
401
      free_block_chain = BLOCK_CHAIN (free_block_chain);
402
      BLOCK_CHAIN (newlevel->block) = NULL_TREE;
403
    }
404
  else
405
    newlevel->block = make_node (BLOCK);
406
 
407
  /* Point the BLOCK we just made to its parent.  */
408
  if (current_binding_level)
409
    BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
410
 
411
  BLOCK_VARS (newlevel->block) = NULL_TREE;
412
  BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
413
  TREE_USED (newlevel->block) = 1;
414
 
415
  /* Add this level to the front of the chain (stack) of active levels.  */
416
  newlevel->chain = current_binding_level;
417
  newlevel->jmpbuf_decl = NULL_TREE;
418
  current_binding_level = newlevel;
419
}
420
 
421
/* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
422
   and point FNDECL to this BLOCK.  */
423
 
424
void
425
set_current_block_context (tree fndecl)
426
{
427
  BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
428
  DECL_INITIAL (fndecl) = current_binding_level->block;
429
  set_block_for_group (current_binding_level->block);
430
}
431
 
432
/* Set the jmpbuf_decl for the current binding level to DECL.  */
433
 
434
void
435
set_block_jmpbuf_decl (tree decl)
436
{
437
  current_binding_level->jmpbuf_decl = decl;
438
}
439
 
440
/* Get the jmpbuf_decl, if any, for the current binding level.  */
441
 
442
tree
443
get_block_jmpbuf_decl (void)
444
{
445
  return current_binding_level->jmpbuf_decl;
446
}
447
 
448
/* Exit a binding level.  Set any BLOCK into the current code group.  */
449
 
450
void
451
gnat_poplevel (void)
452
{
453
  struct gnat_binding_level *level = current_binding_level;
454
  tree block = level->block;
455
 
456
  BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
457
  BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
458
 
459
  /* If this is a function-level BLOCK don't do anything.  Otherwise, if there
460
     are no variables free the block and merge its subblocks into those of its
461
     parent block.  Otherwise, add it to the list of its parent.  */
462
  if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
463
    ;
464
  else if (BLOCK_VARS (block) == NULL_TREE)
465
    {
466
      BLOCK_SUBBLOCKS (level->chain->block)
467
        = block_chainon (BLOCK_SUBBLOCKS (block),
468
                         BLOCK_SUBBLOCKS (level->chain->block));
469
      BLOCK_CHAIN (block) = free_block_chain;
470
      free_block_chain = block;
471
    }
472
  else
473
    {
474
      BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
475
      BLOCK_SUBBLOCKS (level->chain->block) = block;
476
      TREE_USED (block) = 1;
477
      set_block_for_group (block);
478
    }
479
 
480
  /* Free this binding structure.  */
481
  current_binding_level = level->chain;
482
  level->chain = free_binding_level;
483
  free_binding_level = level;
484
}
485
 
486
/* Exit a binding level and discard the associated BLOCK.  */
487
 
488
void
489
gnat_zaplevel (void)
490
{
491
  struct gnat_binding_level *level = current_binding_level;
492
  tree block = level->block;
493
 
494
  BLOCK_CHAIN (block) = free_block_chain;
495
  free_block_chain = block;
496
 
497
  /* Free this binding structure.  */
498
  current_binding_level = level->chain;
499
  level->chain = free_binding_level;
500
  free_binding_level = level;
501
}
502
 
503
/* Record DECL as belonging to the current lexical scope and use GNAT_NODE
504
   for location information and flag propagation.  */
505
 
506
void
507
gnat_pushdecl (tree decl, Node_Id gnat_node)
508
{
509
  /* If DECL is public external or at top level, it has global context.  */
510
  if ((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || global_bindings_p ())
511
    {
512
      if (!global_context)
513
        global_context = build_translation_unit_decl (NULL_TREE);
514
      DECL_CONTEXT (decl) = global_context;
515
   }
516
  else
517
    {
518
      DECL_CONTEXT (decl) = current_function_decl;
519
 
520
      /* Functions imported in another function are not really nested.
521
         For really nested functions mark them initially as needing
522
         a static chain for uses of that flag before unnesting;
523
         lower_nested_functions will then recompute it.  */
524
      if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
525
        DECL_STATIC_CHAIN (decl) = 1;
526
    }
527
 
528
  TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
529
 
530
  /* Set the location of DECL and emit a declaration for it.  */
531
  if (Present (gnat_node))
532
    Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
533
 
534
  add_decl_expr (decl, gnat_node);
535
 
536
  /* Put the declaration on the list.  The list of declarations is in reverse
537
     order.  The list will be reversed later.  Put global declarations in the
538
     globals list and local ones in the current block.  But skip TYPE_DECLs
539
     for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
540
     with the debugger and aren't needed anyway.  */
541
  if (!(TREE_CODE (decl) == TYPE_DECL
542
        && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
543
    {
544
      if (global_bindings_p ())
545
        {
546
          VEC_safe_push (tree, gc, global_decls, decl);
547
 
548
          if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
549
            VEC_safe_push (tree, gc, builtin_decls, decl);
550
        }
551
      else if (!DECL_EXTERNAL (decl))
552
        {
553
          DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
554
          BLOCK_VARS (current_binding_level->block) = decl;
555
        }
556
    }
557
 
558
  /* For the declaration of a type, set its name if it either is not already
559
     set or if the previous type name was not derived from a source name.
560
     We'd rather have the type named with a real name and all the pointer
561
     types to the same object have the same POINTER_TYPE node.  Code in the
562
     equivalent function of c-decl.c makes a copy of the type node here, but
563
     that may cause us trouble with incomplete types.  We make an exception
564
     for fat pointer types because the compiler automatically builds them
565
     for unconstrained array types and the debugger uses them to represent
566
     both these and pointers to these.  */
567
  if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
568
    {
569
      tree t = TREE_TYPE (decl);
570
 
571
      if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
572
        {
573
          /* Array and pointer types aren't "tagged" types so we force the
574
             type to be associated with its typedef in the DWARF back-end,
575
             in order to make sure that the latter is always preserved.  */
576
          if (!DECL_ARTIFICIAL (decl)
577
              && (TREE_CODE (t) == ARRAY_TYPE
578
                  || TREE_CODE (t) == POINTER_TYPE))
579
            {
580
              tree tt = build_distinct_type_copy (t);
581
              if (TREE_CODE (t) == POINTER_TYPE)
582
                TYPE_NEXT_PTR_TO (t) = tt;
583
              TYPE_NAME (tt) = DECL_NAME (decl);
584
              TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
585
              DECL_ORIGINAL_TYPE (decl) = tt;
586
            }
587
        }
588
      else if (TYPE_IS_FAT_POINTER_P (t))
589
        {
590
          /* We need a variant for the placeholder machinery to work.  */
591
          tree tt = build_variant_type_copy (t);
592
          TYPE_NAME (tt) = decl;
593
          TREE_USED (tt) = TREE_USED (t);
594
          TREE_TYPE (decl) = tt;
595
          if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
596
            DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
597
          else
598
            DECL_ORIGINAL_TYPE (decl) = t;
599
          DECL_ARTIFICIAL (decl) = 0;
600
          t = NULL_TREE;
601
        }
602
      else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
603
        ;
604
      else
605
        t = NULL_TREE;
606
 
607
      /* Propagate the name to all the anonymous variants.  This is needed
608
         for the type qualifiers machinery to work properly.  */
609
      if (t)
610
        for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
611
          if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
612
            TYPE_NAME (t) = decl;
613
    }
614
}
615
 
616
/* Record TYPE as a builtin type for Ada.  NAME is the name of the type.
617
   ARTIFICIAL_P is true if it's a type that was generated by the compiler.  */
618
 
619
void
620
record_builtin_type (const char *name, tree type, bool artificial_p)
621
{
622
  tree type_decl = build_decl (input_location,
623
                               TYPE_DECL, get_identifier (name), type);
624
  DECL_ARTIFICIAL (type_decl) = artificial_p;
625
  TYPE_ARTIFICIAL (type) = artificial_p;
626
  gnat_pushdecl (type_decl, Empty);
627
 
628
  if (debug_hooks->type_decl)
629
    debug_hooks->type_decl (type_decl, false);
630
}
631
 
632
/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
633
   finish constructing the record type as a fat pointer type.  */
634
 
635
void
636
finish_fat_pointer_type (tree record_type, tree field_list)
637
{
638
  /* Make sure we can put it into a register.  */
639
  TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
640
 
641
  /* Show what it really is.  */
642
  TYPE_FAT_POINTER_P (record_type) = 1;
643
 
644
  /* Do not emit debug info for it since the types of its fields may still be
645
     incomplete at this point.  */
646
  finish_record_type (record_type, field_list, 0, false);
647
 
648
  /* Force type_contains_placeholder_p to return true on it.  Although the
649
     PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
650
     type but the representation of the unconstrained array.  */
651
  TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
652
}
653
 
654
/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
655
   finish constructing the record or union type.  If REP_LEVEL is zero, this
656
   record has no representation clause and so will be entirely laid out here.
657
   If REP_LEVEL is one, this record has a representation clause and has been
658
   laid out already; only set the sizes and alignment.  If REP_LEVEL is two,
659
   this record is derived from a parent record and thus inherits its layout;
660
   only make a pass on the fields to finalize them.  DEBUG_INFO_P is true if
661
   we need to write debug information about this type.  */
662
 
663
void
664
finish_record_type (tree record_type, tree field_list, int rep_level,
665
                    bool debug_info_p)
666
{
667
  enum tree_code code = TREE_CODE (record_type);
668
  tree name = TYPE_NAME (record_type);
669
  tree ada_size = bitsize_zero_node;
670
  tree size = bitsize_zero_node;
671
  bool had_size = TYPE_SIZE (record_type) != 0;
672
  bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
673
  bool had_align = TYPE_ALIGN (record_type) != 0;
674
  tree field;
675
 
676
  TYPE_FIELDS (record_type) = field_list;
677
 
678
  /* Always attach the TYPE_STUB_DECL for a record type.  It is required to
679
     generate debug info and have a parallel type.  */
680
  if (name && TREE_CODE (name) == TYPE_DECL)
681
    name = DECL_NAME (name);
682
  TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
683
 
684
  /* Globally initialize the record first.  If this is a rep'ed record,
685
     that just means some initializations; otherwise, layout the record.  */
686
  if (rep_level > 0)
687
    {
688
      TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
689
 
690
      if (!had_size_unit)
691
        TYPE_SIZE_UNIT (record_type) = size_zero_node;
692
 
693
      if (!had_size)
694
        TYPE_SIZE (record_type) = bitsize_zero_node;
695
 
696
      /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
697
         out just like a UNION_TYPE, since the size will be fixed.  */
698
      else if (code == QUAL_UNION_TYPE)
699
        code = UNION_TYPE;
700
    }
701
  else
702
    {
703
      /* Ensure there isn't a size already set.  There can be in an error
704
         case where there is a rep clause but all fields have errors and
705
         no longer have a position.  */
706
      TYPE_SIZE (record_type) = 0;
707
      layout_type (record_type);
708
    }
709
 
710
  /* At this point, the position and size of each field is known.  It was
711
     either set before entry by a rep clause, or by laying out the type above.
712
 
713
     We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
714
     to compute the Ada size; the GCC size and alignment (for rep'ed records
715
     that are not padding types); and the mode (for rep'ed records).  We also
716
     clear the DECL_BIT_FIELD indication for the cases we know have not been
717
     handled yet, and adjust DECL_NONADDRESSABLE_P accordingly.  */
718
 
719
  if (code == QUAL_UNION_TYPE)
720
    field_list = nreverse (field_list);
721
 
722
  for (field = field_list; field; field = DECL_CHAIN (field))
723
    {
724
      tree type = TREE_TYPE (field);
725
      tree pos = bit_position (field);
726
      tree this_size = DECL_SIZE (field);
727
      tree this_ada_size;
728
 
729
      if (RECORD_OR_UNION_TYPE_P (type)
730
          && !TYPE_FAT_POINTER_P (type)
731
          && !TYPE_CONTAINS_TEMPLATE_P (type)
732
          && TYPE_ADA_SIZE (type))
733
        this_ada_size = TYPE_ADA_SIZE (type);
734
      else
735
        this_ada_size = this_size;
736
 
737
      /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle.  */
738
      if (DECL_BIT_FIELD (field)
739
          && operand_equal_p (this_size, TYPE_SIZE (type), 0))
740
        {
741
          unsigned int align = TYPE_ALIGN (type);
742
 
743
          /* In the general case, type alignment is required.  */
744
          if (value_factor_p (pos, align))
745
            {
746
              /* The enclosing record type must be sufficiently aligned.
747
                 Otherwise, if no alignment was specified for it and it
748
                 has been laid out already, bump its alignment to the
749
                 desired one if this is compatible with its size.  */
750
              if (TYPE_ALIGN (record_type) >= align)
751
                {
752
                  DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
753
                  DECL_BIT_FIELD (field) = 0;
754
                }
755
              else if (!had_align
756
                       && rep_level == 0
757
                       && value_factor_p (TYPE_SIZE (record_type), align))
758
                {
759
                  TYPE_ALIGN (record_type) = align;
760
                  DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
761
                  DECL_BIT_FIELD (field) = 0;
762
                }
763
            }
764
 
765
          /* In the non-strict alignment case, only byte alignment is.  */
766
          if (!STRICT_ALIGNMENT
767
              && DECL_BIT_FIELD (field)
768
              && value_factor_p (pos, BITS_PER_UNIT))
769
            DECL_BIT_FIELD (field) = 0;
770
        }
771
 
772
      /* If we still have DECL_BIT_FIELD set at this point, we know that the
773
         field is technically not addressable.  Except that it can actually
774
         be addressed if it is BLKmode and happens to be properly aligned.  */
775
      if (DECL_BIT_FIELD (field)
776
          && !(DECL_MODE (field) == BLKmode
777
               && value_factor_p (pos, BITS_PER_UNIT)))
778
        DECL_NONADDRESSABLE_P (field) = 1;
779
 
780
      /* A type must be as aligned as its most aligned field that is not
781
         a bit-field.  But this is already enforced by layout_type.  */
782
      if (rep_level > 0 && !DECL_BIT_FIELD (field))
783
        TYPE_ALIGN (record_type)
784
          = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
785
 
786
      switch (code)
787
        {
788
        case UNION_TYPE:
789
          ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
790
          size = size_binop (MAX_EXPR, size, this_size);
791
          break;
792
 
793
        case QUAL_UNION_TYPE:
794
          ada_size
795
            = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
796
                           this_ada_size, ada_size);
797
          size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
798
                              this_size, size);
799
          break;
800
 
801
        case RECORD_TYPE:
802
          /* Since we know here that all fields are sorted in order of
803
             increasing bit position, the size of the record is one
804
             higher than the ending bit of the last field processed
805
             unless we have a rep clause, since in that case we might
806
             have a field outside a QUAL_UNION_TYPE that has a higher ending
807
             position.  So use a MAX in that case.  Also, if this field is a
808
             QUAL_UNION_TYPE, we need to take into account the previous size in
809
             the case of empty variants.  */
810
          ada_size
811
            = merge_sizes (ada_size, pos, this_ada_size,
812
                           TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
813
          size
814
            = merge_sizes (size, pos, this_size,
815
                           TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
816
          break;
817
 
818
        default:
819
          gcc_unreachable ();
820
        }
821
    }
822
 
823
  if (code == QUAL_UNION_TYPE)
824
    nreverse (field_list);
825
 
826
  if (rep_level < 2)
827
    {
828
      /* If this is a padding record, we never want to make the size smaller
829
         than what was specified in it, if any.  */
830
      if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
831
        size = TYPE_SIZE (record_type);
832
 
833
      /* Now set any of the values we've just computed that apply.  */
834
      if (!TYPE_FAT_POINTER_P (record_type)
835
          && !TYPE_CONTAINS_TEMPLATE_P (record_type))
836
        SET_TYPE_ADA_SIZE (record_type, ada_size);
837
 
838
      if (rep_level > 0)
839
        {
840
          tree size_unit = had_size_unit
841
                           ? TYPE_SIZE_UNIT (record_type)
842
                           : convert (sizetype,
843
                                      size_binop (CEIL_DIV_EXPR, size,
844
                                                  bitsize_unit_node));
845
          unsigned int align = TYPE_ALIGN (record_type);
846
 
847
          TYPE_SIZE (record_type) = variable_size (round_up (size, align));
848
          TYPE_SIZE_UNIT (record_type)
849
            = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
850
 
851
          compute_record_mode (record_type);
852
        }
853
    }
854
 
855
  if (debug_info_p)
856
    rest_of_record_type_compilation (record_type);
857
}
858
 
859
/* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
860
   associated with it.  It need not be invoked directly in most cases since
861
   finish_record_type takes care of doing so, but this can be necessary if
862
   a parallel type is to be attached to the record type.  */
863
 
864
void
865
rest_of_record_type_compilation (tree record_type)
866
{
867
  tree field_list = TYPE_FIELDS (record_type);
868
  tree field;
869
  enum tree_code code = TREE_CODE (record_type);
870
  bool var_size = false;
871
 
872
  for (field = field_list; field; field = DECL_CHAIN (field))
873
    {
874
      /* We need to make an XVE/XVU record if any field has variable size,
875
         whether or not the record does.  For example, if we have a union,
876
         it may be that all fields, rounded up to the alignment, have the
877
         same size, in which case we'll use that size.  But the debug
878
         output routines (except Dwarf2) won't be able to output the fields,
879
         so we need to make the special record.  */
880
      if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
881
          /* If a field has a non-constant qualifier, the record will have
882
             variable size too.  */
883
          || (code == QUAL_UNION_TYPE
884
              && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
885
        {
886
          var_size = true;
887
          break;
888
        }
889
    }
890
 
891
  /* If this record is of variable size, rename it so that the
892
     debugger knows it is and make a new, parallel, record
893
     that tells the debugger how the record is laid out.  See
894
     exp_dbug.ads.  But don't do this for records that are padding
895
     since they confuse GDB.  */
896
  if (var_size && !TYPE_IS_PADDING_P (record_type))
897
    {
898
      tree new_record_type
899
        = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
900
                     ? UNION_TYPE : TREE_CODE (record_type));
901
      tree orig_name = TYPE_NAME (record_type), new_name;
902
      tree last_pos = bitsize_zero_node;
903
      tree old_field, prev_old_field = NULL_TREE;
904
 
905
      if (TREE_CODE (orig_name) == TYPE_DECL)
906
        orig_name = DECL_NAME (orig_name);
907
 
908
      new_name
909
        = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
910
                                  ? "XVU" : "XVE");
911
      TYPE_NAME (new_record_type) = new_name;
912
      TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
913
      TYPE_STUB_DECL (new_record_type)
914
        = create_type_stub_decl (new_name, new_record_type);
915
      DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
916
        = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
917
      TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
918
      TYPE_SIZE_UNIT (new_record_type)
919
        = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
920
 
921
      /* Now scan all the fields, replacing each field with a new
922
         field corresponding to the new encoding.  */
923
      for (old_field = TYPE_FIELDS (record_type); old_field;
924
           old_field = DECL_CHAIN (old_field))
925
        {
926
          tree field_type = TREE_TYPE (old_field);
927
          tree field_name = DECL_NAME (old_field);
928
          tree new_field;
929
          tree curpos = bit_position (old_field);
930
          bool var = false;
931
          unsigned int align = 0;
932
          tree pos;
933
 
934
          /* See how the position was modified from the last position.
935
 
936
          There are two basic cases we support: a value was added
937
          to the last position or the last position was rounded to
938
          a boundary and they something was added.  Check for the
939
          first case first.  If not, see if there is any evidence
940
          of rounding.  If so, round the last position and try
941
          again.
942
 
943
          If this is a union, the position can be taken as zero. */
944
 
945
          /* Some computations depend on the shape of the position expression,
946
             so strip conversions to make sure it's exposed.  */
947
          curpos = remove_conversions (curpos, true);
948
 
949
          if (TREE_CODE (new_record_type) == UNION_TYPE)
950
            pos = bitsize_zero_node, align = 0;
951
          else
952
            pos = compute_related_constant (curpos, last_pos);
953
 
954
          if (!pos && TREE_CODE (curpos) == MULT_EXPR
955
              && host_integerp (TREE_OPERAND (curpos, 1), 1))
956
            {
957
              tree offset = TREE_OPERAND (curpos, 0);
958
              align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
959
 
960
              /* An offset which is a bitwise AND with a negative power of 2
961
                 means an alignment corresponding to this power of 2.  Note
962
                 that, as sizetype is sign-extended but nonetheless unsigned,
963
                 we don't directly use tree_int_cst_sgn.  */
964
              offset = remove_conversions (offset, true);
965
              if (TREE_CODE (offset) == BIT_AND_EXPR
966
                  && host_integerp (TREE_OPERAND (offset, 1), 0)
967
                  && TREE_INT_CST_HIGH (TREE_OPERAND (offset, 1)) < 0)
968
                {
969
                  unsigned int pow
970
                    = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
971
                  if (exact_log2 (pow) > 0)
972
                    align *= pow;
973
                }
974
 
975
              pos = compute_related_constant (curpos,
976
                                              round_up (last_pos, align));
977
            }
978
          else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
979
                   && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
980
                   && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
981
                   && host_integerp (TREE_OPERAND
982
                                     (TREE_OPERAND (curpos, 0), 1),
983
                                     1))
984
            {
985
              align
986
                = tree_low_cst
987
                (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
988
              pos = compute_related_constant (curpos,
989
                                              round_up (last_pos, align));
990
            }
991
          else if (potential_alignment_gap (prev_old_field, old_field,
992
                                            pos))
993
            {
994
              align = TYPE_ALIGN (field_type);
995
              pos = compute_related_constant (curpos,
996
                                              round_up (last_pos, align));
997
            }
998
 
999
          /* If we can't compute a position, set it to zero.
1000
 
1001
          ??? We really should abort here, but it's too much work
1002
          to get this correct for all cases.  */
1003
 
1004
          if (!pos)
1005
            pos = bitsize_zero_node;
1006
 
1007
          /* See if this type is variable-sized and make a pointer type
1008
             and indicate the indirection if so.  Beware that the debug
1009
             back-end may adjust the position computed above according
1010
             to the alignment of the field type, i.e. the pointer type
1011
             in this case, if we don't preventively counter that.  */
1012
          if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1013
            {
1014
              field_type = build_pointer_type (field_type);
1015
              if (align != 0 && TYPE_ALIGN (field_type) > align)
1016
                {
1017
                  field_type = copy_node (field_type);
1018
                  TYPE_ALIGN (field_type) = align;
1019
                }
1020
              var = true;
1021
            }
1022
 
1023
          /* Make a new field name, if necessary.  */
1024
          if (var || align != 0)
1025
            {
1026
              char suffix[16];
1027
 
1028
              if (align != 0)
1029
                sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1030
                         align / BITS_PER_UNIT);
1031
              else
1032
                strcpy (suffix, "XVL");
1033
 
1034
              field_name = concat_name (field_name, suffix);
1035
            }
1036
 
1037
          new_field
1038
            = create_field_decl (field_name, field_type, new_record_type,
1039
                                 DECL_SIZE (old_field), pos, 0, 0);
1040
          DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1041
          TYPE_FIELDS (new_record_type) = new_field;
1042
 
1043
          /* If old_field is a QUAL_UNION_TYPE, take its size as being
1044
             zero.  The only time it's not the last field of the record
1045
             is when there are other components at fixed positions after
1046
             it (meaning there was a rep clause for every field) and we
1047
             want to be able to encode them.  */
1048
          last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1049
                                 (TREE_CODE (TREE_TYPE (old_field))
1050
                                  == QUAL_UNION_TYPE)
1051
                                 ? bitsize_zero_node
1052
                                 : DECL_SIZE (old_field));
1053
          prev_old_field = old_field;
1054
        }
1055
 
1056
      TYPE_FIELDS (new_record_type)
1057
        = nreverse (TYPE_FIELDS (new_record_type));
1058
 
1059
      /* We used to explicitly invoke rest_of_type_decl_compilation on the
1060
         parallel type for the sake of STABS.  We don't do it any more, so
1061
         as to ensure that the parallel type be processed after the type
1062
         by the debug back-end and, thus, prevent it from interfering with
1063
         the processing of a recursive type.  */
1064
      add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
1065
    }
1066
 
1067
  rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
1068
}
1069
 
1070
/* Append PARALLEL_TYPE on the chain of parallel types for decl.  */
1071
 
1072
void
1073
add_parallel_type (tree decl, tree parallel_type)
1074
{
1075
  tree d = decl;
1076
 
1077
  while (DECL_PARALLEL_TYPE (d))
1078
    d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
1079
 
1080
  SET_DECL_PARALLEL_TYPE (d, parallel_type);
1081
}
1082
 
1083
/* Utility function of above to merge LAST_SIZE, the previous size of a record
1084
   with FIRST_BIT and SIZE that describe a field.  SPECIAL is true if this
1085
   represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1086
   replace a value of zero with the old size.  If HAS_REP is true, we take the
1087
   MAX of the end position of this field with LAST_SIZE.  In all other cases,
1088
   we use FIRST_BIT plus SIZE.  Return an expression for the size.  */
1089
 
1090
static tree
1091
merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1092
             bool has_rep)
1093
{
1094
  tree type = TREE_TYPE (last_size);
1095
  tree new_size;
1096
 
1097
  if (!special || TREE_CODE (size) != COND_EXPR)
1098
    {
1099
      new_size = size_binop (PLUS_EXPR, first_bit, size);
1100
      if (has_rep)
1101
        new_size = size_binop (MAX_EXPR, last_size, new_size);
1102
    }
1103
 
1104
  else
1105
    new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1106
                            integer_zerop (TREE_OPERAND (size, 1))
1107
                            ? last_size : merge_sizes (last_size, first_bit,
1108
                                                       TREE_OPERAND (size, 1),
1109
                                                       1, has_rep),
1110
                            integer_zerop (TREE_OPERAND (size, 2))
1111
                            ? last_size : merge_sizes (last_size, first_bit,
1112
                                                       TREE_OPERAND (size, 2),
1113
                                                       1, has_rep));
1114
 
1115
  /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1116
     when fed through substitute_in_expr) into thinking that a constant
1117
     size is not constant.  */
1118
  while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1119
    new_size = TREE_OPERAND (new_size, 0);
1120
 
1121
  return new_size;
1122
}
1123
 
1124
/* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1125
   related by the addition of a constant.  Return that constant if so.  */
1126
 
1127
static tree
1128
compute_related_constant (tree op0, tree op1)
1129
{
1130
  tree op0_var, op1_var;
1131
  tree op0_con = split_plus (op0, &op0_var);
1132
  tree op1_con = split_plus (op1, &op1_var);
1133
  tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1134
 
1135
  if (operand_equal_p (op0_var, op1_var, 0))
1136
    return result;
1137
  else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1138
    return result;
1139
  else
1140
    return 0;
1141
}
1142
 
1143
/* Utility function of above to split a tree OP which may be a sum, into a
1144
   constant part, which is returned, and a variable part, which is stored
1145
   in *PVAR.  *PVAR may be bitsize_zero_node.  All operations must be of
1146
   bitsizetype.  */
1147
 
1148
static tree
1149
split_plus (tree in, tree *pvar)
1150
{
1151
  /* Strip conversions in order to ease the tree traversal and maximize the
1152
     potential for constant or plus/minus discovery.  We need to be careful
1153
     to always return and set *pvar to bitsizetype trees, but it's worth
1154
     the effort.  */
1155
  in = remove_conversions (in, false);
1156
 
1157
  *pvar = convert (bitsizetype, in);
1158
 
1159
  if (TREE_CODE (in) == INTEGER_CST)
1160
    {
1161
      *pvar = bitsize_zero_node;
1162
      return convert (bitsizetype, in);
1163
    }
1164
  else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1165
    {
1166
      tree lhs_var, rhs_var;
1167
      tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1168
      tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1169
 
1170
      if (lhs_var == TREE_OPERAND (in, 0)
1171
          && rhs_var == TREE_OPERAND (in, 1))
1172
        return bitsize_zero_node;
1173
 
1174
      *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1175
      return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1176
    }
1177
  else
1178
    return bitsize_zero_node;
1179
}
1180
 
1181
/* Return a FUNCTION_TYPE node.  RETURN_TYPE is the type returned by the
1182
   subprogram.  If it is VOID_TYPE, then we are dealing with a procedure,
1183
   otherwise we are dealing with a function.  PARAM_DECL_LIST is a list of
1184
   PARM_DECL nodes that are the subprogram parameters.  CICO_LIST is the
1185
   copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1186
   RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1187
   object.  RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1188
   reference.  RETURN_BY_INVISI_REF_P is true if the function returns by
1189
   invisible reference.  */
1190
 
1191
tree
1192
create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1193
                     bool return_unconstrained_p, bool return_by_direct_ref_p,
1194
                     bool return_by_invisi_ref_p)
1195
{
1196
  /* A list of the data type nodes of the subprogram formal parameters.
1197
     This list is generated by traversing the input list of PARM_DECL
1198
     nodes.  */
1199
  VEC(tree,gc) *param_type_list = NULL;
1200
  tree t, type;
1201
 
1202
  for (t = param_decl_list; t; t = DECL_CHAIN (t))
1203
    VEC_safe_push (tree, gc, param_type_list, TREE_TYPE (t));
1204
 
1205
  type = build_function_type_vec (return_type, param_type_list);
1206
 
1207
  /* TYPE may have been shared since GCC hashes types.  If it has a different
1208
     CICO_LIST, make a copy.  Likewise for the various flags.  */
1209
  if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
1210
                            return_by_direct_ref_p, return_by_invisi_ref_p))
1211
    {
1212
      type = copy_type (type);
1213
      TYPE_CI_CO_LIST (type) = cico_list;
1214
      TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
1215
      TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
1216
      TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
1217
    }
1218
 
1219
  return type;
1220
}
1221
 
1222
/* Return a copy of TYPE but safe to modify in any way.  */
1223
 
1224
tree
1225
copy_type (tree type)
1226
{
1227
  tree new_type = copy_node (type);
1228
 
1229
  /* Unshare the language-specific data.  */
1230
  if (TYPE_LANG_SPECIFIC (type))
1231
    {
1232
      TYPE_LANG_SPECIFIC (new_type) = NULL;
1233
      SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
1234
    }
1235
 
1236
  /* And the contents of the language-specific slot if needed.  */
1237
  if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
1238
      && TYPE_RM_VALUES (type))
1239
    {
1240
      TYPE_RM_VALUES (new_type) = NULL_TREE;
1241
      SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
1242
      SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
1243
      SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
1244
    }
1245
 
1246
  /* copy_node clears this field instead of copying it, because it is
1247
     aliased with TREE_CHAIN.  */
1248
  TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
1249
 
1250
  TYPE_POINTER_TO (new_type) = 0;
1251
  TYPE_REFERENCE_TO (new_type) = 0;
1252
  TYPE_MAIN_VARIANT (new_type) = new_type;
1253
  TYPE_NEXT_VARIANT (new_type) = 0;
1254
 
1255
  return new_type;
1256
}
1257
 
1258
/* Return a subtype of sizetype with range MIN to MAX and whose
1259
   TYPE_INDEX_TYPE is INDEX.  GNAT_NODE is used for the position
1260
   of the associated TYPE_DECL.  */
1261
 
1262
tree
1263
create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1264
{
1265
  /* First build a type for the desired range.  */
1266
  tree type = build_nonshared_range_type (sizetype, min, max);
1267
 
1268
  /* Then set the index type.  */
1269
  SET_TYPE_INDEX_TYPE (type, index);
1270
  create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1271
 
1272
  return type;
1273
}
1274
 
1275
/* Return a subtype of TYPE with range MIN to MAX.  If TYPE is NULL,
1276
   sizetype is used.  */
1277
 
1278
tree
1279
create_range_type (tree type, tree min, tree max)
1280
{
1281
  tree range_type;
1282
 
1283
  if (type == NULL_TREE)
1284
    type = sizetype;
1285
 
1286
  /* First build a type with the base range.  */
1287
  range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
1288
                                                 TYPE_MAX_VALUE (type));
1289
 
1290
  /* Then set the actual range.  */
1291
  SET_TYPE_RM_MIN_VALUE (range_type, convert (type, min));
1292
  SET_TYPE_RM_MAX_VALUE (range_type, convert (type, max));
1293
 
1294
  return range_type;
1295
}
1296
 
1297
/* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1298
   TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1299
   its data type.  */
1300
 
1301
tree
1302
create_type_stub_decl (tree type_name, tree type)
1303
{
1304
  /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1305
     STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1306
     emitted in DWARF.  */
1307
  tree type_decl = build_decl (input_location,
1308
                               TYPE_DECL, type_name, type);
1309
  DECL_ARTIFICIAL (type_decl) = 1;
1310
  TYPE_ARTIFICIAL (type) = 1;
1311
  return type_decl;
1312
}
1313
 
1314
/* Return a TYPE_DECL node.  TYPE_NAME gives the name of the type and TYPE
1315
   is a ..._TYPE node giving its data type.  ARTIFICIAL_P is true if this
1316
   is a declaration that was generated by the compiler.  DEBUG_INFO_P is
1317
   true if we need to write debug information about this type.  GNAT_NODE
1318
   is used for the position of the decl.  */
1319
 
1320
tree
1321
create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1322
                  bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1323
{
1324
  enum tree_code code = TREE_CODE (type);
1325
  bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1326
  tree type_decl;
1327
 
1328
  /* Only the builtin TYPE_STUB_DECL should be used for dummy types.  */
1329
  gcc_assert (!TYPE_IS_DUMMY_P (type));
1330
 
1331
  /* If the type hasn't been named yet, we're naming it; preserve an existing
1332
     TYPE_STUB_DECL that has been attached to it for some purpose.  */
1333
  if (!named && TYPE_STUB_DECL (type))
1334
    {
1335
      type_decl = TYPE_STUB_DECL (type);
1336
      DECL_NAME (type_decl) = type_name;
1337
    }
1338
  else
1339
    type_decl = build_decl (input_location,
1340
                            TYPE_DECL, type_name, type);
1341
 
1342
  DECL_ARTIFICIAL (type_decl) = artificial_p;
1343
  TYPE_ARTIFICIAL (type) = artificial_p;
1344
 
1345
  /* Add this decl to the current binding level.  */
1346
  gnat_pushdecl (type_decl, gnat_node);
1347
 
1348
  process_attributes (type_decl, attr_list);
1349
 
1350
  /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1351
     This causes the name to be also viewed as a "tag" by the debug
1352
     back-end, with the advantage that no DW_TAG_typedef is emitted
1353
     for artificial "tagged" types in DWARF.  */
1354
  if (!named)
1355
    TYPE_STUB_DECL (type) = type_decl;
1356
 
1357
  /* Pass the type declaration to the debug back-end unless this is an
1358
     UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1359
     type for which debugging information was not requested, or else an
1360
     ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1361
     handled separately.  And do not pass dummy types either.  */
1362
  if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1363
    DECL_IGNORED_P (type_decl) = 1;
1364
  else if (code != ENUMERAL_TYPE
1365
           && (code != RECORD_TYPE || TYPE_FAT_POINTER_P (type))
1366
           && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1367
                && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1368
           && !(code == RECORD_TYPE
1369
                && TYPE_IS_DUMMY_P
1370
                   (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
1371
    rest_of_type_decl_compilation (type_decl);
1372
 
1373
  return type_decl;
1374
}
1375
 
1376
/* Return a VAR_DECL or CONST_DECL node.
1377
 
1378
   VAR_NAME gives the name of the variable.  ASM_NAME is its assembler name
1379
   (if provided).  TYPE is its data type (a GCC ..._TYPE node).  VAR_INIT is
1380
   the GCC tree for an optional initial expression; NULL_TREE if none.
1381
 
1382
   CONST_FLAG is true if this variable is constant, in which case we might
1383
   return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1384
 
1385
   PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1386
   definition to be made visible outside of the current compilation unit, for
1387
   instance variable definitions in a package specification.
1388
 
1389
   EXTERN_FLAG is true when processing an external variable declaration (as
1390
   opposed to a definition: no storage is to be allocated for the variable).
1391
 
1392
   STATIC_FLAG is only relevant when not at top level.  In that case
1393
   it indicates whether to always allocate storage to the variable.
1394
 
1395
   GNAT_NODE is used for the position of the decl.  */
1396
 
1397
tree
1398
create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1399
                   bool const_flag, bool public_flag, bool extern_flag,
1400
                   bool static_flag, bool const_decl_allowed_p,
1401
                   struct attrib *attr_list, Node_Id gnat_node)
1402
{
1403
  /* Whether the initializer is a constant initializer.  At the global level
1404
     or for an external object or an object to be allocated in static memory,
1405
     we check that it is a valid constant expression for use in initializing
1406
     a static variable; otherwise, we only check that it is constant.  */
1407
  bool init_const
1408
    = (var_init != 0
1409
       && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1410
       && (global_bindings_p () || extern_flag || static_flag
1411
           ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1412
           : TREE_CONSTANT (var_init)));
1413
 
1414
  /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1415
     case the initializer may be used in-lieu of the DECL node (as done in
1416
     Identifier_to_gnu).  This is useful to prevent the need of elaboration
1417
     code when an identifier for which such a decl is made is in turn used as
1418
     an initializer.  We used to rely on CONST vs VAR_DECL for this purpose,
1419
     but extra constraints apply to this choice (see below) and are not
1420
     relevant to the distinction we wish to make. */
1421
  bool constant_p = const_flag && init_const;
1422
 
1423
  /* The actual DECL node.  CONST_DECL was initially intended for enumerals
1424
     and may be used for scalars in general but not for aggregates.  */
1425
  tree var_decl
1426
    = build_decl (input_location,
1427
                  (constant_p && const_decl_allowed_p
1428
                   && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1429
                  var_name, type);
1430
 
1431
  /* If this is external, throw away any initializations (they will be done
1432
     elsewhere) unless this is a constant for which we would like to remain
1433
     able to get the initializer.  If we are defining a global here, leave a
1434
     constant initialization and save any variable elaborations for the
1435
     elaboration routine.  If we are just annotating types, throw away the
1436
     initialization if it isn't a constant.  */
1437
  if ((extern_flag && !constant_p)
1438
      || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1439
    var_init = NULL_TREE;
1440
 
1441
  /* At the global level, an initializer requiring code to be generated
1442
     produces elaboration statements.  Check that such statements are allowed,
1443
     that is, not violating a No_Elaboration_Code restriction.  */
1444
  if (global_bindings_p () && var_init != 0 && !init_const)
1445
    Check_Elaboration_Code_Allowed (gnat_node);
1446
 
1447
  DECL_INITIAL  (var_decl) = var_init;
1448
  TREE_READONLY (var_decl) = const_flag;
1449
  DECL_EXTERNAL (var_decl) = extern_flag;
1450
  TREE_PUBLIC   (var_decl) = public_flag || extern_flag;
1451
  TREE_CONSTANT (var_decl) = constant_p;
1452
  TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1453
    = TYPE_VOLATILE (type);
1454
 
1455
  /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1456
     try to fiddle with DECL_COMMON.  However, on platforms that don't
1457
     support global BSS sections, uninitialized global variables would
1458
     go in DATA instead, thus increasing the size of the executable.  */
1459
  if (!flag_no_common
1460
      && TREE_CODE (var_decl) == VAR_DECL
1461
      && TREE_PUBLIC (var_decl)
1462
      && !have_global_bss_p ())
1463
    DECL_COMMON (var_decl) = 1;
1464
 
1465
  /* At the global binding level, we need to allocate static storage for the
1466
     variable if it isn't external.  Otherwise, we allocate automatic storage
1467
     unless requested not to.  */
1468
  TREE_STATIC (var_decl)
1469
    = !extern_flag && (static_flag || global_bindings_p ());
1470
 
1471
  /* For an external constant whose initializer is not absolute, do not emit
1472
     debug info.  In DWARF this would mean a global relocation in a read-only
1473
     section which runs afoul of the PE-COFF run-time relocation mechanism.  */
1474
  if (extern_flag
1475
      && constant_p
1476
      && var_init
1477
      && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
1478
           != null_pointer_node)
1479
    DECL_IGNORED_P (var_decl) = 1;
1480
 
1481
  /* Add this decl to the current binding level.  */
1482
  gnat_pushdecl (var_decl, gnat_node);
1483
 
1484
  if (TREE_SIDE_EFFECTS (var_decl))
1485
    TREE_ADDRESSABLE (var_decl) = 1;
1486
 
1487
  if (TREE_CODE (var_decl) == VAR_DECL)
1488
    {
1489
      if (asm_name)
1490
        SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1491
      process_attributes (var_decl, attr_list);
1492
      if (global_bindings_p ())
1493
        rest_of_decl_compilation (var_decl, true, 0);
1494
    }
1495
  else
1496
    expand_decl (var_decl);
1497
 
1498
  return var_decl;
1499
}
1500
 
1501
/* Return true if TYPE, an aggregate type, contains (or is) an array.  */
1502
 
1503
static bool
1504
aggregate_type_contains_array_p (tree type)
1505
{
1506
  switch (TREE_CODE (type))
1507
    {
1508
    case RECORD_TYPE:
1509
    case UNION_TYPE:
1510
    case QUAL_UNION_TYPE:
1511
      {
1512
        tree field;
1513
        for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1514
          if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1515
              && aggregate_type_contains_array_p (TREE_TYPE (field)))
1516
            return true;
1517
        return false;
1518
      }
1519
 
1520
    case ARRAY_TYPE:
1521
      return true;
1522
 
1523
    default:
1524
      gcc_unreachable ();
1525
    }
1526
}
1527
 
1528
/* Return a FIELD_DECL node.  FIELD_NAME is the field's name, FIELD_TYPE is
1529
   its type and RECORD_TYPE is the type of the enclosing record.  If SIZE is
1530
   nonzero, it is the specified size of the field.  If POS is nonzero, it is
1531
   the bit position.  PACKED is 1 if the enclosing record is packed, -1 if it
1532
   has Component_Alignment of Storage_Unit.  If ADDRESSABLE is nonzero, it
1533
   means we are allowed to take the address of the field; if it is negative,
1534
   we should not make a bitfield, which is used by make_aligning_type.  */
1535
 
1536
tree
1537
create_field_decl (tree field_name, tree field_type, tree record_type,
1538
                   tree size, tree pos, int packed, int addressable)
1539
{
1540
  tree field_decl = build_decl (input_location,
1541
                                FIELD_DECL, field_name, field_type);
1542
 
1543
  DECL_CONTEXT (field_decl) = record_type;
1544
  TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1545
 
1546
  /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1547
     byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1548
     Likewise for an aggregate without specified position that contains an
1549
     array, because in this case slices of variable length of this array
1550
     must be handled by GCC and variable-sized objects need to be aligned
1551
     to at least a byte boundary.  */
1552
  if (packed && (TYPE_MODE (field_type) == BLKmode
1553
                 || (!pos
1554
                     && AGGREGATE_TYPE_P (field_type)
1555
                     && aggregate_type_contains_array_p (field_type))))
1556
    DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1557
 
1558
  /* If a size is specified, use it.  Otherwise, if the record type is packed
1559
     compute a size to use, which may differ from the object's natural size.
1560
     We always set a size in this case to trigger the checks for bitfield
1561
     creation below, which is typically required when no position has been
1562
     specified.  */
1563
  if (size)
1564
    size = convert (bitsizetype, size);
1565
  else if (packed == 1)
1566
    {
1567
      size = rm_size (field_type);
1568
      if (TYPE_MODE (field_type) == BLKmode)
1569
        size = round_up (size, BITS_PER_UNIT);
1570
    }
1571
 
1572
  /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1573
     specified for two reasons: first if the size differs from the natural
1574
     size.  Second, if the alignment is insufficient.  There are a number of
1575
     ways the latter can be true.
1576
 
1577
     We never make a bitfield if the type of the field has a nonconstant size,
1578
     because no such entity requiring bitfield operations should reach here.
1579
 
1580
     We do *preventively* make a bitfield when there might be the need for it
1581
     but we don't have all the necessary information to decide, as is the case
1582
     of a field with no specified position in a packed record.
1583
 
1584
     We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1585
     in layout_decl or finish_record_type to clear the bit_field indication if
1586
     it is in fact not needed.  */
1587
  if (addressable >= 0
1588
      && size
1589
      && TREE_CODE (size) == INTEGER_CST
1590
      && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1591
      && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1592
          || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1593
          || packed
1594
          || (TYPE_ALIGN (record_type) != 0
1595
              && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1596
    {
1597
      DECL_BIT_FIELD (field_decl) = 1;
1598
      DECL_SIZE (field_decl) = size;
1599
      if (!packed && !pos)
1600
        {
1601
          if (TYPE_ALIGN (record_type) != 0
1602
              && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
1603
            DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
1604
          else
1605
            DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1606
        }
1607
    }
1608
 
1609
  DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1610
 
1611
  /* Bump the alignment if need be, either for bitfield/packing purposes or
1612
     to satisfy the type requirements if no such consideration applies.  When
1613
     we get the alignment from the type, indicate if this is from an explicit
1614
     user request, which prevents stor-layout from lowering it later on.  */
1615
  {
1616
    unsigned int bit_align
1617
      = (DECL_BIT_FIELD (field_decl) ? 1
1618
         : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1619
 
1620
    if (bit_align > DECL_ALIGN (field_decl))
1621
      DECL_ALIGN (field_decl) = bit_align;
1622
    else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1623
      {
1624
        DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1625
        DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1626
      }
1627
  }
1628
 
1629
  if (pos)
1630
    {
1631
      /* We need to pass in the alignment the DECL is known to have.
1632
         This is the lowest-order bit set in POS, but no more than
1633
         the alignment of the record, if one is specified.  Note
1634
         that an alignment of 0 is taken as infinite.  */
1635
      unsigned int known_align;
1636
 
1637
      if (host_integerp (pos, 1))
1638
        known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1639
      else
1640
        known_align = BITS_PER_UNIT;
1641
 
1642
      if (TYPE_ALIGN (record_type)
1643
          && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1644
        known_align = TYPE_ALIGN (record_type);
1645
 
1646
      layout_decl (field_decl, known_align);
1647
      SET_DECL_OFFSET_ALIGN (field_decl,
1648
                             host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1649
                             : BITS_PER_UNIT);
1650
      pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1651
                    &DECL_FIELD_BIT_OFFSET (field_decl),
1652
                    DECL_OFFSET_ALIGN (field_decl), pos);
1653
    }
1654
 
1655
  /* In addition to what our caller says, claim the field is addressable if we
1656
     know that its type is not suitable.
1657
 
1658
     The field may also be "technically" nonaddressable, meaning that even if
1659
     we attempt to take the field's address we will actually get the address
1660
     of a copy.  This is the case for true bitfields, but the DECL_BIT_FIELD
1661
     value we have at this point is not accurate enough, so we don't account
1662
     for this here and let finish_record_type decide.  */
1663
  if (!addressable && !type_for_nonaliased_component_p (field_type))
1664
    addressable = 1;
1665
 
1666
  DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1667
 
1668
  return field_decl;
1669
}
1670
 
1671
/* Return a PARM_DECL node.  PARAM_NAME is the name of the parameter and
1672
   PARAM_TYPE is its type.  READONLY is true if the parameter is readonly
1673
   (either an In parameter or an address of a pass-by-ref parameter).  */
1674
 
1675
tree
1676
create_param_decl (tree param_name, tree param_type, bool readonly)
1677
{
1678
  tree param_decl = build_decl (input_location,
1679
                                PARM_DECL, param_name, param_type);
1680
 
1681
  /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1682
     can lead to various ABI violations.  */
1683
  if (targetm.calls.promote_prototypes (NULL_TREE)
1684
      && INTEGRAL_TYPE_P (param_type)
1685
      && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1686
    {
1687
      /* We have to be careful about biased types here.  Make a subtype
1688
         of integer_type_node with the proper biasing.  */
1689
      if (TREE_CODE (param_type) == INTEGER_TYPE
1690
          && TYPE_BIASED_REPRESENTATION_P (param_type))
1691
        {
1692
          tree subtype
1693
            = make_unsigned_type (TYPE_PRECISION (integer_type_node));
1694
          TREE_TYPE (subtype) = integer_type_node;
1695
          TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
1696
          SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
1697
          SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
1698
          param_type = subtype;
1699
        }
1700
      else
1701
        param_type = integer_type_node;
1702
    }
1703
 
1704
  DECL_ARG_TYPE (param_decl) = param_type;
1705
  TREE_READONLY (param_decl) = readonly;
1706
  return param_decl;
1707
}
1708
 
1709
/* Given a DECL and ATTR_LIST, process the listed attributes.  */
1710
 
1711
static void
1712
process_attributes (tree decl, struct attrib *attr_list)
1713
{
1714
  for (; attr_list; attr_list = attr_list->next)
1715
    switch (attr_list->type)
1716
      {
1717
      case ATTR_MACHINE_ATTRIBUTE:
1718
        input_location = DECL_SOURCE_LOCATION (decl);
1719
        decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1720
                                           NULL_TREE),
1721
                         ATTR_FLAG_TYPE_IN_PLACE);
1722
        break;
1723
 
1724
      case ATTR_LINK_ALIAS:
1725
        if (! DECL_EXTERNAL (decl))
1726
          {
1727
            TREE_STATIC (decl) = 1;
1728
            assemble_alias (decl, attr_list->name);
1729
          }
1730
        break;
1731
 
1732
      case ATTR_WEAK_EXTERNAL:
1733
        if (SUPPORTS_WEAK)
1734
          declare_weak (decl);
1735
        else
1736
          post_error ("?weak declarations not supported on this target",
1737
                      attr_list->error_point);
1738
        break;
1739
 
1740
      case ATTR_LINK_SECTION:
1741
        if (targetm_common.have_named_sections)
1742
          {
1743
            DECL_SECTION_NAME (decl)
1744
              = build_string (IDENTIFIER_LENGTH (attr_list->name),
1745
                              IDENTIFIER_POINTER (attr_list->name));
1746
            DECL_COMMON (decl) = 0;
1747
          }
1748
        else
1749
          post_error ("?section attributes are not supported for this target",
1750
                      attr_list->error_point);
1751
        break;
1752
 
1753
      case ATTR_LINK_CONSTRUCTOR:
1754
        DECL_STATIC_CONSTRUCTOR (decl) = 1;
1755
        TREE_USED (decl) = 1;
1756
        break;
1757
 
1758
      case ATTR_LINK_DESTRUCTOR:
1759
        DECL_STATIC_DESTRUCTOR (decl) = 1;
1760
        TREE_USED (decl) = 1;
1761
        break;
1762
 
1763
      case ATTR_THREAD_LOCAL_STORAGE:
1764
        DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1765
        DECL_COMMON (decl) = 0;
1766
        break;
1767
      }
1768
}
1769
 
1770
/* Record DECL as a global renaming pointer.  */
1771
 
1772
void
1773
record_global_renaming_pointer (tree decl)
1774
{
1775
  gcc_assert (!DECL_LOOP_PARM_P (decl) && DECL_RENAMED_OBJECT (decl));
1776
  VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1777
}
1778
 
1779
/* Invalidate the global renaming pointers.   */
1780
 
1781
void
1782
invalidate_global_renaming_pointers (void)
1783
{
1784
  unsigned int i;
1785
  tree iter;
1786
 
1787
  FOR_EACH_VEC_ELT (tree, global_renaming_pointers, i, iter)
1788
    SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1789
 
1790
  VEC_free (tree, gc, global_renaming_pointers);
1791
}
1792
 
1793
/* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1794
   a power of 2. */
1795
 
1796
bool
1797
value_factor_p (tree value, HOST_WIDE_INT factor)
1798
{
1799
  if (host_integerp (value, 1))
1800
    return tree_low_cst (value, 1) % factor == 0;
1801
 
1802
  if (TREE_CODE (value) == MULT_EXPR)
1803
    return (value_factor_p (TREE_OPERAND (value, 0), factor)
1804
            || value_factor_p (TREE_OPERAND (value, 1), factor));
1805
 
1806
  return false;
1807
}
1808
 
1809
/* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
1810
   unless we can prove these 2 fields are laid out in such a way that no gap
1811
   exist between the end of PREV_FIELD and the beginning of CURR_FIELD.  OFFSET
1812
   is the distance in bits between the end of PREV_FIELD and the starting
1813
   position of CURR_FIELD. It is ignored if null. */
1814
 
1815
static bool
1816
potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1817
{
1818
  /* If this is the first field of the record, there cannot be any gap */
1819
  if (!prev_field)
1820
    return false;
1821
 
1822
  /* If the previous field is a union type, then return False: The only
1823
     time when such a field is not the last field of the record is when
1824
     there are other components at fixed positions after it (meaning there
1825
     was a rep clause for every field), in which case we don't want the
1826
     alignment constraint to override them. */
1827
  if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1828
    return false;
1829
 
1830
  /* If the distance between the end of prev_field and the beginning of
1831
     curr_field is constant, then there is a gap if the value of this
1832
     constant is not null. */
1833
  if (offset && host_integerp (offset, 1))
1834
    return !integer_zerop (offset);
1835
 
1836
  /* If the size and position of the previous field are constant,
1837
     then check the sum of this size and position. There will be a gap
1838
     iff it is not multiple of the current field alignment. */
1839
  if (host_integerp (DECL_SIZE (prev_field), 1)
1840
      && host_integerp (bit_position (prev_field), 1))
1841
    return ((tree_low_cst (bit_position (prev_field), 1)
1842
             + tree_low_cst (DECL_SIZE (prev_field), 1))
1843
            % DECL_ALIGN (curr_field) != 0);
1844
 
1845
  /* If both the position and size of the previous field are multiples
1846
     of the current field alignment, there cannot be any gap. */
1847
  if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1848
      && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1849
    return false;
1850
 
1851
  /* Fallback, return that there may be a potential gap */
1852
  return true;
1853
}
1854
 
1855
/* Return a LABEL_DECL with LABEL_NAME.  GNAT_NODE is used for the position
1856
   of the decl.  */
1857
 
1858
tree
1859
create_label_decl (tree label_name, Node_Id gnat_node)
1860
{
1861
  tree label_decl
1862
    = build_decl (input_location, LABEL_DECL, label_name, void_type_node);
1863
 
1864
  DECL_MODE (label_decl) = VOIDmode;
1865
 
1866
  /* Add this decl to the current binding level.  */
1867
  gnat_pushdecl (label_decl, gnat_node);
1868
 
1869
  return label_decl;
1870
}
1871
 
1872
/* Return a FUNCTION_DECL node.  SUBPROG_NAME is the name of the subprogram,
1873
   ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1874
   node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1875
   PARM_DECL nodes chained through the DECL_CHAIN field).
1876
 
1877
   INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
1878
   used to set the appropriate fields in the FUNCTION_DECL.  GNAT_NODE is
1879
   used for the position of the decl.  */
1880
 
1881
tree
1882
create_subprog_decl (tree subprog_name, tree asm_name, tree subprog_type,
1883
                     tree param_decl_list, bool inline_flag, bool public_flag,
1884
                     bool extern_flag, bool artificial_flag,
1885
                     struct attrib *attr_list, Node_Id gnat_node)
1886
{
1887
  tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
1888
                                  subprog_type);
1889
  tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
1890
                                 TREE_TYPE (subprog_type));
1891
  DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1892
 
1893
  /* If this is a non-inline function nested inside an inlined external
1894
     function, we cannot honor both requests without cloning the nested
1895
     function in the current unit since it is private to the other unit.
1896
     We could inline the nested function as well but it's probably better
1897
     to err on the side of too little inlining.  */
1898
  if (!inline_flag
1899
      && !public_flag
1900
      && current_function_decl
1901
      && DECL_DECLARED_INLINE_P (current_function_decl)
1902
      && DECL_EXTERNAL (current_function_decl))
1903
    DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1904
 
1905
  DECL_ARTIFICIAL (subprog_decl) = artificial_flag;
1906
  DECL_EXTERNAL (subprog_decl) = extern_flag;
1907
  DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1908
  DECL_NO_INLINE_WARNING_P (subprog_decl) = inline_flag && artificial_flag;
1909
 
1910
  TREE_PUBLIC (subprog_decl) = public_flag;
1911
  TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1912
  TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1913
  TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1914
 
1915
  DECL_ARTIFICIAL (result_decl) = 1;
1916
  DECL_IGNORED_P (result_decl) = 1;
1917
  DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
1918
  DECL_RESULT (subprog_decl) = result_decl;
1919
 
1920
  if (asm_name)
1921
    {
1922
      SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1923
 
1924
      /* The expand_main_function circuitry expects "main_identifier_node" to
1925
         designate the DECL_NAME of the 'main' entry point, in turn expected
1926
         to be declared as the "main" function literally by default.  Ada
1927
         program entry points are typically declared with a different name
1928
         within the binder generated file, exported as 'main' to satisfy the
1929
         system expectations.  Force main_identifier_node in this case.  */
1930
      if (asm_name == main_identifier_node)
1931
        DECL_NAME (subprog_decl) = main_identifier_node;
1932
    }
1933
 
1934
  /* Add this decl to the current binding level.  */
1935
  gnat_pushdecl (subprog_decl, gnat_node);
1936
 
1937
  process_attributes (subprog_decl, attr_list);
1938
 
1939
  /* Output the assembler code and/or RTL for the declaration.  */
1940
  rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1941
 
1942
  return subprog_decl;
1943
}
1944
 
1945
/* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1946
   body.  This routine needs to be invoked before processing the declarations
1947
   appearing in the subprogram.  */
1948
 
1949
void
1950
begin_subprog_body (tree subprog_decl)
1951
{
1952
  tree param_decl;
1953
 
1954
  announce_function (subprog_decl);
1955
 
1956
  /* This function is being defined.  */
1957
  TREE_STATIC (subprog_decl) = 1;
1958
 
1959
  current_function_decl = subprog_decl;
1960
 
1961
  /* Enter a new binding level and show that all the parameters belong to
1962
     this function.  */
1963
  gnat_pushlevel ();
1964
 
1965
  for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1966
       param_decl = DECL_CHAIN (param_decl))
1967
    DECL_CONTEXT (param_decl) = subprog_decl;
1968
 
1969
  make_decl_rtl (subprog_decl);
1970
}
1971
 
1972
/* Finish translating the current subprogram and set its BODY.  */
1973
 
1974
void
1975
end_subprog_body (tree body)
1976
{
1977
  tree fndecl = current_function_decl;
1978
 
1979
  /* Attach the BLOCK for this level to the function and pop the level.  */
1980
  BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
1981
  DECL_INITIAL (fndecl) = current_binding_level->block;
1982
  gnat_poplevel ();
1983
 
1984
  /* Mark the RESULT_DECL as being in this subprogram. */
1985
  DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
1986
 
1987
  /* The body should be a BIND_EXPR whose BLOCK is the top-level one.  */
1988
  if (TREE_CODE (body) == BIND_EXPR)
1989
    {
1990
      BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
1991
      DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
1992
    }
1993
 
1994
  DECL_SAVED_TREE (fndecl) = body;
1995
 
1996
  current_function_decl = decl_function_context (fndecl);
1997
}
1998
 
1999
/* Wrap up compilation of SUBPROG_DECL, a subprogram body.  */
2000
 
2001
void
2002
rest_of_subprog_body_compilation (tree subprog_decl)
2003
{
2004
  /* We cannot track the location of errors past this point.  */
2005
  error_gnat_node = Empty;
2006
 
2007
  /* If we're only annotating types, don't actually compile this function.  */
2008
  if (type_annotate_only)
2009
    return;
2010
 
2011
  /* Dump functions before gimplification.  */
2012
  dump_function (TDI_original, subprog_decl);
2013
 
2014
  /* ??? This special handling of nested functions is probably obsolete.  */
2015
  if (!decl_function_context (subprog_decl))
2016
    cgraph_finalize_function (subprog_decl, false);
2017
  else
2018
    /* Register this function with cgraph just far enough to get it
2019
       added to our parent's nested function list.  */
2020
    (void) cgraph_get_create_node (subprog_decl);
2021
}
2022
 
2023
tree
2024
gnat_builtin_function (tree decl)
2025
{
2026
  gnat_pushdecl (decl, Empty);
2027
  return decl;
2028
}
2029
 
2030
/* Return an integer type with the number of bits of precision given by
2031
   PRECISION.  UNSIGNEDP is nonzero if the type is unsigned; otherwise
2032
   it is a signed type.  */
2033
 
2034
tree
2035
gnat_type_for_size (unsigned precision, int unsignedp)
2036
{
2037
  tree t;
2038
  char type_name[20];
2039
 
2040
  if (precision <= 2 * MAX_BITS_PER_WORD
2041
      && signed_and_unsigned_types[precision][unsignedp])
2042
    return signed_and_unsigned_types[precision][unsignedp];
2043
 
2044
 if (unsignedp)
2045
    t = make_unsigned_type (precision);
2046
  else
2047
    t = make_signed_type (precision);
2048
 
2049
  if (precision <= 2 * MAX_BITS_PER_WORD)
2050
    signed_and_unsigned_types[precision][unsignedp] = t;
2051
 
2052
  if (!TYPE_NAME (t))
2053
    {
2054
      sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2055
      TYPE_NAME (t) = get_identifier (type_name);
2056
    }
2057
 
2058
  return t;
2059
}
2060
 
2061
/* Likewise for floating-point types.  */
2062
 
2063
static tree
2064
float_type_for_precision (int precision, enum machine_mode mode)
2065
{
2066
  tree t;
2067
  char type_name[20];
2068
 
2069
  if (float_types[(int) mode])
2070
    return float_types[(int) mode];
2071
 
2072
  float_types[(int) mode] = t = make_node (REAL_TYPE);
2073
  TYPE_PRECISION (t) = precision;
2074
  layout_type (t);
2075
 
2076
  gcc_assert (TYPE_MODE (t) == mode);
2077
  if (!TYPE_NAME (t))
2078
    {
2079
      sprintf (type_name, "FLOAT_%d", precision);
2080
      TYPE_NAME (t) = get_identifier (type_name);
2081
    }
2082
 
2083
  return t;
2084
}
2085
 
2086
/* Return a data type that has machine mode MODE.  UNSIGNEDP selects
2087
   an unsigned type; otherwise a signed type is returned.  */
2088
 
2089
tree
2090
gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2091
{
2092
  if (mode == BLKmode)
2093
    return NULL_TREE;
2094
 
2095
  if (mode == VOIDmode)
2096
    return void_type_node;
2097
 
2098
  if (COMPLEX_MODE_P (mode))
2099
    return NULL_TREE;
2100
 
2101
  if (SCALAR_FLOAT_MODE_P (mode))
2102
    return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2103
 
2104
  if (SCALAR_INT_MODE_P (mode))
2105
    return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2106
 
2107
  if (VECTOR_MODE_P (mode))
2108
    {
2109
      enum machine_mode inner_mode = GET_MODE_INNER (mode);
2110
      tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2111
      if (inner_type)
2112
        return build_vector_type_for_mode (inner_type, mode);
2113
    }
2114
 
2115
  return NULL_TREE;
2116
}
2117
 
2118
/* Return the unsigned version of a TYPE_NODE, a scalar type.  */
2119
 
2120
tree
2121
gnat_unsigned_type (tree type_node)
2122
{
2123
  tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2124
 
2125
  if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2126
    {
2127
      type = copy_node (type);
2128
      TREE_TYPE (type) = type_node;
2129
    }
2130
  else if (TREE_TYPE (type_node)
2131
           && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2132
           && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2133
    {
2134
      type = copy_node (type);
2135
      TREE_TYPE (type) = TREE_TYPE (type_node);
2136
    }
2137
 
2138
  return type;
2139
}
2140
 
2141
/* Return the signed version of a TYPE_NODE, a scalar type.  */
2142
 
2143
tree
2144
gnat_signed_type (tree type_node)
2145
{
2146
  tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2147
 
2148
  if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2149
    {
2150
      type = copy_node (type);
2151
      TREE_TYPE (type) = type_node;
2152
    }
2153
  else if (TREE_TYPE (type_node)
2154
           && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2155
           && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2156
    {
2157
      type = copy_node (type);
2158
      TREE_TYPE (type) = TREE_TYPE (type_node);
2159
    }
2160
 
2161
  return type;
2162
}
2163
 
2164
/* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2165
   transparently converted to each other.  */
2166
 
2167
int
2168
gnat_types_compatible_p (tree t1, tree t2)
2169
{
2170
  enum tree_code code;
2171
 
2172
  /* This is the default criterion.  */
2173
  if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2174
    return 1;
2175
 
2176
  /* We only check structural equivalence here.  */
2177
  if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2178
    return 0;
2179
 
2180
  /* Vector types are also compatible if they have the same number of subparts
2181
     and the same form of (scalar) element type.  */
2182
  if (code == VECTOR_TYPE
2183
      && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2184
      && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2185
      && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2186
    return 1;
2187
 
2188
  /* Array types are also compatible if they are constrained and have the same
2189
     domain(s) and the same component type.  */
2190
  if (code == ARRAY_TYPE
2191
      && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2192
          || (TYPE_DOMAIN (t1)
2193
              && TYPE_DOMAIN (t2)
2194
              && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2195
                                     TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2196
              && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2197
                                     TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
2198
      && (TREE_TYPE (t1) == TREE_TYPE (t2)
2199
          || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
2200
              && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
2201
    return 1;
2202
 
2203
  /* Padding record types are also compatible if they pad the same
2204
     type and have the same constant size.  */
2205
  if (code == RECORD_TYPE
2206
      && TYPE_PADDING_P (t1) && TYPE_PADDING_P (t2)
2207
      && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2208
      && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2209
    return 1;
2210
 
2211
  return 0;
2212
}
2213
 
2214
/* Return true if EXPR is a useless type conversion.  */
2215
 
2216
bool
2217
gnat_useless_type_conversion (tree expr)
2218
{
2219
  if (CONVERT_EXPR_P (expr)
2220
      || TREE_CODE (expr) == VIEW_CONVERT_EXPR
2221
      || TREE_CODE (expr) == NON_LVALUE_EXPR)
2222
    return gnat_types_compatible_p (TREE_TYPE (expr),
2223
                                    TREE_TYPE (TREE_OPERAND (expr, 0)));
2224
 
2225
  return false;
2226
}
2227
 
2228
/* Return true if T, a FUNCTION_TYPE, has the specified list of flags.  */
2229
 
2230
bool
2231
fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
2232
                     bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
2233
{
2234
  return TYPE_CI_CO_LIST (t) == cico_list
2235
         && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
2236
         && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
2237
         && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
2238
}
2239
 
2240
/* EXP is an expression for the size of an object.  If this size contains
2241
   discriminant references, replace them with the maximum (if MAX_P) or
2242
   minimum (if !MAX_P) possible value of the discriminant.  */
2243
 
2244
tree
2245
max_size (tree exp, bool max_p)
2246
{
2247
  enum tree_code code = TREE_CODE (exp);
2248
  tree type = TREE_TYPE (exp);
2249
 
2250
  switch (TREE_CODE_CLASS (code))
2251
    {
2252
    case tcc_declaration:
2253
    case tcc_constant:
2254
      return exp;
2255
 
2256
    case tcc_vl_exp:
2257
      if (code == CALL_EXPR)
2258
        {
2259
          tree t, *argarray;
2260
          int n, i;
2261
 
2262
          t = maybe_inline_call_in_expr (exp);
2263
          if (t)
2264
            return max_size (t, max_p);
2265
 
2266
          n = call_expr_nargs (exp);
2267
          gcc_assert (n > 0);
2268
          argarray = XALLOCAVEC (tree, n);
2269
          for (i = 0; i < n; i++)
2270
            argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2271
          return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2272
        }
2273
      break;
2274
 
2275
    case tcc_reference:
2276
      /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2277
         modify.  Otherwise, we treat it like a variable.  */
2278
      if (!CONTAINS_PLACEHOLDER_P (exp))
2279
        return exp;
2280
 
2281
      type = TREE_TYPE (TREE_OPERAND (exp, 1));
2282
      return
2283
        max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2284
 
2285
    case tcc_comparison:
2286
      return max_p ? size_one_node : size_zero_node;
2287
 
2288
    case tcc_unary:
2289
    case tcc_binary:
2290
    case tcc_expression:
2291
      switch (TREE_CODE_LENGTH (code))
2292
        {
2293
        case 1:
2294
          if (code == SAVE_EXPR)
2295
            return exp;
2296
          else if (code == NON_LVALUE_EXPR)
2297
            return max_size (TREE_OPERAND (exp, 0), max_p);
2298
          else
2299
            return
2300
              fold_build1 (code, type,
2301
                           max_size (TREE_OPERAND (exp, 0),
2302
                                     code == NEGATE_EXPR ? !max_p : max_p));
2303
 
2304
        case 2:
2305
          if (code == COMPOUND_EXPR)
2306
            return max_size (TREE_OPERAND (exp, 1), max_p);
2307
 
2308
          {
2309
            tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2310
            tree rhs = max_size (TREE_OPERAND (exp, 1),
2311
                                 code == MINUS_EXPR ? !max_p : max_p);
2312
 
2313
            /* Special-case wanting the maximum value of a MIN_EXPR.
2314
               In that case, if one side overflows, return the other.
2315
               sizetype is signed, but we know sizes are non-negative.
2316
               Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2317
               overflowing and the RHS a variable.  */
2318
            if (max_p
2319
                && code == MIN_EXPR
2320
                && TREE_CODE (rhs) == INTEGER_CST
2321
                && TREE_OVERFLOW (rhs))
2322
              return lhs;
2323
            else if (max_p
2324
                     && code == MIN_EXPR
2325
                     && TREE_CODE (lhs) == INTEGER_CST
2326
                     && TREE_OVERFLOW (lhs))
2327
              return rhs;
2328
            else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2329
                     && TREE_CODE (lhs) == INTEGER_CST
2330
                     && TREE_OVERFLOW (lhs)
2331
                     && !TREE_CONSTANT (rhs))
2332
              return lhs;
2333
            else
2334
              return fold_build2 (code, type, lhs, rhs);
2335
          }
2336
 
2337
        case 3:
2338
          if (code == COND_EXPR)
2339
            return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2340
                                max_size (TREE_OPERAND (exp, 1), max_p),
2341
                                max_size (TREE_OPERAND (exp, 2), max_p));
2342
        }
2343
 
2344
      /* Other tree classes cannot happen.  */
2345
    default:
2346
      break;
2347
    }
2348
 
2349
  gcc_unreachable ();
2350
}
2351
 
2352
/* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2353
   EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2354
   Return a constructor for the template.  */
2355
 
2356
tree
2357
build_template (tree template_type, tree array_type, tree expr)
2358
{
2359
  VEC(constructor_elt,gc) *template_elts = NULL;
2360
  tree bound_list = NULL_TREE;
2361
  tree field;
2362
 
2363
  while (TREE_CODE (array_type) == RECORD_TYPE
2364
         && (TYPE_PADDING_P (array_type)
2365
             || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2366
    array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2367
 
2368
  if (TREE_CODE (array_type) == ARRAY_TYPE
2369
      || (TREE_CODE (array_type) == INTEGER_TYPE
2370
          && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2371
    bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2372
 
2373
  /* First make the list for a CONSTRUCTOR for the template.  Go down the
2374
     field list of the template instead of the type chain because this
2375
     array might be an Ada array of arrays and we can't tell where the
2376
     nested arrays stop being the underlying object.  */
2377
 
2378
  for (field = TYPE_FIELDS (template_type); field;
2379
       (bound_list
2380
        ? (bound_list = TREE_CHAIN (bound_list))
2381
        : (array_type = TREE_TYPE (array_type))),
2382
       field = DECL_CHAIN (DECL_CHAIN (field)))
2383
    {
2384
      tree bounds, min, max;
2385
 
2386
      /* If we have a bound list, get the bounds from there.  Likewise
2387
         for an ARRAY_TYPE.  Otherwise, if expr is a PARM_DECL with
2388
         DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2389
         This will give us a maximum range.  */
2390
      if (bound_list)
2391
        bounds = TREE_VALUE (bound_list);
2392
      else if (TREE_CODE (array_type) == ARRAY_TYPE)
2393
        bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2394
      else if (expr && TREE_CODE (expr) == PARM_DECL
2395
               && DECL_BY_COMPONENT_PTR_P (expr))
2396
        bounds = TREE_TYPE (field);
2397
      else
2398
        gcc_unreachable ();
2399
 
2400
      min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2401
      max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2402
 
2403
      /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2404
         substitute it from OBJECT.  */
2405
      min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2406
      max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2407
 
2408
      CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
2409
      CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
2410
    }
2411
 
2412
  return gnat_build_constructor (template_type, template_elts);
2413
}
2414
 
2415
/* Helper routine to make a descriptor field.  FIELD_LIST is the list of decls
2416
   being built; the new decl is chained on to the front of the list.  */
2417
 
2418
static tree
2419
make_descriptor_field (const char *name, tree type, tree rec_type,
2420
                       tree initial, tree field_list)
2421
{
2422
  tree field
2423
    = create_field_decl (get_identifier (name), type, rec_type, NULL_TREE,
2424
                         NULL_TREE, 0, 0);
2425
 
2426
  DECL_INITIAL (field) = initial;
2427
  DECL_CHAIN (field) = field_list;
2428
  return field;
2429
}
2430
 
2431
/* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
2432
   descriptor type, and the GCC type of an object.  Each FIELD_DECL in the
2433
   type contains in its DECL_INITIAL the expression to use when a constructor
2434
   is made for the type.  GNAT_ENTITY is an entity used to print out an error
2435
   message if the mechanism cannot be applied to an object of that type and
2436
   also for the name.  */
2437
 
2438
tree
2439
build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2440
{
2441
  tree record_type = make_node (RECORD_TYPE);
2442
  tree pointer32_type, pointer64_type;
2443
  tree field_list = NULL_TREE;
2444
  int klass, ndim, i, dtype = 0;
2445
  tree inner_type, tem;
2446
  tree *idx_arr;
2447
 
2448
  /* If TYPE is an unconstrained array, use the underlying array type.  */
2449
  if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2450
    type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2451
 
2452
  /* If this is an array, compute the number of dimensions in the array,
2453
     get the index types, and point to the inner type.  */
2454
  if (TREE_CODE (type) != ARRAY_TYPE)
2455
    ndim = 0;
2456
  else
2457
    for (ndim = 1, inner_type = type;
2458
         TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2459
         && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2460
         ndim++, inner_type = TREE_TYPE (inner_type))
2461
      ;
2462
 
2463
  idx_arr = XALLOCAVEC (tree, ndim);
2464
 
2465
  if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2466
      && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2467
    for (i = ndim - 1, inner_type = type;
2468
         i >= 0;
2469
         i--, inner_type = TREE_TYPE (inner_type))
2470
      idx_arr[i] = TYPE_DOMAIN (inner_type);
2471
  else
2472
    for (i = 0, inner_type = type;
2473
         i < ndim;
2474
         i++, inner_type = TREE_TYPE (inner_type))
2475
      idx_arr[i] = TYPE_DOMAIN (inner_type);
2476
 
2477
  /* Now get the DTYPE value.  */
2478
  switch (TREE_CODE (type))
2479
    {
2480
    case INTEGER_TYPE:
2481
    case ENUMERAL_TYPE:
2482
    case BOOLEAN_TYPE:
2483
      if (TYPE_VAX_FLOATING_POINT_P (type))
2484
        switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2485
          {
2486
          case 6:
2487
            dtype = 10;
2488
            break;
2489
          case 9:
2490
            dtype = 11;
2491
            break;
2492
          case 15:
2493
            dtype = 27;
2494
            break;
2495
          }
2496
      else
2497
        switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2498
          {
2499
          case 8:
2500
            dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2501
            break;
2502
          case 16:
2503
            dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2504
            break;
2505
          case 32:
2506
            dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2507
            break;
2508
          case 64:
2509
            dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2510
            break;
2511
          case 128:
2512
            dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2513
            break;
2514
          }
2515
      break;
2516
 
2517
    case REAL_TYPE:
2518
      dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2519
      break;
2520
 
2521
    case COMPLEX_TYPE:
2522
      if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2523
          && TYPE_VAX_FLOATING_POINT_P (type))
2524
        switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2525
          {
2526
          case 6:
2527
            dtype = 12;
2528
            break;
2529
          case 9:
2530
            dtype = 13;
2531
            break;
2532
          case 15:
2533
            dtype = 29;
2534
          }
2535
      else
2536
        dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2537
      break;
2538
 
2539
    case ARRAY_TYPE:
2540
      dtype = 14;
2541
      break;
2542
 
2543
    default:
2544
      break;
2545
    }
2546
 
2547
  /* Get the CLASS value.  */
2548
  switch (mech)
2549
    {
2550
    case By_Descriptor_A:
2551
    case By_Short_Descriptor_A:
2552
      klass = 4;
2553
      break;
2554
    case By_Descriptor_NCA:
2555
    case By_Short_Descriptor_NCA:
2556
      klass = 10;
2557
      break;
2558
    case By_Descriptor_SB:
2559
    case By_Short_Descriptor_SB:
2560
      klass = 15;
2561
      break;
2562
    case By_Descriptor:
2563
    case By_Short_Descriptor:
2564
    case By_Descriptor_S:
2565
    case By_Short_Descriptor_S:
2566
    default:
2567
      klass = 1;
2568
      break;
2569
    }
2570
 
2571
  /* Make the type for a descriptor for VMS.  The first four fields are the
2572
     same for all types.  */
2573
  field_list
2574
    = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type,
2575
                             size_in_bytes ((mech == By_Descriptor_A
2576
                                             || mech == By_Short_Descriptor_A)
2577
                                            ? inner_type : type),
2578
                             field_list);
2579
  field_list
2580
    = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type,
2581
                             size_int (dtype), field_list);
2582
  field_list
2583
    = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type,
2584
                             size_int (klass), field_list);
2585
 
2586
  pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2587
  pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2588
 
2589
  /* Ensure that only 32-bit pointers are passed in 32-bit descriptors.  Note
2590
     that we cannot build a template call to the CE routine as it would get a
2591
     wrong source location; instead we use a second placeholder for it.  */
2592
  tem = build_unary_op (ADDR_EXPR, pointer64_type,
2593
                        build0 (PLACEHOLDER_EXPR, type));
2594
  tem = build3 (COND_EXPR, pointer32_type,
2595
                Pmode != SImode
2596
                ? build_binary_op (GE_EXPR, boolean_type_node, tem,
2597
                                   build_int_cstu (pointer64_type, 0x80000000))
2598
                : boolean_false_node,
2599
                build0 (PLACEHOLDER_EXPR, void_type_node),
2600
                convert (pointer32_type, tem));
2601
 
2602
  field_list
2603
    = make_descriptor_field ("POINTER", pointer32_type, record_type, tem,
2604
                             field_list);
2605
 
2606
  switch (mech)
2607
    {
2608
    case By_Descriptor:
2609
    case By_Short_Descriptor:
2610
    case By_Descriptor_S:
2611
    case By_Short_Descriptor_S:
2612
      break;
2613
 
2614
    case By_Descriptor_SB:
2615
    case By_Short_Descriptor_SB:
2616
      field_list
2617
        = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
2618
                                 record_type,
2619
                                 (TREE_CODE (type) == ARRAY_TYPE
2620
                                  ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
2621
                                  : size_zero_node),
2622
                                 field_list);
2623
      field_list
2624
        = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
2625
                                 record_type,
2626
                                 (TREE_CODE (type) == ARRAY_TYPE
2627
                                  ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
2628
                                  : size_zero_node),
2629
                                 field_list);
2630
      break;
2631
 
2632
    case By_Descriptor_A:
2633
    case By_Short_Descriptor_A:
2634
    case By_Descriptor_NCA:
2635
    case By_Short_Descriptor_NCA:
2636
      field_list
2637
        = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
2638
                                 record_type, size_zero_node, field_list);
2639
 
2640
      field_list
2641
        = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
2642
                                 record_type, size_zero_node, field_list);
2643
 
2644
      field_list
2645
        = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
2646
                                 record_type,
2647
                                 size_int ((mech == By_Descriptor_NCA
2648
                                            || mech == By_Short_Descriptor_NCA)
2649
                                           ? 0
2650
                                           /* Set FL_COLUMN, FL_COEFF, and
2651
                                              FL_BOUNDS.  */
2652
                                           : (TREE_CODE (type) == ARRAY_TYPE
2653
                                              && TYPE_CONVENTION_FORTRAN_P
2654
                                                 (type)
2655
                                             ? 224 : 192)),
2656
                                 field_list);
2657
 
2658
      field_list
2659
        = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
2660
                                 record_type, size_int (ndim), field_list);
2661
 
2662
      field_list
2663
        = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
2664
                                 record_type, size_in_bytes (type),
2665
                                 field_list);
2666
 
2667
      /* Now build a pointer to the 0,0,0... element.  */
2668
      tem = build0 (PLACEHOLDER_EXPR, type);
2669
      for (i = 0, inner_type = type; i < ndim;
2670
           i++, inner_type = TREE_TYPE (inner_type))
2671
        tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2672
                      convert (TYPE_DOMAIN (inner_type), size_zero_node),
2673
                      NULL_TREE, NULL_TREE);
2674
 
2675
      field_list
2676
        = make_descriptor_field ("A0", pointer32_type, record_type,
2677
                                 build1 (ADDR_EXPR, pointer32_type, tem),
2678
                                 field_list);
2679
 
2680
      /* Next come the addressing coefficients.  */
2681
      tem = size_one_node;
2682
      for (i = 0; i < ndim; i++)
2683
        {
2684
          char fname[3];
2685
          tree idx_length
2686
            = size_binop (MULT_EXPR, tem,
2687
                          size_binop (PLUS_EXPR,
2688
                                      size_binop (MINUS_EXPR,
2689
                                                  TYPE_MAX_VALUE (idx_arr[i]),
2690
                                                  TYPE_MIN_VALUE (idx_arr[i])),
2691
                                      size_int (1)));
2692
 
2693
          fname[0] = ((mech == By_Descriptor_NCA ||
2694
                       mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2695
          fname[1] = '0' + i, fname[2] = 0;
2696
          field_list
2697
            = make_descriptor_field (fname, gnat_type_for_size (32, 1),
2698
                                     record_type, idx_length, field_list);
2699
 
2700
          if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2701
            tem = idx_length;
2702
        }
2703
 
2704
      /* Finally here are the bounds.  */
2705
      for (i = 0; i < ndim; i++)
2706
        {
2707
          char fname[3];
2708
 
2709
          fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2710
          field_list
2711
            = make_descriptor_field (fname, gnat_type_for_size (32, 1),
2712
                                     record_type, TYPE_MIN_VALUE (idx_arr[i]),
2713
                                     field_list);
2714
 
2715
          fname[0] = 'U';
2716
          field_list
2717
            = make_descriptor_field (fname, gnat_type_for_size (32, 1),
2718
                                     record_type, TYPE_MAX_VALUE (idx_arr[i]),
2719
                                     field_list);
2720
        }
2721
      break;
2722
 
2723
    default:
2724
      post_error ("unsupported descriptor type for &", gnat_entity);
2725
    }
2726
 
2727
  TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
2728
  finish_record_type (record_type, nreverse (field_list), 0, false);
2729
  return record_type;
2730
}
2731
 
2732
/* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
2733
   descriptor type, and the GCC type of an object.  Each FIELD_DECL in the
2734
   type contains in its DECL_INITIAL the expression to use when a constructor
2735
   is made for the type.  GNAT_ENTITY is an entity used to print out an error
2736
   message if the mechanism cannot be applied to an object of that type and
2737
   also for the name.  */
2738
 
2739
tree
2740
build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2741
{
2742
  tree record_type = make_node (RECORD_TYPE);
2743
  tree pointer64_type;
2744
  tree field_list = NULL_TREE;
2745
  int klass, ndim, i, dtype = 0;
2746
  tree inner_type, tem;
2747
  tree *idx_arr;
2748
 
2749
  /* If TYPE is an unconstrained array, use the underlying array type.  */
2750
  if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2751
    type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2752
 
2753
  /* If this is an array, compute the number of dimensions in the array,
2754
     get the index types, and point to the inner type.  */
2755
  if (TREE_CODE (type) != ARRAY_TYPE)
2756
    ndim = 0;
2757
  else
2758
    for (ndim = 1, inner_type = type;
2759
         TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2760
         && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2761
         ndim++, inner_type = TREE_TYPE (inner_type))
2762
      ;
2763
 
2764
  idx_arr = XALLOCAVEC (tree, ndim);
2765
 
2766
  if (mech != By_Descriptor_NCA
2767
      && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2768
    for (i = ndim - 1, inner_type = type;
2769
         i >= 0;
2770
         i--, inner_type = TREE_TYPE (inner_type))
2771
      idx_arr[i] = TYPE_DOMAIN (inner_type);
2772
  else
2773
    for (i = 0, inner_type = type;
2774
         i < ndim;
2775
         i++, inner_type = TREE_TYPE (inner_type))
2776
      idx_arr[i] = TYPE_DOMAIN (inner_type);
2777
 
2778
  /* Now get the DTYPE value.  */
2779
  switch (TREE_CODE (type))
2780
    {
2781
    case INTEGER_TYPE:
2782
    case ENUMERAL_TYPE:
2783
    case BOOLEAN_TYPE:
2784
      if (TYPE_VAX_FLOATING_POINT_P (type))
2785
        switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2786
          {
2787
          case 6:
2788
            dtype = 10;
2789
            break;
2790
          case 9:
2791
            dtype = 11;
2792
            break;
2793
          case 15:
2794
            dtype = 27;
2795
            break;
2796
          }
2797
      else
2798
        switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2799
          {
2800
          case 8:
2801
            dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2802
            break;
2803
          case 16:
2804
            dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2805
            break;
2806
          case 32:
2807
            dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2808
            break;
2809
          case 64:
2810
            dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2811
            break;
2812
          case 128:
2813
            dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2814
            break;
2815
          }
2816
      break;
2817
 
2818
    case REAL_TYPE:
2819
      dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2820
      break;
2821
 
2822
    case COMPLEX_TYPE:
2823
      if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2824
          && TYPE_VAX_FLOATING_POINT_P (type))
2825
        switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2826
          {
2827
          case 6:
2828
            dtype = 12;
2829
            break;
2830
          case 9:
2831
            dtype = 13;
2832
            break;
2833
          case 15:
2834
            dtype = 29;
2835
          }
2836
      else
2837
        dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2838
      break;
2839
 
2840
    case ARRAY_TYPE:
2841
      dtype = 14;
2842
      break;
2843
 
2844
    default:
2845
      break;
2846
    }
2847
 
2848
  /* Get the CLASS value.  */
2849
  switch (mech)
2850
    {
2851
    case By_Descriptor_A:
2852
      klass = 4;
2853
      break;
2854
    case By_Descriptor_NCA:
2855
      klass = 10;
2856
      break;
2857
    case By_Descriptor_SB:
2858
      klass = 15;
2859
      break;
2860
    case By_Descriptor:
2861
    case By_Descriptor_S:
2862
    default:
2863
      klass = 1;
2864
      break;
2865
    }
2866
 
2867
  /* Make the type for a 64-bit descriptor for VMS.  The first six fields
2868
     are the same for all types.  */
2869
  field_list
2870
    = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
2871
                             record_type, size_int (1), field_list);
2872
  field_list
2873
    = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
2874
                             record_type, size_int (dtype), field_list);
2875
  field_list
2876
    = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
2877
                             record_type, size_int (klass), field_list);
2878
  field_list
2879
    = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
2880
                             record_type, ssize_int (-1), field_list);
2881
  field_list
2882
    = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
2883
                             record_type,
2884
                             size_in_bytes (mech == By_Descriptor_A
2885
                                            ? inner_type : type),
2886
                             field_list);
2887
 
2888
  pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2889
 
2890
  field_list
2891
    = make_descriptor_field ("POINTER", pointer64_type, record_type,
2892
                             build_unary_op (ADDR_EXPR, pointer64_type,
2893
                                             build0 (PLACEHOLDER_EXPR, type)),
2894
                             field_list);
2895
 
2896
  switch (mech)
2897
    {
2898
    case By_Descriptor:
2899
    case By_Descriptor_S:
2900
      break;
2901
 
2902
    case By_Descriptor_SB:
2903
      field_list
2904
        = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
2905
                                 record_type,
2906
                                 (TREE_CODE (type) == ARRAY_TYPE
2907
                                  ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
2908
                                  : size_zero_node),
2909
                                 field_list);
2910
      field_list
2911
        = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
2912
                                 record_type,
2913
                                 (TREE_CODE (type) == ARRAY_TYPE
2914
                                  ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
2915
                                  : size_zero_node),
2916
                                 field_list);
2917
      break;
2918
 
2919
    case By_Descriptor_A:
2920
    case By_Descriptor_NCA:
2921
      field_list
2922
        = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
2923
                                 record_type, size_zero_node, field_list);
2924
 
2925
      field_list
2926
        = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
2927
                                 record_type, size_zero_node, field_list);
2928
 
2929
      dtype = (mech == By_Descriptor_NCA
2930
               ? 0
2931
               /* Set FL_COLUMN, FL_COEFF, and
2932
                  FL_BOUNDS.  */
2933
               : (TREE_CODE (type) == ARRAY_TYPE
2934
                  && TYPE_CONVENTION_FORTRAN_P (type)
2935
                  ? 224 : 192));
2936
      field_list
2937
        = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
2938
                                 record_type, size_int (dtype),
2939
                                 field_list);
2940
 
2941
      field_list
2942
        = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
2943
                                 record_type, size_int (ndim), field_list);
2944
 
2945
      field_list
2946
        = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
2947
                                 record_type, size_int (0), field_list);
2948
      field_list
2949
        = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
2950
                                 record_type, size_in_bytes (type),
2951
                                 field_list);
2952
 
2953
      /* Now build a pointer to the 0,0,0... element.  */
2954
      tem = build0 (PLACEHOLDER_EXPR, type);
2955
      for (i = 0, inner_type = type; i < ndim;
2956
           i++, inner_type = TREE_TYPE (inner_type))
2957
        tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2958
                      convert (TYPE_DOMAIN (inner_type), size_zero_node),
2959
                      NULL_TREE, NULL_TREE);
2960
 
2961
      field_list
2962
        = make_descriptor_field ("A0", pointer64_type, record_type,
2963
                                 build1 (ADDR_EXPR, pointer64_type, tem),
2964
                                 field_list);
2965
 
2966
      /* Next come the addressing coefficients.  */
2967
      tem = size_one_node;
2968
      for (i = 0; i < ndim; i++)
2969
        {
2970
          char fname[3];
2971
          tree idx_length
2972
            = size_binop (MULT_EXPR, tem,
2973
                          size_binop (PLUS_EXPR,
2974
                                      size_binop (MINUS_EXPR,
2975
                                                  TYPE_MAX_VALUE (idx_arr[i]),
2976
                                                  TYPE_MIN_VALUE (idx_arr[i])),
2977
                                      size_int (1)));
2978
 
2979
          fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2980
          fname[1] = '0' + i, fname[2] = 0;
2981
          field_list
2982
            = make_descriptor_field (fname, gnat_type_for_size (64, 1),
2983
                                     record_type, idx_length, field_list);
2984
 
2985
          if (mech == By_Descriptor_NCA)
2986
            tem = idx_length;
2987
        }
2988
 
2989
      /* Finally here are the bounds.  */
2990
      for (i = 0; i < ndim; i++)
2991
        {
2992
          char fname[3];
2993
 
2994
          fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2995
          field_list
2996
            = make_descriptor_field (fname, gnat_type_for_size (64, 1),
2997
                                     record_type,
2998
                                     TYPE_MIN_VALUE (idx_arr[i]), field_list);
2999
 
3000
          fname[0] = 'U';
3001
          field_list
3002
            = make_descriptor_field (fname, gnat_type_for_size (64, 1),
3003
                                     record_type,
3004
                                     TYPE_MAX_VALUE (idx_arr[i]), field_list);
3005
        }
3006
      break;
3007
 
3008
    default:
3009
      post_error ("unsupported descriptor type for &", gnat_entity);
3010
    }
3011
 
3012
  TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC64");
3013
  finish_record_type (record_type, nreverse (field_list), 0, false);
3014
  return record_type;
3015
}
3016
 
3017
/* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
3018
   GNAT_ACTUAL is the actual parameter for which the descriptor is built.  */
3019
 
3020
tree
3021
fill_vms_descriptor (tree gnu_type, tree gnu_expr, Node_Id gnat_actual)
3022
{
3023
  VEC(constructor_elt,gc) *v = NULL;
3024
  tree field;
3025
 
3026
  gnu_expr = maybe_unconstrained_array (gnu_expr);
3027
  gnu_expr = gnat_protect_expr (gnu_expr);
3028
  gnat_mark_addressable (gnu_expr);
3029
 
3030
  /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3031
     routine in case we have a 32-bit descriptor.  */
3032
  gnu_expr = build2 (COMPOUND_EXPR, void_type_node,
3033
                     build_call_raise (CE_Range_Check_Failed, gnat_actual,
3034
                                       N_Raise_Constraint_Error),
3035
                     gnu_expr);
3036
 
3037
  for (field = TYPE_FIELDS (gnu_type); field; field = DECL_CHAIN (field))
3038
    {
3039
      tree value
3040
        = convert (TREE_TYPE (field),
3041
                   SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field),
3042
                                                   gnu_expr));
3043
      CONSTRUCTOR_APPEND_ELT (v, field, value);
3044
    }
3045
 
3046
  return gnat_build_constructor (gnu_type, v);
3047
}
3048
 
3049
/* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3050
   regular pointer or fat pointer type.  GNAT_SUBPROG is the subprogram to
3051
   which the VMS descriptor is passed.  */
3052
 
3053
static tree
3054
convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3055
{
3056
  tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3057
  tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3058
  /* The CLASS field is the 3rd field in the descriptor.  */
3059
  tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
3060
  /* The POINTER field is the 6th field in the descriptor.  */
3061
  tree pointer = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass)));
3062
 
3063
  /* Retrieve the value of the POINTER field.  */
3064
  tree gnu_expr64
3065
    = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3066
 
3067
  if (POINTER_TYPE_P (gnu_type))
3068
    return convert (gnu_type, gnu_expr64);
3069
 
3070
  else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3071
    {
3072
      tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3073
      tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
3074
      tree template_type = TREE_TYPE (p_bounds_type);
3075
      tree min_field = TYPE_FIELDS (template_type);
3076
      tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
3077
      tree template_tree, template_addr, aflags, dimct, t, u;
3078
      /* See the head comment of build_vms_descriptor.  */
3079
      int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3080
      tree lfield, ufield;
3081
      VEC(constructor_elt,gc) *v;
3082
 
3083
      /* Convert POINTER to the pointer-to-array type.  */
3084
      gnu_expr64 = convert (p_array_type, gnu_expr64);
3085
 
3086
      switch (iklass)
3087
        {
3088
        case 1:  /* Class S  */
3089
        case 15: /* Class SB */
3090
          /* Build {1, LENGTH} template; LENGTH64 is the 5th field.  */
3091
          v = VEC_alloc (constructor_elt, gc, 2);
3092
          t = DECL_CHAIN (DECL_CHAIN (klass));
3093
          t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3094
          CONSTRUCTOR_APPEND_ELT (v, min_field,
3095
                                  convert (TREE_TYPE (min_field),
3096
                                           integer_one_node));
3097
          CONSTRUCTOR_APPEND_ELT (v, max_field,
3098
                                  convert (TREE_TYPE (max_field), t));
3099
          template_tree = gnat_build_constructor (template_type, v);
3100
          template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3101
 
3102
          /* For class S, we are done.  */
3103
          if (iklass == 1)
3104
            break;
3105
 
3106
          /* Test that we really have a SB descriptor, like DEC Ada.  */
3107
          t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3108
          u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3109
          u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3110
          /* If so, there is already a template in the descriptor and
3111
             it is located right after the POINTER field.  The fields are
3112
             64bits so they must be repacked. */
3113
          t = DECL_CHAIN (pointer);
3114
          lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3115
          lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3116
 
3117
          t = DECL_CHAIN (t);
3118
          ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3119
          ufield = convert
3120
           (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
3121
 
3122
          /* Build the template in the form of a constructor. */
3123
          v = VEC_alloc (constructor_elt, gc, 2);
3124
          CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
3125
          CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
3126
                                  ufield);
3127
          template_tree = gnat_build_constructor (template_type, v);
3128
 
3129
          /* Otherwise use the {1, LENGTH} template we build above.  */
3130
          template_addr = build3 (COND_EXPR, p_bounds_type, u,
3131
                                  build_unary_op (ADDR_EXPR, p_bounds_type,
3132
                                                 template_tree),
3133
                                  template_addr);
3134
          break;
3135
 
3136
        case 4:  /* Class A */
3137
          /* The AFLAGS field is the 3rd field after the pointer in the
3138
             descriptor.  */
3139
          t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
3140
          aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3141
          /* The DIMCT field is the next field in the descriptor after
3142
             aflags.  */
3143
          t = DECL_CHAIN (t);
3144
          dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3145
          /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3146
             or FL_COEFF or FL_BOUNDS not set.  */
3147
          u = build_int_cst (TREE_TYPE (aflags), 192);
3148
          u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3149
                               build_binary_op (NE_EXPR, boolean_type_node,
3150
                                                dimct,
3151
                                                convert (TREE_TYPE (dimct),
3152
                                                         size_one_node)),
3153
                               build_binary_op (NE_EXPR, boolean_type_node,
3154
                                                build2 (BIT_AND_EXPR,
3155
                                                        TREE_TYPE (aflags),
3156
                                                        aflags, u),
3157
                                                u));
3158
          /* There is already a template in the descriptor and it is located
3159
             in block 3.  The fields are 64bits so they must be repacked. */
3160
          t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
3161
              (t)))));
3162
          lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3163
          lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3164
 
3165
          t = DECL_CHAIN (t);
3166
          ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3167
          ufield = convert
3168
           (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
3169
 
3170
          /* Build the template in the form of a constructor. */
3171
          v = VEC_alloc (constructor_elt, gc, 2);
3172
          CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
3173
          CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
3174
                                  ufield);
3175
          template_tree = gnat_build_constructor (template_type, v);
3176
          template_tree = build3 (COND_EXPR, template_type, u,
3177
                            build_call_raise (CE_Length_Check_Failed, Empty,
3178
                                              N_Raise_Constraint_Error),
3179
                            template_tree);
3180
          template_addr
3181
            = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3182
          break;
3183
 
3184
        case 10: /* Class NCA */
3185
        default:
3186
          post_error ("unsupported descriptor type for &", gnat_subprog);
3187
          template_addr = integer_zero_node;
3188
          break;
3189
        }
3190
 
3191
      /* Build the fat pointer in the form of a constructor.  */
3192
      v = VEC_alloc (constructor_elt, gc, 2);
3193
      CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr64);
3194
      CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
3195
                              template_addr);
3196
      return gnat_build_constructor (gnu_type, v);
3197
    }
3198
 
3199
  else
3200
    gcc_unreachable ();
3201
}
3202
 
3203
/* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3204
   regular pointer or fat pointer type.  GNAT_SUBPROG is the subprogram to
3205
   which the VMS descriptor is passed.  */
3206
 
3207
static tree
3208
convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3209
{
3210
  tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3211
  tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3212
  /* The CLASS field is the 3rd field in the descriptor.  */
3213
  tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
3214
  /* The POINTER field is the 4th field in the descriptor.  */
3215
  tree pointer = DECL_CHAIN (klass);
3216
 
3217
  /* Retrieve the value of the POINTER field.  */
3218
  tree gnu_expr32
3219
    = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3220
 
3221
  if (POINTER_TYPE_P (gnu_type))
3222
    return convert (gnu_type, gnu_expr32);
3223
 
3224
  else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3225
    {
3226
      tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3227
      tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
3228
      tree template_type = TREE_TYPE (p_bounds_type);
3229
      tree min_field = TYPE_FIELDS (template_type);
3230
      tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
3231
      tree template_tree, template_addr, aflags, dimct, t, u;
3232
      /* See the head comment of build_vms_descriptor.  */
3233
      int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3234
      VEC(constructor_elt,gc) *v;
3235
 
3236
      /* Convert POINTER to the pointer-to-array type.  */
3237
      gnu_expr32 = convert (p_array_type, gnu_expr32);
3238
 
3239
      switch (iklass)
3240
        {
3241
        case 1:  /* Class S  */
3242
        case 15: /* Class SB */
3243
          /* Build {1, LENGTH} template; LENGTH is the 1st field.  */
3244
          v = VEC_alloc (constructor_elt, gc, 2);
3245
          t = TYPE_FIELDS (desc_type);
3246
          t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3247
          CONSTRUCTOR_APPEND_ELT (v, min_field,
3248
                                  convert (TREE_TYPE (min_field),
3249
                                           integer_one_node));
3250
          CONSTRUCTOR_APPEND_ELT (v, max_field,
3251
                                  convert (TREE_TYPE (max_field), t));
3252
          template_tree = gnat_build_constructor (template_type, v);
3253
          template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3254
 
3255
          /* For class S, we are done.  */
3256
          if (iklass == 1)
3257
            break;
3258
 
3259
          /* Test that we really have a SB descriptor, like DEC Ada.  */
3260
          t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3261
          u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3262
          u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3263
          /* If so, there is already a template in the descriptor and
3264
             it is located right after the POINTER field.  */
3265
          t = DECL_CHAIN (pointer);
3266
          template_tree
3267
            = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3268
          /* Otherwise use the {1, LENGTH} template we build above.  */
3269
          template_addr = build3 (COND_EXPR, p_bounds_type, u,
3270
                                  build_unary_op (ADDR_EXPR, p_bounds_type,
3271
                                                 template_tree),
3272
                                  template_addr);
3273
          break;
3274
 
3275
        case 4:  /* Class A */
3276
          /* The AFLAGS field is the 7th field in the descriptor.  */
3277
          t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
3278
          aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3279
          /* The DIMCT field is the 8th field in the descriptor.  */
3280
          t = DECL_CHAIN (t);
3281
          dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3282
          /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3283
             or FL_COEFF or FL_BOUNDS not set.  */
3284
          u = build_int_cst (TREE_TYPE (aflags), 192);
3285
          u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3286
                               build_binary_op (NE_EXPR, boolean_type_node,
3287
                                                dimct,
3288
                                                convert (TREE_TYPE (dimct),
3289
                                                         size_one_node)),
3290
                               build_binary_op (NE_EXPR, boolean_type_node,
3291
                                                build2 (BIT_AND_EXPR,
3292
                                                        TREE_TYPE (aflags),
3293
                                                        aflags, u),
3294
                                                u));
3295
          /* There is already a template in the descriptor and it is
3296
             located at the start of block 3 (12th field).  */
3297
          t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t))));
3298
          template_tree
3299
            = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3300
          template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
3301
                            build_call_raise (CE_Length_Check_Failed, Empty,
3302
                                              N_Raise_Constraint_Error),
3303
                            template_tree);
3304
          template_addr
3305
            = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3306
          break;
3307
 
3308
        case 10: /* Class NCA */
3309
        default:
3310
          post_error ("unsupported descriptor type for &", gnat_subprog);
3311
          template_addr = integer_zero_node;
3312
          break;
3313
        }
3314
 
3315
      /* Build the fat pointer in the form of a constructor.  */
3316
      v = VEC_alloc (constructor_elt, gc, 2);
3317
      CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr32);
3318
      CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
3319
                              template_addr);
3320
 
3321
      return gnat_build_constructor (gnu_type, v);
3322
    }
3323
 
3324
  else
3325
    gcc_unreachable ();
3326
}
3327
 
3328
/* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3329
   pointer or fat pointer type.  GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3330
   pointer type of GNU_EXPR.  BY_REF is true if the result is to be used by
3331
   reference.  GNAT_SUBPROG is the subprogram to which the VMS descriptor is
3332
   passed.  */
3333
 
3334
tree
3335
convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3336
                        bool by_ref, Entity_Id gnat_subprog)
3337
{
3338
  tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3339
  tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3340
  tree mbo = TYPE_FIELDS (desc_type);
3341
  const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3342
  tree mbmo = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo)));
3343
  tree real_type, is64bit, gnu_expr32, gnu_expr64;
3344
 
3345
  if (by_ref)
3346
    real_type = TREE_TYPE (gnu_type);
3347
  else
3348
    real_type = gnu_type;
3349
 
3350
  /* If the field name is not MBO, it must be 32-bit and no alternate.
3351
     Otherwise primary must be 64-bit and alternate 32-bit.  */
3352
  if (strcmp (mbostr, "MBO") != 0)
3353
    {
3354
      tree ret = convert_vms_descriptor32 (real_type, gnu_expr, gnat_subprog);
3355
      if (by_ref)
3356
        ret = build_unary_op (ADDR_EXPR, gnu_type, ret);
3357
      return ret;
3358
    }
3359
 
3360
  /* Build the test for 64-bit descriptor.  */
3361
  mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3362
  mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3363
  is64bit
3364
    = build_binary_op (TRUTH_ANDIF_EXPR, boolean_type_node,
3365
                       build_binary_op (EQ_EXPR, boolean_type_node,
3366
                                        convert (integer_type_node, mbo),
3367
                                        integer_one_node),
3368
                       build_binary_op (EQ_EXPR, boolean_type_node,
3369
                                        convert (integer_type_node, mbmo),
3370
                                        integer_minus_one_node));
3371
 
3372
  /* Build the 2 possible end results.  */
3373
  gnu_expr64 = convert_vms_descriptor64 (real_type, gnu_expr, gnat_subprog);
3374
  if (by_ref)
3375
    gnu_expr64 =  build_unary_op (ADDR_EXPR, gnu_type, gnu_expr64);
3376
  gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3377
  gnu_expr32 = convert_vms_descriptor32 (real_type, gnu_expr, gnat_subprog);
3378
  if (by_ref)
3379
    gnu_expr32 =  build_unary_op (ADDR_EXPR, gnu_type, gnu_expr32);
3380
 
3381
  return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3382
}
3383
 
3384
/* Build a type to be used to represent an aliased object whose nominal type
3385
   is an unconstrained array.  This consists of a RECORD_TYPE containing a
3386
   field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3387
   If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3388
   an arbitrary unconstrained object.  Use NAME as the name of the record.
3389
   DEBUG_INFO_P is true if we need to write debug information for the type.  */
3390
 
3391
tree
3392
build_unc_object_type (tree template_type, tree object_type, tree name,
3393
                       bool debug_info_p)
3394
{
3395
  tree type = make_node (RECORD_TYPE);
3396
  tree template_field
3397
    = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3398
                         NULL_TREE, NULL_TREE, 0, 1);
3399
  tree array_field
3400
    = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3401
                         NULL_TREE, NULL_TREE, 0, 1);
3402
 
3403
  TYPE_NAME (type) = name;
3404
  TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3405
  DECL_CHAIN (template_field) = array_field;
3406
  finish_record_type (type, template_field, 0, true);
3407
 
3408
  /* Declare it now since it will never be declared otherwise.  This is
3409
     necessary to ensure that its subtrees are properly marked.  */
3410
  create_type_decl (name, type, NULL, true, debug_info_p, Empty);
3411
 
3412
  return type;
3413
}
3414
 
3415
/* Same, taking a thin or fat pointer type instead of a template type. */
3416
 
3417
tree
3418
build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3419
                                tree name, bool debug_info_p)
3420
{
3421
  tree template_type;
3422
 
3423
  gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3424
 
3425
  template_type
3426
    = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3427
       ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3428
       : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3429
 
3430
  return
3431
    build_unc_object_type (template_type, object_type, name, debug_info_p);
3432
}
3433
 
3434
/* Shift the component offsets within an unconstrained object TYPE to make it
3435
   suitable for use as a designated type for thin pointers.  */
3436
 
3437
void
3438
shift_unc_components_for_thin_pointers (tree type)
3439
{
3440
  /* Thin pointer values designate the ARRAY data of an unconstrained object,
3441
     allocated past the BOUNDS template.  The designated type is adjusted to
3442
     have ARRAY at position zero and the template at a negative offset, so
3443
     that COMPONENT_REFs on (*thin_ptr) designate the proper location.  */
3444
 
3445
  tree bounds_field = TYPE_FIELDS (type);
3446
  tree array_field  = DECL_CHAIN (TYPE_FIELDS (type));
3447
 
3448
  DECL_FIELD_OFFSET (bounds_field)
3449
    = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3450
 
3451
  DECL_FIELD_OFFSET (array_field) = size_zero_node;
3452
  DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3453
}
3454
 
3455
/* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3456
   In the normal case this is just two adjustments, but we have more to
3457
   do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE.  */
3458
 
3459
void
3460
update_pointer_to (tree old_type, tree new_type)
3461
{
3462
  tree ptr = TYPE_POINTER_TO (old_type);
3463
  tree ref = TYPE_REFERENCE_TO (old_type);
3464
  tree t;
3465
 
3466
  /* If this is the main variant, process all the other variants first.  */
3467
  if (TYPE_MAIN_VARIANT (old_type) == old_type)
3468
    for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3469
      update_pointer_to (t, new_type);
3470
 
3471
  /* If no pointers and no references, we are done.  */
3472
  if (!ptr && !ref)
3473
    return;
3474
 
3475
  /* Merge the old type qualifiers in the new type.
3476
 
3477
     Each old variant has qualifiers for specific reasons, and the new
3478
     designated type as well.  Each set of qualifiers represents useful
3479
     information grabbed at some point, and merging the two simply unifies
3480
     these inputs into the final type description.
3481
 
3482
     Consider for instance a volatile type frozen after an access to constant
3483
     type designating it; after the designated type's freeze, we get here with
3484
     a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3485
     when the access type was processed.  We will make a volatile and readonly
3486
     designated type, because that's what it really is.
3487
 
3488
     We might also get here for a non-dummy OLD_TYPE variant with different
3489
     qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3490
     to private record type elaboration (see the comments around the call to
3491
     this routine in gnat_to_gnu_entity <E_Access_Type>).  We have to merge
3492
     the qualifiers in those cases too, to avoid accidentally discarding the
3493
     initial set, and will often end up with OLD_TYPE == NEW_TYPE then.  */
3494
  new_type
3495
    = build_qualified_type (new_type,
3496
                            TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3497
 
3498
  /* If old type and new type are identical, there is nothing to do.  */
3499
  if (old_type == new_type)
3500
    return;
3501
 
3502
  /* Otherwise, first handle the simple case.  */
3503
  if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3504
    {
3505
      tree new_ptr, new_ref;
3506
 
3507
      /* If pointer or reference already points to new type, nothing to do.
3508
         This can happen as update_pointer_to can be invoked multiple times
3509
         on the same couple of types because of the type variants.  */
3510
      if ((ptr && TREE_TYPE (ptr) == new_type)
3511
          || (ref && TREE_TYPE (ref) == new_type))
3512
        return;
3513
 
3514
      /* Chain PTR and its variants at the end.  */
3515
      new_ptr = TYPE_POINTER_TO (new_type);
3516
      if (new_ptr)
3517
        {
3518
          while (TYPE_NEXT_PTR_TO (new_ptr))
3519
            new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3520
          TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3521
        }
3522
      else
3523
        TYPE_POINTER_TO (new_type) = ptr;
3524
 
3525
      /* Now adjust them.  */
3526
      for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3527
        for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
3528
          {
3529
            TREE_TYPE (t) = new_type;
3530
            if (TYPE_NULL_BOUNDS (t))
3531
              TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
3532
          }
3533
 
3534
      /* If we have adjusted named types, finalize them.  This is necessary
3535
         since we had forced a DWARF typedef for them in gnat_pushdecl.  */
3536
      for (ptr = TYPE_POINTER_TO (old_type); ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3537
        if (TYPE_NAME (ptr) && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL)
3538
          rest_of_type_decl_compilation (TYPE_NAME (ptr));
3539
 
3540
      /* Chain REF and its variants at the end.  */
3541
      new_ref = TYPE_REFERENCE_TO (new_type);
3542
      if (new_ref)
3543
        {
3544
          while (TYPE_NEXT_REF_TO (new_ref))
3545
            new_ref = TYPE_NEXT_REF_TO (new_ref);
3546
          TYPE_NEXT_REF_TO (new_ref) = ref;
3547
        }
3548
      else
3549
        TYPE_REFERENCE_TO (new_type) = ref;
3550
 
3551
      /* Now adjust them.  */
3552
      for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3553
        for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3554
          TREE_TYPE (t) = new_type;
3555
 
3556
      TYPE_POINTER_TO (old_type) = NULL_TREE;
3557
      TYPE_REFERENCE_TO (old_type) = NULL_TREE;
3558
    }
3559
 
3560
  /* Now deal with the unconstrained array case.  In this case the pointer
3561
     is actually a record where both fields are pointers to dummy nodes.
3562
     Turn them into pointers to the correct types using update_pointer_to.
3563
     Likewise for the pointer to the object record (thin pointer).  */
3564
  else
3565
    {
3566
      tree new_ptr = TYPE_POINTER_TO (new_type);
3567
 
3568
      gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3569
 
3570
      /* If PTR already points to NEW_TYPE, nothing to do.  This can happen
3571
         since update_pointer_to can be invoked multiple times on the same
3572
         couple of types because of the type variants.  */
3573
      if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3574
        return;
3575
 
3576
      update_pointer_to
3577
        (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
3578
         TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
3579
 
3580
      update_pointer_to
3581
        (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
3582
         TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
3583
 
3584
      update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
3585
                         TYPE_OBJECT_RECORD_TYPE (new_type));
3586
 
3587
      TYPE_POINTER_TO (old_type) = NULL_TREE;
3588
    }
3589
}
3590
 
3591
/* Convert EXPR, a pointer to a constrained array, into a pointer to an
3592
   unconstrained one.  This involves making or finding a template.  */
3593
 
3594
static tree
3595
convert_to_fat_pointer (tree type, tree expr)
3596
{
3597
  tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
3598
  tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3599
  tree etype = TREE_TYPE (expr);
3600
  tree template_tree;
3601
  VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
3602
 
3603
  /* If EXPR is null, make a fat pointer that contains a null pointer to the
3604
     array (compare_fat_pointers ensures that this is the full discriminant)
3605
     and a valid pointer to the bounds.  This latter property is necessary
3606
     since the compiler can hoist the load of the bounds done through it.  */
3607
  if (integer_zerop (expr))
3608
    {
3609
      tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3610
      tree null_bounds, t;
3611
 
3612
      if (TYPE_NULL_BOUNDS (ptr_template_type))
3613
        null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
3614
      else
3615
        {
3616
          /* The template type can still be dummy at this point so we build an
3617
             empty constructor.  The middle-end will fill it in with zeros.  */
3618
          t = build_constructor (template_type, NULL);
3619
          TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
3620
          null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
3621
          SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
3622
        }
3623
 
3624
      CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3625
                              fold_convert (p_array_type, null_pointer_node));
3626
      CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
3627
      t = build_constructor (type, v);
3628
      /* Do not set TREE_CONSTANT so as to force T to static memory.  */
3629
      TREE_CONSTANT (t) = 0;
3630
      TREE_STATIC (t) = 1;
3631
 
3632
      return t;
3633
    }
3634
 
3635
  /* If EXPR is a thin pointer, make template and data from the record..  */
3636
  else if (TYPE_IS_THIN_POINTER_P (etype))
3637
    {
3638
      tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3639
 
3640
      expr = gnat_protect_expr (expr);
3641
      if (TREE_CODE (expr) == ADDR_EXPR)
3642
        expr = TREE_OPERAND (expr, 0);
3643
      else
3644
        expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3645
 
3646
      template_tree = build_component_ref (expr, NULL_TREE, fields, false);
3647
      expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3648
                             build_component_ref (expr, NULL_TREE,
3649
                                                  DECL_CHAIN (fields), false));
3650
    }
3651
 
3652
  /* Otherwise, build the constructor for the template.  */
3653
  else
3654
    template_tree = build_template (template_type, TREE_TYPE (etype), expr);
3655
 
3656
  /* The final result is a constructor for the fat pointer.
3657
 
3658
     If EXPR is an argument of a foreign convention subprogram, the type it
3659
     points to is directly the component type.  In this case, the expression
3660
     type may not match the corresponding FIELD_DECL type at this point, so we
3661
     call "convert" here to fix that up if necessary.  This type consistency is
3662
     required, for instance because it ensures that possible later folding of
3663
     COMPONENT_REFs against this constructor always yields something of the
3664
     same type as the initial reference.
3665
 
3666
     Note that the call to "build_template" above is still fine because it
3667
     will only refer to the provided TEMPLATE_TYPE in this case.  */
3668
  CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3669
                          convert (p_array_type, expr));
3670
  CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
3671
                          build_unary_op (ADDR_EXPR, NULL_TREE,
3672
                                          template_tree));
3673
  return gnat_build_constructor (type, v);
3674
}
3675
 
3676
/* Convert to a thin pointer type, TYPE.  The only thing we know how to convert
3677
   is something that is a fat pointer, so convert to it first if it EXPR
3678
   is not already a fat pointer.  */
3679
 
3680
static tree
3681
convert_to_thin_pointer (tree type, tree expr)
3682
{
3683
  if (!TYPE_IS_FAT_POINTER_P (TREE_TYPE (expr)))
3684
    expr
3685
      = convert_to_fat_pointer
3686
        (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3687
 
3688
  /* We get the pointer to the data and use a NOP_EXPR to make it the
3689
     proper GCC type.  */
3690
  expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3691
                              false);
3692
  expr = build1 (NOP_EXPR, type, expr);
3693
 
3694
  return expr;
3695
}
3696
 
3697
/* Create an expression whose value is that of EXPR,
3698
   converted to type TYPE.  The TREE_TYPE of the value
3699
   is always TYPE.  This function implements all reasonable
3700
   conversions; callers should filter out those that are
3701
   not permitted by the language being compiled.  */
3702
 
3703
tree
3704
convert (tree type, tree expr)
3705
{
3706
  tree etype = TREE_TYPE (expr);
3707
  enum tree_code ecode = TREE_CODE (etype);
3708
  enum tree_code code = TREE_CODE (type);
3709
 
3710
  /* If the expression is already of the right type, we are done.  */
3711
  if (etype == type)
3712
    return expr;
3713
 
3714
  /* If both input and output have padding and are of variable size, do this
3715
     as an unchecked conversion.  Likewise if one is a mere variant of the
3716
     other, so we avoid a pointless unpad/repad sequence.  */
3717
  else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3718
           && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
3719
           && (!TREE_CONSTANT (TYPE_SIZE (type))
3720
               || !TREE_CONSTANT (TYPE_SIZE (etype))
3721
               || gnat_types_compatible_p (type, etype)
3722
               || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3723
                  == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3724
    ;
3725
 
3726
  /* If the output type has padding, convert to the inner type and make a
3727
     constructor to build the record, unless a variable size is involved.  */
3728
  else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
3729
    {
3730
      VEC(constructor_elt,gc) *v;
3731
 
3732
      /* If we previously converted from another type and our type is
3733
         of variable size, remove the conversion to avoid the need for
3734
         variable-sized temporaries.  Likewise for a conversion between
3735
         original and packable version.  */
3736
      if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3737
          && (!TREE_CONSTANT (TYPE_SIZE (type))
3738
              || (ecode == RECORD_TYPE
3739
                  && TYPE_NAME (etype)
3740
                     == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3741
        expr = TREE_OPERAND (expr, 0);
3742
 
3743
      /* If we are just removing the padding from expr, convert the original
3744
         object if we have variable size in order to avoid the need for some
3745
         variable-sized temporaries.  Likewise if the padding is a variant
3746
         of the other, so we avoid a pointless unpad/repad sequence.  */
3747
      if (TREE_CODE (expr) == COMPONENT_REF
3748
          && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3749
          && (!TREE_CONSTANT (TYPE_SIZE (type))
3750
              || gnat_types_compatible_p (type,
3751
                                          TREE_TYPE (TREE_OPERAND (expr, 0)))
3752
              || (ecode == RECORD_TYPE
3753
                  && TYPE_NAME (etype)
3754
                     == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3755
        return convert (type, TREE_OPERAND (expr, 0));
3756
 
3757
      /* If the inner type is of self-referential size and the expression type
3758
         is a record, do this as an unchecked conversion.  But first pad the
3759
         expression if possible to have the same size on both sides.  */
3760
      if (ecode == RECORD_TYPE
3761
          && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3762
        {
3763
          if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
3764
            expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
3765
                                            false, false, false, true),
3766
                            expr);
3767
          return unchecked_convert (type, expr, false);
3768
        }
3769
 
3770
      /* If we are converting between array types with variable size, do the
3771
         final conversion as an unchecked conversion, again to avoid the need
3772
         for some variable-sized temporaries.  If valid, this conversion is
3773
         very likely purely technical and without real effects.  */
3774
      if (ecode == ARRAY_TYPE
3775
          && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
3776
          && !TREE_CONSTANT (TYPE_SIZE (etype))
3777
          && !TREE_CONSTANT (TYPE_SIZE (type)))
3778
        return unchecked_convert (type,
3779
                                  convert (TREE_TYPE (TYPE_FIELDS (type)),
3780
                                           expr),
3781
                                  false);
3782
 
3783
      v = VEC_alloc (constructor_elt, gc, 1);
3784
      CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3785
                              convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
3786
      return gnat_build_constructor (type, v);
3787
    }
3788
 
3789
  /* If the input type has padding, remove it and convert to the output type.
3790
     The conditions ordering is arranged to ensure that the output type is not
3791
     a padding type here, as it is not clear whether the conversion would
3792
     always be correct if this was to happen.  */
3793
  else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
3794
    {
3795
      tree unpadded;
3796
 
3797
      /* If we have just converted to this padded type, just get the
3798
         inner expression.  */
3799
      if (TREE_CODE (expr) == CONSTRUCTOR
3800
          && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3801
          && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3802
             == TYPE_FIELDS (etype))
3803
        unpadded
3804
          = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3805
 
3806
      /* Otherwise, build an explicit component reference.  */
3807
      else
3808
        unpadded
3809
          = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3810
 
3811
      return convert (type, unpadded);
3812
    }
3813
 
3814
  /* If the input is a biased type, adjust first.  */
3815
  if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3816
    return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3817
                                       fold_convert (TREE_TYPE (etype),
3818
                                                     expr),
3819
                                       TYPE_MIN_VALUE (etype)));
3820
 
3821
  /* If the input is a justified modular type, we need to extract the actual
3822
     object before converting it to any other type with the exceptions of an
3823
     unconstrained array or of a mere type variant.  It is useful to avoid the
3824
     extraction and conversion in the type variant case because it could end
3825
     up replacing a VAR_DECL expr by a constructor and we might be about the
3826
     take the address of the result.  */
3827
  if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3828
      && code != UNCONSTRAINED_ARRAY_TYPE
3829
      && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3830
    return convert (type, build_component_ref (expr, NULL_TREE,
3831
                                               TYPE_FIELDS (etype), false));
3832
 
3833
  /* If converting to a type that contains a template, convert to the data
3834
     type and then build the template. */
3835
  if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3836
    {
3837
      tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3838
      VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
3839
 
3840
      /* If the source already has a template, get a reference to the
3841
         associated array only, as we are going to rebuild a template
3842
         for the target type anyway.  */
3843
      expr = maybe_unconstrained_array (expr);
3844
 
3845
      CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3846
                              build_template (TREE_TYPE (TYPE_FIELDS (type)),
3847
                                              obj_type, NULL_TREE));
3848
      CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
3849
                              convert (obj_type, expr));
3850
      return gnat_build_constructor (type, v);
3851
    }
3852
 
3853
  /* There are some cases of expressions that we process specially.  */
3854
  switch (TREE_CODE (expr))
3855
    {
3856
    case ERROR_MARK:
3857
      return expr;
3858
 
3859
    case NULL_EXPR:
3860
      /* Just set its type here.  For TRANSFORM_EXPR, we will do the actual
3861
         conversion in gnat_expand_expr.  NULL_EXPR does not represent
3862
         and actual value, so no conversion is needed.  */
3863
      expr = copy_node (expr);
3864
      TREE_TYPE (expr) = type;
3865
      return expr;
3866
 
3867
    case STRING_CST:
3868
      /* If we are converting a STRING_CST to another constrained array type,
3869
         just make a new one in the proper type.  */
3870
      if (code == ecode && AGGREGATE_TYPE_P (etype)
3871
          && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3872
               && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3873
        {
3874
          expr = copy_node (expr);
3875
          TREE_TYPE (expr) = type;
3876
          return expr;
3877
        }
3878
      break;
3879
 
3880
    case VECTOR_CST:
3881
      /* If we are converting a VECTOR_CST to a mere variant type, just make
3882
         a new one in the proper type.  */
3883
      if (code == ecode && gnat_types_compatible_p (type, etype))
3884
        {
3885
          expr = copy_node (expr);
3886
          TREE_TYPE (expr) = type;
3887
          return expr;
3888
        }
3889
 
3890
    case CONSTRUCTOR:
3891
      /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3892
         a new one in the proper type.  */
3893
      if (code == ecode && gnat_types_compatible_p (type, etype))
3894
        {
3895
          expr = copy_node (expr);
3896
          TREE_TYPE (expr) = type;
3897
          return expr;
3898
        }
3899
 
3900
      /* Likewise for a conversion between original and packable version, or
3901
         conversion between types of the same size and with the same list of
3902
         fields, but we have to work harder to preserve type consistency.  */
3903
      if (code == ecode
3904
          && code == RECORD_TYPE
3905
          && (TYPE_NAME (type) == TYPE_NAME (etype)
3906
              || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
3907
 
3908
        {
3909
          VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3910
          unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3911
          VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
3912
          tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
3913
          unsigned HOST_WIDE_INT idx;
3914
          tree index, value;
3915
 
3916
          /* Whether we need to clear TREE_CONSTANT et al. on the output
3917
             constructor when we convert in place.  */
3918
          bool clear_constant = false;
3919
 
3920
          FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
3921
            {
3922
              constructor_elt *elt;
3923
              /* We expect only simple constructors.  */
3924
              if (!SAME_FIELD_P (index, efield))
3925
                break;
3926
              /* The field must be the same.  */
3927
              if (!SAME_FIELD_P (efield, field))
3928
                break;
3929
              elt = VEC_quick_push (constructor_elt, v, NULL);
3930
              elt->index = field;
3931
              elt->value = convert (TREE_TYPE (field), value);
3932
 
3933
              /* If packing has made this field a bitfield and the input
3934
                 value couldn't be emitted statically any more, we need to
3935
                 clear TREE_CONSTANT on our output.  */
3936
              if (!clear_constant
3937
                  && TREE_CONSTANT (expr)
3938
                  && !CONSTRUCTOR_BITFIELD_P (efield)
3939
                  && CONSTRUCTOR_BITFIELD_P (field)
3940
                  && !initializer_constant_valid_for_bitfield_p (value))
3941
                clear_constant = true;
3942
 
3943
              efield = DECL_CHAIN (efield);
3944
              field = DECL_CHAIN (field);
3945
            }
3946
 
3947
          /* If we have been able to match and convert all the input fields
3948
             to their output type, convert in place now.  We'll fallback to a
3949
             view conversion downstream otherwise.  */
3950
          if (idx == len)
3951
            {
3952
              expr = copy_node (expr);
3953
              TREE_TYPE (expr) = type;
3954
              CONSTRUCTOR_ELTS (expr) = v;
3955
              if (clear_constant)
3956
                TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
3957
              return expr;
3958
            }
3959
        }
3960
 
3961
      /* Likewise for a conversion between array type and vector type with a
3962
         compatible representative array.  */
3963
      else if (code == VECTOR_TYPE
3964
               && ecode == ARRAY_TYPE
3965
               && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
3966
                                           etype))
3967
        {
3968
          VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3969
          unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3970
          VEC(constructor_elt,gc) *v;
3971
          unsigned HOST_WIDE_INT ix;
3972
          tree value;
3973
 
3974
          /* Build a VECTOR_CST from a *constant* array constructor.  */
3975
          if (TREE_CONSTANT (expr))
3976
            {
3977
              bool constant_p = true;
3978
 
3979
              /* Iterate through elements and check if all constructor
3980
                 elements are *_CSTs.  */
3981
              FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
3982
                if (!CONSTANT_CLASS_P (value))
3983
                  {
3984
                    constant_p = false;
3985
                    break;
3986
                  }
3987
 
3988
              if (constant_p)
3989
                return build_vector_from_ctor (type,
3990
                                               CONSTRUCTOR_ELTS (expr));
3991
            }
3992
 
3993
          /* Otherwise, build a regular vector constructor.  */
3994
          v = VEC_alloc (constructor_elt, gc, len);
3995
          FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
3996
            {
3997
              constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
3998
              elt->index = NULL_TREE;
3999
              elt->value = value;
4000
            }
4001
          expr = copy_node (expr);
4002
          TREE_TYPE (expr) = type;
4003
          CONSTRUCTOR_ELTS (expr) = v;
4004
          return expr;
4005
        }
4006
      break;
4007
 
4008
    case UNCONSTRAINED_ARRAY_REF:
4009
      /* First retrieve the underlying array.  */
4010
      expr = maybe_unconstrained_array (expr);
4011
      etype = TREE_TYPE (expr);
4012
      ecode = TREE_CODE (etype);
4013
      break;
4014
 
4015
    case VIEW_CONVERT_EXPR:
4016
      {
4017
        /* GCC 4.x is very sensitive to type consistency overall, and view
4018
           conversions thus are very frequent.  Even though just "convert"ing
4019
           the inner operand to the output type is fine in most cases, it
4020
           might expose unexpected input/output type mismatches in special
4021
           circumstances so we avoid such recursive calls when we can.  */
4022
        tree op0 = TREE_OPERAND (expr, 0);
4023
 
4024
        /* If we are converting back to the original type, we can just
4025
           lift the input conversion.  This is a common occurrence with
4026
           switches back-and-forth amongst type variants.  */
4027
        if (type == TREE_TYPE (op0))
4028
          return op0;
4029
 
4030
        /* Otherwise, if we're converting between two aggregate or vector
4031
           types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4032
           target type in place or to just convert the inner expression.  */
4033
        if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4034
            || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4035
          {
4036
            /* If we are converting between mere variants, we can just
4037
               substitute the VIEW_CONVERT_EXPR in place.  */
4038
            if (gnat_types_compatible_p (type, etype))
4039
              return build1 (VIEW_CONVERT_EXPR, type, op0);
4040
 
4041
            /* Otherwise, we may just bypass the input view conversion unless
4042
               one of the types is a fat pointer,  which is handled by
4043
               specialized code below which relies on exact type matching.  */
4044
            else if (!TYPE_IS_FAT_POINTER_P (type)
4045
                     && !TYPE_IS_FAT_POINTER_P (etype))
4046
              return convert (type, op0);
4047
          }
4048
 
4049
        break;
4050
      }
4051
 
4052
    default:
4053
      break;
4054
    }
4055
 
4056
  /* Check for converting to a pointer to an unconstrained array.  */
4057
  if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4058
    return convert_to_fat_pointer (type, expr);
4059
 
4060
  /* If we are converting between two aggregate or vector types that are mere
4061
     variants, just make a VIEW_CONVERT_EXPR.  Likewise when we are converting
4062
     to a vector type from its representative array type.  */
4063
  else if ((code == ecode
4064
            && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4065
            && gnat_types_compatible_p (type, etype))
4066
           || (code == VECTOR_TYPE
4067
               && ecode == ARRAY_TYPE
4068
               && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4069
                                           etype)))
4070
    return build1 (VIEW_CONVERT_EXPR, type, expr);
4071
 
4072
  /* If we are converting between tagged types, try to upcast properly.  */
4073
  else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4074
           && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4075
    {
4076
      tree child_etype = etype;
4077
      do {
4078
        tree field = TYPE_FIELDS (child_etype);
4079
        if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4080
          return build_component_ref (expr, NULL_TREE, field, false);
4081
        child_etype = TREE_TYPE (field);
4082
      } while (TREE_CODE (child_etype) == RECORD_TYPE);
4083
    }
4084
 
4085
  /* If we are converting from a smaller form of record type back to it, just
4086
     make a VIEW_CONVERT_EXPR.  But first pad the expression to have the same
4087
     size on both sides.  */
4088
  else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4089
           && smaller_form_type_p (etype, type))
4090
    {
4091
      expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4092
                                      false, false, false, true),
4093
                      expr);
4094
      return build1 (VIEW_CONVERT_EXPR, type, expr);
4095
    }
4096
 
4097
  /* In all other cases of related types, make a NOP_EXPR.  */
4098
  else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4099
    return fold_convert (type, expr);
4100
 
4101
  switch (code)
4102
    {
4103
    case VOID_TYPE:
4104
      return fold_build1 (CONVERT_EXPR, type, expr);
4105
 
4106
    case INTEGER_TYPE:
4107
      if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4108
          && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4109
              || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4110
        return unchecked_convert (type, expr, false);
4111
      else if (TYPE_BIASED_REPRESENTATION_P (type))
4112
        return fold_convert (type,
4113
                             fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4114
                                          convert (TREE_TYPE (type), expr),
4115
                                          TYPE_MIN_VALUE (type)));
4116
 
4117
      /* ... fall through ... */
4118
 
4119
    case ENUMERAL_TYPE:
4120
    case BOOLEAN_TYPE:
4121
      /* If we are converting an additive expression to an integer type
4122
         with lower precision, be wary of the optimization that can be
4123
         applied by convert_to_integer.  There are 2 problematic cases:
4124
           - if the first operand was originally of a biased type,
4125
             because we could be recursively called to convert it
4126
             to an intermediate type and thus rematerialize the
4127
             additive operator endlessly,
4128
           - if the expression contains a placeholder, because an
4129
             intermediate conversion that changes the sign could
4130
             be inserted and thus introduce an artificial overflow
4131
             at compile time when the placeholder is substituted.  */
4132
      if (code == INTEGER_TYPE
4133
          && ecode == INTEGER_TYPE
4134
          && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4135
          && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4136
        {
4137
          tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4138
 
4139
          if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4140
               && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4141
              || CONTAINS_PLACEHOLDER_P (expr))
4142
            return build1 (NOP_EXPR, type, expr);
4143
        }
4144
 
4145
      return fold (convert_to_integer (type, expr));
4146
 
4147
    case POINTER_TYPE:
4148
    case REFERENCE_TYPE:
4149
      /* If converting between two pointers to records denoting
4150
         both a template and type, adjust if needed to account
4151
         for any differing offsets, since one might be negative.  */
4152
      if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4153
        {
4154
          tree bit_diff
4155
            = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4156
                           bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4157
          tree byte_diff
4158
            = size_binop (CEIL_DIV_EXPR, bit_diff, sbitsize_unit_node);
4159
          expr = build1 (NOP_EXPR, type, expr);
4160
          TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4161
          if (integer_zerop (byte_diff))
4162
            return expr;
4163
 
4164
          return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4165
                                  fold (convert (sizetype, byte_diff)));
4166
        }
4167
 
4168
      /* If converting to a thin pointer, handle specially.  */
4169
      if (TYPE_IS_THIN_POINTER_P (type)
4170
          && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4171
        return convert_to_thin_pointer (type, expr);
4172
 
4173
      /* If converting fat pointer to normal pointer, get the pointer to the
4174
         array and then convert it.  */
4175
      else if (TYPE_IS_FAT_POINTER_P (etype))
4176
        expr
4177
          = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4178
 
4179
      return fold (convert_to_pointer (type, expr));
4180
 
4181
    case REAL_TYPE:
4182
      return fold (convert_to_real (type, expr));
4183
 
4184
    case RECORD_TYPE:
4185
      if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4186
        {
4187
          VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 1);
4188
 
4189
          CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4190
                                  convert (TREE_TYPE (TYPE_FIELDS (type)),
4191
                                           expr));
4192
          return gnat_build_constructor (type, v);
4193
        }
4194
 
4195
      /* ... fall through ... */
4196
 
4197
    case ARRAY_TYPE:
4198
      /* In these cases, assume the front-end has validated the conversion.
4199
         If the conversion is valid, it will be a bit-wise conversion, so
4200
         it can be viewed as an unchecked conversion.  */
4201
      return unchecked_convert (type, expr, false);
4202
 
4203
    case UNION_TYPE:
4204
      /* This is a either a conversion between a tagged type and some
4205
         subtype, which we have to mark as a UNION_TYPE because of
4206
         overlapping fields or a conversion of an Unchecked_Union.  */
4207
      return unchecked_convert (type, expr, false);
4208
 
4209
    case UNCONSTRAINED_ARRAY_TYPE:
4210
      /* If the input is a VECTOR_TYPE, convert to the representative
4211
         array type first.  */
4212
      if (ecode == VECTOR_TYPE)
4213
        {
4214
          expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4215
          etype = TREE_TYPE (expr);
4216
          ecode = TREE_CODE (etype);
4217
        }
4218
 
4219
      /* If EXPR is a constrained array, take its address, convert it to a
4220
         fat pointer, and then dereference it.  Likewise if EXPR is a
4221
         record containing both a template and a constrained array.
4222
         Note that a record representing a justified modular type
4223
         always represents a packed constrained array.  */
4224
      if (ecode == ARRAY_TYPE
4225
          || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4226
          || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4227
          || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4228
        return
4229
          build_unary_op
4230
            (INDIRECT_REF, NULL_TREE,
4231
             convert_to_fat_pointer (TREE_TYPE (type),
4232
                                     build_unary_op (ADDR_EXPR,
4233
                                                     NULL_TREE, expr)));
4234
 
4235
      /* Do something very similar for converting one unconstrained
4236
         array to another.  */
4237
      else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4238
        return
4239
          build_unary_op (INDIRECT_REF, NULL_TREE,
4240
                          convert (TREE_TYPE (type),
4241
                                   build_unary_op (ADDR_EXPR,
4242
                                                   NULL_TREE, expr)));
4243
      else
4244
        gcc_unreachable ();
4245
 
4246
    case COMPLEX_TYPE:
4247
      return fold (convert_to_complex (type, expr));
4248
 
4249
    default:
4250
      gcc_unreachable ();
4251
    }
4252
}
4253
 
4254
/* Create an expression whose value is that of EXPR converted to the common
4255
   index type, which is sizetype.  EXPR is supposed to be in the base type
4256
   of the GNAT index type.  Calling it is equivalent to doing
4257
 
4258
     convert (sizetype, expr)
4259
 
4260
   but we try to distribute the type conversion with the knowledge that EXPR
4261
   cannot overflow in its type.  This is a best-effort approach and we fall
4262
   back to the above expression as soon as difficulties are encountered.
4263
 
4264
   This is necessary to overcome issues that arise when the GNAT base index
4265
   type and the GCC common index type (sizetype) don't have the same size,
4266
   which is quite frequent on 64-bit architectures.  In this case, and if
4267
   the GNAT base index type is signed but the iteration type of the loop has
4268
   been forced to unsigned, the loop scalar evolution engine cannot compute
4269
   a simple evolution for the general induction variables associated with the
4270
   array indices, because it will preserve the wrap-around semantics in the
4271
   unsigned type of their "inner" part.  As a result, many loop optimizations
4272
   are blocked.
4273
 
4274
   The solution is to use a special (basic) induction variable that is at
4275
   least as large as sizetype, and to express the aforementioned general
4276
   induction variables in terms of this induction variable, eliminating
4277
   the problematic intermediate truncation to the GNAT base index type.
4278
   This is possible as long as the original expression doesn't overflow
4279
   and if the middle-end hasn't introduced artificial overflows in the
4280
   course of the various simplification it can make to the expression.  */
4281
 
4282
tree
4283
convert_to_index_type (tree expr)
4284
{
4285
  enum tree_code code = TREE_CODE (expr);
4286
  tree type = TREE_TYPE (expr);
4287
 
4288
  /* If the type is unsigned, overflow is allowed so we cannot be sure that
4289
     EXPR doesn't overflow.  Keep it simple if optimization is disabled.  */
4290
  if (TYPE_UNSIGNED (type) || !optimize)
4291
    return convert (sizetype, expr);
4292
 
4293
  switch (code)
4294
    {
4295
    case VAR_DECL:
4296
      /* The main effect of the function: replace a loop parameter with its
4297
         associated special induction variable.  */
4298
      if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4299
        expr = DECL_INDUCTION_VAR (expr);
4300
      break;
4301
 
4302
    CASE_CONVERT:
4303
      {
4304
        tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4305
        /* Bail out as soon as we suspect some sort of type frobbing.  */
4306
        if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4307
            || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4308
          break;
4309
      }
4310
 
4311
      /* ... fall through ... */
4312
 
4313
    case NON_LVALUE_EXPR:
4314
      return fold_build1 (code, sizetype,
4315
                          convert_to_index_type (TREE_OPERAND (expr, 0)));
4316
 
4317
    case PLUS_EXPR:
4318
    case MINUS_EXPR:
4319
    case MULT_EXPR:
4320
      return fold_build2 (code, sizetype,
4321
                          convert_to_index_type (TREE_OPERAND (expr, 0)),
4322
                          convert_to_index_type (TREE_OPERAND (expr, 1)));
4323
 
4324
    case COMPOUND_EXPR:
4325
      return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4326
                          convert_to_index_type (TREE_OPERAND (expr, 1)));
4327
 
4328
    case COND_EXPR:
4329
      return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4330
                          convert_to_index_type (TREE_OPERAND (expr, 1)),
4331
                          convert_to_index_type (TREE_OPERAND (expr, 2)));
4332
 
4333
    default:
4334
      break;
4335
    }
4336
 
4337
  return convert (sizetype, expr);
4338
}
4339
 
4340
/* Remove all conversions that are done in EXP.  This includes converting
4341
   from a padded type or to a justified modular type.  If TRUE_ADDRESS
4342
   is true, always return the address of the containing object even if
4343
   the address is not bit-aligned.  */
4344
 
4345
tree
4346
remove_conversions (tree exp, bool true_address)
4347
{
4348
  switch (TREE_CODE (exp))
4349
    {
4350
    case CONSTRUCTOR:
4351
      if (true_address
4352
          && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4353
          && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4354
        return
4355
          remove_conversions (VEC_index (constructor_elt,
4356
                                         CONSTRUCTOR_ELTS (exp), 0)->value,
4357
                              true);
4358
      break;
4359
 
4360
    case COMPONENT_REF:
4361
      if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4362
        return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4363
      break;
4364
 
4365
    CASE_CONVERT:
4366
    case VIEW_CONVERT_EXPR:
4367
    case NON_LVALUE_EXPR:
4368
      return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4369
 
4370
    default:
4371
      break;
4372
    }
4373
 
4374
  return exp;
4375
}
4376
 
4377
/* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4378
   refers to the underlying array.  If it has TYPE_CONTAINS_TEMPLATE_P,
4379
   likewise return an expression pointing to the underlying array.  */
4380
 
4381
tree
4382
maybe_unconstrained_array (tree exp)
4383
{
4384
  enum tree_code code = TREE_CODE (exp);
4385
  tree type = TREE_TYPE (exp);
4386
 
4387
  switch (TREE_CODE (type))
4388
    {
4389
    case UNCONSTRAINED_ARRAY_TYPE:
4390
      if (code == UNCONSTRAINED_ARRAY_REF)
4391
        {
4392
          const bool read_only = TREE_READONLY (exp);
4393
          const bool no_trap = TREE_THIS_NOTRAP (exp);
4394
 
4395
          exp = TREE_OPERAND (exp, 0);
4396
          type = TREE_TYPE (exp);
4397
 
4398
          if (TREE_CODE (exp) == COND_EXPR)
4399
            {
4400
              tree op1
4401
                = build_unary_op (INDIRECT_REF, NULL_TREE,
4402
                                  build_component_ref (TREE_OPERAND (exp, 1),
4403
                                                       NULL_TREE,
4404
                                                       TYPE_FIELDS (type),
4405
                                                       false));
4406
              tree op2
4407
                = build_unary_op (INDIRECT_REF, NULL_TREE,
4408
                                  build_component_ref (TREE_OPERAND (exp, 2),
4409
                                                       NULL_TREE,
4410
                                                       TYPE_FIELDS (type),
4411
                                                       false));
4412
 
4413
              exp = build3 (COND_EXPR,
4414
                            TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
4415
                            TREE_OPERAND (exp, 0), op1, op2);
4416
            }
4417
          else
4418
            {
4419
              exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4420
                                    build_component_ref (exp, NULL_TREE,
4421
                                                         TYPE_FIELDS (type),
4422
                                                         false));
4423
              TREE_READONLY (exp) = read_only;
4424
              TREE_THIS_NOTRAP (exp) = no_trap;
4425
            }
4426
        }
4427
 
4428
      else if (code == NULL_EXPR)
4429
        exp = build1 (NULL_EXPR,
4430
                      TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4431
                      TREE_OPERAND (exp, 0));
4432
      break;
4433
 
4434
    case RECORD_TYPE:
4435
      /* If this is a padded type and it contains a template, convert to the
4436
         unpadded type first.  */
4437
      if (TYPE_PADDING_P (type)
4438
          && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4439
          && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
4440
        {
4441
          exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4442
          type = TREE_TYPE (exp);
4443
        }
4444
 
4445
      if (TYPE_CONTAINS_TEMPLATE_P (type))
4446
        {
4447
          exp = build_component_ref (exp, NULL_TREE,
4448
                                     DECL_CHAIN (TYPE_FIELDS (type)),
4449
                                     false);
4450
          type = TREE_TYPE (exp);
4451
 
4452
          /* If the array type is padded, convert to the unpadded type.  */
4453
          if (TYPE_IS_PADDING_P (type))
4454
            exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4455
        }
4456
      break;
4457
 
4458
    default:
4459
      break;
4460
    }
4461
 
4462
  return exp;
4463
}
4464
 
4465
/* If EXP's type is a VECTOR_TYPE, return EXP converted to the associated
4466
   TYPE_REPRESENTATIVE_ARRAY.  */
4467
 
4468
tree
4469
maybe_vector_array (tree exp)
4470
{
4471
  tree etype = TREE_TYPE (exp);
4472
 
4473
  if (VECTOR_TYPE_P (etype))
4474
    exp = convert (TYPE_REPRESENTATIVE_ARRAY (etype), exp);
4475
 
4476
  return exp;
4477
}
4478
 
4479
/* Return true if EXPR is an expression that can be folded as an operand
4480
   of a VIEW_CONVERT_EXPR.  See ada-tree.h for a complete rationale.  */
4481
 
4482
static bool
4483
can_fold_for_view_convert_p (tree expr)
4484
{
4485
  tree t1, t2;
4486
 
4487
  /* The folder will fold NOP_EXPRs between integral types with the same
4488
     precision (in the middle-end's sense).  We cannot allow it if the
4489
     types don't have the same precision in the Ada sense as well.  */
4490
  if (TREE_CODE (expr) != NOP_EXPR)
4491
    return true;
4492
 
4493
  t1 = TREE_TYPE (expr);
4494
  t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4495
 
4496
  /* Defer to the folder for non-integral conversions.  */
4497
  if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4498
    return true;
4499
 
4500
  /* Only fold conversions that preserve both precisions.  */
4501
  if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4502
      && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4503
    return true;
4504
 
4505
  return false;
4506
}
4507
 
4508
/* Return an expression that does an unchecked conversion of EXPR to TYPE.
4509
   If NOTRUNC_P is true, truncation operations should be suppressed.
4510
 
4511
   Special care is required with (source or target) integral types whose
4512
   precision is not equal to their size, to make sure we fetch or assign
4513
   the value bits whose location might depend on the endianness, e.g.
4514
 
4515
     Rmsize : constant := 8;
4516
     subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4517
 
4518
     type Bit_Array is array (1 .. Rmsize) of Boolean;
4519
     pragma Pack (Bit_Array);
4520
 
4521
     function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4522
 
4523
     Value : Int := 2#1000_0001#;
4524
     Vbits : Bit_Array := To_Bit_Array (Value);
4525
 
4526
   we expect the 8 bits at Vbits'Address to always contain Value, while
4527
   their original location depends on the endianness, at Value'Address
4528
   on a little-endian architecture but not on a big-endian one.  */
4529
 
4530
tree
4531
unchecked_convert (tree type, tree expr, bool notrunc_p)
4532
{
4533
  tree etype = TREE_TYPE (expr);
4534
  enum tree_code ecode = TREE_CODE (etype);
4535
  enum tree_code code = TREE_CODE (type);
4536
  int c;
4537
 
4538
  /* If the expression is already of the right type, we are done.  */
4539
  if (etype == type)
4540
    return expr;
4541
 
4542
  /* If both types types are integral just do a normal conversion.
4543
     Likewise for a conversion to an unconstrained array.  */
4544
  if ((((INTEGRAL_TYPE_P (type)
4545
         && !(code == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (type)))
4546
        || (POINTER_TYPE_P (type) && ! TYPE_IS_THIN_POINTER_P (type))
4547
        || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
4548
       && ((INTEGRAL_TYPE_P (etype)
4549
            && !(ecode == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (etype)))
4550
           || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4551
           || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4552
      || code == UNCONSTRAINED_ARRAY_TYPE)
4553
    {
4554
      if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4555
        {
4556
          tree ntype = copy_type (etype);
4557
          TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4558
          TYPE_MAIN_VARIANT (ntype) = ntype;
4559
          expr = build1 (NOP_EXPR, ntype, expr);
4560
        }
4561
 
4562
      if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4563
        {
4564
          tree rtype = copy_type (type);
4565
          TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4566
          TYPE_MAIN_VARIANT (rtype) = rtype;
4567
          expr = convert (rtype, expr);
4568
          expr = build1 (NOP_EXPR, type, expr);
4569
        }
4570
      else
4571
        expr = convert (type, expr);
4572
    }
4573
 
4574
  /* If we are converting to an integral type whose precision is not equal
4575
     to its size, first unchecked convert to a record type that contains an
4576
     field of the given precision.  Then extract the field.  */
4577
  else if (INTEGRAL_TYPE_P (type)
4578
           && TYPE_RM_SIZE (type)
4579
           && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4580
                                     GET_MODE_BITSIZE (TYPE_MODE (type))))
4581
    {
4582
      tree rec_type = make_node (RECORD_TYPE);
4583
      unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
4584
      tree field_type, field;
4585
 
4586
      if (TYPE_UNSIGNED (type))
4587
        field_type = make_unsigned_type (prec);
4588
      else
4589
        field_type = make_signed_type (prec);
4590
      SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
4591
 
4592
      field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
4593
                                 NULL_TREE, NULL_TREE, 1, 0);
4594
 
4595
      TYPE_FIELDS (rec_type) = field;
4596
      layout_type (rec_type);
4597
 
4598
      expr = unchecked_convert (rec_type, expr, notrunc_p);
4599
      expr = build_component_ref (expr, NULL_TREE, field, false);
4600
      expr = fold_build1 (NOP_EXPR, type, expr);
4601
    }
4602
 
4603
  /* Similarly if we are converting from an integral type whose precision is
4604
     not equal to its size, first copy into a field of the given precision
4605
     and unchecked convert the record type.  */
4606
  else if (INTEGRAL_TYPE_P (etype)
4607
           && TYPE_RM_SIZE (etype)
4608
           && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4609
                                     GET_MODE_BITSIZE (TYPE_MODE (etype))))
4610
    {
4611
      tree rec_type = make_node (RECORD_TYPE);
4612
      unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
4613
      VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 1);
4614
      tree field_type, field;
4615
 
4616
      if (TYPE_UNSIGNED (etype))
4617
        field_type = make_unsigned_type (prec);
4618
      else
4619
        field_type = make_signed_type (prec);
4620
      SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
4621
 
4622
      field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
4623
                                 NULL_TREE, NULL_TREE, 1, 0);
4624
 
4625
      TYPE_FIELDS (rec_type) = field;
4626
      layout_type (rec_type);
4627
 
4628
      expr = fold_build1 (NOP_EXPR, field_type, expr);
4629
      CONSTRUCTOR_APPEND_ELT (v, field, expr);
4630
      expr = gnat_build_constructor (rec_type, v);
4631
      expr = unchecked_convert (type, expr, notrunc_p);
4632
    }
4633
 
4634
  /* If we are converting from a scalar type to a type with a different size,
4635
     we need to pad to have the same size on both sides.
4636
 
4637
     ??? We cannot do it unconditionally because unchecked conversions are
4638
     used liberally by the front-end to implement polymorphism, e.g. in:
4639
 
4640
       S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
4641
       return p___size__4 (p__object!(S191s.all));
4642
 
4643
     so we skip all expressions that are references.  */
4644
  else if (!REFERENCE_CLASS_P (expr)
4645
           && !AGGREGATE_TYPE_P (etype)
4646
           && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4647
           && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
4648
    {
4649
      if (c < 0)
4650
        {
4651
          expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4652
                                          false, false, false, true),
4653
                          expr);
4654
          expr = unchecked_convert (type, expr, notrunc_p);
4655
        }
4656
      else
4657
        {
4658
          tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
4659
                                          false, false, false, true);
4660
          expr = unchecked_convert (rec_type, expr, notrunc_p);
4661
          expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
4662
                                      false);
4663
        }
4664
    }
4665
 
4666
  /* We have a special case when we are converting between two unconstrained
4667
     array types.  In that case, take the address, convert the fat pointer
4668
     types, and dereference.  */
4669
  else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
4670
    expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4671
                           build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4672
                                   build_unary_op (ADDR_EXPR, NULL_TREE,
4673
                                                   expr)));
4674
 
4675
  /* Another special case is when we are converting to a vector type from its
4676
     representative array type; this a regular conversion.  */
4677
  else if (code == VECTOR_TYPE
4678
           && ecode == ARRAY_TYPE
4679
           && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4680
                                       etype))
4681
    expr = convert (type, expr);
4682
 
4683
  else
4684
    {
4685
      expr = maybe_unconstrained_array (expr);
4686
      etype = TREE_TYPE (expr);
4687
      ecode = TREE_CODE (etype);
4688
      if (can_fold_for_view_convert_p (expr))
4689
        expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4690
      else
4691
        expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4692
    }
4693
 
4694
  /* If the result is an integral type whose precision is not equal to its
4695
     size, sign- or zero-extend the result.  We need not do this if the input
4696
     is an integral type of the same precision and signedness or if the output
4697
     is a biased type or if both the input and output are unsigned.  */
4698
  if (!notrunc_p
4699
      && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4700
      && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4701
      && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4702
                                GET_MODE_BITSIZE (TYPE_MODE (type)))
4703
      && !(INTEGRAL_TYPE_P (etype)
4704
           && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4705
           && operand_equal_p (TYPE_RM_SIZE (type),
4706
                               (TYPE_RM_SIZE (etype) != 0
4707
                                ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4708
                               0))
4709
      && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4710
    {
4711
      tree base_type
4712
        = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
4713
      tree shift_expr
4714
        = convert (base_type,
4715
                   size_binop (MINUS_EXPR,
4716
                               bitsize_int
4717
                               (GET_MODE_BITSIZE (TYPE_MODE (type))),
4718
                               TYPE_RM_SIZE (type)));
4719
      expr
4720
        = convert (type,
4721
                   build_binary_op (RSHIFT_EXPR, base_type,
4722
                                    build_binary_op (LSHIFT_EXPR, base_type,
4723
                                                     convert (base_type, expr),
4724
                                                     shift_expr),
4725
                                    shift_expr));
4726
    }
4727
 
4728
  /* An unchecked conversion should never raise Constraint_Error.  The code
4729
     below assumes that GCC's conversion routines overflow the same way that
4730
     the underlying hardware does.  This is probably true.  In the rare case
4731
     when it is false, we can rely on the fact that such conversions are
4732
     erroneous anyway.  */
4733
  if (TREE_CODE (expr) == INTEGER_CST)
4734
    TREE_OVERFLOW (expr) = 0;
4735
 
4736
  /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4737
     show no longer constant.  */
4738
  if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4739
      && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4740
                           OEP_ONLY_CONST))
4741
    TREE_CONSTANT (expr) = 0;
4742
 
4743
  return expr;
4744
}
4745
 
4746
/* Return the appropriate GCC tree code for the specified GNAT_TYPE,
4747
   the latter being a record type as predicated by Is_Record_Type.  */
4748
 
4749
enum tree_code
4750
tree_code_for_record_type (Entity_Id gnat_type)
4751
{
4752
  Node_Id component_list, component;
4753
 
4754
  /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
4755
     fields are all in the variant part.  Otherwise, return RECORD_TYPE.  */
4756
  if (!Is_Unchecked_Union (gnat_type))
4757
    return RECORD_TYPE;
4758
 
4759
  gnat_type = Implementation_Base_Type (gnat_type);
4760
  component_list
4761
    = Component_List (Type_Definition (Declaration_Node (gnat_type)));
4762
 
4763
  for (component = First_Non_Pragma (Component_Items (component_list));
4764
       Present (component);
4765
       component = Next_Non_Pragma (component))
4766
    if (Ekind (Defining_Entity (component)) == E_Component)
4767
      return RECORD_TYPE;
4768
 
4769
  return UNION_TYPE;
4770
}
4771
 
4772
/* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
4773
   size is equal to 64 bits, or an array of such a type.  Set ALIGN_CLAUSE
4774
   according to the presence of an alignment clause on the type or, if it
4775
   is an array, on the component type.  */
4776
 
4777
bool
4778
is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
4779
{
4780
  gnat_type = Underlying_Type (gnat_type);
4781
 
4782
  *align_clause = Present (Alignment_Clause (gnat_type));
4783
 
4784
  if (Is_Array_Type (gnat_type))
4785
    {
4786
      gnat_type = Underlying_Type (Component_Type (gnat_type));
4787
      if (Present (Alignment_Clause (gnat_type)))
4788
        *align_clause = true;
4789
    }
4790
 
4791
  if (!Is_Floating_Point_Type (gnat_type))
4792
    return false;
4793
 
4794
  if (UI_To_Int (Esize (gnat_type)) != 64)
4795
    return false;
4796
 
4797
  return true;
4798
}
4799
 
4800
/* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
4801
   size is greater or equal to 64 bits, or an array of such a type.  Set
4802
   ALIGN_CLAUSE according to the presence of an alignment clause on the
4803
   type or, if it is an array, on the component type.  */
4804
 
4805
bool
4806
is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
4807
{
4808
  gnat_type = Underlying_Type (gnat_type);
4809
 
4810
  *align_clause = Present (Alignment_Clause (gnat_type));
4811
 
4812
  if (Is_Array_Type (gnat_type))
4813
    {
4814
      gnat_type = Underlying_Type (Component_Type (gnat_type));
4815
      if (Present (Alignment_Clause (gnat_type)))
4816
        *align_clause = true;
4817
    }
4818
 
4819
  if (!Is_Scalar_Type (gnat_type))
4820
    return false;
4821
 
4822
  if (UI_To_Int (Esize (gnat_type)) < 64)
4823
    return false;
4824
 
4825
  return true;
4826
}
4827
 
4828
/* Return true if GNU_TYPE is suitable as the type of a non-aliased
4829
   component of an aggregate type.  */
4830
 
4831
bool
4832
type_for_nonaliased_component_p (tree gnu_type)
4833
{
4834
  /* If the type is passed by reference, we may have pointers to the
4835
     component so it cannot be made non-aliased. */
4836
  if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4837
    return false;
4838
 
4839
  /* We used to say that any component of aggregate type is aliased
4840
     because the front-end may take 'Reference of it.  The front-end
4841
     has been enhanced in the meantime so as to use a renaming instead
4842
     in most cases, but the back-end can probably take the address of
4843
     such a component too so we go for the conservative stance.
4844
 
4845
     For instance, we might need the address of any array type, even
4846
     if normally passed by copy, to construct a fat pointer if the
4847
     component is used as an actual for an unconstrained formal.
4848
 
4849
     Likewise for record types: even if a specific record subtype is
4850
     passed by copy, the parent type might be passed by ref (e.g. if
4851
     it's of variable size) and we might take the address of a child
4852
     component to pass to a parent formal.  We have no way to check
4853
     for such conditions here.  */
4854
  if (AGGREGATE_TYPE_P (gnu_type))
4855
    return false;
4856
 
4857
  return true;
4858
}
4859
 
4860
/* Return true if TYPE is a smaller form of ORIG_TYPE.  */
4861
 
4862
bool
4863
smaller_form_type_p (tree type, tree orig_type)
4864
{
4865
  tree size, osize;
4866
 
4867
  /* We're not interested in variants here.  */
4868
  if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
4869
    return false;
4870
 
4871
  /* Like a variant, a packable version keeps the original TYPE_NAME.  */
4872
  if (TYPE_NAME (type) != TYPE_NAME (orig_type))
4873
    return false;
4874
 
4875
  size = TYPE_SIZE (type);
4876
  osize = TYPE_SIZE (orig_type);
4877
 
4878
  if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
4879
    return false;
4880
 
4881
  return tree_int_cst_lt (size, osize) != 0;
4882
}
4883
 
4884
/* Perform final processing on global variables.  */
4885
 
4886
static GTY (()) tree dummy_global;
4887
 
4888
void
4889
gnat_write_global_declarations (void)
4890
{
4891
  unsigned int i;
4892
  tree iter;
4893
 
4894
  /* If we have declared types as used at the global level, insert them in
4895
     the global hash table.  We use a dummy variable for this purpose.  */
4896
  if (!VEC_empty (tree, types_used_by_cur_var_decl))
4897
    {
4898
      struct varpool_node *node;
4899
      dummy_global
4900
        = build_decl (BUILTINS_LOCATION, VAR_DECL, NULL_TREE, void_type_node);
4901
      TREE_STATIC (dummy_global) = 1;
4902
      TREE_ASM_WRITTEN (dummy_global) = 1;
4903
      node = varpool_node (dummy_global);
4904
      node->force_output = 1;
4905
      varpool_mark_needed_node (node);
4906
 
4907
      while (!VEC_empty (tree, types_used_by_cur_var_decl))
4908
        {
4909
          tree t = VEC_pop (tree, types_used_by_cur_var_decl);
4910
          types_used_by_var_decl_insert (t, dummy_global);
4911
        }
4912
    }
4913
 
4914
  /* Output debug information for all global type declarations first.  This
4915
     ensures that global types whose compilation hasn't been finalized yet,
4916
     for example pointers to Taft amendment types, have their compilation
4917
     finalized in the right context.  */
4918
  FOR_EACH_VEC_ELT (tree, global_decls, i, iter)
4919
    if (TREE_CODE (iter) == TYPE_DECL)
4920
      debug_hooks->global_decl (iter);
4921
 
4922
  /* Proceed to optimize and emit assembly.
4923
     FIXME: shouldn't be the front end's responsibility to call this.  */
4924
  cgraph_finalize_compilation_unit ();
4925
 
4926
  /* After cgraph has had a chance to emit everything that's going to
4927
     be emitted, output debug information for the rest of globals.  */
4928
  if (!seen_error ())
4929
    {
4930
      timevar_push (TV_SYMOUT);
4931
      FOR_EACH_VEC_ELT (tree, global_decls, i, iter)
4932
        if (TREE_CODE (iter) != TYPE_DECL)
4933
          debug_hooks->global_decl (iter);
4934
      timevar_pop (TV_SYMOUT);
4935
    }
4936
}
4937
 
4938
/* ************************************************************************
4939
 * *                           GCC builtins support                       *
4940
 * ************************************************************************ */
4941
 
4942
/* The general scheme is fairly simple:
4943
 
4944
   For each builtin function/type to be declared, gnat_install_builtins calls
4945
   internal facilities which eventually get to gnat_push_decl, which in turn
4946
   tracks the so declared builtin function decls in the 'builtin_decls' global
4947
   datastructure. When an Intrinsic subprogram declaration is processed, we
4948
   search this global datastructure to retrieve the associated BUILT_IN DECL
4949
   node.  */
4950
 
4951
/* Search the chain of currently available builtin declarations for a node
4952
   corresponding to function NAME (an IDENTIFIER_NODE).  Return the first node
4953
   found, if any, or NULL_TREE otherwise.  */
4954
tree
4955
builtin_decl_for (tree name)
4956
{
4957
  unsigned i;
4958
  tree decl;
4959
 
4960
  FOR_EACH_VEC_ELT (tree, builtin_decls, i, decl)
4961
    if (DECL_NAME (decl) == name)
4962
      return decl;
4963
 
4964
  return NULL_TREE;
4965
}
4966
 
4967
/* The code below eventually exposes gnat_install_builtins, which declares
4968
   the builtin types and functions we might need, either internally or as
4969
   user accessible facilities.
4970
 
4971
   ??? This is a first implementation shot, still in rough shape.  It is
4972
   heavily inspired from the "C" family implementation, with chunks copied
4973
   verbatim from there.
4974
 
4975
   Two obvious TODO candidates are
4976
   o Use a more efficient name/decl mapping scheme
4977
   o Devise a middle-end infrastructure to avoid having to copy
4978
     pieces between front-ends.  */
4979
 
4980
/* ----------------------------------------------------------------------- *
4981
 *                         BUILTIN ELEMENTARY TYPES                        *
4982
 * ----------------------------------------------------------------------- */
4983
 
4984
/* Standard data types to be used in builtin argument declarations.  */
4985
 
4986
enum c_tree_index
4987
{
4988
    CTI_SIGNED_SIZE_TYPE, /* For format checking only.  */
4989
    CTI_STRING_TYPE,
4990
    CTI_CONST_STRING_TYPE,
4991
 
4992
    CTI_MAX
4993
};
4994
 
4995
static tree c_global_trees[CTI_MAX];
4996
 
4997
#define signed_size_type_node   c_global_trees[CTI_SIGNED_SIZE_TYPE]
4998
#define string_type_node        c_global_trees[CTI_STRING_TYPE]
4999
#define const_string_type_node  c_global_trees[CTI_CONST_STRING_TYPE]
5000
 
5001
/* ??? In addition some attribute handlers, we currently don't support a
5002
   (small) number of builtin-types, which in turns inhibits support for a
5003
   number of builtin functions.  */
5004
#define wint_type_node    void_type_node
5005
#define intmax_type_node  void_type_node
5006
#define uintmax_type_node void_type_node
5007
 
5008
/* Build the void_list_node (void_type_node having been created).  */
5009
 
5010
static tree
5011
build_void_list_node (void)
5012
{
5013
  tree t = build_tree_list (NULL_TREE, void_type_node);
5014
  return t;
5015
}
5016
 
5017
/* Used to help initialize the builtin-types.def table.  When a type of
5018
   the correct size doesn't exist, use error_mark_node instead of NULL.
5019
   The later results in segfaults even when a decl using the type doesn't
5020
   get invoked.  */
5021
 
5022
static tree
5023
builtin_type_for_size (int size, bool unsignedp)
5024
{
5025
  tree type = gnat_type_for_size (size, unsignedp);
5026
  return type ? type : error_mark_node;
5027
}
5028
 
5029
/* Build/push the elementary type decls that builtin functions/types
5030
   will need.  */
5031
 
5032
static void
5033
install_builtin_elementary_types (void)
5034
{
5035
  signed_size_type_node = gnat_signed_type (size_type_node);
5036
  pid_type_node = integer_type_node;
5037
  void_list_node = build_void_list_node ();
5038
 
5039
  string_type_node = build_pointer_type (char_type_node);
5040
  const_string_type_node
5041
    = build_pointer_type (build_qualified_type
5042
                          (char_type_node, TYPE_QUAL_CONST));
5043
}
5044
 
5045
/* ----------------------------------------------------------------------- *
5046
 *                          BUILTIN FUNCTION TYPES                         *
5047
 * ----------------------------------------------------------------------- */
5048
 
5049
/* Now, builtin function types per se.  */
5050
 
5051
enum c_builtin_type
5052
{
5053
#define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5054
#define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5055
#define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5056
#define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5057
#define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5058
#define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5059
#define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5060
#define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
5061
#define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
5062
#define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5063
#define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5064
#define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5065
#define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5066
#define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5067
#define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
5068
  NAME,
5069
#define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5070
#include "builtin-types.def"
5071
#undef DEF_PRIMITIVE_TYPE
5072
#undef DEF_FUNCTION_TYPE_0
5073
#undef DEF_FUNCTION_TYPE_1
5074
#undef DEF_FUNCTION_TYPE_2
5075
#undef DEF_FUNCTION_TYPE_3
5076
#undef DEF_FUNCTION_TYPE_4
5077
#undef DEF_FUNCTION_TYPE_5
5078
#undef DEF_FUNCTION_TYPE_6
5079
#undef DEF_FUNCTION_TYPE_7
5080
#undef DEF_FUNCTION_TYPE_VAR_0
5081
#undef DEF_FUNCTION_TYPE_VAR_1
5082
#undef DEF_FUNCTION_TYPE_VAR_2
5083
#undef DEF_FUNCTION_TYPE_VAR_3
5084
#undef DEF_FUNCTION_TYPE_VAR_4
5085
#undef DEF_FUNCTION_TYPE_VAR_5
5086
#undef DEF_POINTER_TYPE
5087
  BT_LAST
5088
};
5089
 
5090
typedef enum c_builtin_type builtin_type;
5091
 
5092
/* A temporary array used in communication with def_fn_type.  */
5093
static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5094
 
5095
/* A helper function for install_builtin_types.  Build function type
5096
   for DEF with return type RET and N arguments.  If VAR is true, then the
5097
   function should be variadic after those N arguments.
5098
 
5099
   Takes special care not to ICE if any of the types involved are
5100
   error_mark_node, which indicates that said type is not in fact available
5101
   (see builtin_type_for_size).  In which case the function type as a whole
5102
   should be error_mark_node.  */
5103
 
5104
static void
5105
def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5106
{
5107
  tree t;
5108
  tree *args = XALLOCAVEC (tree, n);
5109
  va_list list;
5110
  int i;
5111
 
5112
  va_start (list, n);
5113
  for (i = 0; i < n; ++i)
5114
    {
5115
      builtin_type a = (builtin_type) va_arg (list, int);
5116
      t = builtin_types[a];
5117
      if (t == error_mark_node)
5118
        goto egress;
5119
      args[i] = t;
5120
    }
5121
 
5122
  t = builtin_types[ret];
5123
  if (t == error_mark_node)
5124
    goto egress;
5125
  if (var)
5126
    t = build_varargs_function_type_array (t, n, args);
5127
  else
5128
    t = build_function_type_array (t, n, args);
5129
 
5130
 egress:
5131
  builtin_types[def] = t;
5132
  va_end (list);
5133
}
5134
 
5135
/* Build the builtin function types and install them in the builtin_types
5136
   array for later use in builtin function decls.  */
5137
 
5138
static void
5139
install_builtin_function_types (void)
5140
{
5141
  tree va_list_ref_type_node;
5142
  tree va_list_arg_type_node;
5143
 
5144
  if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5145
    {
5146
      va_list_arg_type_node = va_list_ref_type_node =
5147
        build_pointer_type (TREE_TYPE (va_list_type_node));
5148
    }
5149
  else
5150
    {
5151
      va_list_arg_type_node = va_list_type_node;
5152
      va_list_ref_type_node = build_reference_type (va_list_type_node);
5153
    }
5154
 
5155
#define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5156
  builtin_types[ENUM] = VALUE;
5157
#define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5158
  def_fn_type (ENUM, RETURN, 0, 0);
5159
#define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5160
  def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5161
#define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5162
  def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5163
#define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5164
  def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5165
#define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5166
  def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5167
#define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5168
  def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5169
#define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5170
                            ARG6)                                       \
5171
  def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5172
#define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5173
                            ARG6, ARG7)                                 \
5174
  def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5175
#define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5176
  def_fn_type (ENUM, RETURN, 1, 0);
5177
#define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5178
  def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5179
#define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5180
  def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5181
#define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5182
  def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5183
#define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5184
  def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5185
#define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5186
  def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5187
#define DEF_POINTER_TYPE(ENUM, TYPE) \
5188
  builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5189
 
5190
#include "builtin-types.def"
5191
 
5192
#undef DEF_PRIMITIVE_TYPE
5193
#undef DEF_FUNCTION_TYPE_1
5194
#undef DEF_FUNCTION_TYPE_2
5195
#undef DEF_FUNCTION_TYPE_3
5196
#undef DEF_FUNCTION_TYPE_4
5197
#undef DEF_FUNCTION_TYPE_5
5198
#undef DEF_FUNCTION_TYPE_6
5199
#undef DEF_FUNCTION_TYPE_VAR_0
5200
#undef DEF_FUNCTION_TYPE_VAR_1
5201
#undef DEF_FUNCTION_TYPE_VAR_2
5202
#undef DEF_FUNCTION_TYPE_VAR_3
5203
#undef DEF_FUNCTION_TYPE_VAR_4
5204
#undef DEF_FUNCTION_TYPE_VAR_5
5205
#undef DEF_POINTER_TYPE
5206
  builtin_types[(int) BT_LAST] = NULL_TREE;
5207
}
5208
 
5209
/* ----------------------------------------------------------------------- *
5210
 *                            BUILTIN ATTRIBUTES                           *
5211
 * ----------------------------------------------------------------------- */
5212
 
5213
enum built_in_attribute
5214
{
5215
#define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5216
#define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5217
#define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5218
#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5219
#include "builtin-attrs.def"
5220
#undef DEF_ATTR_NULL_TREE
5221
#undef DEF_ATTR_INT
5222
#undef DEF_ATTR_IDENT
5223
#undef DEF_ATTR_TREE_LIST
5224
  ATTR_LAST
5225
};
5226
 
5227
static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5228
 
5229
static void
5230
install_builtin_attributes (void)
5231
{
5232
  /* Fill in the built_in_attributes array.  */
5233
#define DEF_ATTR_NULL_TREE(ENUM)                                \
5234
  built_in_attributes[(int) ENUM] = NULL_TREE;
5235
#define DEF_ATTR_INT(ENUM, VALUE)                               \
5236
  built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5237
#define DEF_ATTR_IDENT(ENUM, STRING)                            \
5238
  built_in_attributes[(int) ENUM] = get_identifier (STRING);
5239
#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5240
  built_in_attributes[(int) ENUM]                       \
5241
    = tree_cons (built_in_attributes[(int) PURPOSE],    \
5242
                 built_in_attributes[(int) VALUE],      \
5243
                 built_in_attributes[(int) CHAIN]);
5244
#include "builtin-attrs.def"
5245
#undef DEF_ATTR_NULL_TREE
5246
#undef DEF_ATTR_INT
5247
#undef DEF_ATTR_IDENT
5248
#undef DEF_ATTR_TREE_LIST
5249
}
5250
 
5251
/* Handle a "const" attribute; arguments as in
5252
   struct attribute_spec.handler.  */
5253
 
5254
static tree
5255
handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5256
                        tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5257
                        bool *no_add_attrs)
5258
{
5259
  if (TREE_CODE (*node) == FUNCTION_DECL)
5260
    TREE_READONLY (*node) = 1;
5261
  else
5262
    *no_add_attrs = true;
5263
 
5264
  return NULL_TREE;
5265
}
5266
 
5267
/* Handle a "nothrow" attribute; arguments as in
5268
   struct attribute_spec.handler.  */
5269
 
5270
static tree
5271
handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5272
                          tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5273
                          bool *no_add_attrs)
5274
{
5275
  if (TREE_CODE (*node) == FUNCTION_DECL)
5276
    TREE_NOTHROW (*node) = 1;
5277
  else
5278
    *no_add_attrs = true;
5279
 
5280
  return NULL_TREE;
5281
}
5282
 
5283
/* Handle a "pure" attribute; arguments as in
5284
   struct attribute_spec.handler.  */
5285
 
5286
static tree
5287
handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5288
                       int ARG_UNUSED (flags), bool *no_add_attrs)
5289
{
5290
  if (TREE_CODE (*node) == FUNCTION_DECL)
5291
    DECL_PURE_P (*node) = 1;
5292
  /* ??? TODO: Support types.  */
5293
  else
5294
    {
5295
      warning (OPT_Wattributes, "%qs attribute ignored",
5296
               IDENTIFIER_POINTER (name));
5297
      *no_add_attrs = true;
5298
    }
5299
 
5300
  return NULL_TREE;
5301
}
5302
 
5303
/* Handle a "no vops" attribute; arguments as in
5304
   struct attribute_spec.handler.  */
5305
 
5306
static tree
5307
handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5308
                         tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5309
                         bool *ARG_UNUSED (no_add_attrs))
5310
{
5311
  gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5312
  DECL_IS_NOVOPS (*node) = 1;
5313
  return NULL_TREE;
5314
}
5315
 
5316
/* Helper for nonnull attribute handling; fetch the operand number
5317
   from the attribute argument list.  */
5318
 
5319
static bool
5320
get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5321
{
5322
  /* Verify the arg number is a constant.  */
5323
  if (TREE_CODE (arg_num_expr) != INTEGER_CST
5324
      || TREE_INT_CST_HIGH (arg_num_expr) != 0)
5325
    return false;
5326
 
5327
  *valp = TREE_INT_CST_LOW (arg_num_expr);
5328
  return true;
5329
}
5330
 
5331
/* Handle the "nonnull" attribute.  */
5332
static tree
5333
handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5334
                          tree args, int ARG_UNUSED (flags),
5335
                          bool *no_add_attrs)
5336
{
5337
  tree type = *node;
5338
  unsigned HOST_WIDE_INT attr_arg_num;
5339
 
5340
  /* If no arguments are specified, all pointer arguments should be
5341
     non-null.  Verify a full prototype is given so that the arguments
5342
     will have the correct types when we actually check them later.  */
5343
  if (!args)
5344
    {
5345
      if (!prototype_p (type))
5346
        {
5347
          error ("nonnull attribute without arguments on a non-prototype");
5348
          *no_add_attrs = true;
5349
        }
5350
      return NULL_TREE;
5351
    }
5352
 
5353
  /* Argument list specified.  Verify that each argument number references
5354
     a pointer argument.  */
5355
  for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5356
    {
5357
      unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5358
 
5359
      if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5360
        {
5361
          error ("nonnull argument has invalid operand number (argument %lu)",
5362
                 (unsigned long) attr_arg_num);
5363
          *no_add_attrs = true;
5364
          return NULL_TREE;
5365
        }
5366
 
5367
      if (prototype_p (type))
5368
        {
5369
          function_args_iterator iter;
5370
          tree argument;
5371
 
5372
          function_args_iter_init (&iter, type);
5373
          for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
5374
            {
5375
              argument = function_args_iter_cond (&iter);
5376
              if (!argument || ck_num == arg_num)
5377
                break;
5378
            }
5379
 
5380
          if (!argument
5381
              || TREE_CODE (argument) == VOID_TYPE)
5382
            {
5383
              error ("nonnull argument with out-of-range operand number "
5384
                     "(argument %lu, operand %lu)",
5385
                     (unsigned long) attr_arg_num, (unsigned long) arg_num);
5386
              *no_add_attrs = true;
5387
              return NULL_TREE;
5388
            }
5389
 
5390
          if (TREE_CODE (argument) != POINTER_TYPE)
5391
            {
5392
              error ("nonnull argument references non-pointer operand "
5393
                     "(argument %lu, operand %lu)",
5394
                   (unsigned long) attr_arg_num, (unsigned long) arg_num);
5395
              *no_add_attrs = true;
5396
              return NULL_TREE;
5397
            }
5398
        }
5399
    }
5400
 
5401
  return NULL_TREE;
5402
}
5403
 
5404
/* Handle a "sentinel" attribute.  */
5405
 
5406
static tree
5407
handle_sentinel_attribute (tree *node, tree name, tree args,
5408
                           int ARG_UNUSED (flags), bool *no_add_attrs)
5409
{
5410
  if (!prototype_p (*node))
5411
    {
5412
      warning (OPT_Wattributes,
5413
               "%qs attribute requires prototypes with named arguments",
5414
               IDENTIFIER_POINTER (name));
5415
      *no_add_attrs = true;
5416
    }
5417
  else
5418
    {
5419
      if (!stdarg_p (*node))
5420
        {
5421
          warning (OPT_Wattributes,
5422
                   "%qs attribute only applies to variadic functions",
5423
                   IDENTIFIER_POINTER (name));
5424
          *no_add_attrs = true;
5425
        }
5426
    }
5427
 
5428
  if (args)
5429
    {
5430
      tree position = TREE_VALUE (args);
5431
 
5432
      if (TREE_CODE (position) != INTEGER_CST)
5433
        {
5434
          warning (0, "requested position is not an integer constant");
5435
          *no_add_attrs = true;
5436
        }
5437
      else
5438
        {
5439
          if (tree_int_cst_lt (position, integer_zero_node))
5440
            {
5441
              warning (0, "requested position is less than zero");
5442
              *no_add_attrs = true;
5443
            }
5444
        }
5445
    }
5446
 
5447
  return NULL_TREE;
5448
}
5449
 
5450
/* Handle a "noreturn" attribute; arguments as in
5451
   struct attribute_spec.handler.  */
5452
 
5453
static tree
5454
handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5455
                           int ARG_UNUSED (flags), bool *no_add_attrs)
5456
{
5457
  tree type = TREE_TYPE (*node);
5458
 
5459
  /* See FIXME comment in c_common_attribute_table.  */
5460
  if (TREE_CODE (*node) == FUNCTION_DECL)
5461
    TREE_THIS_VOLATILE (*node) = 1;
5462
  else if (TREE_CODE (type) == POINTER_TYPE
5463
           && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5464
    TREE_TYPE (*node)
5465
      = build_pointer_type
5466
        (build_type_variant (TREE_TYPE (type),
5467
                             TYPE_READONLY (TREE_TYPE (type)), 1));
5468
  else
5469
    {
5470
      warning (OPT_Wattributes, "%qs attribute ignored",
5471
               IDENTIFIER_POINTER (name));
5472
      *no_add_attrs = true;
5473
    }
5474
 
5475
  return NULL_TREE;
5476
}
5477
 
5478
/* Handle a "leaf" attribute; arguments as in
5479
   struct attribute_spec.handler.  */
5480
 
5481
static tree
5482
handle_leaf_attribute (tree *node, tree name,
5483
                       tree ARG_UNUSED (args),
5484
                       int ARG_UNUSED (flags), bool *no_add_attrs)
5485
{
5486
  if (TREE_CODE (*node) != FUNCTION_DECL)
5487
    {
5488
      warning (OPT_Wattributes, "%qE attribute ignored", name);
5489
      *no_add_attrs = true;
5490
    }
5491
  if (!TREE_PUBLIC (*node))
5492
    {
5493
      warning (OPT_Wattributes, "%qE attribute has no effect", name);
5494
      *no_add_attrs = true;
5495
    }
5496
 
5497
  return NULL_TREE;
5498
}
5499
 
5500
/* Handle a "malloc" attribute; arguments as in
5501
   struct attribute_spec.handler.  */
5502
 
5503
static tree
5504
handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5505
                         int ARG_UNUSED (flags), bool *no_add_attrs)
5506
{
5507
  if (TREE_CODE (*node) == FUNCTION_DECL
5508
      && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5509
    DECL_IS_MALLOC (*node) = 1;
5510
  else
5511
    {
5512
      warning (OPT_Wattributes, "%qs attribute ignored",
5513
               IDENTIFIER_POINTER (name));
5514
      *no_add_attrs = true;
5515
    }
5516
 
5517
  return NULL_TREE;
5518
}
5519
 
5520
/* Fake handler for attributes we don't properly support.  */
5521
 
5522
tree
5523
fake_attribute_handler (tree * ARG_UNUSED (node),
5524
                        tree ARG_UNUSED (name),
5525
                        tree ARG_UNUSED (args),
5526
                        int  ARG_UNUSED (flags),
5527
                        bool * ARG_UNUSED (no_add_attrs))
5528
{
5529
  return NULL_TREE;
5530
}
5531
 
5532
/* Handle a "type_generic" attribute.  */
5533
 
5534
static tree
5535
handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5536
                               tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5537
                               bool * ARG_UNUSED (no_add_attrs))
5538
{
5539
  /* Ensure we have a function type.  */
5540
  gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5541
 
5542
  /* Ensure we have a variadic function.  */
5543
  gcc_assert (!prototype_p (*node) || stdarg_p (*node));
5544
 
5545
  return NULL_TREE;
5546
}
5547
 
5548
/* Handle a "vector_size" attribute; arguments as in
5549
   struct attribute_spec.handler.  */
5550
 
5551
static tree
5552
handle_vector_size_attribute (tree *node, tree name, tree args,
5553
                              int ARG_UNUSED (flags),
5554
                              bool *no_add_attrs)
5555
{
5556
  unsigned HOST_WIDE_INT vecsize, nunits;
5557
  enum machine_mode orig_mode;
5558
  tree type = *node, new_type, size;
5559
 
5560
  *no_add_attrs = true;
5561
 
5562
  size = TREE_VALUE (args);
5563
 
5564
  if (!host_integerp (size, 1))
5565
    {
5566
      warning (OPT_Wattributes, "%qs attribute ignored",
5567
               IDENTIFIER_POINTER (name));
5568
      return NULL_TREE;
5569
    }
5570
 
5571
  /* Get the vector size (in bytes).  */
5572
  vecsize = tree_low_cst (size, 1);
5573
 
5574
  /* We need to provide for vector pointers, vector arrays, and
5575
     functions returning vectors.  For example:
5576
 
5577
       __attribute__((vector_size(16))) short *foo;
5578
 
5579
     In this case, the mode is SI, but the type being modified is
5580
     HI, so we need to look further.  */
5581
 
5582
  while (POINTER_TYPE_P (type)
5583
         || TREE_CODE (type) == FUNCTION_TYPE
5584
         || TREE_CODE (type) == ARRAY_TYPE)
5585
    type = TREE_TYPE (type);
5586
 
5587
  /* Get the mode of the type being modified.  */
5588
  orig_mode = TYPE_MODE (type);
5589
 
5590
  if ((!INTEGRAL_TYPE_P (type)
5591
       && !SCALAR_FLOAT_TYPE_P (type)
5592
       && !FIXED_POINT_TYPE_P (type))
5593
      || (!SCALAR_FLOAT_MODE_P (orig_mode)
5594
          && GET_MODE_CLASS (orig_mode) != MODE_INT
5595
          && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
5596
      || !host_integerp (TYPE_SIZE_UNIT (type), 1)
5597
      || TREE_CODE (type) == BOOLEAN_TYPE)
5598
    {
5599
      error ("invalid vector type for attribute %qs",
5600
             IDENTIFIER_POINTER (name));
5601
      return NULL_TREE;
5602
    }
5603
 
5604
  if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
5605
    {
5606
      error ("vector size not an integral multiple of component size");
5607
      return NULL;
5608
    }
5609
 
5610
  if (vecsize == 0)
5611
    {
5612
      error ("zero vector size");
5613
      return NULL;
5614
    }
5615
 
5616
  /* Calculate how many units fit in the vector.  */
5617
  nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5618
  if (nunits & (nunits - 1))
5619
    {
5620
      error ("number of components of the vector not a power of two");
5621
      return NULL_TREE;
5622
    }
5623
 
5624
  new_type = build_vector_type (type, nunits);
5625
 
5626
  /* Build back pointers if needed.  */
5627
  *node = reconstruct_complex_type (*node, new_type);
5628
 
5629
  return NULL_TREE;
5630
}
5631
 
5632
/* Handle a "vector_type" attribute; arguments as in
5633
   struct attribute_spec.handler.  */
5634
 
5635
static tree
5636
handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5637
                              int ARG_UNUSED (flags),
5638
                              bool *no_add_attrs)
5639
{
5640
  /* Vector representative type and size.  */
5641
  tree rep_type = *node;
5642
  tree rep_size = TYPE_SIZE_UNIT (rep_type);
5643
  tree rep_name;
5644
 
5645
  /* Vector size in bytes and number of units.  */
5646
  unsigned HOST_WIDE_INT vec_bytes, vec_units;
5647
 
5648
  /* Vector element type and mode.  */
5649
  tree elem_type;
5650
  enum machine_mode elem_mode;
5651
 
5652
  *no_add_attrs = true;
5653
 
5654
  /* Get the representative array type, possibly nested within a
5655
     padding record e.g. for alignment purposes.  */
5656
 
5657
  if (TYPE_IS_PADDING_P (rep_type))
5658
    rep_type = TREE_TYPE (TYPE_FIELDS (rep_type));
5659
 
5660
  if (TREE_CODE (rep_type) != ARRAY_TYPE)
5661
    {
5662
      error ("attribute %qs applies to array types only",
5663
             IDENTIFIER_POINTER (name));
5664
      return NULL_TREE;
5665
    }
5666
 
5667
  /* Silently punt on variable sizes.  We can't make vector types for them,
5668
     need to ignore them on front-end generated subtypes of unconstrained
5669
     bases, and this attribute is for binding implementors, not end-users, so
5670
     we should never get there from legitimate explicit uses.  */
5671
 
5672
  if (!host_integerp (rep_size, 1))
5673
    return NULL_TREE;
5674
 
5675
  /* Get the element type/mode and check this is something we know
5676
     how to make vectors of.  */
5677
 
5678
  elem_type = TREE_TYPE (rep_type);
5679
  elem_mode = TYPE_MODE (elem_type);
5680
 
5681
  if ((!INTEGRAL_TYPE_P (elem_type)
5682
       && !SCALAR_FLOAT_TYPE_P (elem_type)
5683
       && !FIXED_POINT_TYPE_P (elem_type))
5684
      || (!SCALAR_FLOAT_MODE_P (elem_mode)
5685
          && GET_MODE_CLASS (elem_mode) != MODE_INT
5686
          && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode))
5687
      || !host_integerp (TYPE_SIZE_UNIT (elem_type), 1))
5688
    {
5689
      error ("invalid element type for attribute %qs",
5690
             IDENTIFIER_POINTER (name));
5691
      return NULL_TREE;
5692
    }
5693
 
5694
  /* Sanity check the vector size and element type consistency.  */
5695
 
5696
  vec_bytes = tree_low_cst (rep_size, 1);
5697
 
5698
  if (vec_bytes % tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1))
5699
    {
5700
      error ("vector size not an integral multiple of component size");
5701
      return NULL;
5702
    }
5703
 
5704
  if (vec_bytes == 0)
5705
    {
5706
      error ("zero vector size");
5707
      return NULL;
5708
    }
5709
 
5710
  vec_units = vec_bytes / tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1);
5711
  if (vec_units & (vec_units - 1))
5712
    {
5713
      error ("number of components of the vector not a power of two");
5714
      return NULL_TREE;
5715
    }
5716
 
5717
  /* Build the vector type and replace.  */
5718
 
5719
  *node = build_vector_type (elem_type, vec_units);
5720
  rep_name = TYPE_NAME (rep_type);
5721
  if (TREE_CODE (rep_name) == TYPE_DECL)
5722
    rep_name = DECL_NAME (rep_name);
5723
  TYPE_NAME (*node) = rep_name;
5724
  TYPE_REPRESENTATIVE_ARRAY (*node) = rep_type;
5725
 
5726
  return NULL_TREE;
5727
}
5728
 
5729
/* ----------------------------------------------------------------------- *
5730
 *                              BUILTIN FUNCTIONS                          *
5731
 * ----------------------------------------------------------------------- */
5732
 
5733
/* Worker for DEF_BUILTIN.  Possibly define a builtin function with one or two
5734
   names.  Does not declare a non-__builtin_ function if flag_no_builtin, or
5735
   if nonansi_p and flag_no_nonansi_builtin.  */
5736
 
5737
static void
5738
def_builtin_1 (enum built_in_function fncode,
5739
               const char *name,
5740
               enum built_in_class fnclass,
5741
               tree fntype, tree libtype,
5742
               bool both_p, bool fallback_p,
5743
               bool nonansi_p ATTRIBUTE_UNUSED,
5744
               tree fnattrs, bool implicit_p)
5745
{
5746
  tree decl;
5747
  const char *libname;
5748
 
5749
  /* Preserve an already installed decl.  It most likely was setup in advance
5750
     (e.g. as part of the internal builtins) for specific reasons.  */
5751
  if (builtin_decl_explicit (fncode) != NULL_TREE)
5752
    return;
5753
 
5754
  gcc_assert ((!both_p && !fallback_p)
5755
              || !strncmp (name, "__builtin_",
5756
                           strlen ("__builtin_")));
5757
 
5758
  libname = name + strlen ("__builtin_");
5759
  decl = add_builtin_function (name, fntype, fncode, fnclass,
5760
                               (fallback_p ? libname : NULL),
5761
                               fnattrs);
5762
  if (both_p)
5763
    /* ??? This is normally further controlled by command-line options
5764
       like -fno-builtin, but we don't have them for Ada.  */
5765
    add_builtin_function (libname, libtype, fncode, fnclass,
5766
                          NULL, fnattrs);
5767
 
5768
  set_builtin_decl (fncode, decl, implicit_p);
5769
}
5770
 
5771
static int flag_isoc94 = 0;
5772
static int flag_isoc99 = 0;
5773
 
5774
/* Install what the common builtins.def offers.  */
5775
 
5776
static void
5777
install_builtin_functions (void)
5778
{
5779
#define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5780
                    NONANSI_P, ATTRS, IMPLICIT, COND)                   \
5781
  if (NAME && COND)                                                     \
5782
    def_builtin_1 (ENUM, NAME, CLASS,                                   \
5783
                   builtin_types[(int) TYPE],                           \
5784
                   builtin_types[(int) LIBTYPE],                        \
5785
                   BOTH_P, FALLBACK_P, NONANSI_P,                       \
5786
                   built_in_attributes[(int) ATTRS], IMPLICIT);
5787
#include "builtins.def"
5788
#undef DEF_BUILTIN
5789
}
5790
 
5791
/* ----------------------------------------------------------------------- *
5792
 *                              BUILTIN FUNCTIONS                          *
5793
 * ----------------------------------------------------------------------- */
5794
 
5795
/* Install the builtin functions we might need.  */
5796
 
5797
void
5798
gnat_install_builtins (void)
5799
{
5800
  install_builtin_elementary_types ();
5801
  install_builtin_function_types ();
5802
  install_builtin_attributes ();
5803
 
5804
  /* Install builtins used by generic middle-end pieces first.  Some of these
5805
     know about internal specificities and control attributes accordingly, for
5806
     instance __builtin_alloca vs no-throw and -fstack-check.  We will ignore
5807
     the generic definition from builtins.def.  */
5808
  build_common_builtin_nodes ();
5809
 
5810
  /* Now, install the target specific builtins, such as the AltiVec family on
5811
     ppc, and the common set as exposed by builtins.def.  */
5812
  targetm.init_builtins ();
5813
  install_builtin_functions ();
5814
}
5815
 
5816
#include "gt-ada-utils.h"
5817
#include "gtype-ada.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.