OpenCores
URL https://opencores.org/ocsvn/openrisc_2011-10-31/openrisc_2011-10-31/trunk

Subversion Repositories openrisc_2011-10-31

[/] [openrisc/] [tags/] [gnu-src/] [gcc-4.5.1/] [gcc-4.5.1-or32-1.0rc2/] [gcc/] [ada/] [gcc-interface/] [utils.c] - Blame information for rev 281

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 281 jeremybenn
/****************************************************************************
2
 *                                                                          *
3
 *                         GNAT COMPILER COMPONENTS                         *
4
 *                                                                          *
5
 *                                U T I L S                                 *
6
 *                                                                          *
7
 *                          C Implementation File                           *
8
 *                                                                          *
9
 *          Copyright (C) 1992-2010, Free Software Foundation, Inc.         *
10
 *                                                                          *
11
 * GNAT is free software;  you can  redistribute it  and/or modify it under *
12
 * terms of the  GNU General Public License as published  by the Free Soft- *
13
 * ware  Foundation;  either version 3,  or (at your option) any later ver- *
14
 * sion.  GNAT is distributed in the hope that it will be useful, but WITH- *
15
 * OUT ANY WARRANTY;  without even the  implied warranty of MERCHANTABILITY *
16
 * or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License *
17
 * for  more details.  You should have received a copy of the GNU General   *
18
 * Public License along with GCC; see the file COPYING3.  If not see        *
19
 * <http://www.gnu.org/licenses/>.                                          *
20
 *                                                                          *
21
 * GNAT was originally developed  by the GNAT team at  New York University. *
22
 * Extensive contributions were provided by Ada Core Technologies Inc.      *
23
 *                                                                          *
24
 ****************************************************************************/
25
 
26
#include "config.h"
27
#include "system.h"
28
#include "coretypes.h"
29
#include "tm.h"
30
#include "tree.h"
31
#include "flags.h"
32
#include "toplev.h"
33
#include "rtl.h"
34
#include "output.h"
35
#include "ggc.h"
36
#include "debug.h"
37
#include "convert.h"
38
#include "target.h"
39
#include "function.h"
40
#include "langhooks.h"
41
#include "pointer-set.h"
42
#include "cgraph.h"
43
#include "tree-dump.h"
44
#include "tree-inline.h"
45
#include "tree-iterator.h"
46
#include "gimple.h"
47
 
48
#include "ada.h"
49
#include "types.h"
50
#include "atree.h"
51
#include "elists.h"
52
#include "namet.h"
53
#include "nlists.h"
54
#include "stringt.h"
55
#include "uintp.h"
56
#include "fe.h"
57
#include "sinfo.h"
58
#include "einfo.h"
59
#include "ada-tree.h"
60
#include "gigi.h"
61
 
62
#ifndef MAX_BITS_PER_WORD
63
#define MAX_BITS_PER_WORD  BITS_PER_WORD
64
#endif
65
 
66
/* If nonzero, pretend we are allocating at global level.  */
67
int force_global;
68
 
69
/* The default alignment of "double" floating-point types, i.e. floating
70
   point types whose size is equal to 64 bits, or 0 if this alignment is
71
   not specifically capped.  */
72
int double_float_alignment;
73
 
74
/* The default alignment of "double" or larger scalar types, i.e. scalar
75
   types whose size is greater or equal to 64 bits, or 0 if this alignment
76
   is not specifically capped.  */
77
int double_scalar_alignment;
78
 
79
/* Tree nodes for the various types and decls we create.  */
80
tree gnat_std_decls[(int) ADT_LAST];
81
 
82
/* Functions to call for each of the possible raise reasons.  */
83
tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
84
 
85
/* Forward declarations for handlers of attributes.  */
86
static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87
static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88
static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89
static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90
static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91
static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92
static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93
static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
94
static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
95
static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
96
static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
97
 
98
/* Fake handler for attributes we don't properly support, typically because
99
   they'd require dragging a lot of the common-c front-end circuitry.  */
100
static tree fake_attribute_handler      (tree *, tree, tree, int, bool *);
101
 
102
/* Table of machine-independent internal attributes for Ada.  We support
103
   this minimal set of attributes to accommodate the needs of builtins.  */
104
const struct attribute_spec gnat_internal_attribute_table[] =
105
{
106
  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
107
  { "const",        0, 0,  true,  false, false, handle_const_attribute   },
108
  { "nothrow",      0, 0,  true,  false, false, handle_nothrow_attribute },
109
  { "pure",         0, 0,  true,  false, false, handle_pure_attribute },
110
  { "no vops",      0, 0,  true,  false, false, handle_novops_attribute },
111
  { "nonnull",      0, -1, false, true,  true,  handle_nonnull_attribute },
112
  { "sentinel",     0, 1,  false, true,  true,  handle_sentinel_attribute },
113
  { "noreturn",     0, 0,  true,  false, false, handle_noreturn_attribute },
114
  { "malloc",       0, 0,  true,  false, false, handle_malloc_attribute },
115
  { "type generic", 0, 0,  false, true, true, handle_type_generic_attribute },
116
 
117
  { "vector_size",  1, 1,  false, true, false,  handle_vector_size_attribute },
118
  { "vector_type",  0, 0,  false, true, false,  handle_vector_type_attribute },
119
  { "may_alias",    0, 0, false, true, false, NULL },
120
 
121
  /* ??? format and format_arg are heavy and not supported, which actually
122
     prevents support for stdio builtins, which we however declare as part
123
     of the common builtins.def contents.  */
124
  { "format",     3, 3,  false, true,  true,  fake_attribute_handler },
125
  { "format_arg", 1, 1,  false, true,  true,  fake_attribute_handler },
126
 
127
  { NULL,         0, 0, false, false, false, NULL }
128
};
129
 
130
/* Associates a GNAT tree node to a GCC tree node. It is used in
131
   `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
132
   of `save_gnu_tree' for more info.  */
133
static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
134
 
135
#define GET_GNU_TREE(GNAT_ENTITY)       \
136
  associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
137
 
138
#define SET_GNU_TREE(GNAT_ENTITY,VAL)   \
139
  associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
140
 
141
#define PRESENT_GNU_TREE(GNAT_ENTITY)   \
142
  (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
143
 
144
/* Associates a GNAT entity to a GCC tree node used as a dummy, if any.  */
145
static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
146
 
147
#define GET_DUMMY_NODE(GNAT_ENTITY)     \
148
  dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
149
 
150
#define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
151
  dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
152
 
153
#define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
154
  (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
155
 
156
/* This variable keeps a table for types for each precision so that we only
157
   allocate each of them once. Signed and unsigned types are kept separate.
158
 
159
   Note that these types are only used when fold-const requests something
160
   special.  Perhaps we should NOT share these types; we'll see how it
161
   goes later.  */
162
static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
163
 
164
/* Likewise for float types, but record these by mode.  */
165
static GTY(()) tree float_types[NUM_MACHINE_MODES];
166
 
167
/* For each binding contour we allocate a binding_level structure to indicate
168
   the binding depth.  */
169
 
170
struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
171
  /* The binding level containing this one (the enclosing binding level). */
172
  struct gnat_binding_level *chain;
173
  /* The BLOCK node for this level.  */
174
  tree block;
175
  /* If nonzero, the setjmp buffer that needs to be updated for any
176
     variable-sized definition within this context.  */
177
  tree jmpbuf_decl;
178
};
179
 
180
/* The binding level currently in effect.  */
181
static GTY(()) struct gnat_binding_level *current_binding_level;
182
 
183
/* A chain of gnat_binding_level structures awaiting reuse.  */
184
static GTY((deletable)) struct gnat_binding_level *free_binding_level;
185
 
186
/* An array of global declarations.  */
187
static GTY(()) VEC(tree,gc) *global_decls;
188
 
189
/* An array of builtin function declarations.  */
190
static GTY(()) VEC(tree,gc) *builtin_decls;
191
 
192
/* An array of global renaming pointers.  */
193
static GTY(()) VEC(tree,gc) *global_renaming_pointers;
194
 
195
/* A chain of unused BLOCK nodes. */
196
static GTY((deletable)) tree free_block_chain;
197
 
198
static tree merge_sizes (tree, tree, tree, bool, bool);
199
static tree compute_related_constant (tree, tree);
200
static tree split_plus (tree, tree *);
201
static tree float_type_for_precision (int, enum machine_mode);
202
static tree convert_to_fat_pointer (tree, tree);
203
static tree convert_to_thin_pointer (tree, tree);
204
static tree make_descriptor_field (const char *,tree, tree, tree);
205
static bool potential_alignment_gap (tree, tree, tree);
206
 
207
/* Initialize the association of GNAT nodes to GCC trees.  */
208
 
209
void
210
init_gnat_to_gnu (void)
211
{
212
  associate_gnat_to_gnu
213
    = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
214
}
215
 
216
/* GNAT_ENTITY is a GNAT tree node for an entity.   GNU_DECL is the GCC tree
217
   which is to be associated with GNAT_ENTITY. Such GCC tree node is always
218
   a ..._DECL node.  If NO_CHECK is true, the latter check is suppressed.
219
 
220
   If GNU_DECL is zero, a previous association is to be reset.  */
221
 
222
void
223
save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
224
{
225
  /* Check that GNAT_ENTITY is not already defined and that it is being set
226
     to something which is a decl.  Raise gigi 401 if not.  Usually, this
227
     means GNAT_ENTITY is defined twice, but occasionally is due to some
228
     Gigi problem.  */
229
  gcc_assert (!(gnu_decl
230
                && (PRESENT_GNU_TREE (gnat_entity)
231
                    || (!no_check && !DECL_P (gnu_decl)))));
232
 
233
  SET_GNU_TREE (gnat_entity, gnu_decl);
234
}
235
 
236
/* GNAT_ENTITY is a GNAT tree node for a defining identifier.
237
   Return the ..._DECL node that was associated with it.  If there is no tree
238
   node associated with GNAT_ENTITY, abort.
239
 
240
   In some cases, such as delayed elaboration or expressions that need to
241
   be elaborated only once, GNAT_ENTITY is really not an entity.  */
242
 
243
tree
244
get_gnu_tree (Entity_Id gnat_entity)
245
{
246
  gcc_assert (PRESENT_GNU_TREE (gnat_entity));
247
  return GET_GNU_TREE (gnat_entity);
248
}
249
 
250
/* Return nonzero if a GCC tree has been associated with GNAT_ENTITY.  */
251
 
252
bool
253
present_gnu_tree (Entity_Id gnat_entity)
254
{
255
  return PRESENT_GNU_TREE (gnat_entity);
256
}
257
 
258
/* Initialize the association of GNAT nodes to GCC trees as dummies.  */
259
 
260
void
261
init_dummy_type (void)
262
{
263
  dummy_node_table
264
    = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
265
}
266
 
267
/* Make a dummy type corresponding to GNAT_TYPE.  */
268
 
269
tree
270
make_dummy_type (Entity_Id gnat_type)
271
{
272
  Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
273
  tree gnu_type;
274
 
275
  /* If there is an equivalent type, get its underlying type.  */
276
  if (Present (gnat_underlying))
277
    gnat_underlying = Underlying_Type (gnat_underlying);
278
 
279
  /* If there was no equivalent type (can only happen when just annotating
280
     types) or underlying type, go back to the original type.  */
281
  if (No (gnat_underlying))
282
    gnat_underlying = gnat_type;
283
 
284
  /* If it there already a dummy type, use that one.  Else make one.  */
285
  if (PRESENT_DUMMY_NODE (gnat_underlying))
286
    return GET_DUMMY_NODE (gnat_underlying);
287
 
288
  /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
289
     an ENUMERAL_TYPE.  */
290
  gnu_type = make_node (Is_Record_Type (gnat_underlying)
291
                        ? tree_code_for_record_type (gnat_underlying)
292
                        : ENUMERAL_TYPE);
293
  TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
294
  TYPE_DUMMY_P (gnu_type) = 1;
295
  TYPE_STUB_DECL (gnu_type)
296
    = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
297
  if (AGGREGATE_TYPE_P (gnu_type))
298
    TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
299
 
300
  SET_DUMMY_NODE (gnat_underlying, gnu_type);
301
 
302
  return gnu_type;
303
}
304
 
305
/* Return nonzero if we are currently in the global binding level.  */
306
 
307
int
308
global_bindings_p (void)
309
{
310
  return ((force_global || !current_function_decl) ? -1 : 0);
311
}
312
 
313
/* Enter a new binding level. */
314
 
315
void
316
gnat_pushlevel (void)
317
{
318
  struct gnat_binding_level *newlevel = NULL;
319
 
320
  /* Reuse a struct for this binding level, if there is one.  */
321
  if (free_binding_level)
322
    {
323
      newlevel = free_binding_level;
324
      free_binding_level = free_binding_level->chain;
325
    }
326
  else
327
    newlevel
328
      = (struct gnat_binding_level *)
329
        ggc_alloc (sizeof (struct gnat_binding_level));
330
 
331
  /* Use a free BLOCK, if any; otherwise, allocate one.  */
332
  if (free_block_chain)
333
    {
334
      newlevel->block = free_block_chain;
335
      free_block_chain = BLOCK_CHAIN (free_block_chain);
336
      BLOCK_CHAIN (newlevel->block) = NULL_TREE;
337
    }
338
  else
339
    newlevel->block = make_node (BLOCK);
340
 
341
  /* Point the BLOCK we just made to its parent.  */
342
  if (current_binding_level)
343
    BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
344
 
345
  BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
346
  TREE_USED (newlevel->block) = 1;
347
 
348
  /* Add this level to the front of the chain (stack) of levels that are
349
     active.  */
350
  newlevel->chain = current_binding_level;
351
  newlevel->jmpbuf_decl = NULL_TREE;
352
  current_binding_level = newlevel;
353
}
354
 
355
/* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
356
   and point FNDECL to this BLOCK.  */
357
 
358
void
359
set_current_block_context (tree fndecl)
360
{
361
  BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
362
  DECL_INITIAL (fndecl) = current_binding_level->block;
363
}
364
 
365
/* Set the jmpbuf_decl for the current binding level to DECL.  */
366
 
367
void
368
set_block_jmpbuf_decl (tree decl)
369
{
370
  current_binding_level->jmpbuf_decl = decl;
371
}
372
 
373
/* Get the jmpbuf_decl, if any, for the current binding level.  */
374
 
375
tree
376
get_block_jmpbuf_decl (void)
377
{
378
  return current_binding_level->jmpbuf_decl;
379
}
380
 
381
/* Exit a binding level. Set any BLOCK into the current code group.  */
382
 
383
void
384
gnat_poplevel (void)
385
{
386
  struct gnat_binding_level *level = current_binding_level;
387
  tree block = level->block;
388
 
389
  BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
390
  BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
391
 
392
  /* If this is a function-level BLOCK don't do anything.  Otherwise, if there
393
     are no variables free the block and merge its subblocks into those of its
394
     parent block. Otherwise, add it to the list of its parent.  */
395
  if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
396
    ;
397
  else if (BLOCK_VARS (block) == NULL_TREE)
398
    {
399
      BLOCK_SUBBLOCKS (level->chain->block)
400
        = chainon (BLOCK_SUBBLOCKS (block),
401
                   BLOCK_SUBBLOCKS (level->chain->block));
402
      BLOCK_CHAIN (block) = free_block_chain;
403
      free_block_chain = block;
404
    }
405
  else
406
    {
407
      BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
408
      BLOCK_SUBBLOCKS (level->chain->block) = block;
409
      TREE_USED (block) = 1;
410
      set_block_for_group (block);
411
    }
412
 
413
  /* Free this binding structure.  */
414
  current_binding_level = level->chain;
415
  level->chain = free_binding_level;
416
  free_binding_level = level;
417
}
418
 
419
 
420
/* Records a ..._DECL node DECL as belonging to the current lexical scope
421
   and uses GNAT_NODE for location information and propagating flags.  */
422
 
423
void
424
gnat_pushdecl (tree decl, Node_Id gnat_node)
425
{
426
  /* If this decl is public external or at toplevel, there is no context.
427
     But PARM_DECLs always go in the level of its function.  */
428
  if (TREE_CODE (decl) != PARM_DECL
429
      && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
430
          || global_bindings_p ()))
431
    DECL_CONTEXT (decl) = 0;
432
  else
433
    {
434
      DECL_CONTEXT (decl) = current_function_decl;
435
 
436
      /* Functions imported in another function are not really nested.
437
         For really nested functions mark them initially as needing
438
         a static chain for uses of that flag before unnesting;
439
         lower_nested_functions will then recompute it.  */
440
      if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
441
        DECL_STATIC_CHAIN (decl) = 1;
442
    }
443
 
444
  TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
445
 
446
  /* Set the location of DECL and emit a declaration for it.  */
447
  if (Present (gnat_node))
448
    Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
449
  add_decl_expr (decl, gnat_node);
450
 
451
  /* Put the declaration on the list.  The list of declarations is in reverse
452
     order.  The list will be reversed later.  Put global variables in the
453
     globals list and builtin functions in a dedicated list to speed up
454
     further lookups.  Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
455
     the list, as they will cause trouble with the debugger and aren't needed
456
     anyway.  */
457
  if (TREE_CODE (decl) != TYPE_DECL
458
      || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
459
    {
460
      if (global_bindings_p ())
461
        {
462
          VEC_safe_push (tree, gc, global_decls, decl);
463
 
464
          if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
465
            VEC_safe_push (tree, gc, builtin_decls, decl);
466
        }
467
      else
468
        {
469
          TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
470
          BLOCK_VARS (current_binding_level->block) = decl;
471
        }
472
    }
473
 
474
  /* For the declaration of a type, set its name if it either is not already
475
     set or if the previous type name was not derived from a source name.
476
     We'd rather have the type named with a real name and all the pointer
477
     types to the same object have the same POINTER_TYPE node.  Code in the
478
     equivalent function of c-decl.c makes a copy of the type node here, but
479
     that may cause us trouble with incomplete types.  We make an exception
480
     for fat pointer types because the compiler automatically builds them
481
     for unconstrained array types and the debugger uses them to represent
482
     both these and pointers to these.  */
483
  if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
484
    {
485
      tree t = TREE_TYPE (decl);
486
 
487
      if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
488
        ;
489
      else if (TYPE_IS_FAT_POINTER_P (t))
490
        {
491
          tree tt = build_variant_type_copy (t);
492
          TYPE_NAME (tt) = decl;
493
          TREE_USED (tt) = TREE_USED (t);
494
          TREE_TYPE (decl) = tt;
495
          if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
496
            DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
497
          else
498
            DECL_ORIGINAL_TYPE (decl) = t;
499
          t = NULL_TREE;
500
          DECL_ARTIFICIAL (decl) = 0;
501
        }
502
      else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
503
        ;
504
      else
505
        t = NULL_TREE;
506
 
507
      /* Propagate the name to all the variants.  This is needed for
508
         the type qualifiers machinery to work properly.  */
509
      if (t)
510
        for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
511
          TYPE_NAME (t) = decl;
512
    }
513
}
514
 
515
/* Do little here.  Set up the standard declarations later after the
516
   front end has been run.  */
517
 
518
void
519
gnat_init_decl_processing (void)
520
{
521
  /* Make the binding_level structure for global names.  */
522
  current_function_decl = 0;
523
  current_binding_level = 0;
524
  free_binding_level = 0;
525
  gnat_pushlevel ();
526
 
527
  build_common_tree_nodes (true, true);
528
 
529
  /* In Ada, we use a signed type for SIZETYPE.  Use the signed type
530
     corresponding to the width of Pmode.  In most cases when ptr_mode
531
     and Pmode differ, C will use the width of ptr_mode for SIZETYPE.
532
     But we get far better code using the width of Pmode.  */
533
  size_type_node = gnat_type_for_mode (Pmode, 0);
534
  set_sizetype (size_type_node);
535
 
536
  /* In Ada, we use an unsigned 8-bit type for the default boolean type.  */
537
  boolean_type_node = make_unsigned_type (8);
538
  TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
539
  SET_TYPE_RM_MAX_VALUE (boolean_type_node,
540
                         build_int_cst (boolean_type_node, 1));
541
  SET_TYPE_RM_SIZE (boolean_type_node, bitsize_int (1));
542
 
543
  build_common_tree_nodes_2 (0);
544
  boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
545
 
546
  ptr_void_type_node = build_pointer_type (void_type_node);
547
}
548
 
549
/* Record TYPE as a builtin type for Ada.  NAME is the name of the type.  */
550
 
551
void
552
record_builtin_type (const char *name, tree type)
553
{
554
  tree type_decl = build_decl (input_location,
555
                               TYPE_DECL, get_identifier (name), type);
556
 
557
  gnat_pushdecl (type_decl, Empty);
558
 
559
  if (debug_hooks->type_decl)
560
    debug_hooks->type_decl (type_decl, false);
561
}
562
 
563
/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
564
   finish constructing the record or union type.  If REP_LEVEL is zero, this
565
   record has no representation clause and so will be entirely laid out here.
566
   If REP_LEVEL is one, this record has a representation clause and has been
567
   laid out already; only set the sizes and alignment.  If REP_LEVEL is two,
568
   this record is derived from a parent record and thus inherits its layout;
569
   only make a pass on the fields to finalize them.  DEBUG_INFO_P is true if
570
   we need to write debug information about this type.  */
571
 
572
void
573
finish_record_type (tree record_type, tree field_list, int rep_level,
574
                    bool debug_info_p)
575
{
576
  enum tree_code code = TREE_CODE (record_type);
577
  tree name = TYPE_NAME (record_type);
578
  tree ada_size = bitsize_zero_node;
579
  tree size = bitsize_zero_node;
580
  bool had_size = TYPE_SIZE (record_type) != 0;
581
  bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
582
  bool had_align = TYPE_ALIGN (record_type) != 0;
583
  tree field;
584
 
585
  TYPE_FIELDS (record_type) = field_list;
586
 
587
  /* Always attach the TYPE_STUB_DECL for a record type.  It is required to
588
     generate debug info and have a parallel type.  */
589
  if (name && TREE_CODE (name) == TYPE_DECL)
590
    name = DECL_NAME (name);
591
  TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
592
 
593
  /* Globally initialize the record first.  If this is a rep'ed record,
594
     that just means some initializations; otherwise, layout the record.  */
595
  if (rep_level > 0)
596
    {
597
      TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
598
      SET_TYPE_MODE (record_type, BLKmode);
599
 
600
      if (!had_size_unit)
601
        TYPE_SIZE_UNIT (record_type) = size_zero_node;
602
      if (!had_size)
603
        TYPE_SIZE (record_type) = bitsize_zero_node;
604
 
605
      /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
606
         out just like a UNION_TYPE, since the size will be fixed.  */
607
      else if (code == QUAL_UNION_TYPE)
608
        code = UNION_TYPE;
609
    }
610
  else
611
    {
612
      /* Ensure there isn't a size already set.  There can be in an error
613
         case where there is a rep clause but all fields have errors and
614
         no longer have a position.  */
615
      TYPE_SIZE (record_type) = 0;
616
      layout_type (record_type);
617
    }
618
 
619
  /* At this point, the position and size of each field is known.  It was
620
     either set before entry by a rep clause, or by laying out the type above.
621
 
622
     We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
623
     to compute the Ada size; the GCC size and alignment (for rep'ed records
624
     that are not padding types); and the mode (for rep'ed records).  We also
625
     clear the DECL_BIT_FIELD indication for the cases we know have not been
626
     handled yet, and adjust DECL_NONADDRESSABLE_P accordingly.  */
627
 
628
  if (code == QUAL_UNION_TYPE)
629
    field_list = nreverse (field_list);
630
 
631
  for (field = field_list; field; field = TREE_CHAIN (field))
632
    {
633
      tree type = TREE_TYPE (field);
634
      tree pos = bit_position (field);
635
      tree this_size = DECL_SIZE (field);
636
      tree this_ada_size;
637
 
638
      if ((TREE_CODE (type) == RECORD_TYPE
639
           || TREE_CODE (type) == UNION_TYPE
640
           || TREE_CODE (type) == QUAL_UNION_TYPE)
641
          && !TYPE_FAT_POINTER_P (type)
642
          && !TYPE_CONTAINS_TEMPLATE_P (type)
643
          && TYPE_ADA_SIZE (type))
644
        this_ada_size = TYPE_ADA_SIZE (type);
645
      else
646
        this_ada_size = this_size;
647
 
648
      /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle.  */
649
      if (DECL_BIT_FIELD (field)
650
          && operand_equal_p (this_size, TYPE_SIZE (type), 0))
651
        {
652
          unsigned int align = TYPE_ALIGN (type);
653
 
654
          /* In the general case, type alignment is required.  */
655
          if (value_factor_p (pos, align))
656
            {
657
              /* The enclosing record type must be sufficiently aligned.
658
                 Otherwise, if no alignment was specified for it and it
659
                 has been laid out already, bump its alignment to the
660
                 desired one if this is compatible with its size.  */
661
              if (TYPE_ALIGN (record_type) >= align)
662
                {
663
                  DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
664
                  DECL_BIT_FIELD (field) = 0;
665
                }
666
              else if (!had_align
667
                       && rep_level == 0
668
                       && value_factor_p (TYPE_SIZE (record_type), align))
669
                {
670
                  TYPE_ALIGN (record_type) = align;
671
                  DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
672
                  DECL_BIT_FIELD (field) = 0;
673
                }
674
            }
675
 
676
          /* In the non-strict alignment case, only byte alignment is.  */
677
          if (!STRICT_ALIGNMENT
678
              && DECL_BIT_FIELD (field)
679
              && value_factor_p (pos, BITS_PER_UNIT))
680
            DECL_BIT_FIELD (field) = 0;
681
        }
682
 
683
      /* If we still have DECL_BIT_FIELD set at this point, we know that the
684
         field is technically not addressable.  Except that it can actually
685
         be addressed if it is BLKmode and happens to be properly aligned.  */
686
      if (DECL_BIT_FIELD (field)
687
          && !(DECL_MODE (field) == BLKmode
688
               && value_factor_p (pos, BITS_PER_UNIT)))
689
        DECL_NONADDRESSABLE_P (field) = 1;
690
 
691
      /* A type must be as aligned as its most aligned field that is not
692
         a bit-field.  But this is already enforced by layout_type.  */
693
      if (rep_level > 0 && !DECL_BIT_FIELD (field))
694
        TYPE_ALIGN (record_type)
695
          = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
696
 
697
      switch (code)
698
        {
699
        case UNION_TYPE:
700
          ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
701
          size = size_binop (MAX_EXPR, size, this_size);
702
          break;
703
 
704
        case QUAL_UNION_TYPE:
705
          ada_size
706
            = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
707
                           this_ada_size, ada_size);
708
          size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
709
                              this_size, size);
710
          break;
711
 
712
        case RECORD_TYPE:
713
          /* Since we know here that all fields are sorted in order of
714
             increasing bit position, the size of the record is one
715
             higher than the ending bit of the last field processed
716
             unless we have a rep clause, since in that case we might
717
             have a field outside a QUAL_UNION_TYPE that has a higher ending
718
             position.  So use a MAX in that case.  Also, if this field is a
719
             QUAL_UNION_TYPE, we need to take into account the previous size in
720
             the case of empty variants.  */
721
          ada_size
722
            = merge_sizes (ada_size, pos, this_ada_size,
723
                           TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
724
          size
725
            = merge_sizes (size, pos, this_size,
726
                           TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
727
          break;
728
 
729
        default:
730
          gcc_unreachable ();
731
        }
732
    }
733
 
734
  if (code == QUAL_UNION_TYPE)
735
    nreverse (field_list);
736
 
737
  if (rep_level < 2)
738
    {
739
      /* If this is a padding record, we never want to make the size smaller
740
         than what was specified in it, if any.  */
741
      if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
742
        size = TYPE_SIZE (record_type);
743
 
744
      /* Now set any of the values we've just computed that apply.  */
745
      if (!TYPE_FAT_POINTER_P (record_type)
746
          && !TYPE_CONTAINS_TEMPLATE_P (record_type))
747
        SET_TYPE_ADA_SIZE (record_type, ada_size);
748
 
749
      if (rep_level > 0)
750
        {
751
          tree size_unit = had_size_unit
752
                           ? TYPE_SIZE_UNIT (record_type)
753
                           : convert (sizetype,
754
                                      size_binop (CEIL_DIV_EXPR, size,
755
                                                  bitsize_unit_node));
756
          unsigned int align = TYPE_ALIGN (record_type);
757
 
758
          TYPE_SIZE (record_type) = variable_size (round_up (size, align));
759
          TYPE_SIZE_UNIT (record_type)
760
            = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
761
 
762
          compute_record_mode (record_type);
763
        }
764
    }
765
 
766
  if (debug_info_p)
767
    rest_of_record_type_compilation (record_type);
768
}
769
 
770
/* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
771
   associated with it.  It need not be invoked directly in most cases since
772
   finish_record_type takes care of doing so, but this can be necessary if
773
   a parallel type is to be attached to the record type.  */
774
 
775
void
776
rest_of_record_type_compilation (tree record_type)
777
{
778
  tree field_list = TYPE_FIELDS (record_type);
779
  tree field;
780
  enum tree_code code = TREE_CODE (record_type);
781
  bool var_size = false;
782
 
783
  for (field = field_list; field; field = TREE_CHAIN (field))
784
    {
785
      /* We need to make an XVE/XVU record if any field has variable size,
786
         whether or not the record does.  For example, if we have a union,
787
         it may be that all fields, rounded up to the alignment, have the
788
         same size, in which case we'll use that size.  But the debug
789
         output routines (except Dwarf2) won't be able to output the fields,
790
         so we need to make the special record.  */
791
      if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
792
          /* If a field has a non-constant qualifier, the record will have
793
             variable size too.  */
794
          || (code == QUAL_UNION_TYPE
795
              && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
796
        {
797
          var_size = true;
798
          break;
799
        }
800
    }
801
 
802
  /* If this record is of variable size, rename it so that the
803
     debugger knows it is and make a new, parallel, record
804
     that tells the debugger how the record is laid out.  See
805
     exp_dbug.ads.  But don't do this for records that are padding
806
     since they confuse GDB.  */
807
  if (var_size && !TYPE_IS_PADDING_P (record_type))
808
    {
809
      tree new_record_type
810
        = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
811
                     ? UNION_TYPE : TREE_CODE (record_type));
812
      tree orig_name = TYPE_NAME (record_type), new_name;
813
      tree last_pos = bitsize_zero_node;
814
      tree old_field, prev_old_field = NULL_TREE;
815
 
816
      if (TREE_CODE (orig_name) == TYPE_DECL)
817
        orig_name = DECL_NAME (orig_name);
818
 
819
      new_name
820
        = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
821
                                  ? "XVU" : "XVE");
822
      TYPE_NAME (new_record_type) = new_name;
823
      TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
824
      TYPE_STUB_DECL (new_record_type)
825
        = create_type_stub_decl (new_name, new_record_type);
826
      DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
827
        = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
828
      TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
829
      TYPE_SIZE_UNIT (new_record_type)
830
        = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
831
 
832
      add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
833
 
834
      /* Now scan all the fields, replacing each field with a new
835
         field corresponding to the new encoding.  */
836
      for (old_field = TYPE_FIELDS (record_type); old_field;
837
           old_field = TREE_CHAIN (old_field))
838
        {
839
          tree field_type = TREE_TYPE (old_field);
840
          tree field_name = DECL_NAME (old_field);
841
          tree new_field;
842
          tree curpos = bit_position (old_field);
843
          bool var = false;
844
          unsigned int align = 0;
845
          tree pos;
846
 
847
          /* See how the position was modified from the last position.
848
 
849
          There are two basic cases we support: a value was added
850
          to the last position or the last position was rounded to
851
          a boundary and they something was added.  Check for the
852
          first case first.  If not, see if there is any evidence
853
          of rounding.  If so, round the last position and try
854
          again.
855
 
856
          If this is a union, the position can be taken as zero. */
857
 
858
          /* Some computations depend on the shape of the position expression,
859
             so strip conversions to make sure it's exposed.  */
860
          curpos = remove_conversions (curpos, true);
861
 
862
          if (TREE_CODE (new_record_type) == UNION_TYPE)
863
            pos = bitsize_zero_node, align = 0;
864
          else
865
            pos = compute_related_constant (curpos, last_pos);
866
 
867
          if (!pos && TREE_CODE (curpos) == MULT_EXPR
868
              && host_integerp (TREE_OPERAND (curpos, 1), 1))
869
            {
870
              tree offset = TREE_OPERAND (curpos, 0);
871
              align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
872
 
873
              /* An offset which is a bitwise AND with a negative power of 2
874
                 means an alignment corresponding to this power of 2.  */
875
              offset = remove_conversions (offset, true);
876
              if (TREE_CODE (offset) == BIT_AND_EXPR
877
                  && host_integerp (TREE_OPERAND (offset, 1), 0)
878
                  && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
879
                {
880
                  unsigned int pow
881
                    = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
882
                  if (exact_log2 (pow) > 0)
883
                    align *= pow;
884
                }
885
 
886
              pos = compute_related_constant (curpos,
887
                                              round_up (last_pos, align));
888
            }
889
          else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
890
                   && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
891
                   && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
892
                   && host_integerp (TREE_OPERAND
893
                                     (TREE_OPERAND (curpos, 0), 1),
894
                                     1))
895
            {
896
              align
897
                = tree_low_cst
898
                (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
899
              pos = compute_related_constant (curpos,
900
                                              round_up (last_pos, align));
901
            }
902
          else if (potential_alignment_gap (prev_old_field, old_field,
903
                                            pos))
904
            {
905
              align = TYPE_ALIGN (field_type);
906
              pos = compute_related_constant (curpos,
907
                                              round_up (last_pos, align));
908
            }
909
 
910
          /* If we can't compute a position, set it to zero.
911
 
912
          ??? We really should abort here, but it's too much work
913
          to get this correct for all cases.  */
914
 
915
          if (!pos)
916
            pos = bitsize_zero_node;
917
 
918
          /* See if this type is variable-sized and make a pointer type
919
             and indicate the indirection if so.  Beware that the debug
920
             back-end may adjust the position computed above according
921
             to the alignment of the field type, i.e. the pointer type
922
             in this case, if we don't preventively counter that.  */
923
          if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
924
            {
925
              field_type = build_pointer_type (field_type);
926
              if (align != 0 && TYPE_ALIGN (field_type) > align)
927
                {
928
                  field_type = copy_node (field_type);
929
                  TYPE_ALIGN (field_type) = align;
930
                }
931
              var = true;
932
            }
933
 
934
          /* Make a new field name, if necessary.  */
935
          if (var || align != 0)
936
            {
937
              char suffix[16];
938
 
939
              if (align != 0)
940
                sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
941
                         align / BITS_PER_UNIT);
942
              else
943
                strcpy (suffix, "XVL");
944
 
945
              field_name = concat_name (field_name, suffix);
946
            }
947
 
948
          new_field = create_field_decl (field_name, field_type,
949
                                         new_record_type, 0,
950
                                         DECL_SIZE (old_field), pos, 0);
951
          TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
952
          TYPE_FIELDS (new_record_type) = new_field;
953
 
954
          /* If old_field is a QUAL_UNION_TYPE, take its size as being
955
             zero.  The only time it's not the last field of the record
956
             is when there are other components at fixed positions after
957
             it (meaning there was a rep clause for every field) and we
958
             want to be able to encode them.  */
959
          last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
960
                                 (TREE_CODE (TREE_TYPE (old_field))
961
                                  == QUAL_UNION_TYPE)
962
                                 ? bitsize_zero_node
963
                                 : DECL_SIZE (old_field));
964
          prev_old_field = old_field;
965
        }
966
 
967
      TYPE_FIELDS (new_record_type)
968
        = nreverse (TYPE_FIELDS (new_record_type));
969
 
970
      rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
971
    }
972
 
973
  rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
974
}
975
 
976
/* Append PARALLEL_TYPE on the chain of parallel types for decl.  */
977
 
978
void
979
add_parallel_type (tree decl, tree parallel_type)
980
{
981
  tree d = decl;
982
 
983
  while (DECL_PARALLEL_TYPE (d))
984
    d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
985
 
986
  SET_DECL_PARALLEL_TYPE (d, parallel_type);
987
}
988
 
989
/* Return the parallel type associated to a type, if any.  */
990
 
991
tree
992
get_parallel_type (tree type)
993
{
994
  if (TYPE_STUB_DECL (type))
995
    return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
996
  else
997
    return NULL_TREE;
998
}
999
 
1000
/* Utility function of above to merge LAST_SIZE, the previous size of a record
1001
   with FIRST_BIT and SIZE that describe a field.  SPECIAL is true if this
1002
   represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1003
   replace a value of zero with the old size.  If HAS_REP is true, we take the
1004
   MAX of the end position of this field with LAST_SIZE.  In all other cases,
1005
   we use FIRST_BIT plus SIZE.  Return an expression for the size.  */
1006
 
1007
static tree
1008
merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1009
             bool has_rep)
1010
{
1011
  tree type = TREE_TYPE (last_size);
1012
  tree new_size;
1013
 
1014
  if (!special || TREE_CODE (size) != COND_EXPR)
1015
    {
1016
      new_size = size_binop (PLUS_EXPR, first_bit, size);
1017
      if (has_rep)
1018
        new_size = size_binop (MAX_EXPR, last_size, new_size);
1019
    }
1020
 
1021
  else
1022
    new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1023
                            integer_zerop (TREE_OPERAND (size, 1))
1024
                            ? last_size : merge_sizes (last_size, first_bit,
1025
                                                       TREE_OPERAND (size, 1),
1026
                                                       1, has_rep),
1027
                            integer_zerop (TREE_OPERAND (size, 2))
1028
                            ? last_size : merge_sizes (last_size, first_bit,
1029
                                                       TREE_OPERAND (size, 2),
1030
                                                       1, has_rep));
1031
 
1032
  /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1033
     when fed through substitute_in_expr) into thinking that a constant
1034
     size is not constant.  */
1035
  while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1036
    new_size = TREE_OPERAND (new_size, 0);
1037
 
1038
  return new_size;
1039
}
1040
 
1041
/* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1042
   related by the addition of a constant.  Return that constant if so.  */
1043
 
1044
static tree
1045
compute_related_constant (tree op0, tree op1)
1046
{
1047
  tree op0_var, op1_var;
1048
  tree op0_con = split_plus (op0, &op0_var);
1049
  tree op1_con = split_plus (op1, &op1_var);
1050
  tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1051
 
1052
  if (operand_equal_p (op0_var, op1_var, 0))
1053
    return result;
1054
  else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1055
    return result;
1056
  else
1057
    return 0;
1058
}
1059
 
1060
/* Utility function of above to split a tree OP which may be a sum, into a
1061
   constant part, which is returned, and a variable part, which is stored
1062
   in *PVAR.  *PVAR may be bitsize_zero_node.  All operations must be of
1063
   bitsizetype.  */
1064
 
1065
static tree
1066
split_plus (tree in, tree *pvar)
1067
{
1068
  /* Strip NOPS in order to ease the tree traversal and maximize the
1069
     potential for constant or plus/minus discovery. We need to be careful
1070
     to always return and set *pvar to bitsizetype trees, but it's worth
1071
     the effort.  */
1072
  STRIP_NOPS (in);
1073
 
1074
  *pvar = convert (bitsizetype, in);
1075
 
1076
  if (TREE_CODE (in) == INTEGER_CST)
1077
    {
1078
      *pvar = bitsize_zero_node;
1079
      return convert (bitsizetype, in);
1080
    }
1081
  else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1082
    {
1083
      tree lhs_var, rhs_var;
1084
      tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1085
      tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1086
 
1087
      if (lhs_var == TREE_OPERAND (in, 0)
1088
          && rhs_var == TREE_OPERAND (in, 1))
1089
        return bitsize_zero_node;
1090
 
1091
      *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1092
      return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1093
    }
1094
  else
1095
    return bitsize_zero_node;
1096
}
1097
 
1098
/* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1099
   subprogram. If it is void_type_node, then we are dealing with a procedure,
1100
   otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1101
   PARM_DECL nodes that are the subprogram arguments.  CICO_LIST is the
1102
   copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1103
   RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1104
   object.  RETURNS_BY_REF is true if the function returns by reference.
1105
   RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1106
   first parameter) the address of the place to copy its result.  */
1107
 
1108
tree
1109
create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1110
                     bool returns_unconstrained, bool returns_by_ref,
1111
                     bool returns_by_target_ptr)
1112
{
1113
  /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1114
     the subprogram formal parameters. This list is generated by traversing the
1115
     input list of PARM_DECL nodes.  */
1116
  tree param_type_list = NULL;
1117
  tree param_decl;
1118
  tree type;
1119
 
1120
  for (param_decl = param_decl_list; param_decl;
1121
       param_decl = TREE_CHAIN (param_decl))
1122
    param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1123
                                 param_type_list);
1124
 
1125
  /* The list of the function parameter types has to be terminated by the void
1126
     type to signal to the back-end that we are not dealing with a variable
1127
     parameter subprogram, but that the subprogram has a fixed number of
1128
     parameters.  */
1129
  param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1130
 
1131
  /* The list of argument types has been created in reverse
1132
     so nreverse it.   */
1133
  param_type_list = nreverse (param_type_list);
1134
 
1135
  type = build_function_type (return_type, param_type_list);
1136
 
1137
  /* TYPE may have been shared since GCC hashes types.  If it has a CICO_LIST
1138
     or the new type should, make a copy of TYPE.  Likewise for
1139
     RETURNS_UNCONSTRAINED and RETURNS_BY_REF.  */
1140
  if (TYPE_CI_CO_LIST (type) || cico_list
1141
      || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1142
      || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1143
      || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1144
    type = copy_type (type);
1145
 
1146
  TYPE_CI_CO_LIST (type) = cico_list;
1147
  TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1148
  TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1149
  TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1150
  return type;
1151
}
1152
 
1153
/* Return a copy of TYPE but safe to modify in any way.  */
1154
 
1155
tree
1156
copy_type (tree type)
1157
{
1158
  tree new_type = copy_node (type);
1159
 
1160
  /* Unshare the language-specific data.  */
1161
  if (TYPE_LANG_SPECIFIC (type))
1162
    {
1163
      TYPE_LANG_SPECIFIC (new_type) = NULL;
1164
      SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
1165
    }
1166
 
1167
  /* And the contents of the language-specific slot if needed.  */
1168
  if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
1169
      && TYPE_RM_VALUES (type))
1170
    {
1171
      TYPE_RM_VALUES (new_type) = NULL_TREE;
1172
      SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
1173
      SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
1174
      SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
1175
    }
1176
 
1177
  /* copy_node clears this field instead of copying it, because it is
1178
     aliased with TREE_CHAIN.  */
1179
  TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
1180
 
1181
  TYPE_POINTER_TO (new_type) = 0;
1182
  TYPE_REFERENCE_TO (new_type) = 0;
1183
  TYPE_MAIN_VARIANT (new_type) = new_type;
1184
  TYPE_NEXT_VARIANT (new_type) = 0;
1185
 
1186
  return new_type;
1187
}
1188
 
1189
/* Return a subtype of sizetype with range MIN to MAX and whose
1190
   TYPE_INDEX_TYPE is INDEX.  GNAT_NODE is used for the position
1191
   of the associated TYPE_DECL.  */
1192
 
1193
tree
1194
create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1195
{
1196
  /* First build a type for the desired range.  */
1197
  tree type = build_index_2_type (min, max);
1198
 
1199
  /* If this type has the TYPE_INDEX_TYPE we want, return it.  */
1200
  if (TYPE_INDEX_TYPE (type) == index)
1201
    return type;
1202
 
1203
  /* Otherwise, if TYPE_INDEX_TYPE is set, make a copy.  Note that we have
1204
     no way of sharing these types, but that's only a small hole.  */
1205
  if (TYPE_INDEX_TYPE (type))
1206
    type = copy_type (type);
1207
 
1208
  SET_TYPE_INDEX_TYPE (type, index);
1209
  create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1210
 
1211
  return type;
1212
}
1213
 
1214
/* Return a subtype of TYPE with range MIN to MAX.  If TYPE is NULL,
1215
   sizetype is used.  */
1216
 
1217
tree
1218
create_range_type (tree type, tree min, tree max)
1219
{
1220
  tree range_type;
1221
 
1222
  if (type == NULL_TREE)
1223
    type = sizetype;
1224
 
1225
  /* First build a type with the base range.  */
1226
  range_type
1227
    = build_range_type (type, TYPE_MIN_VALUE (type), TYPE_MAX_VALUE (type));
1228
 
1229
  min = convert (type, min);
1230
  max = convert (type, max);
1231
 
1232
  /* If this type has the TYPE_RM_{MIN,MAX}_VALUE we want, return it.  */
1233
  if (TYPE_RM_MIN_VALUE (range_type)
1234
      && TYPE_RM_MAX_VALUE (range_type)
1235
      && operand_equal_p (TYPE_RM_MIN_VALUE (range_type), min, 0)
1236
      && operand_equal_p (TYPE_RM_MAX_VALUE (range_type), max, 0))
1237
    return range_type;
1238
 
1239
  /* Otherwise, if TYPE_RM_{MIN,MAX}_VALUE is set, make a copy.  */
1240
  if (TYPE_RM_MIN_VALUE (range_type) || TYPE_RM_MAX_VALUE (range_type))
1241
    range_type = copy_type (range_type);
1242
 
1243
  /* Then set the actual range.  */
1244
  SET_TYPE_RM_MIN_VALUE (range_type, min);
1245
  SET_TYPE_RM_MAX_VALUE (range_type, max);
1246
 
1247
  return range_type;
1248
}
1249
 
1250
/* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1251
   TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1252
   its data type.  */
1253
 
1254
tree
1255
create_type_stub_decl (tree type_name, tree type)
1256
{
1257
  /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1258
     STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1259
     emitted in DWARF.  */
1260
  tree type_decl = build_decl (input_location,
1261
                               TYPE_DECL, type_name, type);
1262
  DECL_ARTIFICIAL (type_decl) = 1;
1263
  return type_decl;
1264
}
1265
 
1266
/* Return a TYPE_DECL node.  TYPE_NAME gives the name of the type and TYPE
1267
   is a ..._TYPE node giving its data type.  ARTIFICIAL_P is true if this
1268
   is a declaration that was generated by the compiler.  DEBUG_INFO_P is
1269
   true if we need to write debug information about this type.  GNAT_NODE
1270
   is used for the position of the decl.  */
1271
 
1272
tree
1273
create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1274
                  bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1275
{
1276
  enum tree_code code = TREE_CODE (type);
1277
  bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1278
  tree type_decl;
1279
 
1280
  /* Only the builtin TYPE_STUB_DECL should be used for dummy types.  */
1281
  gcc_assert (!TYPE_IS_DUMMY_P (type));
1282
 
1283
  /* If the type hasn't been named yet, we're naming it; preserve an existing
1284
     TYPE_STUB_DECL that has been attached to it for some purpose.  */
1285
  if (!named && TYPE_STUB_DECL (type))
1286
    {
1287
      type_decl = TYPE_STUB_DECL (type);
1288
      DECL_NAME (type_decl) = type_name;
1289
    }
1290
  else
1291
    type_decl = build_decl (input_location,
1292
                            TYPE_DECL, type_name, type);
1293
 
1294
  DECL_ARTIFICIAL (type_decl) = artificial_p;
1295
  gnat_pushdecl (type_decl, gnat_node);
1296
  process_attributes (type_decl, attr_list);
1297
 
1298
  /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1299
     This causes the name to be also viewed as a "tag" by the debug
1300
     back-end, with the advantage that no DW_TAG_typedef is emitted
1301
     for artificial "tagged" types in DWARF.  */
1302
  if (!named)
1303
    TYPE_STUB_DECL (type) = type_decl;
1304
 
1305
  /* Pass the type declaration to the debug back-end unless this is an
1306
     UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1307
     type for which debugging information was not requested, or else an
1308
     ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1309
     handled separately.  And do not pass dummy types either.  */
1310
  if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1311
    DECL_IGNORED_P (type_decl) = 1;
1312
  else if (code != ENUMERAL_TYPE
1313
           && (code != RECORD_TYPE || TYPE_FAT_POINTER_P (type))
1314
           && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1315
                && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1316
           && !(code == RECORD_TYPE
1317
                && TYPE_IS_DUMMY_P
1318
                   (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
1319
    rest_of_type_decl_compilation (type_decl);
1320
 
1321
  return type_decl;
1322
}
1323
 
1324
/* Return a VAR_DECL or CONST_DECL node.
1325
 
1326
   VAR_NAME gives the name of the variable.  ASM_NAME is its assembler name
1327
   (if provided).  TYPE is its data type (a GCC ..._TYPE node).  VAR_INIT is
1328
   the GCC tree for an optional initial expression; NULL_TREE if none.
1329
 
1330
   CONST_FLAG is true if this variable is constant, in which case we might
1331
   return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1332
 
1333
   PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1334
   definition to be made visible outside of the current compilation unit, for
1335
   instance variable definitions in a package specification.
1336
 
1337
   EXTERN_FLAG is true when processing an external variable declaration (as
1338
   opposed to a definition: no storage is to be allocated for the variable).
1339
 
1340
   STATIC_FLAG is only relevant when not at top level.  In that case
1341
   it indicates whether to always allocate storage to the variable.
1342
 
1343
   GNAT_NODE is used for the position of the decl.  */
1344
 
1345
tree
1346
create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1347
                   bool const_flag, bool public_flag, bool extern_flag,
1348
                   bool static_flag, bool const_decl_allowed_p,
1349
                   struct attrib *attr_list, Node_Id gnat_node)
1350
{
1351
  bool init_const
1352
    = (var_init != 0
1353
       && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1354
       && (global_bindings_p () || static_flag
1355
           ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1356
           : TREE_CONSTANT (var_init)));
1357
 
1358
  /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1359
     case the initializer may be used in-lieu of the DECL node (as done in
1360
     Identifier_to_gnu).  This is useful to prevent the need of elaboration
1361
     code when an identifier for which such a decl is made is in turn used as
1362
     an initializer.  We used to rely on CONST vs VAR_DECL for this purpose,
1363
     but extra constraints apply to this choice (see below) and are not
1364
     relevant to the distinction we wish to make. */
1365
  bool constant_p = const_flag && init_const;
1366
 
1367
  /* The actual DECL node.  CONST_DECL was initially intended for enumerals
1368
     and may be used for scalars in general but not for aggregates.  */
1369
  tree var_decl
1370
    = build_decl (input_location,
1371
                  (constant_p && const_decl_allowed_p
1372
                   && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1373
                  var_name, type);
1374
 
1375
  /* If this is external, throw away any initializations (they will be done
1376
     elsewhere) unless this is a constant for which we would like to remain
1377
     able to get the initializer.  If we are defining a global here, leave a
1378
     constant initialization and save any variable elaborations for the
1379
     elaboration routine.  If we are just annotating types, throw away the
1380
     initialization if it isn't a constant.  */
1381
  if ((extern_flag && !constant_p)
1382
      || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1383
    var_init = NULL_TREE;
1384
 
1385
  /* At the global level, an initializer requiring code to be generated
1386
     produces elaboration statements.  Check that such statements are allowed,
1387
     that is, not violating a No_Elaboration_Code restriction.  */
1388
  if (global_bindings_p () && var_init != 0 && !init_const)
1389
    Check_Elaboration_Code_Allowed (gnat_node);
1390
 
1391
  DECL_INITIAL  (var_decl) = var_init;
1392
  TREE_READONLY (var_decl) = const_flag;
1393
  DECL_EXTERNAL (var_decl) = extern_flag;
1394
  TREE_PUBLIC   (var_decl) = public_flag || extern_flag;
1395
  TREE_CONSTANT (var_decl) = constant_p;
1396
  TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1397
    = TYPE_VOLATILE (type);
1398
 
1399
  /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1400
     try to fiddle with DECL_COMMON.  However, on platforms that don't
1401
     support global BSS sections, uninitialized global variables would
1402
     go in DATA instead, thus increasing the size of the executable.  */
1403
  if (!flag_no_common
1404
      && TREE_CODE (var_decl) == VAR_DECL
1405
      && TREE_PUBLIC (var_decl)
1406
      && !have_global_bss_p ())
1407
    DECL_COMMON (var_decl) = 1;
1408
 
1409
  /* If it's public and not external, always allocate storage for it.
1410
     At the global binding level we need to allocate static storage for the
1411
     variable if and only if it's not external. If we are not at the top level
1412
     we allocate automatic storage unless requested not to.  */
1413
  TREE_STATIC (var_decl)
1414
    = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1415
 
1416
  /* For an external constant whose initializer is not absolute, do not emit
1417
     debug info.  In DWARF this would mean a global relocation in a read-only
1418
     section which runs afoul of the PE-COFF runtime relocation mechanism.  */
1419
  if (extern_flag
1420
      && constant_p
1421
      && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
1422
           != null_pointer_node)
1423
    DECL_IGNORED_P (var_decl) = 1;
1424
 
1425
  if (TREE_CODE (var_decl) == VAR_DECL)
1426
    {
1427
      if (asm_name)
1428
        SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1429
      process_attributes (var_decl, attr_list);
1430
    }
1431
 
1432
  /* Add this decl to the current binding level.  */
1433
  gnat_pushdecl (var_decl, gnat_node);
1434
 
1435
  if (TREE_SIDE_EFFECTS (var_decl))
1436
    TREE_ADDRESSABLE (var_decl) = 1;
1437
 
1438
  if (TREE_CODE (var_decl) != CONST_DECL)
1439
    {
1440
      if (global_bindings_p ())
1441
        rest_of_decl_compilation (var_decl, true, 0);
1442
    }
1443
  else
1444
    expand_decl (var_decl);
1445
 
1446
  return var_decl;
1447
}
1448
 
1449
/* Return true if TYPE, an aggregate type, contains (or is) an array.  */
1450
 
1451
static bool
1452
aggregate_type_contains_array_p (tree type)
1453
{
1454
  switch (TREE_CODE (type))
1455
    {
1456
    case RECORD_TYPE:
1457
    case UNION_TYPE:
1458
    case QUAL_UNION_TYPE:
1459
      {
1460
        tree field;
1461
        for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1462
          if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1463
              && aggregate_type_contains_array_p (TREE_TYPE (field)))
1464
            return true;
1465
        return false;
1466
      }
1467
 
1468
    case ARRAY_TYPE:
1469
      return true;
1470
 
1471
    default:
1472
      gcc_unreachable ();
1473
    }
1474
}
1475
 
1476
/* Return a FIELD_DECL node.  FIELD_NAME is the field's name, FIELD_TYPE is
1477
   its type and RECORD_TYPE is the type of the enclosing record.  PACKED is
1478
   1 if the enclosing record is packed, -1 if it has Component_Alignment of
1479
   Storage_Unit.  If SIZE is nonzero, it is the specified size of the field.
1480
   If POS is nonzero, it is the bit position.  If ADDRESSABLE is nonzero, it
1481
   means we are allowed to take the address of the field; if it is negative,
1482
   we should not make a bitfield, which is used by make_aligning_type.  */
1483
 
1484
tree
1485
create_field_decl (tree field_name, tree field_type, tree record_type,
1486
                   int packed, tree size, tree pos, int addressable)
1487
{
1488
  tree field_decl = build_decl (input_location,
1489
                                FIELD_DECL, field_name, field_type);
1490
 
1491
  DECL_CONTEXT (field_decl) = record_type;
1492
  TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1493
 
1494
  /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1495
     byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1496
     Likewise for an aggregate without specified position that contains an
1497
     array, because in this case slices of variable length of this array
1498
     must be handled by GCC and variable-sized objects need to be aligned
1499
     to at least a byte boundary.  */
1500
  if (packed && (TYPE_MODE (field_type) == BLKmode
1501
                 || (!pos
1502
                     && AGGREGATE_TYPE_P (field_type)
1503
                     && aggregate_type_contains_array_p (field_type))))
1504
    DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1505
 
1506
  /* If a size is specified, use it.  Otherwise, if the record type is packed
1507
     compute a size to use, which may differ from the object's natural size.
1508
     We always set a size in this case to trigger the checks for bitfield
1509
     creation below, which is typically required when no position has been
1510
     specified.  */
1511
  if (size)
1512
    size = convert (bitsizetype, size);
1513
  else if (packed == 1)
1514
    {
1515
      size = rm_size (field_type);
1516
      if (TYPE_MODE (field_type) == BLKmode)
1517
        size = round_up (size, BITS_PER_UNIT);
1518
    }
1519
 
1520
  /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1521
     specified for two reasons: first if the size differs from the natural
1522
     size.  Second, if the alignment is insufficient.  There are a number of
1523
     ways the latter can be true.
1524
 
1525
     We never make a bitfield if the type of the field has a nonconstant size,
1526
     because no such entity requiring bitfield operations should reach here.
1527
 
1528
     We do *preventively* make a bitfield when there might be the need for it
1529
     but we don't have all the necessary information to decide, as is the case
1530
     of a field with no specified position in a packed record.
1531
 
1532
     We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1533
     in layout_decl or finish_record_type to clear the bit_field indication if
1534
     it is in fact not needed.  */
1535
  if (addressable >= 0
1536
      && size
1537
      && TREE_CODE (size) == INTEGER_CST
1538
      && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1539
      && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1540
          || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1541
          || packed
1542
          || (TYPE_ALIGN (record_type) != 0
1543
              && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1544
    {
1545
      DECL_BIT_FIELD (field_decl) = 1;
1546
      DECL_SIZE (field_decl) = size;
1547
      if (!packed && !pos)
1548
        {
1549
          if (TYPE_ALIGN (record_type) != 0
1550
              && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
1551
            DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
1552
          else
1553
            DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1554
        }
1555
    }
1556
 
1557
  DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1558
 
1559
  /* Bump the alignment if need be, either for bitfield/packing purposes or
1560
     to satisfy the type requirements if no such consideration applies.  When
1561
     we get the alignment from the type, indicate if this is from an explicit
1562
     user request, which prevents stor-layout from lowering it later on.  */
1563
  {
1564
    unsigned int bit_align
1565
      = (DECL_BIT_FIELD (field_decl) ? 1
1566
         : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1567
 
1568
    if (bit_align > DECL_ALIGN (field_decl))
1569
      DECL_ALIGN (field_decl) = bit_align;
1570
    else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1571
      {
1572
        DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1573
        DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1574
      }
1575
  }
1576
 
1577
  if (pos)
1578
    {
1579
      /* We need to pass in the alignment the DECL is known to have.
1580
         This is the lowest-order bit set in POS, but no more than
1581
         the alignment of the record, if one is specified.  Note
1582
         that an alignment of 0 is taken as infinite.  */
1583
      unsigned int known_align;
1584
 
1585
      if (host_integerp (pos, 1))
1586
        known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1587
      else
1588
        known_align = BITS_PER_UNIT;
1589
 
1590
      if (TYPE_ALIGN (record_type)
1591
          && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1592
        known_align = TYPE_ALIGN (record_type);
1593
 
1594
      layout_decl (field_decl, known_align);
1595
      SET_DECL_OFFSET_ALIGN (field_decl,
1596
                             host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1597
                             : BITS_PER_UNIT);
1598
      pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1599
                    &DECL_FIELD_BIT_OFFSET (field_decl),
1600
                    DECL_OFFSET_ALIGN (field_decl), pos);
1601
    }
1602
 
1603
  /* In addition to what our caller says, claim the field is addressable if we
1604
     know that its type is not suitable.
1605
 
1606
     The field may also be "technically" nonaddressable, meaning that even if
1607
     we attempt to take the field's address we will actually get the address
1608
     of a copy.  This is the case for true bitfields, but the DECL_BIT_FIELD
1609
     value we have at this point is not accurate enough, so we don't account
1610
     for this here and let finish_record_type decide.  */
1611
  if (!addressable && !type_for_nonaliased_component_p (field_type))
1612
    addressable = 1;
1613
 
1614
  DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1615
 
1616
  return field_decl;
1617
}
1618
 
1619
/* Return a PARM_DECL node.  PARAM_NAME is the name of the parameter and
1620
   PARAM_TYPE is its type.  READONLY is true if the parameter is readonly
1621
   (either an In parameter or an address of a pass-by-ref parameter).  */
1622
 
1623
tree
1624
create_param_decl (tree param_name, tree param_type, bool readonly)
1625
{
1626
  tree param_decl = build_decl (input_location,
1627
                                PARM_DECL, param_name, param_type);
1628
 
1629
  /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1630
     can lead to various ABI violations.  */
1631
  if (targetm.calls.promote_prototypes (NULL_TREE)
1632
      && INTEGRAL_TYPE_P (param_type)
1633
      && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1634
    {
1635
      /* We have to be careful about biased types here.  Make a subtype
1636
         of integer_type_node with the proper biasing.  */
1637
      if (TREE_CODE (param_type) == INTEGER_TYPE
1638
          && TYPE_BIASED_REPRESENTATION_P (param_type))
1639
        {
1640
          tree subtype
1641
            = make_unsigned_type (TYPE_PRECISION (integer_type_node));
1642
          TREE_TYPE (subtype) = integer_type_node;
1643
          TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
1644
          SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
1645
          SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
1646
          param_type = subtype;
1647
        }
1648
      else
1649
        param_type = integer_type_node;
1650
    }
1651
 
1652
  DECL_ARG_TYPE (param_decl) = param_type;
1653
  TREE_READONLY (param_decl) = readonly;
1654
  return param_decl;
1655
}
1656
 
1657
/* Given a DECL and ATTR_LIST, process the listed attributes.  */
1658
 
1659
void
1660
process_attributes (tree decl, struct attrib *attr_list)
1661
{
1662
  for (; attr_list; attr_list = attr_list->next)
1663
    switch (attr_list->type)
1664
      {
1665
      case ATTR_MACHINE_ATTRIBUTE:
1666
        decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1667
                                           NULL_TREE),
1668
                         ATTR_FLAG_TYPE_IN_PLACE);
1669
        break;
1670
 
1671
      case ATTR_LINK_ALIAS:
1672
        if (! DECL_EXTERNAL (decl))
1673
          {
1674
            TREE_STATIC (decl) = 1;
1675
            assemble_alias (decl, attr_list->name);
1676
          }
1677
        break;
1678
 
1679
      case ATTR_WEAK_EXTERNAL:
1680
        if (SUPPORTS_WEAK)
1681
          declare_weak (decl);
1682
        else
1683
          post_error ("?weak declarations not supported on this target",
1684
                      attr_list->error_point);
1685
        break;
1686
 
1687
      case ATTR_LINK_SECTION:
1688
        if (targetm.have_named_sections)
1689
          {
1690
            DECL_SECTION_NAME (decl)
1691
              = build_string (IDENTIFIER_LENGTH (attr_list->name),
1692
                              IDENTIFIER_POINTER (attr_list->name));
1693
            DECL_COMMON (decl) = 0;
1694
          }
1695
        else
1696
          post_error ("?section attributes are not supported for this target",
1697
                      attr_list->error_point);
1698
        break;
1699
 
1700
      case ATTR_LINK_CONSTRUCTOR:
1701
        DECL_STATIC_CONSTRUCTOR (decl) = 1;
1702
        TREE_USED (decl) = 1;
1703
        break;
1704
 
1705
      case ATTR_LINK_DESTRUCTOR:
1706
        DECL_STATIC_DESTRUCTOR (decl) = 1;
1707
        TREE_USED (decl) = 1;
1708
        break;
1709
 
1710
      case ATTR_THREAD_LOCAL_STORAGE:
1711
        DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1712
        DECL_COMMON (decl) = 0;
1713
        break;
1714
      }
1715
}
1716
 
1717
/* Record DECL as a global renaming pointer.  */
1718
 
1719
void
1720
record_global_renaming_pointer (tree decl)
1721
{
1722
  gcc_assert (DECL_RENAMED_OBJECT (decl));
1723
  VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1724
}
1725
 
1726
/* Invalidate the global renaming pointers.   */
1727
 
1728
void
1729
invalidate_global_renaming_pointers (void)
1730
{
1731
  unsigned int i;
1732
  tree iter;
1733
 
1734
  for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1735
    SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1736
 
1737
  VEC_free (tree, gc, global_renaming_pointers);
1738
}
1739
 
1740
/* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1741
   a power of 2. */
1742
 
1743
bool
1744
value_factor_p (tree value, HOST_WIDE_INT factor)
1745
{
1746
  if (host_integerp (value, 1))
1747
    return tree_low_cst (value, 1) % factor == 0;
1748
 
1749
  if (TREE_CODE (value) == MULT_EXPR)
1750
    return (value_factor_p (TREE_OPERAND (value, 0), factor)
1751
            || value_factor_p (TREE_OPERAND (value, 1), factor));
1752
 
1753
  return false;
1754
}
1755
 
1756
/* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1757
   unless we can prove these 2 fields are laid out in such a way that no gap
1758
   exist between the end of PREV_FIELD and the beginning of CURR_FIELD.  OFFSET
1759
   is the distance in bits between the end of PREV_FIELD and the starting
1760
   position of CURR_FIELD. It is ignored if null. */
1761
 
1762
static bool
1763
potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1764
{
1765
  /* If this is the first field of the record, there cannot be any gap */
1766
  if (!prev_field)
1767
    return false;
1768
 
1769
  /* If the previous field is a union type, then return False: The only
1770
     time when such a field is not the last field of the record is when
1771
     there are other components at fixed positions after it (meaning there
1772
     was a rep clause for every field), in which case we don't want the
1773
     alignment constraint to override them. */
1774
  if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1775
    return false;
1776
 
1777
  /* If the distance between the end of prev_field and the beginning of
1778
     curr_field is constant, then there is a gap if the value of this
1779
     constant is not null. */
1780
  if (offset && host_integerp (offset, 1))
1781
    return !integer_zerop (offset);
1782
 
1783
  /* If the size and position of the previous field are constant,
1784
     then check the sum of this size and position. There will be a gap
1785
     iff it is not multiple of the current field alignment. */
1786
  if (host_integerp (DECL_SIZE (prev_field), 1)
1787
      && host_integerp (bit_position (prev_field), 1))
1788
    return ((tree_low_cst (bit_position (prev_field), 1)
1789
             + tree_low_cst (DECL_SIZE (prev_field), 1))
1790
            % DECL_ALIGN (curr_field) != 0);
1791
 
1792
  /* If both the position and size of the previous field are multiples
1793
     of the current field alignment, there cannot be any gap. */
1794
  if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1795
      && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1796
    return false;
1797
 
1798
  /* Fallback, return that there may be a potential gap */
1799
  return true;
1800
}
1801
 
1802
/* Returns a LABEL_DECL node for LABEL_NAME.  */
1803
 
1804
tree
1805
create_label_decl (tree label_name)
1806
{
1807
  tree label_decl = build_decl (input_location,
1808
                                LABEL_DECL, label_name, void_type_node);
1809
 
1810
  DECL_CONTEXT (label_decl)     = current_function_decl;
1811
  DECL_MODE (label_decl)        = VOIDmode;
1812
  DECL_SOURCE_LOCATION (label_decl) = input_location;
1813
 
1814
  return label_decl;
1815
}
1816
 
1817
/* Returns a FUNCTION_DECL node.  SUBPROG_NAME is the name of the subprogram,
1818
   ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1819
   node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1820
   PARM_DECL nodes chained through the TREE_CHAIN field).
1821
 
1822
   INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1823
   appropriate fields in the FUNCTION_DECL.  GNAT_NODE gives the location.  */
1824
 
1825
tree
1826
create_subprog_decl (tree subprog_name, tree asm_name,
1827
                     tree subprog_type, tree param_decl_list, bool inline_flag,
1828
                     bool public_flag, bool extern_flag,
1829
                     struct attrib *attr_list, Node_Id gnat_node)
1830
{
1831
  tree return_type  = TREE_TYPE (subprog_type);
1832
  tree subprog_decl = build_decl (input_location,
1833
                                  FUNCTION_DECL, subprog_name, subprog_type);
1834
 
1835
  /* If this is a non-inline function nested inside an inlined external
1836
     function, we cannot honor both requests without cloning the nested
1837
     function in the current unit since it is private to the other unit.
1838
     We could inline the nested function as well but it's probably better
1839
     to err on the side of too little inlining.  */
1840
  if (!inline_flag
1841
      && current_function_decl
1842
      && DECL_DECLARED_INLINE_P (current_function_decl)
1843
      && DECL_EXTERNAL (current_function_decl))
1844
    DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1845
 
1846
  DECL_EXTERNAL (subprog_decl)  = extern_flag;
1847
  TREE_PUBLIC (subprog_decl)    = public_flag;
1848
  TREE_STATIC (subprog_decl)    = 1;
1849
  TREE_READONLY (subprog_decl)  = TYPE_READONLY (subprog_type);
1850
  TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1851
  TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1852
  DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1853
  DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1854
  DECL_RESULT (subprog_decl)    = build_decl (input_location,
1855
                                              RESULT_DECL, 0, return_type);
1856
  DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1857
  DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1858
 
1859
  /* TREE_ADDRESSABLE is set on the result type to request the use of the
1860
     target by-reference return mechanism.  This is not supported all the
1861
     way down to RTL expansion with GCC 4, which ICEs on temporary creation
1862
     attempts with such a type and expects DECL_BY_REFERENCE to be set on
1863
     the RESULT_DECL instead - see gnat_genericize for more details.  */
1864
  if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1865
    {
1866
      tree result_decl = DECL_RESULT (subprog_decl);
1867
 
1868
      TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1869
      DECL_BY_REFERENCE (result_decl) = 1;
1870
    }
1871
 
1872
  if (asm_name)
1873
    {
1874
      SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1875
 
1876
      /* The expand_main_function circuitry expects "main_identifier_node" to
1877
         designate the DECL_NAME of the 'main' entry point, in turn expected
1878
         to be declared as the "main" function literally by default.  Ada
1879
         program entry points are typically declared with a different name
1880
         within the binder generated file, exported as 'main' to satisfy the
1881
         system expectations.  Force main_identifier_node in this case.  */
1882
      if (asm_name == main_identifier_node)
1883
        DECL_NAME (subprog_decl) = main_identifier_node;
1884
    }
1885
 
1886
  process_attributes (subprog_decl, attr_list);
1887
 
1888
  /* Add this decl to the current binding level.  */
1889
  gnat_pushdecl (subprog_decl, gnat_node);
1890
 
1891
  /* Output the assembler code and/or RTL for the declaration.  */
1892
  rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1893
 
1894
  return subprog_decl;
1895
}
1896
 
1897
/* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1898
   body.  This routine needs to be invoked before processing the declarations
1899
   appearing in the subprogram.  */
1900
 
1901
void
1902
begin_subprog_body (tree subprog_decl)
1903
{
1904
  tree param_decl;
1905
 
1906
  current_function_decl = subprog_decl;
1907
  announce_function (subprog_decl);
1908
 
1909
  /* Enter a new binding level and show that all the parameters belong to
1910
     this function.  */
1911
  gnat_pushlevel ();
1912
  for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1913
       param_decl = TREE_CHAIN (param_decl))
1914
    DECL_CONTEXT (param_decl) = subprog_decl;
1915
 
1916
  make_decl_rtl (subprog_decl);
1917
 
1918
  /* We handle pending sizes via the elaboration of types, so we don't need to
1919
     save them.  This causes them to be marked as part of the outer function
1920
     and then discarded.  */
1921
  get_pending_sizes ();
1922
}
1923
 
1924
 
1925
/* Helper for the genericization callback.  Return a dereference of VAL
1926
   if it is of a reference type.  */
1927
 
1928
static tree
1929
convert_from_reference (tree val)
1930
{
1931
  tree value_type, ref;
1932
 
1933
  if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1934
    return val;
1935
 
1936
  value_type =  TREE_TYPE (TREE_TYPE (val));
1937
  ref = build1 (INDIRECT_REF, value_type, val);
1938
 
1939
  /* See if what we reference is CONST or VOLATILE, which requires
1940
     looking into array types to get to the component type.  */
1941
 
1942
  while (TREE_CODE (value_type) == ARRAY_TYPE)
1943
    value_type = TREE_TYPE (value_type);
1944
 
1945
  TREE_READONLY (ref)
1946
    = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
1947
  TREE_THIS_VOLATILE (ref)
1948
    = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
1949
 
1950
  TREE_SIDE_EFFECTS (ref)
1951
    = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
1952
 
1953
  return ref;
1954
}
1955
 
1956
/* Helper for the genericization callback.  Returns true if T denotes
1957
   a RESULT_DECL with DECL_BY_REFERENCE set.  */
1958
 
1959
static inline bool
1960
is_byref_result (tree t)
1961
{
1962
  return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
1963
}
1964
 
1965
 
1966
/* Tree walking callback for gnat_genericize. Currently ...
1967
 
1968
   o Adjust references to the function's DECL_RESULT if it is marked
1969
     DECL_BY_REFERENCE and so has had its type turned into a reference
1970
     type at the end of the function compilation.  */
1971
 
1972
static tree
1973
gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1974
{
1975
  /* This implementation is modeled after what the C++ front-end is
1976
     doing, basis of the downstream passes behavior.  */
1977
 
1978
  tree stmt = *stmt_p;
1979
  struct pointer_set_t *p_set = (struct pointer_set_t*) data;
1980
 
1981
  /* If we have a direct mention of the result decl, dereference.  */
1982
  if (is_byref_result (stmt))
1983
    {
1984
      *stmt_p = convert_from_reference (stmt);
1985
      *walk_subtrees = 0;
1986
      return NULL;
1987
    }
1988
 
1989
  /* Otherwise, no need to walk the same tree twice.  */
1990
  if (pointer_set_contains (p_set, stmt))
1991
    {
1992
      *walk_subtrees = 0;
1993
      return NULL_TREE;
1994
    }
1995
 
1996
  /* If we are taking the address of what now is a reference, just get the
1997
     reference value.  */
1998
  if (TREE_CODE (stmt) == ADDR_EXPR
1999
      && is_byref_result (TREE_OPERAND (stmt, 0)))
2000
    {
2001
      *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
2002
      *walk_subtrees = 0;
2003
    }
2004
 
2005
  /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR.  */
2006
  else if (TREE_CODE (stmt) == RETURN_EXPR
2007
           && TREE_OPERAND (stmt, 0)
2008
           && is_byref_result (TREE_OPERAND (stmt, 0)))
2009
    *walk_subtrees = 0;
2010
 
2011
  /* Don't look inside trees that cannot embed references of interest.  */
2012
  else if (IS_TYPE_OR_DECL_P (stmt))
2013
    *walk_subtrees = 0;
2014
 
2015
  pointer_set_insert (p_set, *stmt_p);
2016
 
2017
  return NULL;
2018
}
2019
 
2020
/* Perform lowering of Ada trees to GENERIC. In particular:
2021
 
2022
   o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2023
     and adjust all the references to this decl accordingly.  */
2024
 
2025
static void
2026
gnat_genericize (tree fndecl)
2027
{
2028
  /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2029
     was handled by simply setting TREE_ADDRESSABLE on the result type.
2030
     Everything required to actually pass by invisible ref using the target
2031
     mechanism (e.g. extra parameter) was handled at RTL expansion time.
2032
 
2033
     This doesn't work with GCC 4 any more for several reasons.  First, the
2034
     gimplification process might need the creation of temporaries of this
2035
     type, and the gimplifier ICEs on such attempts.  Second, the middle-end
2036
     now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2037
     RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2038
     be explicitly accounted for by the front-end in the function body.
2039
 
2040
     We achieve the complete transformation in two steps:
2041
 
2042
     1/ create_subprog_decl performs early attribute tweaks: it clears
2043
        TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2044
        the result decl.  The former ensures that the bit isn't set in the GCC
2045
        tree saved for the function, so prevents ICEs on temporary creation.
2046
        The latter we use here to trigger the rest of the processing.
2047
 
2048
     2/ This function performs the type transformation on the result decl
2049
        and adjusts all the references to this decl from the function body
2050
        accordingly.
2051
 
2052
     Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2053
     strategy, which escapes the gimplifier temporary creation issues by
2054
     creating it's own temporaries using TARGET_EXPR nodes.  Our way relies
2055
     on simple specific support code in aggregate_value_p to look at the
2056
     target function result decl explicitly.  */
2057
 
2058
  struct pointer_set_t *p_set;
2059
  tree decl_result = DECL_RESULT (fndecl);
2060
 
2061
  if (!DECL_BY_REFERENCE (decl_result))
2062
    return;
2063
 
2064
  /* Make the DECL_RESULT explicitly by-reference and adjust all the
2065
     occurrences in the function body using the common tree-walking facility.
2066
     We want to see every occurrence of the result decl to adjust the
2067
     referencing tree, so need to use our own pointer set to control which
2068
     trees should be visited again or not.  */
2069
 
2070
  p_set = pointer_set_create ();
2071
 
2072
  TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2073
  TREE_ADDRESSABLE (decl_result) = 0;
2074
  relayout_decl (decl_result);
2075
 
2076
  walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2077
 
2078
  pointer_set_destroy (p_set);
2079
}
2080
 
2081
/* Finish the definition of the current subprogram BODY and finalize it.  */
2082
 
2083
void
2084
end_subprog_body (tree body)
2085
{
2086
  tree fndecl = current_function_decl;
2087
 
2088
  /* Mark the BLOCK for this level as being for this function and pop the
2089
     level.  Since the vars in it are the parameters, clear them.  */
2090
  BLOCK_VARS (current_binding_level->block) = 0;
2091
  BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2092
  DECL_INITIAL (fndecl) = current_binding_level->block;
2093
  gnat_poplevel ();
2094
 
2095
  /* We handle pending sizes via the elaboration of types, so we don't
2096
     need to save them.  */
2097
  get_pending_sizes ();
2098
 
2099
  /* Mark the RESULT_DECL as being in this subprogram. */
2100
  DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2101
 
2102
  DECL_SAVED_TREE (fndecl) = body;
2103
 
2104
  current_function_decl = DECL_CONTEXT (fndecl);
2105
  set_cfun (NULL);
2106
 
2107
  /* We cannot track the location of errors past this point.  */
2108
  error_gnat_node = Empty;
2109
 
2110
  /* If we're only annotating types, don't actually compile this function.  */
2111
  if (type_annotate_only)
2112
    return;
2113
 
2114
  /* Perform the required pre-gimplification transformations on the tree.  */
2115
  gnat_genericize (fndecl);
2116
 
2117
  /* Dump functions before gimplification.  */
2118
  dump_function (TDI_original, fndecl);
2119
 
2120
  /* ??? This special handling of nested functions is probably obsolete.  */
2121
  if (!DECL_CONTEXT (fndecl))
2122
    cgraph_finalize_function (fndecl, false);
2123
  else
2124
    /* Register this function with cgraph just far enough to get it
2125
       added to our parent's nested function list.  */
2126
    (void) cgraph_node (fndecl);
2127
}
2128
 
2129
tree
2130
gnat_builtin_function (tree decl)
2131
{
2132
  gnat_pushdecl (decl, Empty);
2133
  return decl;
2134
}
2135
 
2136
/* Return an integer type with the number of bits of precision given by
2137
   PRECISION.  UNSIGNEDP is nonzero if the type is unsigned; otherwise
2138
   it is a signed type.  */
2139
 
2140
tree
2141
gnat_type_for_size (unsigned precision, int unsignedp)
2142
{
2143
  tree t;
2144
  char type_name[20];
2145
 
2146
  if (precision <= 2 * MAX_BITS_PER_WORD
2147
      && signed_and_unsigned_types[precision][unsignedp])
2148
    return signed_and_unsigned_types[precision][unsignedp];
2149
 
2150
 if (unsignedp)
2151
    t = make_unsigned_type (precision);
2152
  else
2153
    t = make_signed_type (precision);
2154
 
2155
  if (precision <= 2 * MAX_BITS_PER_WORD)
2156
    signed_and_unsigned_types[precision][unsignedp] = t;
2157
 
2158
  if (!TYPE_NAME (t))
2159
    {
2160
      sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2161
      TYPE_NAME (t) = get_identifier (type_name);
2162
    }
2163
 
2164
  return t;
2165
}
2166
 
2167
/* Likewise for floating-point types.  */
2168
 
2169
static tree
2170
float_type_for_precision (int precision, enum machine_mode mode)
2171
{
2172
  tree t;
2173
  char type_name[20];
2174
 
2175
  if (float_types[(int) mode])
2176
    return float_types[(int) mode];
2177
 
2178
  float_types[(int) mode] = t = make_node (REAL_TYPE);
2179
  TYPE_PRECISION (t) = precision;
2180
  layout_type (t);
2181
 
2182
  gcc_assert (TYPE_MODE (t) == mode);
2183
  if (!TYPE_NAME (t))
2184
    {
2185
      sprintf (type_name, "FLOAT_%d", precision);
2186
      TYPE_NAME (t) = get_identifier (type_name);
2187
    }
2188
 
2189
  return t;
2190
}
2191
 
2192
/* Return a data type that has machine mode MODE.  UNSIGNEDP selects
2193
   an unsigned type; otherwise a signed type is returned.  */
2194
 
2195
tree
2196
gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2197
{
2198
  if (mode == BLKmode)
2199
    return NULL_TREE;
2200
 
2201
  if (mode == VOIDmode)
2202
    return void_type_node;
2203
 
2204
  if (COMPLEX_MODE_P (mode))
2205
    return NULL_TREE;
2206
 
2207
  if (SCALAR_FLOAT_MODE_P (mode))
2208
    return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2209
 
2210
  if (SCALAR_INT_MODE_P (mode))
2211
    return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2212
 
2213
  if (VECTOR_MODE_P (mode))
2214
    {
2215
      enum machine_mode inner_mode = GET_MODE_INNER (mode);
2216
      tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2217
      if (inner_type)
2218
        return build_vector_type_for_mode (inner_type, mode);
2219
    }
2220
 
2221
  return NULL_TREE;
2222
}
2223
 
2224
/* Return the unsigned version of a TYPE_NODE, a scalar type.  */
2225
 
2226
tree
2227
gnat_unsigned_type (tree type_node)
2228
{
2229
  tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2230
 
2231
  if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2232
    {
2233
      type = copy_node (type);
2234
      TREE_TYPE (type) = type_node;
2235
    }
2236
  else if (TREE_TYPE (type_node)
2237
           && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2238
           && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2239
    {
2240
      type = copy_node (type);
2241
      TREE_TYPE (type) = TREE_TYPE (type_node);
2242
    }
2243
 
2244
  return type;
2245
}
2246
 
2247
/* Return the signed version of a TYPE_NODE, a scalar type.  */
2248
 
2249
tree
2250
gnat_signed_type (tree type_node)
2251
{
2252
  tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2253
 
2254
  if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2255
    {
2256
      type = copy_node (type);
2257
      TREE_TYPE (type) = type_node;
2258
    }
2259
  else if (TREE_TYPE (type_node)
2260
           && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2261
           && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2262
    {
2263
      type = copy_node (type);
2264
      TREE_TYPE (type) = TREE_TYPE (type_node);
2265
    }
2266
 
2267
  return type;
2268
}
2269
 
2270
/* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2271
   transparently converted to each other.  */
2272
 
2273
int
2274
gnat_types_compatible_p (tree t1, tree t2)
2275
{
2276
  enum tree_code code;
2277
 
2278
  /* This is the default criterion.  */
2279
  if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2280
    return 1;
2281
 
2282
  /* We only check structural equivalence here.  */
2283
  if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2284
    return 0;
2285
 
2286
  /* Vector types are also compatible if they have the same number of subparts
2287
     and the same form of (scalar) element type.  */
2288
  if (code == VECTOR_TYPE
2289
      && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2290
      && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2291
      && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2292
    return 1;
2293
 
2294
  /* Array types are also compatible if they are constrained and have
2295
     the same component type and the same domain.  */
2296
  if (code == ARRAY_TYPE
2297
      && TREE_TYPE (t1) == TREE_TYPE (t2)
2298
      && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2299
          || (TYPE_DOMAIN (t1)
2300
              && TYPE_DOMAIN (t2)
2301
              && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2302
                                     TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2303
              && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2304
                                     TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))))
2305
    return 1;
2306
 
2307
  /* Padding record types are also compatible if they pad the same
2308
     type and have the same constant size.  */
2309
  if (code == RECORD_TYPE
2310
      && TYPE_PADDING_P (t1) && TYPE_PADDING_P (t2)
2311
      && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2312
      && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2313
    return 1;
2314
 
2315
  return 0;
2316
}
2317
 
2318
/* EXP is an expression for the size of an object.  If this size contains
2319
   discriminant references, replace them with the maximum (if MAX_P) or
2320
   minimum (if !MAX_P) possible value of the discriminant.  */
2321
 
2322
tree
2323
max_size (tree exp, bool max_p)
2324
{
2325
  enum tree_code code = TREE_CODE (exp);
2326
  tree type = TREE_TYPE (exp);
2327
 
2328
  switch (TREE_CODE_CLASS (code))
2329
    {
2330
    case tcc_declaration:
2331
    case tcc_constant:
2332
      return exp;
2333
 
2334
    case tcc_vl_exp:
2335
      if (code == CALL_EXPR)
2336
        {
2337
          tree t, *argarray;
2338
          int n, i;
2339
 
2340
          t = maybe_inline_call_in_expr (exp);
2341
          if (t)
2342
            return max_size (t, max_p);
2343
 
2344
          n = call_expr_nargs (exp);
2345
          gcc_assert (n > 0);
2346
          argarray = (tree *) alloca (n * sizeof (tree));
2347
          for (i = 0; i < n; i++)
2348
            argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2349
          return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2350
        }
2351
      break;
2352
 
2353
    case tcc_reference:
2354
      /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2355
         modify.  Otherwise, we treat it like a variable.  */
2356
      if (!CONTAINS_PLACEHOLDER_P (exp))
2357
        return exp;
2358
 
2359
      type = TREE_TYPE (TREE_OPERAND (exp, 1));
2360
      return
2361
        max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2362
 
2363
    case tcc_comparison:
2364
      return max_p ? size_one_node : size_zero_node;
2365
 
2366
    case tcc_unary:
2367
    case tcc_binary:
2368
    case tcc_expression:
2369
      switch (TREE_CODE_LENGTH (code))
2370
        {
2371
        case 1:
2372
          if (code == NON_LVALUE_EXPR)
2373
            return max_size (TREE_OPERAND (exp, 0), max_p);
2374
          else
2375
            return
2376
              fold_build1 (code, type,
2377
                           max_size (TREE_OPERAND (exp, 0),
2378
                                     code == NEGATE_EXPR ? !max_p : max_p));
2379
 
2380
        case 2:
2381
          if (code == COMPOUND_EXPR)
2382
            return max_size (TREE_OPERAND (exp, 1), max_p);
2383
 
2384
          /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2385
             may provide a tighter bound on max_size.  */
2386
          if (code == MINUS_EXPR
2387
              && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2388
            {
2389
              tree lhs = fold_build2 (MINUS_EXPR, type,
2390
                                      TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2391
                                      TREE_OPERAND (exp, 1));
2392
              tree rhs = fold_build2 (MINUS_EXPR, type,
2393
                                      TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2394
                                      TREE_OPERAND (exp, 1));
2395
              return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2396
                                  max_size (lhs, max_p),
2397
                                  max_size (rhs, max_p));
2398
            }
2399
 
2400
          {
2401
            tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2402
            tree rhs = max_size (TREE_OPERAND (exp, 1),
2403
                                 code == MINUS_EXPR ? !max_p : max_p);
2404
 
2405
            /* Special-case wanting the maximum value of a MIN_EXPR.
2406
               In that case, if one side overflows, return the other.
2407
               sizetype is signed, but we know sizes are non-negative.
2408
               Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2409
               overflowing or the maximum possible value and the RHS
2410
               a variable.  */
2411
            if (max_p
2412
                && code == MIN_EXPR
2413
                && TREE_CODE (rhs) == INTEGER_CST
2414
                && TREE_OVERFLOW (rhs))
2415
              return lhs;
2416
            else if (max_p
2417
                     && code == MIN_EXPR
2418
                     && TREE_CODE (lhs) == INTEGER_CST
2419
                     && TREE_OVERFLOW (lhs))
2420
              return rhs;
2421
            else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2422
                     && ((TREE_CODE (lhs) == INTEGER_CST
2423
                          && TREE_OVERFLOW (lhs))
2424
                         || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2425
                     && !TREE_CONSTANT (rhs))
2426
              return lhs;
2427
            else
2428
              return fold_build2 (code, type, lhs, rhs);
2429
          }
2430
 
2431
        case 3:
2432
          if (code == SAVE_EXPR)
2433
            return exp;
2434
          else if (code == COND_EXPR)
2435
            return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2436
                                max_size (TREE_OPERAND (exp, 1), max_p),
2437
                                max_size (TREE_OPERAND (exp, 2), max_p));
2438
        }
2439
 
2440
      /* Other tree classes cannot happen.  */
2441
    default:
2442
      break;
2443
    }
2444
 
2445
  gcc_unreachable ();
2446
}
2447
 
2448
/* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2449
   EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2450
   Return a constructor for the template.  */
2451
 
2452
tree
2453
build_template (tree template_type, tree array_type, tree expr)
2454
{
2455
  tree template_elts = NULL_TREE;
2456
  tree bound_list = NULL_TREE;
2457
  tree field;
2458
 
2459
  while (TREE_CODE (array_type) == RECORD_TYPE
2460
         && (TYPE_PADDING_P (array_type)
2461
             || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2462
    array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2463
 
2464
  if (TREE_CODE (array_type) == ARRAY_TYPE
2465
      || (TREE_CODE (array_type) == INTEGER_TYPE
2466
          && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2467
    bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2468
 
2469
  /* First make the list for a CONSTRUCTOR for the template.  Go down the
2470
     field list of the template instead of the type chain because this
2471
     array might be an Ada array of arrays and we can't tell where the
2472
     nested arrays stop being the underlying object.  */
2473
 
2474
  for (field = TYPE_FIELDS (template_type); field;
2475
       (bound_list
2476
        ? (bound_list = TREE_CHAIN (bound_list))
2477
        : (array_type = TREE_TYPE (array_type))),
2478
       field = TREE_CHAIN (TREE_CHAIN (field)))
2479
    {
2480
      tree bounds, min, max;
2481
 
2482
      /* If we have a bound list, get the bounds from there.  Likewise
2483
         for an ARRAY_TYPE.  Otherwise, if expr is a PARM_DECL with
2484
         DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2485
         This will give us a maximum range.  */
2486
      if (bound_list)
2487
        bounds = TREE_VALUE (bound_list);
2488
      else if (TREE_CODE (array_type) == ARRAY_TYPE)
2489
        bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2490
      else if (expr && TREE_CODE (expr) == PARM_DECL
2491
               && DECL_BY_COMPONENT_PTR_P (expr))
2492
        bounds = TREE_TYPE (field);
2493
      else
2494
        gcc_unreachable ();
2495
 
2496
      min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2497
      max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2498
 
2499
      /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2500
         substitute it from OBJECT.  */
2501
      min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2502
      max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2503
 
2504
      template_elts = tree_cons (TREE_CHAIN (field), max,
2505
                                 tree_cons (field, min, template_elts));
2506
    }
2507
 
2508
  return gnat_build_constructor (template_type, nreverse (template_elts));
2509
}
2510
 
2511
/* Build a 32bit VMS descriptor from a Mechanism_Type, which must specify
2512
   a descriptor type, and the GCC type of an object.  Each FIELD_DECL
2513
   in the type contains in its DECL_INITIAL the expression to use when
2514
   a constructor is made for the type.  GNAT_ENTITY is an entity used
2515
   to print out an error message if the mechanism cannot be applied to
2516
   an object of that type and also for the name.  */
2517
 
2518
tree
2519
build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2520
{
2521
  tree record_type = make_node (RECORD_TYPE);
2522
  tree pointer32_type;
2523
  tree field_list = 0;
2524
  int klass;
2525
  int dtype = 0;
2526
  tree inner_type;
2527
  int ndim;
2528
  int i;
2529
  tree *idx_arr;
2530
  tree tem;
2531
 
2532
  /* If TYPE is an unconstrained array, use the underlying array type.  */
2533
  if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2534
    type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2535
 
2536
  /* If this is an array, compute the number of dimensions in the array,
2537
     get the index types, and point to the inner type.  */
2538
  if (TREE_CODE (type) != ARRAY_TYPE)
2539
    ndim = 0;
2540
  else
2541
    for (ndim = 1, inner_type = type;
2542
         TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2543
         && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2544
         ndim++, inner_type = TREE_TYPE (inner_type))
2545
      ;
2546
 
2547
  idx_arr = (tree *) alloca (ndim * sizeof (tree));
2548
 
2549
  if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2550
      && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2551
    for (i = ndim - 1, inner_type = type;
2552
         i >= 0;
2553
         i--, inner_type = TREE_TYPE (inner_type))
2554
      idx_arr[i] = TYPE_DOMAIN (inner_type);
2555
  else
2556
    for (i = 0, inner_type = type;
2557
         i < ndim;
2558
         i++, inner_type = TREE_TYPE (inner_type))
2559
      idx_arr[i] = TYPE_DOMAIN (inner_type);
2560
 
2561
  /* Now get the DTYPE value.  */
2562
  switch (TREE_CODE (type))
2563
    {
2564
    case INTEGER_TYPE:
2565
    case ENUMERAL_TYPE:
2566
    case BOOLEAN_TYPE:
2567
      if (TYPE_VAX_FLOATING_POINT_P (type))
2568
        switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2569
          {
2570
          case 6:
2571
            dtype = 10;
2572
            break;
2573
          case 9:
2574
            dtype = 11;
2575
            break;
2576
          case 15:
2577
            dtype = 27;
2578
            break;
2579
          }
2580
      else
2581
        switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2582
          {
2583
          case 8:
2584
            dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2585
            break;
2586
          case 16:
2587
            dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2588
            break;
2589
          case 32:
2590
            dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2591
            break;
2592
          case 64:
2593
            dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2594
            break;
2595
          case 128:
2596
            dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2597
            break;
2598
          }
2599
      break;
2600
 
2601
    case REAL_TYPE:
2602
      dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2603
      break;
2604
 
2605
    case COMPLEX_TYPE:
2606
      if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2607
          && TYPE_VAX_FLOATING_POINT_P (type))
2608
        switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2609
          {
2610
          case 6:
2611
            dtype = 12;
2612
            break;
2613
          case 9:
2614
            dtype = 13;
2615
            break;
2616
          case 15:
2617
            dtype = 29;
2618
          }
2619
      else
2620
        dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2621
      break;
2622
 
2623
    case ARRAY_TYPE:
2624
      dtype = 14;
2625
      break;
2626
 
2627
    default:
2628
      break;
2629
    }
2630
 
2631
  /* Get the CLASS value.  */
2632
  switch (mech)
2633
    {
2634
    case By_Descriptor_A:
2635
    case By_Short_Descriptor_A:
2636
      klass = 4;
2637
      break;
2638
    case By_Descriptor_NCA:
2639
    case By_Short_Descriptor_NCA:
2640
      klass = 10;
2641
      break;
2642
    case By_Descriptor_SB:
2643
    case By_Short_Descriptor_SB:
2644
      klass = 15;
2645
      break;
2646
    case By_Descriptor:
2647
    case By_Short_Descriptor:
2648
    case By_Descriptor_S:
2649
    case By_Short_Descriptor_S:
2650
    default:
2651
      klass = 1;
2652
      break;
2653
    }
2654
 
2655
  /* Make the type for a descriptor for VMS.  The first four fields
2656
     are the same for all types.  */
2657
 
2658
  field_list
2659
    = chainon (field_list,
2660
               make_descriptor_field
2661
               ("LENGTH", gnat_type_for_size (16, 1), record_type,
2662
                size_in_bytes ((mech == By_Descriptor_A ||
2663
                                mech == By_Short_Descriptor_A)
2664
                               ? inner_type : type)));
2665
 
2666
  field_list = chainon (field_list,
2667
                        make_descriptor_field ("DTYPE",
2668
                                               gnat_type_for_size (8, 1),
2669
                                               record_type, size_int (dtype)));
2670
  field_list = chainon (field_list,
2671
                        make_descriptor_field ("CLASS",
2672
                                               gnat_type_for_size (8, 1),
2673
                                               record_type, size_int (klass)));
2674
 
2675
  /* Of course this will crash at run-time if the address space is not
2676
     within the low 32 bits, but there is nothing else we can do.  */
2677
  pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2678
 
2679
  field_list
2680
    = chainon (field_list,
2681
               make_descriptor_field
2682
               ("POINTER", pointer32_type, record_type,
2683
                build_unary_op (ADDR_EXPR,
2684
                                pointer32_type,
2685
                                build0 (PLACEHOLDER_EXPR, type))));
2686
 
2687
  switch (mech)
2688
    {
2689
    case By_Descriptor:
2690
    case By_Short_Descriptor:
2691
    case By_Descriptor_S:
2692
    case By_Short_Descriptor_S:
2693
      break;
2694
 
2695
    case By_Descriptor_SB:
2696
    case By_Short_Descriptor_SB:
2697
      field_list
2698
        = chainon (field_list,
2699
                   make_descriptor_field
2700
                   ("SB_L1", gnat_type_for_size (32, 1), record_type,
2701
                    TREE_CODE (type) == ARRAY_TYPE
2702
                    ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2703
      field_list
2704
        = chainon (field_list,
2705
                   make_descriptor_field
2706
                   ("SB_U1", gnat_type_for_size (32, 1), record_type,
2707
                    TREE_CODE (type) == ARRAY_TYPE
2708
                    ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2709
      break;
2710
 
2711
    case By_Descriptor_A:
2712
    case By_Short_Descriptor_A:
2713
    case By_Descriptor_NCA:
2714
    case By_Short_Descriptor_NCA:
2715
      field_list = chainon (field_list,
2716
                            make_descriptor_field ("SCALE",
2717
                                                   gnat_type_for_size (8, 1),
2718
                                                   record_type,
2719
                                                   size_zero_node));
2720
 
2721
      field_list = chainon (field_list,
2722
                            make_descriptor_field ("DIGITS",
2723
                                                   gnat_type_for_size (8, 1),
2724
                                                   record_type,
2725
                                                   size_zero_node));
2726
 
2727
      field_list
2728
        = chainon (field_list,
2729
                   make_descriptor_field
2730
                   ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2731
                    size_int ((mech == By_Descriptor_NCA ||
2732
                              mech == By_Short_Descriptor_NCA)
2733
                              ? 0
2734
                              /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS.  */
2735
                              : (TREE_CODE (type) == ARRAY_TYPE
2736
                                 && TYPE_CONVENTION_FORTRAN_P (type)
2737
                                 ? 224 : 192))));
2738
 
2739
      field_list = chainon (field_list,
2740
                            make_descriptor_field ("DIMCT",
2741
                                                   gnat_type_for_size (8, 1),
2742
                                                   record_type,
2743
                                                   size_int (ndim)));
2744
 
2745
      field_list = chainon (field_list,
2746
                            make_descriptor_field ("ARSIZE",
2747
                                                   gnat_type_for_size (32, 1),
2748
                                                   record_type,
2749
                                                   size_in_bytes (type)));
2750
 
2751
      /* Now build a pointer to the 0,0,0... element.  */
2752
      tem = build0 (PLACEHOLDER_EXPR, type);
2753
      for (i = 0, inner_type = type; i < ndim;
2754
           i++, inner_type = TREE_TYPE (inner_type))
2755
        tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2756
                      convert (TYPE_DOMAIN (inner_type), size_zero_node),
2757
                      NULL_TREE, NULL_TREE);
2758
 
2759
      field_list
2760
        = chainon (field_list,
2761
                   make_descriptor_field
2762
                   ("A0",
2763
                    build_pointer_type_for_mode (inner_type, SImode, false),
2764
                    record_type,
2765
                    build1 (ADDR_EXPR,
2766
                            build_pointer_type_for_mode (inner_type, SImode,
2767
                                                         false),
2768
                            tem)));
2769
 
2770
      /* Next come the addressing coefficients.  */
2771
      tem = size_one_node;
2772
      for (i = 0; i < ndim; i++)
2773
        {
2774
          char fname[3];
2775
          tree idx_length
2776
            = size_binop (MULT_EXPR, tem,
2777
                          size_binop (PLUS_EXPR,
2778
                                      size_binop (MINUS_EXPR,
2779
                                                  TYPE_MAX_VALUE (idx_arr[i]),
2780
                                                  TYPE_MIN_VALUE (idx_arr[i])),
2781
                                      size_int (1)));
2782
 
2783
          fname[0] = ((mech == By_Descriptor_NCA ||
2784
                       mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2785
          fname[1] = '0' + i, fname[2] = 0;
2786
          field_list
2787
            = chainon (field_list,
2788
                       make_descriptor_field (fname,
2789
                                              gnat_type_for_size (32, 1),
2790
                                              record_type, idx_length));
2791
 
2792
          if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2793
            tem = idx_length;
2794
        }
2795
 
2796
      /* Finally here are the bounds.  */
2797
      for (i = 0; i < ndim; i++)
2798
        {
2799
          char fname[3];
2800
 
2801
          fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2802
          field_list
2803
            = chainon (field_list,
2804
                       make_descriptor_field
2805
                       (fname, gnat_type_for_size (32, 1), record_type,
2806
                        TYPE_MIN_VALUE (idx_arr[i])));
2807
 
2808
          fname[0] = 'U';
2809
          field_list
2810
            = chainon (field_list,
2811
                       make_descriptor_field
2812
                       (fname, gnat_type_for_size (32, 1), record_type,
2813
                        TYPE_MAX_VALUE (idx_arr[i])));
2814
        }
2815
      break;
2816
 
2817
    default:
2818
      post_error ("unsupported descriptor type for &", gnat_entity);
2819
    }
2820
 
2821
  TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
2822
  finish_record_type (record_type, field_list, 0, false);
2823
  return record_type;
2824
}
2825
 
2826
/* Build a 64bit VMS descriptor from a Mechanism_Type, which must specify
2827
   a descriptor type, and the GCC type of an object.  Each FIELD_DECL
2828
   in the type contains in its DECL_INITIAL the expression to use when
2829
   a constructor is made for the type.  GNAT_ENTITY is an entity used
2830
   to print out an error message if the mechanism cannot be applied to
2831
   an object of that type and also for the name.  */
2832
 
2833
tree
2834
build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2835
{
2836
  tree record64_type = make_node (RECORD_TYPE);
2837
  tree pointer64_type;
2838
  tree field_list64 = 0;
2839
  int klass;
2840
  int dtype = 0;
2841
  tree inner_type;
2842
  int ndim;
2843
  int i;
2844
  tree *idx_arr;
2845
  tree tem;
2846
 
2847
  /* If TYPE is an unconstrained array, use the underlying array type.  */
2848
  if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2849
    type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2850
 
2851
  /* If this is an array, compute the number of dimensions in the array,
2852
     get the index types, and point to the inner type.  */
2853
  if (TREE_CODE (type) != ARRAY_TYPE)
2854
    ndim = 0;
2855
  else
2856
    for (ndim = 1, inner_type = type;
2857
         TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2858
         && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2859
         ndim++, inner_type = TREE_TYPE (inner_type))
2860
      ;
2861
 
2862
  idx_arr = (tree *) alloca (ndim * sizeof (tree));
2863
 
2864
  if (mech != By_Descriptor_NCA
2865
      && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2866
    for (i = ndim - 1, inner_type = type;
2867
         i >= 0;
2868
         i--, inner_type = TREE_TYPE (inner_type))
2869
      idx_arr[i] = TYPE_DOMAIN (inner_type);
2870
  else
2871
    for (i = 0, inner_type = type;
2872
         i < ndim;
2873
         i++, inner_type = TREE_TYPE (inner_type))
2874
      idx_arr[i] = TYPE_DOMAIN (inner_type);
2875
 
2876
  /* Now get the DTYPE value.  */
2877
  switch (TREE_CODE (type))
2878
    {
2879
    case INTEGER_TYPE:
2880
    case ENUMERAL_TYPE:
2881
    case BOOLEAN_TYPE:
2882
      if (TYPE_VAX_FLOATING_POINT_P (type))
2883
        switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2884
          {
2885
          case 6:
2886
            dtype = 10;
2887
            break;
2888
          case 9:
2889
            dtype = 11;
2890
            break;
2891
          case 15:
2892
            dtype = 27;
2893
            break;
2894
          }
2895
      else
2896
        switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2897
          {
2898
          case 8:
2899
            dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2900
            break;
2901
          case 16:
2902
            dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2903
            break;
2904
          case 32:
2905
            dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2906
            break;
2907
          case 64:
2908
            dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2909
            break;
2910
          case 128:
2911
            dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2912
            break;
2913
          }
2914
      break;
2915
 
2916
    case REAL_TYPE:
2917
      dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2918
      break;
2919
 
2920
    case COMPLEX_TYPE:
2921
      if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2922
          && TYPE_VAX_FLOATING_POINT_P (type))
2923
        switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2924
          {
2925
          case 6:
2926
            dtype = 12;
2927
            break;
2928
          case 9:
2929
            dtype = 13;
2930
            break;
2931
          case 15:
2932
            dtype = 29;
2933
          }
2934
      else
2935
        dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2936
      break;
2937
 
2938
    case ARRAY_TYPE:
2939
      dtype = 14;
2940
      break;
2941
 
2942
    default:
2943
      break;
2944
    }
2945
 
2946
  /* Get the CLASS value.  */
2947
  switch (mech)
2948
    {
2949
    case By_Descriptor_A:
2950
      klass = 4;
2951
      break;
2952
    case By_Descriptor_NCA:
2953
      klass = 10;
2954
      break;
2955
    case By_Descriptor_SB:
2956
      klass = 15;
2957
      break;
2958
    case By_Descriptor:
2959
    case By_Descriptor_S:
2960
    default:
2961
      klass = 1;
2962
      break;
2963
    }
2964
 
2965
  /* Make the type for a 64bit descriptor for VMS.  The first six fields
2966
     are the same for all types.  */
2967
 
2968
  field_list64 = chainon (field_list64,
2969
                        make_descriptor_field ("MBO",
2970
                                               gnat_type_for_size (16, 1),
2971
                                               record64_type, size_int (1)));
2972
 
2973
  field_list64 = chainon (field_list64,
2974
                        make_descriptor_field ("DTYPE",
2975
                                               gnat_type_for_size (8, 1),
2976
                                               record64_type, size_int (dtype)));
2977
  field_list64 = chainon (field_list64,
2978
                        make_descriptor_field ("CLASS",
2979
                                               gnat_type_for_size (8, 1),
2980
                                               record64_type, size_int (klass)));
2981
 
2982
  field_list64 = chainon (field_list64,
2983
                        make_descriptor_field ("MBMO",
2984
                                               gnat_type_for_size (32, 1),
2985
                                               record64_type, ssize_int (-1)));
2986
 
2987
  field_list64
2988
    = chainon (field_list64,
2989
               make_descriptor_field
2990
               ("LENGTH", gnat_type_for_size (64, 1), record64_type,
2991
                size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2992
 
2993
  pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2994
 
2995
  field_list64
2996
    = chainon (field_list64,
2997
               make_descriptor_field
2998
               ("POINTER", pointer64_type, record64_type,
2999
                build_unary_op (ADDR_EXPR,
3000
                                pointer64_type,
3001
                                build0 (PLACEHOLDER_EXPR, type))));
3002
 
3003
  switch (mech)
3004
    {
3005
    case By_Descriptor:
3006
    case By_Descriptor_S:
3007
      break;
3008
 
3009
    case By_Descriptor_SB:
3010
      field_list64
3011
        = chainon (field_list64,
3012
                   make_descriptor_field
3013
                   ("SB_L1", gnat_type_for_size (64, 1), record64_type,
3014
                    TREE_CODE (type) == ARRAY_TYPE
3015
                    ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
3016
      field_list64
3017
        = chainon (field_list64,
3018
                   make_descriptor_field
3019
                   ("SB_U1", gnat_type_for_size (64, 1), record64_type,
3020
                    TREE_CODE (type) == ARRAY_TYPE
3021
                    ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
3022
      break;
3023
 
3024
    case By_Descriptor_A:
3025
    case By_Descriptor_NCA:
3026
      field_list64 = chainon (field_list64,
3027
                            make_descriptor_field ("SCALE",
3028
                                                   gnat_type_for_size (8, 1),
3029
                                                   record64_type,
3030
                                                   size_zero_node));
3031
 
3032
      field_list64 = chainon (field_list64,
3033
                            make_descriptor_field ("DIGITS",
3034
                                                   gnat_type_for_size (8, 1),
3035
                                                   record64_type,
3036
                                                   size_zero_node));
3037
 
3038
      field_list64
3039
        = chainon (field_list64,
3040
                   make_descriptor_field
3041
                   ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
3042
                    size_int (mech == By_Descriptor_NCA
3043
                              ? 0
3044
                              /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS.  */
3045
                              : (TREE_CODE (type) == ARRAY_TYPE
3046
                                 && TYPE_CONVENTION_FORTRAN_P (type)
3047
                                 ? 224 : 192))));
3048
 
3049
      field_list64 = chainon (field_list64,
3050
                            make_descriptor_field ("DIMCT",
3051
                                                   gnat_type_for_size (8, 1),
3052
                                                   record64_type,
3053
                                                   size_int (ndim)));
3054
 
3055
      field_list64 = chainon (field_list64,
3056
                            make_descriptor_field ("MBZ",
3057
                                                   gnat_type_for_size (32, 1),
3058
                                                   record64_type,
3059
                                                   size_int (0)));
3060
      field_list64 = chainon (field_list64,
3061
                            make_descriptor_field ("ARSIZE",
3062
                                                   gnat_type_for_size (64, 1),
3063
                                                   record64_type,
3064
                                                   size_in_bytes (type)));
3065
 
3066
      /* Now build a pointer to the 0,0,0... element.  */
3067
      tem = build0 (PLACEHOLDER_EXPR, type);
3068
      for (i = 0, inner_type = type; i < ndim;
3069
           i++, inner_type = TREE_TYPE (inner_type))
3070
        tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3071
                      convert (TYPE_DOMAIN (inner_type), size_zero_node),
3072
                      NULL_TREE, NULL_TREE);
3073
 
3074
      field_list64
3075
        = chainon (field_list64,
3076
                   make_descriptor_field
3077
                   ("A0",
3078
                    build_pointer_type_for_mode (inner_type, DImode, false),
3079
                    record64_type,
3080
                    build1 (ADDR_EXPR,
3081
                            build_pointer_type_for_mode (inner_type, DImode,
3082
                                                         false),
3083
                            tem)));
3084
 
3085
      /* Next come the addressing coefficients.  */
3086
      tem = size_one_node;
3087
      for (i = 0; i < ndim; i++)
3088
        {
3089
          char fname[3];
3090
          tree idx_length
3091
            = size_binop (MULT_EXPR, tem,
3092
                          size_binop (PLUS_EXPR,
3093
                                      size_binop (MINUS_EXPR,
3094
                                                  TYPE_MAX_VALUE (idx_arr[i]),
3095
                                                  TYPE_MIN_VALUE (idx_arr[i])),
3096
                                      size_int (1)));
3097
 
3098
          fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3099
          fname[1] = '0' + i, fname[2] = 0;
3100
          field_list64
3101
            = chainon (field_list64,
3102
                       make_descriptor_field (fname,
3103
                                              gnat_type_for_size (64, 1),
3104
                                              record64_type, idx_length));
3105
 
3106
          if (mech == By_Descriptor_NCA)
3107
            tem = idx_length;
3108
        }
3109
 
3110
      /* Finally here are the bounds.  */
3111
      for (i = 0; i < ndim; i++)
3112
        {
3113
          char fname[3];
3114
 
3115
          fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3116
          field_list64
3117
            = chainon (field_list64,
3118
                       make_descriptor_field
3119
                       (fname, gnat_type_for_size (64, 1), record64_type,
3120
                        TYPE_MIN_VALUE (idx_arr[i])));
3121
 
3122
          fname[0] = 'U';
3123
          field_list64
3124
            = chainon (field_list64,
3125
                       make_descriptor_field
3126
                       (fname, gnat_type_for_size (64, 1), record64_type,
3127
                        TYPE_MAX_VALUE (idx_arr[i])));
3128
        }
3129
      break;
3130
 
3131
    default:
3132
      post_error ("unsupported descriptor type for &", gnat_entity);
3133
    }
3134
 
3135
  TYPE_NAME (record64_type) = create_concat_name (gnat_entity, "DESC64");
3136
  finish_record_type (record64_type, field_list64, 0, false);
3137
  return record64_type;
3138
}
3139
 
3140
/* Utility routine for above code to make a field.  */
3141
 
3142
static tree
3143
make_descriptor_field (const char *name, tree type,
3144
                       tree rec_type, tree initial)
3145
{
3146
  tree field
3147
    = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
3148
 
3149
  DECL_INITIAL (field) = initial;
3150
  return field;
3151
}
3152
 
3153
/* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3154
   regular pointer or fat pointer type.  GNAT_SUBPROG is the subprogram to
3155
   which the VMS descriptor is passed.  */
3156
 
3157
static tree
3158
convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3159
{
3160
  tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3161
  tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3162
  /* The CLASS field is the 3rd field in the descriptor.  */
3163
  tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3164
  /* The POINTER field is the 6th field in the descriptor.  */
3165
  tree pointer64 = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (klass)));
3166
 
3167
  /* Retrieve the value of the POINTER field.  */
3168
  tree gnu_expr64
3169
    = build3 (COMPONENT_REF, TREE_TYPE (pointer64), desc, pointer64, NULL_TREE);
3170
 
3171
  if (POINTER_TYPE_P (gnu_type))
3172
    return convert (gnu_type, gnu_expr64);
3173
 
3174
  else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3175
    {
3176
      tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3177
      tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3178
      tree template_type = TREE_TYPE (p_bounds_type);
3179
      tree min_field = TYPE_FIELDS (template_type);
3180
      tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3181
      tree template_tree, template_addr, aflags, dimct, t, u;
3182
      /* See the head comment of build_vms_descriptor.  */
3183
      int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3184
      tree lfield, ufield;
3185
 
3186
      /* Convert POINTER to the type of the P_ARRAY field.  */
3187
      gnu_expr64 = convert (p_array_type, gnu_expr64);
3188
 
3189
      switch (iklass)
3190
        {
3191
        case 1:  /* Class S  */
3192
        case 15: /* Class SB */
3193
          /* Build {1, LENGTH} template; LENGTH64 is the 5th field.  */
3194
          t = TREE_CHAIN (TREE_CHAIN (klass));
3195
          t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3196
          t = tree_cons (min_field,
3197
                         convert (TREE_TYPE (min_field), integer_one_node),
3198
                         tree_cons (max_field,
3199
                                    convert (TREE_TYPE (max_field), t),
3200
                                    NULL_TREE));
3201
          template_tree = gnat_build_constructor (template_type, t);
3202
          template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3203
 
3204
          /* For class S, we are done.  */
3205
          if (iklass == 1)
3206
            break;
3207
 
3208
          /* Test that we really have a SB descriptor, like DEC Ada.  */
3209
          t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3210
          u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3211
          u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3212
          /* If so, there is already a template in the descriptor and
3213
             it is located right after the POINTER field.  The fields are
3214
             64bits so they must be repacked. */
3215
          t = TREE_CHAIN (pointer64);
3216
          lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3217
          lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3218
 
3219
          t = TREE_CHAIN (t);
3220
          ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3221
          ufield = convert
3222
           (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3223
 
3224
          /* Build the template in the form of a constructor. */
3225
          t = tree_cons (TYPE_FIELDS (template_type), lfield,
3226
                         tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3227
                                    ufield, NULL_TREE));
3228
          template_tree = gnat_build_constructor (template_type, t);
3229
 
3230
          /* Otherwise use the {1, LENGTH} template we build above.  */
3231
          template_addr = build3 (COND_EXPR, p_bounds_type, u,
3232
                                  build_unary_op (ADDR_EXPR, p_bounds_type,
3233
                                                 template_tree),
3234
                                  template_addr);
3235
          break;
3236
 
3237
        case 4:  /* Class A */
3238
          /* The AFLAGS field is the 3rd field after the pointer in the
3239
             descriptor.  */
3240
          t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer64)));
3241
          aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3242
          /* The DIMCT field is the next field in the descriptor after
3243
             aflags.  */
3244
          t = TREE_CHAIN (t);
3245
          dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3246
          /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3247
             or FL_COEFF or FL_BOUNDS not set.  */
3248
          u = build_int_cst (TREE_TYPE (aflags), 192);
3249
          u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3250
                               build_binary_op (NE_EXPR, integer_type_node,
3251
                                                dimct,
3252
                                                convert (TREE_TYPE (dimct),
3253
                                                         size_one_node)),
3254
                               build_binary_op (NE_EXPR, integer_type_node,
3255
                                                build2 (BIT_AND_EXPR,
3256
                                                        TREE_TYPE (aflags),
3257
                                                        aflags, u),
3258
                                                u));
3259
          /* There is already a template in the descriptor and it is located
3260
             in block 3.  The fields are 64bits so they must be repacked. */
3261
          t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3262
              (t)))));
3263
          lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3264
          lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3265
 
3266
          t = TREE_CHAIN (t);
3267
          ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3268
          ufield = convert
3269
           (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3270
 
3271
          /* Build the template in the form of a constructor. */
3272
          t = tree_cons (TYPE_FIELDS (template_type), lfield,
3273
                         tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3274
                                    ufield, NULL_TREE));
3275
          template_tree = gnat_build_constructor (template_type, t);
3276
          template_tree = build3 (COND_EXPR, template_type, u,
3277
                            build_call_raise (CE_Length_Check_Failed, Empty,
3278
                                              N_Raise_Constraint_Error),
3279
                            template_tree);
3280
          template_addr
3281
            = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3282
          break;
3283
 
3284
        case 10: /* Class NCA */
3285
        default:
3286
          post_error ("unsupported descriptor type for &", gnat_subprog);
3287
          template_addr = integer_zero_node;
3288
          break;
3289
        }
3290
 
3291
      /* Build the fat pointer in the form of a constructor.  */
3292
      t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3293
                     tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3294
                                template_addr, NULL_TREE));
3295
      return gnat_build_constructor (gnu_type, t);
3296
    }
3297
 
3298
  else
3299
    gcc_unreachable ();
3300
}
3301
 
3302
/* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3303
   regular pointer or fat pointer type.  GNAT_SUBPROG is the subprogram to
3304
   which the VMS descriptor is passed.  */
3305
 
3306
static tree
3307
convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3308
{
3309
  tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3310
  tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3311
  /* The CLASS field is the 3rd field in the descriptor.  */
3312
  tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3313
  /* The POINTER field is the 4th field in the descriptor.  */
3314
  tree pointer = TREE_CHAIN (klass);
3315
 
3316
  /* Retrieve the value of the POINTER field.  */
3317
  tree gnu_expr32
3318
    = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3319
 
3320
  if (POINTER_TYPE_P (gnu_type))
3321
    return convert (gnu_type, gnu_expr32);
3322
 
3323
  else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3324
    {
3325
      tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3326
      tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3327
      tree template_type = TREE_TYPE (p_bounds_type);
3328
      tree min_field = TYPE_FIELDS (template_type);
3329
      tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3330
      tree template_tree, template_addr, aflags, dimct, t, u;
3331
      /* See the head comment of build_vms_descriptor.  */
3332
      int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3333
 
3334
      /* Convert POINTER to the type of the P_ARRAY field.  */
3335
      gnu_expr32 = convert (p_array_type, gnu_expr32);
3336
 
3337
      switch (iklass)
3338
        {
3339
        case 1:  /* Class S  */
3340
        case 15: /* Class SB */
3341
          /* Build {1, LENGTH} template; LENGTH is the 1st field.  */
3342
          t = TYPE_FIELDS (desc_type);
3343
          t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3344
          t = tree_cons (min_field,
3345
                         convert (TREE_TYPE (min_field), integer_one_node),
3346
                         tree_cons (max_field,
3347
                                    convert (TREE_TYPE (max_field), t),
3348
                                    NULL_TREE));
3349
          template_tree = gnat_build_constructor (template_type, t);
3350
          template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3351
 
3352
          /* For class S, we are done.  */
3353
          if (iklass == 1)
3354
            break;
3355
 
3356
          /* Test that we really have a SB descriptor, like DEC Ada.  */
3357
          t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3358
          u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3359
          u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3360
          /* If so, there is already a template in the descriptor and
3361
             it is located right after the POINTER field.  */
3362
          t = TREE_CHAIN (pointer);
3363
          template_tree
3364
            = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3365
          /* Otherwise use the {1, LENGTH} template we build above.  */
3366
          template_addr = build3 (COND_EXPR, p_bounds_type, u,
3367
                                  build_unary_op (ADDR_EXPR, p_bounds_type,
3368
                                                 template_tree),
3369
                                  template_addr);
3370
          break;
3371
 
3372
        case 4:  /* Class A */
3373
          /* The AFLAGS field is the 7th field in the descriptor.  */
3374
          t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3375
          aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3376
          /* The DIMCT field is the 8th field in the descriptor.  */
3377
          t = TREE_CHAIN (t);
3378
          dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3379
          /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3380
             or FL_COEFF or FL_BOUNDS not set.  */
3381
          u = build_int_cst (TREE_TYPE (aflags), 192);
3382
          u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3383
                               build_binary_op (NE_EXPR, integer_type_node,
3384
                                                dimct,
3385
                                                convert (TREE_TYPE (dimct),
3386
                                                         size_one_node)),
3387
                               build_binary_op (NE_EXPR, integer_type_node,
3388
                                                build2 (BIT_AND_EXPR,
3389
                                                        TREE_TYPE (aflags),
3390
                                                        aflags, u),
3391
                                                u));
3392
          /* There is already a template in the descriptor and it is
3393
             located at the start of block 3 (12th field).  */
3394
          t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3395
          template_tree
3396
            = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3397
          template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
3398
                            build_call_raise (CE_Length_Check_Failed, Empty,
3399
                                              N_Raise_Constraint_Error),
3400
                            template_tree);
3401
          template_addr
3402
            = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3403
          break;
3404
 
3405
        case 10: /* Class NCA */
3406
        default:
3407
          post_error ("unsupported descriptor type for &", gnat_subprog);
3408
          template_addr = integer_zero_node;
3409
          break;
3410
        }
3411
 
3412
      /* Build the fat pointer in the form of a constructor.  */
3413
      t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr32,
3414
                     tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3415
                                template_addr, NULL_TREE));
3416
 
3417
      return gnat_build_constructor (gnu_type, t);
3418
    }
3419
 
3420
  else
3421
    gcc_unreachable ();
3422
}
3423
 
3424
/* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3425
   pointer or fat pointer type.  GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3426
   pointer type of GNU_EXPR.  GNAT_SUBPROG is the subprogram to which the
3427
   VMS descriptor is passed.  */
3428
 
3429
static tree
3430
convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3431
                        Entity_Id gnat_subprog)
3432
{
3433
  tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3434
  tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3435
  tree mbo = TYPE_FIELDS (desc_type);
3436
  const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3437
  tree mbmo = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo)));
3438
  tree is64bit, gnu_expr32, gnu_expr64;
3439
 
3440
  /* If the field name is not MBO, it must be 32-bit and no alternate.
3441
     Otherwise primary must be 64-bit and alternate 32-bit.  */
3442
  if (strcmp (mbostr, "MBO") != 0)
3443
    return convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3444
 
3445
  /* Build the test for 64-bit descriptor.  */
3446
  mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3447
  mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3448
  is64bit
3449
    = build_binary_op (TRUTH_ANDIF_EXPR, integer_type_node,
3450
                       build_binary_op (EQ_EXPR, integer_type_node,
3451
                                        convert (integer_type_node, mbo),
3452
                                        integer_one_node),
3453
                       build_binary_op (EQ_EXPR, integer_type_node,
3454
                                        convert (integer_type_node, mbmo),
3455
                                        integer_minus_one_node));
3456
 
3457
  /* Build the 2 possible end results.  */
3458
  gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
3459
  gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3460
  gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3461
 
3462
  return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3463
}
3464
 
3465
/* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3466
   and the GNAT node GNAT_SUBPROG.  */
3467
 
3468
void
3469
build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3470
{
3471
  tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3472
  tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3473
  tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3474
  tree gnu_body;
3475
 
3476
  gnu_subprog_type = TREE_TYPE (gnu_subprog);
3477
  gnu_param_list = NULL_TREE;
3478
 
3479
  begin_subprog_body (gnu_stub_decl);
3480
  gnat_pushlevel ();
3481
 
3482
  start_stmt_group ();
3483
 
3484
  /* Loop over the parameters of the stub and translate any of them
3485
     passed by descriptor into a by reference one.  */
3486
  for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3487
       gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3488
       gnu_stub_param;
3489
       gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3490
       gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3491
    {
3492
      if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3493
        gnu_param
3494
          = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3495
                                    gnu_stub_param,
3496
                                    DECL_PARM_ALT_TYPE (gnu_stub_param),
3497
                                    gnat_subprog);
3498
      else
3499
        gnu_param = gnu_stub_param;
3500
 
3501
      gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3502
    }
3503
 
3504
  gnu_body = end_stmt_group ();
3505
 
3506
  /* Invoke the internal subprogram.  */
3507
  gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3508
                             gnu_subprog);
3509
  gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3510
                                      gnu_subprog_addr,
3511
                                      nreverse (gnu_param_list));
3512
 
3513
  /* Propagate the return value, if any.  */
3514
  if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3515
    append_to_statement_list (gnu_subprog_call, &gnu_body);
3516
  else
3517
    append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3518
                                                 gnu_subprog_call),
3519
                              &gnu_body);
3520
 
3521
  gnat_poplevel ();
3522
 
3523
  allocate_struct_function (gnu_stub_decl, false);
3524
  end_subprog_body (gnu_body);
3525
}
3526
 
3527
/* Build a type to be used to represent an aliased object whose nominal
3528
   type is an unconstrained array.  This consists of a RECORD_TYPE containing
3529
   a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3530
   ARRAY_TYPE.  If ARRAY_TYPE is that of the unconstrained array, this
3531
   is used to represent an arbitrary unconstrained object.  Use NAME
3532
   as the name of the record.  */
3533
 
3534
tree
3535
build_unc_object_type (tree template_type, tree object_type, tree name)
3536
{
3537
  tree type = make_node (RECORD_TYPE);
3538
  tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3539
                                           template_type, type, 0, 0, 0, 1);
3540
  tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3541
                                        type, 0, 0, 0, 1);
3542
 
3543
  TYPE_NAME (type) = name;
3544
  TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3545
  finish_record_type (type,
3546
                      chainon (chainon (NULL_TREE, template_field),
3547
                               array_field),
3548
                      0, true);
3549
 
3550
  return type;
3551
}
3552
 
3553
/* Same, taking a thin or fat pointer type instead of a template type. */
3554
 
3555
tree
3556
build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3557
                                tree name)
3558
{
3559
  tree template_type;
3560
 
3561
  gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3562
 
3563
  template_type
3564
    = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3565
       ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3566
       : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3567
  return build_unc_object_type (template_type, object_type, name);
3568
}
3569
 
3570
/* Shift the component offsets within an unconstrained object TYPE to make it
3571
   suitable for use as a designated type for thin pointers.  */
3572
 
3573
void
3574
shift_unc_components_for_thin_pointers (tree type)
3575
{
3576
  /* Thin pointer values designate the ARRAY data of an unconstrained object,
3577
     allocated past the BOUNDS template.  The designated type is adjusted to
3578
     have ARRAY at position zero and the template at a negative offset, so
3579
     that COMPONENT_REFs on (*thin_ptr) designate the proper location.  */
3580
 
3581
  tree bounds_field = TYPE_FIELDS (type);
3582
  tree array_field  = TREE_CHAIN (TYPE_FIELDS (type));
3583
 
3584
  DECL_FIELD_OFFSET (bounds_field)
3585
    = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3586
 
3587
  DECL_FIELD_OFFSET (array_field) = size_zero_node;
3588
  DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3589
}
3590
 
3591
/* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3592
   In the normal case this is just two adjustments, but we have more to
3593
   do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE.  */
3594
 
3595
void
3596
update_pointer_to (tree old_type, tree new_type)
3597
{
3598
  tree ptr = TYPE_POINTER_TO (old_type);
3599
  tree ref = TYPE_REFERENCE_TO (old_type);
3600
  tree ptr1, ref1;
3601
  tree type;
3602
 
3603
  /* If this is the main variant, process all the other variants first.  */
3604
  if (TYPE_MAIN_VARIANT (old_type) == old_type)
3605
    for (type = TYPE_NEXT_VARIANT (old_type); type;
3606
         type = TYPE_NEXT_VARIANT (type))
3607
      update_pointer_to (type, new_type);
3608
 
3609
  /* If no pointers and no references, we are done.  */
3610
  if (!ptr && !ref)
3611
    return;
3612
 
3613
  /* Merge the old type qualifiers in the new type.
3614
 
3615
     Each old variant has qualifiers for specific reasons, and the new
3616
     designated type as well.  Each set of qualifiers represents useful
3617
     information grabbed at some point, and merging the two simply unifies
3618
     these inputs into the final type description.
3619
 
3620
     Consider for instance a volatile type frozen after an access to constant
3621
     type designating it; after the designated type's freeze, we get here with
3622
     a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3623
     when the access type was processed.  We will make a volatile and readonly
3624
     designated type, because that's what it really is.
3625
 
3626
     We might also get here for a non-dummy OLD_TYPE variant with different
3627
     qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3628
     to private record type elaboration (see the comments around the call to
3629
     this routine in gnat_to_gnu_entity <E_Access_Type>).  We have to merge
3630
     the qualifiers in those cases too, to avoid accidentally discarding the
3631
     initial set, and will often end up with OLD_TYPE == NEW_TYPE then.  */
3632
  new_type
3633
    = build_qualified_type (new_type,
3634
                            TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3635
 
3636
  /* If old type and new type are identical, there is nothing to do.  */
3637
  if (old_type == new_type)
3638
    return;
3639
 
3640
  /* Otherwise, first handle the simple case.  */
3641
  if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3642
    {
3643
      TYPE_POINTER_TO (new_type) = ptr;
3644
      TYPE_REFERENCE_TO (new_type) = ref;
3645
 
3646
      for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3647
        for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3648
             ptr1 = TYPE_NEXT_VARIANT (ptr1))
3649
          TREE_TYPE (ptr1) = new_type;
3650
 
3651
      for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3652
        for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3653
             ref1 = TYPE_NEXT_VARIANT (ref1))
3654
          TREE_TYPE (ref1) = new_type;
3655
    }
3656
 
3657
  /* Now deal with the unconstrained array case.  In this case the "pointer"
3658
     is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3659
     Turn them into pointers to the correct types using update_pointer_to.  */
3660
  else if (!TYPE_IS_FAT_POINTER_P (ptr))
3661
    gcc_unreachable ();
3662
 
3663
  else
3664
    {
3665
      tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3666
      tree array_field = TYPE_FIELDS (ptr);
3667
      tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3668
      tree new_ptr = TYPE_POINTER_TO (new_type);
3669
      tree new_ref;
3670
      tree var;
3671
 
3672
      /* Make pointers to the dummy template point to the real template.  */
3673
      update_pointer_to
3674
        (TREE_TYPE (TREE_TYPE (bounds_field)),
3675
         TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3676
 
3677
      /* The references to the template bounds present in the array type
3678
         are made through a PLACEHOLDER_EXPR of type NEW_PTR.  Since we
3679
         are updating PTR to make it a full replacement for NEW_PTR as
3680
         pointer to NEW_TYPE, we must rework the PLACEHOLDER_EXPR so as
3681
         to make it of type PTR.  */
3682
      new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3683
                        build0 (PLACEHOLDER_EXPR, ptr),
3684
                        bounds_field, NULL_TREE);
3685
 
3686
      /* Create the new array for the new PLACEHOLDER_EXPR and make pointers
3687
         to the dummy array point to it.  */
3688
      update_pointer_to
3689
        (TREE_TYPE (TREE_TYPE (array_field)),
3690
         substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3691
                             TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3692
 
3693
      /* Make PTR the pointer to NEW_TYPE.  */
3694
      TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3695
        = TREE_TYPE (new_type) = ptr;
3696
 
3697
      /* And show the original pointer NEW_PTR to the debugger.  This is the
3698
         counterpart of the equivalent processing in gnat_pushdecl when the
3699
         unconstrained array type is frozen after access types to it.  Note
3700
         that update_pointer_to can be invoked multiple times on the same
3701
         couple of types because of the type variants.  */
3702
      if (TYPE_NAME (ptr)
3703
          && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
3704
          && !DECL_ORIGINAL_TYPE (TYPE_NAME (ptr)))
3705
        {
3706
          DECL_ORIGINAL_TYPE (TYPE_NAME (ptr)) = new_ptr;
3707
          DECL_ARTIFICIAL (TYPE_NAME (ptr)) = 0;
3708
        }
3709
      for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3710
        SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3711
 
3712
      /* Now handle updating the allocation record, what the thin pointer
3713
         points to.  Update all pointers from the old record into the new
3714
         one, update the type of the array field, and recompute the size.  */
3715
      update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3716
 
3717
      TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3718
        = TREE_TYPE (TREE_TYPE (array_field));
3719
 
3720
      /* The size recomputation needs to account for alignment constraints, so
3721
         we let layout_type work it out.  This will reset the field offsets to
3722
         what they would be in a regular record, so we shift them back to what
3723
         we want them to be for a thin pointer designated type afterwards.  */
3724
      DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3725
      DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3726
      TYPE_SIZE (new_obj_rec) = 0;
3727
      layout_type (new_obj_rec);
3728
 
3729
      shift_unc_components_for_thin_pointers (new_obj_rec);
3730
 
3731
      /* We are done, at last.  */
3732
      rest_of_record_type_compilation (ptr);
3733
    }
3734
}
3735
 
3736
/* Convert EXPR, a pointer to a constrained array, into a pointer to an
3737
   unconstrained one.  This involves making or finding a template.  */
3738
 
3739
static tree
3740
convert_to_fat_pointer (tree type, tree expr)
3741
{
3742
  tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3743
  tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3744
  tree etype = TREE_TYPE (expr);
3745
  tree template_tree;
3746
 
3747
  /* If EXPR is null, make a fat pointer that contains null pointers to the
3748
     template and array.  */
3749
  if (integer_zerop (expr))
3750
    return
3751
      gnat_build_constructor
3752
        (type,
3753
         tree_cons (TYPE_FIELDS (type),
3754
                    convert (p_array_type, expr),
3755
                    tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3756
                               convert (build_pointer_type (template_type),
3757
                                        expr),
3758
                               NULL_TREE)));
3759
 
3760
  /* If EXPR is a thin pointer, make template and data from the record..  */
3761
  else if (TYPE_IS_THIN_POINTER_P (etype))
3762
    {
3763
      tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3764
 
3765
      expr = save_expr (expr);
3766
      if (TREE_CODE (expr) == ADDR_EXPR)
3767
        expr = TREE_OPERAND (expr, 0);
3768
      else
3769
        expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3770
 
3771
      template_tree = build_component_ref (expr, NULL_TREE, fields, false);
3772
      expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3773
                             build_component_ref (expr, NULL_TREE,
3774
                                                  TREE_CHAIN (fields), false));
3775
    }
3776
 
3777
  /* Otherwise, build the constructor for the template.  */
3778
  else
3779
    template_tree = build_template (template_type, TREE_TYPE (etype), expr);
3780
 
3781
  /* The final result is a constructor for the fat pointer.
3782
 
3783
     If EXPR is an argument of a foreign convention subprogram, the type it
3784
     points to is directly the component type.  In this case, the expression
3785
     type may not match the corresponding FIELD_DECL type at this point, so we
3786
     call "convert" here to fix that up if necessary.  This type consistency is
3787
     required, for instance because it ensures that possible later folding of
3788
     COMPONENT_REFs against this constructor always yields something of the
3789
     same type as the initial reference.
3790
 
3791
     Note that the call to "build_template" above is still fine because it
3792
     will only refer to the provided TEMPLATE_TYPE in this case.  */
3793
  return
3794
    gnat_build_constructor
3795
      (type,
3796
       tree_cons (TYPE_FIELDS (type),
3797
                  convert (p_array_type, expr),
3798
                  tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3799
                             build_unary_op (ADDR_EXPR, NULL_TREE,
3800
                                             template_tree),
3801
                             NULL_TREE)));
3802
}
3803
 
3804
/* Convert to a thin pointer type, TYPE.  The only thing we know how to convert
3805
   is something that is a fat pointer, so convert to it first if it EXPR
3806
   is not already a fat pointer.  */
3807
 
3808
static tree
3809
convert_to_thin_pointer (tree type, tree expr)
3810
{
3811
  if (!TYPE_IS_FAT_POINTER_P (TREE_TYPE (expr)))
3812
    expr
3813
      = convert_to_fat_pointer
3814
        (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3815
 
3816
  /* We get the pointer to the data and use a NOP_EXPR to make it the
3817
     proper GCC type.  */
3818
  expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3819
                              false);
3820
  expr = build1 (NOP_EXPR, type, expr);
3821
 
3822
  return expr;
3823
}
3824
 
3825
/* Create an expression whose value is that of EXPR,
3826
   converted to type TYPE.  The TREE_TYPE of the value
3827
   is always TYPE.  This function implements all reasonable
3828
   conversions; callers should filter out those that are
3829
   not permitted by the language being compiled.  */
3830
 
3831
tree
3832
convert (tree type, tree expr)
3833
{
3834
  enum tree_code code = TREE_CODE (type);
3835
  tree etype = TREE_TYPE (expr);
3836
  enum tree_code ecode = TREE_CODE (etype);
3837
 
3838
  /* If EXPR is already the right type, we are done.  */
3839
  if (type == etype)
3840
    return expr;
3841
 
3842
  /* If both input and output have padding and are of variable size, do this
3843
     as an unchecked conversion.  Likewise if one is a mere variant of the
3844
     other, so we avoid a pointless unpad/repad sequence.  */
3845
  else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3846
           && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
3847
           && (!TREE_CONSTANT (TYPE_SIZE (type))
3848
               || !TREE_CONSTANT (TYPE_SIZE (etype))
3849
               || gnat_types_compatible_p (type, etype)
3850
               || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3851
                  == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3852
    ;
3853
 
3854
  /* If the output type has padding, convert to the inner type and make a
3855
     constructor to build the record, unless a variable size is involved.  */
3856
  else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
3857
    {
3858
      /* If we previously converted from another type and our type is
3859
         of variable size, remove the conversion to avoid the need for
3860
         variable-sized temporaries.  Likewise for a conversion between
3861
         original and packable version.  */
3862
      if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3863
          && (!TREE_CONSTANT (TYPE_SIZE (type))
3864
              || (ecode == RECORD_TYPE
3865
                  && TYPE_NAME (etype)
3866
                     == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3867
        expr = TREE_OPERAND (expr, 0);
3868
 
3869
      /* If we are just removing the padding from expr, convert the original
3870
         object if we have variable size in order to avoid the need for some
3871
         variable-sized temporaries.  Likewise if the padding is a variant
3872
         of the other, so we avoid a pointless unpad/repad sequence.  */
3873
      if (TREE_CODE (expr) == COMPONENT_REF
3874
          && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3875
          && (!TREE_CONSTANT (TYPE_SIZE (type))
3876
              || gnat_types_compatible_p (type,
3877
                                          TREE_TYPE (TREE_OPERAND (expr, 0)))
3878
              || (ecode == RECORD_TYPE
3879
                  && TYPE_NAME (etype)
3880
                     == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3881
        return convert (type, TREE_OPERAND (expr, 0));
3882
 
3883
      /* If the inner type is of self-referential size and the expression type
3884
         is a record, do this as an unchecked conversion.  But first pad the
3885
         expression if possible to have the same size on both sides.  */
3886
      if (TREE_CODE (etype) == RECORD_TYPE
3887
          && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3888
        {
3889
          if (TREE_CONSTANT (TYPE_SIZE (etype)))
3890
            expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
3891
                            false, false, false, true), expr);
3892
          return unchecked_convert (type, expr, false);
3893
        }
3894
 
3895
      /* If we are converting between array types with variable size, do the
3896
         final conversion as an unchecked conversion, again to avoid the need
3897
         for some variable-sized temporaries.  If valid, this conversion is
3898
         very likely purely technical and without real effects.  */
3899
      if (TREE_CODE (etype) == ARRAY_TYPE
3900
          && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
3901
          && !TREE_CONSTANT (TYPE_SIZE (etype))
3902
          && !TREE_CONSTANT (TYPE_SIZE (type)))
3903
        return unchecked_convert (type,
3904
                                  convert (TREE_TYPE (TYPE_FIELDS (type)),
3905
                                           expr),
3906
                                  false);
3907
 
3908
      return
3909
        gnat_build_constructor (type,
3910
                                tree_cons (TYPE_FIELDS (type),
3911
                                           convert (TREE_TYPE
3912
                                                    (TYPE_FIELDS (type)),
3913
                                                    expr),
3914
                                           NULL_TREE));
3915
    }
3916
 
3917
  /* If the input type has padding, remove it and convert to the output type.
3918
     The conditions ordering is arranged to ensure that the output type is not
3919
     a padding type here, as it is not clear whether the conversion would
3920
     always be correct if this was to happen.  */
3921
  else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
3922
    {
3923
      tree unpadded;
3924
 
3925
      /* If we have just converted to this padded type, just get the
3926
         inner expression.  */
3927
      if (TREE_CODE (expr) == CONSTRUCTOR
3928
          && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3929
          && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3930
             == TYPE_FIELDS (etype))
3931
        unpadded
3932
          = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3933
 
3934
      /* Otherwise, build an explicit component reference.  */
3935
      else
3936
        unpadded
3937
          = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3938
 
3939
      return convert (type, unpadded);
3940
    }
3941
 
3942
  /* If the input is a biased type, adjust first.  */
3943
  if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3944
    return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3945
                                       fold_convert (TREE_TYPE (etype),
3946
                                                     expr),
3947
                                       TYPE_MIN_VALUE (etype)));
3948
 
3949
  /* If the input is a justified modular type, we need to extract the actual
3950
     object before converting it to any other type with the exceptions of an
3951
     unconstrained array or of a mere type variant.  It is useful to avoid the
3952
     extraction and conversion in the type variant case because it could end
3953
     up replacing a VAR_DECL expr by a constructor and we might be about the
3954
     take the address of the result.  */
3955
  if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3956
      && code != UNCONSTRAINED_ARRAY_TYPE
3957
      && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3958
    return convert (type, build_component_ref (expr, NULL_TREE,
3959
                                               TYPE_FIELDS (etype), false));
3960
 
3961
  /* If converting to a type that contains a template, convert to the data
3962
     type and then build the template. */
3963
  if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3964
    {
3965
      tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3966
 
3967
      /* If the source already has a template, get a reference to the
3968
         associated array only, as we are going to rebuild a template
3969
         for the target type anyway.  */
3970
      expr = maybe_unconstrained_array (expr);
3971
 
3972
      return
3973
        gnat_build_constructor
3974
          (type,
3975
           tree_cons (TYPE_FIELDS (type),
3976
                      build_template (TREE_TYPE (TYPE_FIELDS (type)),
3977
                                      obj_type, NULL_TREE),
3978
                      tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3979
                                 convert (obj_type, expr), NULL_TREE)));
3980
    }
3981
 
3982
  /* There are some special cases of expressions that we process
3983
     specially.  */
3984
  switch (TREE_CODE (expr))
3985
    {
3986
    case ERROR_MARK:
3987
      return expr;
3988
 
3989
    case NULL_EXPR:
3990
      /* Just set its type here.  For TRANSFORM_EXPR, we will do the actual
3991
         conversion in gnat_expand_expr.  NULL_EXPR does not represent
3992
         and actual value, so no conversion is needed.  */
3993
      expr = copy_node (expr);
3994
      TREE_TYPE (expr) = type;
3995
      return expr;
3996
 
3997
    case STRING_CST:
3998
      /* If we are converting a STRING_CST to another constrained array type,
3999
         just make a new one in the proper type.  */
4000
      if (code == ecode && AGGREGATE_TYPE_P (etype)
4001
          && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4002
               && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4003
        {
4004
          expr = copy_node (expr);
4005
          TREE_TYPE (expr) = type;
4006
          return expr;
4007
        }
4008
      break;
4009
 
4010
    case VECTOR_CST:
4011
      /* If we are converting a VECTOR_CST to a mere variant type, just make
4012
         a new one in the proper type.  */
4013
      if (code == ecode && gnat_types_compatible_p (type, etype))
4014
        {
4015
          expr = copy_node (expr);
4016
          TREE_TYPE (expr) = type;
4017
          return expr;
4018
        }
4019
 
4020
    case CONSTRUCTOR:
4021
      /* If we are converting a CONSTRUCTOR to a mere variant type, just make
4022
         a new one in the proper type.  */
4023
      if (code == ecode && gnat_types_compatible_p (type, etype))
4024
        {
4025
          expr = copy_node (expr);
4026
          TREE_TYPE (expr) = type;
4027
          return expr;
4028
        }
4029
 
4030
      /* Likewise for a conversion between original and packable version, but
4031
         we have to work harder in order to preserve type consistency.  */
4032
      if (code == ecode
4033
          && code == RECORD_TYPE
4034
          && TYPE_NAME (type) == TYPE_NAME (etype))
4035
        {
4036
          VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
4037
          unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
4038
          VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
4039
          tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4040
          unsigned HOST_WIDE_INT idx;
4041
          tree index, value;
4042
 
4043
          /* Whether we need to clear TREE_CONSTANT et al. on the output
4044
             constructor when we convert in place.  */
4045
          bool clear_constant = false;
4046
 
4047
          FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4048
            {
4049
              constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
4050
              /* We expect only simple constructors.  Otherwise, punt.  */
4051
              if (!(index == efield || index == DECL_ORIGINAL_FIELD (efield)))
4052
                break;
4053
              elt->index = field;
4054
              elt->value = convert (TREE_TYPE (field), value);
4055
 
4056
              /* If packing has made this field a bitfield and the input
4057
                 value couldn't be emitted statically any more, we need to
4058
                 clear TREE_CONSTANT on our output.  */
4059
              if (!clear_constant && TREE_CONSTANT (expr)
4060
                  && !CONSTRUCTOR_BITFIELD_P (efield)
4061
                  && CONSTRUCTOR_BITFIELD_P (field)
4062
                  && !initializer_constant_valid_for_bitfield_p (value))
4063
                clear_constant = true;
4064
 
4065
              efield = TREE_CHAIN (efield);
4066
              field = TREE_CHAIN (field);
4067
            }
4068
 
4069
          /* If we have been able to match and convert all the input fields
4070
             to their output type, convert in place now.  We'll fallback to a
4071
             view conversion downstream otherwise.  */
4072
          if (idx == len)
4073
            {
4074
              expr = copy_node (expr);
4075
              TREE_TYPE (expr) = type;
4076
              CONSTRUCTOR_ELTS (expr) = v;
4077
              if (clear_constant)
4078
                TREE_CONSTANT (expr) = TREE_STATIC (expr) = false;
4079
              return expr;
4080
            }
4081
        }
4082
 
4083
      /* Likewise for a conversion between array type and vector type with a
4084
         compatible representative array.  */
4085
      else if (code == VECTOR_TYPE
4086
               && ecode == ARRAY_TYPE
4087
               && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4088
                                           etype))
4089
        {
4090
          VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
4091
          unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
4092
          VEC(constructor_elt,gc) *v;
4093
          unsigned HOST_WIDE_INT ix;
4094
          tree value;
4095
 
4096
          /* Build a VECTOR_CST from a *constant* array constructor.  */
4097
          if (TREE_CONSTANT (expr))
4098
            {
4099
              bool constant_p = true;
4100
 
4101
              /* Iterate through elements and check if all constructor
4102
                 elements are *_CSTs.  */
4103
              FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4104
                if (!CONSTANT_CLASS_P (value))
4105
                  {
4106
                    constant_p = false;
4107
                    break;
4108
                  }
4109
 
4110
              if (constant_p)
4111
                return build_vector_from_ctor (type,
4112
                                               CONSTRUCTOR_ELTS (expr));
4113
            }
4114
 
4115
          /* Otherwise, build a regular vector constructor.  */
4116
          v = VEC_alloc (constructor_elt, gc, len);
4117
          FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4118
            {
4119
              constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
4120
              elt->index = NULL_TREE;
4121
              elt->value = value;
4122
            }
4123
          expr = copy_node (expr);
4124
          TREE_TYPE (expr) = type;
4125
          CONSTRUCTOR_ELTS (expr) = v;
4126
          return expr;
4127
        }
4128
      break;
4129
 
4130
    case UNCONSTRAINED_ARRAY_REF:
4131
      /* Convert this to the type of the inner array by getting the address of
4132
         the array from the template.  */
4133
      expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4134
                             build_component_ref (TREE_OPERAND (expr, 0),
4135
                                                  get_identifier ("P_ARRAY"),
4136
                                                  NULL_TREE, false));
4137
      etype = TREE_TYPE (expr);
4138
      ecode = TREE_CODE (etype);
4139
      break;
4140
 
4141
    case VIEW_CONVERT_EXPR:
4142
      {
4143
        /* GCC 4.x is very sensitive to type consistency overall, and view
4144
           conversions thus are very frequent.  Even though just "convert"ing
4145
           the inner operand to the output type is fine in most cases, it
4146
           might expose unexpected input/output type mismatches in special
4147
           circumstances so we avoid such recursive calls when we can.  */
4148
        tree op0 = TREE_OPERAND (expr, 0);
4149
 
4150
        /* If we are converting back to the original type, we can just
4151
           lift the input conversion.  This is a common occurrence with
4152
           switches back-and-forth amongst type variants.  */
4153
        if (type == TREE_TYPE (op0))
4154
          return op0;
4155
 
4156
        /* Otherwise, if we're converting between two aggregate or vector
4157
           types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4158
           target type in place or to just convert the inner expression.  */
4159
        if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4160
            || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4161
          {
4162
            /* If we are converting between mere variants, we can just
4163
               substitute the VIEW_CONVERT_EXPR in place.  */
4164
            if (gnat_types_compatible_p (type, etype))
4165
              return build1 (VIEW_CONVERT_EXPR, type, op0);
4166
 
4167
            /* Otherwise, we may just bypass the input view conversion unless
4168
               one of the types is a fat pointer,  which is handled by
4169
               specialized code below which relies on exact type matching.  */
4170
            else if (!TYPE_IS_FAT_POINTER_P (type)
4171
                     && !TYPE_IS_FAT_POINTER_P (etype))
4172
              return convert (type, op0);
4173
          }
4174
      }
4175
      break;
4176
 
4177
    case INDIRECT_REF:
4178
      /* If both types are record types, just convert the pointer and
4179
         make a new INDIRECT_REF.
4180
 
4181
         ??? Disable this for now since it causes problems with the
4182
         code in build_binary_op for MODIFY_EXPR which wants to
4183
         strip off conversions.  But that code really is a mess and
4184
         we need to do this a much better way some time.  */
4185
      if (0
4186
          && (TREE_CODE (type) == RECORD_TYPE
4187
              || TREE_CODE (type) == UNION_TYPE)
4188
          && (TREE_CODE (etype) == RECORD_TYPE
4189
              || TREE_CODE (etype) == UNION_TYPE)
4190
          && !TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4191
        return build_unary_op (INDIRECT_REF, NULL_TREE,
4192
                               convert (build_pointer_type (type),
4193
                                        TREE_OPERAND (expr, 0)));
4194
      break;
4195
 
4196
    default:
4197
      break;
4198
    }
4199
 
4200
  /* Check for converting to a pointer to an unconstrained array.  */
4201
  if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4202
    return convert_to_fat_pointer (type, expr);
4203
 
4204
  /* If we are converting between two aggregate or vector types that are mere
4205
     variants, just make a VIEW_CONVERT_EXPR.  Likewise when we are converting
4206
     to a vector type from its representative array type.  */
4207
  else if ((code == ecode
4208
            && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4209
            && gnat_types_compatible_p (type, etype))
4210
           || (code == VECTOR_TYPE
4211
               && ecode == ARRAY_TYPE
4212
               && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4213
                                           etype)))
4214
    return build1 (VIEW_CONVERT_EXPR, type, expr);
4215
 
4216
  /* In all other cases of related types, make a NOP_EXPR.  */
4217
  else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4218
           || (code == INTEGER_CST && ecode == INTEGER_CST
4219
               && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
4220
    return fold_convert (type, expr);
4221
 
4222
  switch (code)
4223
    {
4224
    case VOID_TYPE:
4225
      return fold_build1 (CONVERT_EXPR, type, expr);
4226
 
4227
    case INTEGER_TYPE:
4228
      if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4229
          && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4230
              || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4231
        return unchecked_convert (type, expr, false);
4232
      else if (TYPE_BIASED_REPRESENTATION_P (type))
4233
        return fold_convert (type,
4234
                             fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4235
                                          convert (TREE_TYPE (type), expr),
4236
                                          TYPE_MIN_VALUE (type)));
4237
 
4238
      /* ... fall through ... */
4239
 
4240
    case ENUMERAL_TYPE:
4241
    case BOOLEAN_TYPE:
4242
      /* If we are converting an additive expression to an integer type
4243
         with lower precision, be wary of the optimization that can be
4244
         applied by convert_to_integer.  There are 2 problematic cases:
4245
           - if the first operand was originally of a biased type,
4246
             because we could be recursively called to convert it
4247
             to an intermediate type and thus rematerialize the
4248
             additive operator endlessly,
4249
           - if the expression contains a placeholder, because an
4250
             intermediate conversion that changes the sign could
4251
             be inserted and thus introduce an artificial overflow
4252
             at compile time when the placeholder is substituted.  */
4253
      if (code == INTEGER_TYPE
4254
          && ecode == INTEGER_TYPE
4255
          && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4256
          && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4257
        {
4258
          tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4259
 
4260
          if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4261
               && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4262
              || CONTAINS_PLACEHOLDER_P (expr))
4263
            return build1 (NOP_EXPR, type, expr);
4264
        }
4265
 
4266
      return fold (convert_to_integer (type, expr));
4267
 
4268
    case POINTER_TYPE:
4269
    case REFERENCE_TYPE:
4270
      /* If converting between two pointers to records denoting
4271
         both a template and type, adjust if needed to account
4272
         for any differing offsets, since one might be negative.  */
4273
      if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4274
        {
4275
          tree bit_diff
4276
            = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4277
                           bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4278
          tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
4279
                                       sbitsize_int (BITS_PER_UNIT));
4280
 
4281
          expr = build1 (NOP_EXPR, type, expr);
4282
          TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4283
          if (integer_zerop (byte_diff))
4284
            return expr;
4285
 
4286
          return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4287
                                  fold (convert (sizetype, byte_diff)));
4288
        }
4289
 
4290
      /* If converting to a thin pointer, handle specially.  */
4291
      if (TYPE_IS_THIN_POINTER_P (type)
4292
          && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4293
        return convert_to_thin_pointer (type, expr);
4294
 
4295
      /* If converting fat pointer to normal pointer, get the pointer to the
4296
         array and then convert it.  */
4297
      else if (TYPE_IS_FAT_POINTER_P (etype))
4298
        expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
4299
                                    NULL_TREE, false);
4300
 
4301
      return fold (convert_to_pointer (type, expr));
4302
 
4303
    case REAL_TYPE:
4304
      return fold (convert_to_real (type, expr));
4305
 
4306
    case RECORD_TYPE:
4307
      if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4308
        return
4309
          gnat_build_constructor
4310
            (type, tree_cons (TYPE_FIELDS (type),
4311
                              convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
4312
                              NULL_TREE));
4313
 
4314
      /* ... fall through ... */
4315
 
4316
    case ARRAY_TYPE:
4317
      /* In these cases, assume the front-end has validated the conversion.
4318
         If the conversion is valid, it will be a bit-wise conversion, so
4319
         it can be viewed as an unchecked conversion.  */
4320
      return unchecked_convert (type, expr, false);
4321
 
4322
    case UNION_TYPE:
4323
      /* This is a either a conversion between a tagged type and some
4324
         subtype, which we have to mark as a UNION_TYPE because of
4325
         overlapping fields or a conversion of an Unchecked_Union.  */
4326
      return unchecked_convert (type, expr, false);
4327
 
4328
    case UNCONSTRAINED_ARRAY_TYPE:
4329
      /* If the input is a VECTOR_TYPE, convert to the representative
4330
         array type first.  */
4331
      if (ecode == VECTOR_TYPE)
4332
        {
4333
          expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4334
          etype = TREE_TYPE (expr);
4335
          ecode = TREE_CODE (etype);
4336
        }
4337
 
4338
      /* If EXPR is a constrained array, take its address, convert it to a
4339
         fat pointer, and then dereference it.  Likewise if EXPR is a
4340
         record containing both a template and a constrained array.
4341
         Note that a record representing a justified modular type
4342
         always represents a packed constrained array.  */
4343
      if (ecode == ARRAY_TYPE
4344
          || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4345
          || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4346
          || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4347
        return
4348
          build_unary_op
4349
            (INDIRECT_REF, NULL_TREE,
4350
             convert_to_fat_pointer (TREE_TYPE (type),
4351
                                     build_unary_op (ADDR_EXPR,
4352
                                                     NULL_TREE, expr)));
4353
 
4354
      /* Do something very similar for converting one unconstrained
4355
         array to another.  */
4356
      else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4357
        return
4358
          build_unary_op (INDIRECT_REF, NULL_TREE,
4359
                          convert (TREE_TYPE (type),
4360
                                   build_unary_op (ADDR_EXPR,
4361
                                                   NULL_TREE, expr)));
4362
      else
4363
        gcc_unreachable ();
4364
 
4365
    case COMPLEX_TYPE:
4366
      return fold (convert_to_complex (type, expr));
4367
 
4368
    default:
4369
      gcc_unreachable ();
4370
    }
4371
}
4372
 
4373
/* Remove all conversions that are done in EXP.  This includes converting
4374
   from a padded type or to a justified modular type.  If TRUE_ADDRESS
4375
   is true, always return the address of the containing object even if
4376
   the address is not bit-aligned.  */
4377
 
4378
tree
4379
remove_conversions (tree exp, bool true_address)
4380
{
4381
  switch (TREE_CODE (exp))
4382
    {
4383
    case CONSTRUCTOR:
4384
      if (true_address
4385
          && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4386
          && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4387
        return
4388
          remove_conversions (VEC_index (constructor_elt,
4389
                                         CONSTRUCTOR_ELTS (exp), 0)->value,
4390
                              true);
4391
      break;
4392
 
4393
    case COMPONENT_REF:
4394
      if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4395
        return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4396
      break;
4397
 
4398
    case VIEW_CONVERT_EXPR:  case NON_LVALUE_EXPR:
4399
    CASE_CONVERT:
4400
      return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4401
 
4402
    default:
4403
      break;
4404
    }
4405
 
4406
  return exp;
4407
}
4408
 
4409
/* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4410
   refers to the underlying array.  If its type has TYPE_CONTAINS_TEMPLATE_P,
4411
   likewise return an expression pointing to the underlying array.  */
4412
 
4413
tree
4414
maybe_unconstrained_array (tree exp)
4415
{
4416
  enum tree_code code = TREE_CODE (exp);
4417
  tree new_exp;
4418
 
4419
  switch (TREE_CODE (TREE_TYPE (exp)))
4420
    {
4421
    case UNCONSTRAINED_ARRAY_TYPE:
4422
      if (code == UNCONSTRAINED_ARRAY_REF)
4423
        {
4424
          new_exp
4425
            = build_unary_op (INDIRECT_REF, NULL_TREE,
4426
                              build_component_ref (TREE_OPERAND (exp, 0),
4427
                                                   get_identifier ("P_ARRAY"),
4428
                                                   NULL_TREE, false));
4429
          TREE_READONLY (new_exp) = TREE_STATIC (new_exp)
4430
            = TREE_READONLY (exp);
4431
          return new_exp;
4432
        }
4433
 
4434
      else if (code == NULL_EXPR)
4435
        return build1 (NULL_EXPR,
4436
                       TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4437
                                             (TREE_TYPE (TREE_TYPE (exp))))),
4438
                       TREE_OPERAND (exp, 0));
4439
 
4440
    case RECORD_TYPE:
4441
      /* If this is a padded type, convert to the unpadded type and see if
4442
         it contains a template.  */
4443
      if (TYPE_PADDING_P (TREE_TYPE (exp)))
4444
        {
4445
          new_exp = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4446
          if (TREE_CODE (TREE_TYPE (new_exp)) == RECORD_TYPE
4447
              && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new_exp)))
4448
            return
4449
              build_component_ref (new_exp, NULL_TREE,
4450
                                   TREE_CHAIN
4451
                                   (TYPE_FIELDS (TREE_TYPE (new_exp))),
4452
                                   0);
4453
        }
4454
      else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4455
        return
4456
          build_component_ref (exp, NULL_TREE,
4457
                               TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
4458
      break;
4459
 
4460
    default:
4461
      break;
4462
    }
4463
 
4464
  return exp;
4465
}
4466
 
4467
/* If EXP's type is a VECTOR_TYPE, return EXP converted to the associated
4468
   TYPE_REPRESENTATIVE_ARRAY.  */
4469
 
4470
tree
4471
maybe_vector_array (tree exp)
4472
{
4473
  tree etype = TREE_TYPE (exp);
4474
 
4475
  if (VECTOR_TYPE_P (etype))
4476
    exp = convert (TYPE_REPRESENTATIVE_ARRAY (etype), exp);
4477
 
4478
  return exp;
4479
}
4480
 
4481
/* Return true if EXPR is an expression that can be folded as an operand
4482
   of a VIEW_CONVERT_EXPR.  See ada-tree.h for a complete rationale.  */
4483
 
4484
static bool
4485
can_fold_for_view_convert_p (tree expr)
4486
{
4487
  tree t1, t2;
4488
 
4489
  /* The folder will fold NOP_EXPRs between integral types with the same
4490
     precision (in the middle-end's sense).  We cannot allow it if the
4491
     types don't have the same precision in the Ada sense as well.  */
4492
  if (TREE_CODE (expr) != NOP_EXPR)
4493
    return true;
4494
 
4495
  t1 = TREE_TYPE (expr);
4496
  t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4497
 
4498
  /* Defer to the folder for non-integral conversions.  */
4499
  if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4500
    return true;
4501
 
4502
  /* Only fold conversions that preserve both precisions.  */
4503
  if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4504
      && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4505
    return true;
4506
 
4507
  return false;
4508
}
4509
 
4510
/* Return an expression that does an unchecked conversion of EXPR to TYPE.
4511
   If NOTRUNC_P is true, truncation operations should be suppressed.
4512
 
4513
   Special care is required with (source or target) integral types whose
4514
   precision is not equal to their size, to make sure we fetch or assign
4515
   the value bits whose location might depend on the endianness, e.g.
4516
 
4517
     Rmsize : constant := 8;
4518
     subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4519
 
4520
     type Bit_Array is array (1 .. Rmsize) of Boolean;
4521
     pragma Pack (Bit_Array);
4522
 
4523
     function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4524
 
4525
     Value : Int := 2#1000_0001#;
4526
     Vbits : Bit_Array := To_Bit_Array (Value);
4527
 
4528
   we expect the 8 bits at Vbits'Address to always contain Value, while
4529
   their original location depends on the endianness, at Value'Address
4530
   on a little-endian architecture but not on a big-endian one.  */
4531
 
4532
tree
4533
unchecked_convert (tree type, tree expr, bool notrunc_p)
4534
{
4535
  tree etype = TREE_TYPE (expr);
4536
 
4537
  /* If the expression is already the right type, we are done.  */
4538
  if (etype == type)
4539
    return expr;
4540
 
4541
  /* If both types types are integral just do a normal conversion.
4542
     Likewise for a conversion to an unconstrained array.  */
4543
  if ((((INTEGRAL_TYPE_P (type)
4544
         && !(TREE_CODE (type) == INTEGER_TYPE
4545
              && TYPE_VAX_FLOATING_POINT_P (type)))
4546
        || (POINTER_TYPE_P (type) && ! TYPE_IS_THIN_POINTER_P (type))
4547
        || (TREE_CODE (type) == RECORD_TYPE
4548
            && TYPE_JUSTIFIED_MODULAR_P (type)))
4549
       && ((INTEGRAL_TYPE_P (etype)
4550
            && !(TREE_CODE (etype) == INTEGER_TYPE
4551
                 && TYPE_VAX_FLOATING_POINT_P (etype)))
4552
           || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4553
           || (TREE_CODE (etype) == RECORD_TYPE
4554
               && TYPE_JUSTIFIED_MODULAR_P (etype))))
4555
      || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4556
    {
4557
      if (TREE_CODE (etype) == INTEGER_TYPE
4558
          && TYPE_BIASED_REPRESENTATION_P (etype))
4559
        {
4560
          tree ntype = copy_type (etype);
4561
          TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4562
          TYPE_MAIN_VARIANT (ntype) = ntype;
4563
          expr = build1 (NOP_EXPR, ntype, expr);
4564
        }
4565
 
4566
      if (TREE_CODE (type) == INTEGER_TYPE
4567
          && TYPE_BIASED_REPRESENTATION_P (type))
4568
        {
4569
          tree rtype = copy_type (type);
4570
          TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4571
          TYPE_MAIN_VARIANT (rtype) = rtype;
4572
          expr = convert (rtype, expr);
4573
          expr = build1 (NOP_EXPR, type, expr);
4574
        }
4575
      else
4576
        expr = convert (type, expr);
4577
    }
4578
 
4579
  /* If we are converting to an integral type whose precision is not equal
4580
     to its size, first unchecked convert to a record that contains an
4581
     object of the output type.  Then extract the field. */
4582
  else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4583
           && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4584
                                     GET_MODE_BITSIZE (TYPE_MODE (type))))
4585
    {
4586
      tree rec_type = make_node (RECORD_TYPE);
4587
      tree field = create_field_decl (get_identifier ("OBJ"), type,
4588
                                      rec_type, 1, 0, 0, 0);
4589
 
4590
      TYPE_FIELDS (rec_type) = field;
4591
      layout_type (rec_type);
4592
 
4593
      expr = unchecked_convert (rec_type, expr, notrunc_p);
4594
      expr = build_component_ref (expr, NULL_TREE, field, 0);
4595
    }
4596
 
4597
  /* Similarly if we are converting from an integral type whose precision
4598
     is not equal to its size.  */
4599
  else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
4600
      && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4601
                                GET_MODE_BITSIZE (TYPE_MODE (etype))))
4602
    {
4603
      tree rec_type = make_node (RECORD_TYPE);
4604
      tree field
4605
        = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4606
                             1, 0, 0, 0);
4607
 
4608
      TYPE_FIELDS (rec_type) = field;
4609
      layout_type (rec_type);
4610
 
4611
      expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4612
      expr = unchecked_convert (type, expr, notrunc_p);
4613
    }
4614
 
4615
  /* We have a special case when we are converting between two unconstrained
4616
     array types.  In that case, take the address, convert the fat pointer
4617
     types, and dereference.  */
4618
  else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
4619
           && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4620
    expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4621
                           build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4622
                                   build_unary_op (ADDR_EXPR, NULL_TREE,
4623
                                                   expr)));
4624
 
4625
  /* Another special case is when we are converting to a vector type from its
4626
     representative array type; this a regular conversion.  */
4627
  else if (TREE_CODE (type) == VECTOR_TYPE
4628
           && TREE_CODE (etype) == ARRAY_TYPE
4629
           && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4630
                                       etype))
4631
    expr = convert (type, expr);
4632
 
4633
  else
4634
    {
4635
      expr = maybe_unconstrained_array (expr);
4636
      etype = TREE_TYPE (expr);
4637
      if (can_fold_for_view_convert_p (expr))
4638
        expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4639
      else
4640
        expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4641
    }
4642
 
4643
  /* If the result is an integral type whose precision is not equal to its
4644
     size, sign- or zero-extend the result.  We need not do this if the input
4645
     is an integral type of the same precision and signedness or if the output
4646
     is a biased type or if both the input and output are unsigned.  */
4647
  if (!notrunc_p
4648
      && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4649
      && !(TREE_CODE (type) == INTEGER_TYPE
4650
           && TYPE_BIASED_REPRESENTATION_P (type))
4651
      && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4652
                                GET_MODE_BITSIZE (TYPE_MODE (type)))
4653
      && !(INTEGRAL_TYPE_P (etype)
4654
           && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4655
           && operand_equal_p (TYPE_RM_SIZE (type),
4656
                               (TYPE_RM_SIZE (etype) != 0
4657
                                ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4658
                               0))
4659
      && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4660
    {
4661
      tree base_type = gnat_type_for_mode (TYPE_MODE (type),
4662
                                           TYPE_UNSIGNED (type));
4663
      tree shift_expr
4664
        = convert (base_type,
4665
                   size_binop (MINUS_EXPR,
4666
                               bitsize_int
4667
                               (GET_MODE_BITSIZE (TYPE_MODE (type))),
4668
                               TYPE_RM_SIZE (type)));
4669
      expr
4670
        = convert (type,
4671
                   build_binary_op (RSHIFT_EXPR, base_type,
4672
                                    build_binary_op (LSHIFT_EXPR, base_type,
4673
                                                     convert (base_type, expr),
4674
                                                     shift_expr),
4675
                                    shift_expr));
4676
    }
4677
 
4678
  /* An unchecked conversion should never raise Constraint_Error.  The code
4679
     below assumes that GCC's conversion routines overflow the same way that
4680
     the underlying hardware does.  This is probably true.  In the rare case
4681
     when it is false, we can rely on the fact that such conversions are
4682
     erroneous anyway.  */
4683
  if (TREE_CODE (expr) == INTEGER_CST)
4684
    TREE_OVERFLOW (expr) = 0;
4685
 
4686
  /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4687
     show no longer constant.  */
4688
  if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4689
      && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4690
                           OEP_ONLY_CONST))
4691
    TREE_CONSTANT (expr) = 0;
4692
 
4693
  return expr;
4694
}
4695
 
4696
/* Return the appropriate GCC tree code for the specified GNAT_TYPE,
4697
   the latter being a record type as predicated by Is_Record_Type.  */
4698
 
4699
enum tree_code
4700
tree_code_for_record_type (Entity_Id gnat_type)
4701
{
4702
  Node_Id component_list
4703
    = Component_List (Type_Definition
4704
                      (Declaration_Node
4705
                       (Implementation_Base_Type (gnat_type))));
4706
  Node_Id component;
4707
 
4708
 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4709
    we have a non-discriminant field outside a variant.  In either case,
4710
    it's a RECORD_TYPE.  */
4711
 
4712
  if (!Is_Unchecked_Union (gnat_type))
4713
    return RECORD_TYPE;
4714
 
4715
  for (component = First_Non_Pragma (Component_Items (component_list));
4716
       Present (component);
4717
       component = Next_Non_Pragma (component))
4718
    if (Ekind (Defining_Entity (component)) == E_Component)
4719
      return RECORD_TYPE;
4720
 
4721
  return UNION_TYPE;
4722
}
4723
 
4724
/* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
4725
   size is equal to 64 bits, or an array of such a type.  Set ALIGN_CLAUSE
4726
   according to the presence of an alignment clause on the type or, if it
4727
   is an array, on the component type.  */
4728
 
4729
bool
4730
is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
4731
{
4732
  gnat_type = Underlying_Type (gnat_type);
4733
 
4734
  *align_clause = Present (Alignment_Clause (gnat_type));
4735
 
4736
  if (Is_Array_Type (gnat_type))
4737
    {
4738
      gnat_type = Underlying_Type (Component_Type (gnat_type));
4739
      if (Present (Alignment_Clause (gnat_type)))
4740
        *align_clause = true;
4741
    }
4742
 
4743
  if (!Is_Floating_Point_Type (gnat_type))
4744
    return false;
4745
 
4746
  if (UI_To_Int (Esize (gnat_type)) != 64)
4747
    return false;
4748
 
4749
  return true;
4750
}
4751
 
4752
/* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
4753
   size is greater or equal to 64 bits, or an array of such a type.  Set
4754
   ALIGN_CLAUSE according to the presence of an alignment clause on the
4755
   type or, if it is an array, on the component type.  */
4756
 
4757
bool
4758
is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
4759
{
4760
  gnat_type = Underlying_Type (gnat_type);
4761
 
4762
  *align_clause = Present (Alignment_Clause (gnat_type));
4763
 
4764
  if (Is_Array_Type (gnat_type))
4765
    {
4766
      gnat_type = Underlying_Type (Component_Type (gnat_type));
4767
      if (Present (Alignment_Clause (gnat_type)))
4768
        *align_clause = true;
4769
    }
4770
 
4771
  if (!Is_Scalar_Type (gnat_type))
4772
    return false;
4773
 
4774
  if (UI_To_Int (Esize (gnat_type)) < 64)
4775
    return false;
4776
 
4777
  return true;
4778
}
4779
 
4780
/* Return true if GNU_TYPE is suitable as the type of a non-aliased
4781
   component of an aggregate type.  */
4782
 
4783
bool
4784
type_for_nonaliased_component_p (tree gnu_type)
4785
{
4786
  /* If the type is passed by reference, we may have pointers to the
4787
     component so it cannot be made non-aliased. */
4788
  if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4789
    return false;
4790
 
4791
  /* We used to say that any component of aggregate type is aliased
4792
     because the front-end may take 'Reference of it.  The front-end
4793
     has been enhanced in the meantime so as to use a renaming instead
4794
     in most cases, but the back-end can probably take the address of
4795
     such a component too so we go for the conservative stance.
4796
 
4797
     For instance, we might need the address of any array type, even
4798
     if normally passed by copy, to construct a fat pointer if the
4799
     component is used as an actual for an unconstrained formal.
4800
 
4801
     Likewise for record types: even if a specific record subtype is
4802
     passed by copy, the parent type might be passed by ref (e.g. if
4803
     it's of variable size) and we might take the address of a child
4804
     component to pass to a parent formal.  We have no way to check
4805
     for such conditions here.  */
4806
  if (AGGREGATE_TYPE_P (gnu_type))
4807
    return false;
4808
 
4809
  return true;
4810
}
4811
 
4812
/* Perform final processing on global variables.  */
4813
 
4814
void
4815
gnat_write_global_declarations (void)
4816
{
4817
  /* Proceed to optimize and emit assembly.
4818
     FIXME: shouldn't be the front end's responsibility to call this.  */
4819
  cgraph_finalize_compilation_unit ();
4820
 
4821
  /* Emit debug info for all global declarations.  */
4822
  emit_debug_global_declarations (VEC_address (tree, global_decls),
4823
                                  VEC_length (tree, global_decls));
4824
}
4825
 
4826
/* ************************************************************************
4827
 * *                           GCC builtins support                       *
4828
 * ************************************************************************ */
4829
 
4830
/* The general scheme is fairly simple:
4831
 
4832
   For each builtin function/type to be declared, gnat_install_builtins calls
4833
   internal facilities which eventually get to gnat_push_decl, which in turn
4834
   tracks the so declared builtin function decls in the 'builtin_decls' global
4835
   datastructure. When an Intrinsic subprogram declaration is processed, we
4836
   search this global datastructure to retrieve the associated BUILT_IN DECL
4837
   node.  */
4838
 
4839
/* Search the chain of currently available builtin declarations for a node
4840
   corresponding to function NAME (an IDENTIFIER_NODE).  Return the first node
4841
   found, if any, or NULL_TREE otherwise.  */
4842
tree
4843
builtin_decl_for (tree name)
4844
{
4845
  unsigned i;
4846
  tree decl;
4847
 
4848
  for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4849
    if (DECL_NAME (decl) == name)
4850
      return decl;
4851
 
4852
  return NULL_TREE;
4853
}
4854
 
4855
/* The code below eventually exposes gnat_install_builtins, which declares
4856
   the builtin types and functions we might need, either internally or as
4857
   user accessible facilities.
4858
 
4859
   ??? This is a first implementation shot, still in rough shape.  It is
4860
   heavily inspired from the "C" family implementation, with chunks copied
4861
   verbatim from there.
4862
 
4863
   Two obvious TODO candidates are
4864
   o Use a more efficient name/decl mapping scheme
4865
   o Devise a middle-end infrastructure to avoid having to copy
4866
     pieces between front-ends.  */
4867
 
4868
/* ----------------------------------------------------------------------- *
4869
 *                         BUILTIN ELEMENTARY TYPES                        *
4870
 * ----------------------------------------------------------------------- */
4871
 
4872
/* Standard data types to be used in builtin argument declarations.  */
4873
 
4874
enum c_tree_index
4875
{
4876
    CTI_SIGNED_SIZE_TYPE, /* For format checking only.  */
4877
    CTI_STRING_TYPE,
4878
    CTI_CONST_STRING_TYPE,
4879
 
4880
    CTI_MAX
4881
};
4882
 
4883
static tree c_global_trees[CTI_MAX];
4884
 
4885
#define signed_size_type_node   c_global_trees[CTI_SIGNED_SIZE_TYPE]
4886
#define string_type_node        c_global_trees[CTI_STRING_TYPE]
4887
#define const_string_type_node  c_global_trees[CTI_CONST_STRING_TYPE]
4888
 
4889
/* ??? In addition some attribute handlers, we currently don't support a
4890
   (small) number of builtin-types, which in turns inhibits support for a
4891
   number of builtin functions.  */
4892
#define wint_type_node    void_type_node
4893
#define intmax_type_node  void_type_node
4894
#define uintmax_type_node void_type_node
4895
 
4896
/* Build the void_list_node (void_type_node having been created).  */
4897
 
4898
static tree
4899
build_void_list_node (void)
4900
{
4901
  tree t = build_tree_list (NULL_TREE, void_type_node);
4902
  return t;
4903
}
4904
 
4905
/* Used to help initialize the builtin-types.def table.  When a type of
4906
   the correct size doesn't exist, use error_mark_node instead of NULL.
4907
   The later results in segfaults even when a decl using the type doesn't
4908
   get invoked.  */
4909
 
4910
static tree
4911
builtin_type_for_size (int size, bool unsignedp)
4912
{
4913
  tree type = lang_hooks.types.type_for_size (size, unsignedp);
4914
  return type ? type : error_mark_node;
4915
}
4916
 
4917
/* Build/push the elementary type decls that builtin functions/types
4918
   will need.  */
4919
 
4920
static void
4921
install_builtin_elementary_types (void)
4922
{
4923
  signed_size_type_node = size_type_node;
4924
  pid_type_node = integer_type_node;
4925
  void_list_node = build_void_list_node ();
4926
 
4927
  string_type_node = build_pointer_type (char_type_node);
4928
  const_string_type_node
4929
    = build_pointer_type (build_qualified_type
4930
                          (char_type_node, TYPE_QUAL_CONST));
4931
}
4932
 
4933
/* ----------------------------------------------------------------------- *
4934
 *                          BUILTIN FUNCTION TYPES                         *
4935
 * ----------------------------------------------------------------------- */
4936
 
4937
/* Now, builtin function types per se.  */
4938
 
4939
enum c_builtin_type
4940
{
4941
#define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4942
#define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4943
#define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4944
#define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4945
#define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4946
#define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4947
#define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4948
#define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4949
#define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4950
#define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4951
#define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4952
#define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4953
#define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4954
#define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4955
#define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4956
  NAME,
4957
#define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4958
#include "builtin-types.def"
4959
#undef DEF_PRIMITIVE_TYPE
4960
#undef DEF_FUNCTION_TYPE_0
4961
#undef DEF_FUNCTION_TYPE_1
4962
#undef DEF_FUNCTION_TYPE_2
4963
#undef DEF_FUNCTION_TYPE_3
4964
#undef DEF_FUNCTION_TYPE_4
4965
#undef DEF_FUNCTION_TYPE_5
4966
#undef DEF_FUNCTION_TYPE_6
4967
#undef DEF_FUNCTION_TYPE_7
4968
#undef DEF_FUNCTION_TYPE_VAR_0
4969
#undef DEF_FUNCTION_TYPE_VAR_1
4970
#undef DEF_FUNCTION_TYPE_VAR_2
4971
#undef DEF_FUNCTION_TYPE_VAR_3
4972
#undef DEF_FUNCTION_TYPE_VAR_4
4973
#undef DEF_FUNCTION_TYPE_VAR_5
4974
#undef DEF_POINTER_TYPE
4975
  BT_LAST
4976
};
4977
 
4978
typedef enum c_builtin_type builtin_type;
4979
 
4980
/* A temporary array used in communication with def_fn_type.  */
4981
static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4982
 
4983
/* A helper function for install_builtin_types.  Build function type
4984
   for DEF with return type RET and N arguments.  If VAR is true, then the
4985
   function should be variadic after those N arguments.
4986
 
4987
   Takes special care not to ICE if any of the types involved are
4988
   error_mark_node, which indicates that said type is not in fact available
4989
   (see builtin_type_for_size).  In which case the function type as a whole
4990
   should be error_mark_node.  */
4991
 
4992
static void
4993
def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4994
{
4995
  tree args = NULL, t;
4996
  va_list list;
4997
  int i;
4998
 
4999
  va_start (list, n);
5000
  for (i = 0; i < n; ++i)
5001
    {
5002
      builtin_type a = (builtin_type) va_arg (list, int);
5003
      t = builtin_types[a];
5004
      if (t == error_mark_node)
5005
        goto egress;
5006
      args = tree_cons (NULL_TREE, t, args);
5007
    }
5008
  va_end (list);
5009
 
5010
  args = nreverse (args);
5011
  if (!var)
5012
    args = chainon (args, void_list_node);
5013
 
5014
  t = builtin_types[ret];
5015
  if (t == error_mark_node)
5016
    goto egress;
5017
  t = build_function_type (t, args);
5018
 
5019
 egress:
5020
  builtin_types[def] = t;
5021
}
5022
 
5023
/* Build the builtin function types and install them in the builtin_types
5024
   array for later use in builtin function decls.  */
5025
 
5026
static void
5027
install_builtin_function_types (void)
5028
{
5029
  tree va_list_ref_type_node;
5030
  tree va_list_arg_type_node;
5031
 
5032
  if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5033
    {
5034
      va_list_arg_type_node = va_list_ref_type_node =
5035
        build_pointer_type (TREE_TYPE (va_list_type_node));
5036
    }
5037
  else
5038
    {
5039
      va_list_arg_type_node = va_list_type_node;
5040
      va_list_ref_type_node = build_reference_type (va_list_type_node);
5041
    }
5042
 
5043
#define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5044
  builtin_types[ENUM] = VALUE;
5045
#define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5046
  def_fn_type (ENUM, RETURN, 0, 0);
5047
#define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5048
  def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5049
#define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5050
  def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5051
#define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5052
  def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5053
#define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5054
  def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5055
#define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5056
  def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5057
#define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5058
                            ARG6)                                       \
5059
  def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5060
#define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5061
                            ARG6, ARG7)                                 \
5062
  def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5063
#define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5064
  def_fn_type (ENUM, RETURN, 1, 0);
5065
#define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5066
  def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5067
#define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5068
  def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5069
#define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5070
  def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5071
#define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5072
  def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5073
#define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5074
  def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5075
#define DEF_POINTER_TYPE(ENUM, TYPE) \
5076
  builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5077
 
5078
#include "builtin-types.def"
5079
 
5080
#undef DEF_PRIMITIVE_TYPE
5081
#undef DEF_FUNCTION_TYPE_1
5082
#undef DEF_FUNCTION_TYPE_2
5083
#undef DEF_FUNCTION_TYPE_3
5084
#undef DEF_FUNCTION_TYPE_4
5085
#undef DEF_FUNCTION_TYPE_5
5086
#undef DEF_FUNCTION_TYPE_6
5087
#undef DEF_FUNCTION_TYPE_VAR_0
5088
#undef DEF_FUNCTION_TYPE_VAR_1
5089
#undef DEF_FUNCTION_TYPE_VAR_2
5090
#undef DEF_FUNCTION_TYPE_VAR_3
5091
#undef DEF_FUNCTION_TYPE_VAR_4
5092
#undef DEF_FUNCTION_TYPE_VAR_5
5093
#undef DEF_POINTER_TYPE
5094
  builtin_types[(int) BT_LAST] = NULL_TREE;
5095
}
5096
 
5097
/* ----------------------------------------------------------------------- *
5098
 *                            BUILTIN ATTRIBUTES                           *
5099
 * ----------------------------------------------------------------------- */
5100
 
5101
enum built_in_attribute
5102
{
5103
#define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5104
#define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5105
#define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5106
#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5107
#include "builtin-attrs.def"
5108
#undef DEF_ATTR_NULL_TREE
5109
#undef DEF_ATTR_INT
5110
#undef DEF_ATTR_IDENT
5111
#undef DEF_ATTR_TREE_LIST
5112
  ATTR_LAST
5113
};
5114
 
5115
static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5116
 
5117
static void
5118
install_builtin_attributes (void)
5119
{
5120
  /* Fill in the built_in_attributes array.  */
5121
#define DEF_ATTR_NULL_TREE(ENUM)                                \
5122
  built_in_attributes[(int) ENUM] = NULL_TREE;
5123
#define DEF_ATTR_INT(ENUM, VALUE)                               \
5124
  built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5125
#define DEF_ATTR_IDENT(ENUM, STRING)                            \
5126
  built_in_attributes[(int) ENUM] = get_identifier (STRING);
5127
#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5128
  built_in_attributes[(int) ENUM]                       \
5129
    = tree_cons (built_in_attributes[(int) PURPOSE],    \
5130
                 built_in_attributes[(int) VALUE],      \
5131
                 built_in_attributes[(int) CHAIN]);
5132
#include "builtin-attrs.def"
5133
#undef DEF_ATTR_NULL_TREE
5134
#undef DEF_ATTR_INT
5135
#undef DEF_ATTR_IDENT
5136
#undef DEF_ATTR_TREE_LIST
5137
}
5138
 
5139
/* Handle a "const" attribute; arguments as in
5140
   struct attribute_spec.handler.  */
5141
 
5142
static tree
5143
handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5144
                        tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5145
                        bool *no_add_attrs)
5146
{
5147
  if (TREE_CODE (*node) == FUNCTION_DECL)
5148
    TREE_READONLY (*node) = 1;
5149
  else
5150
    *no_add_attrs = true;
5151
 
5152
  return NULL_TREE;
5153
}
5154
 
5155
/* Handle a "nothrow" attribute; arguments as in
5156
   struct attribute_spec.handler.  */
5157
 
5158
static tree
5159
handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5160
                          tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5161
                          bool *no_add_attrs)
5162
{
5163
  if (TREE_CODE (*node) == FUNCTION_DECL)
5164
    TREE_NOTHROW (*node) = 1;
5165
  else
5166
    *no_add_attrs = true;
5167
 
5168
  return NULL_TREE;
5169
}
5170
 
5171
/* Handle a "pure" attribute; arguments as in
5172
   struct attribute_spec.handler.  */
5173
 
5174
static tree
5175
handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5176
                       int ARG_UNUSED (flags), bool *no_add_attrs)
5177
{
5178
  if (TREE_CODE (*node) == FUNCTION_DECL)
5179
    DECL_PURE_P (*node) = 1;
5180
  /* ??? TODO: Support types.  */
5181
  else
5182
    {
5183
      warning (OPT_Wattributes, "%qs attribute ignored",
5184
               IDENTIFIER_POINTER (name));
5185
      *no_add_attrs = true;
5186
    }
5187
 
5188
  return NULL_TREE;
5189
}
5190
 
5191
/* Handle a "no vops" attribute; arguments as in
5192
   struct attribute_spec.handler.  */
5193
 
5194
static tree
5195
handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5196
                         tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5197
                         bool *ARG_UNUSED (no_add_attrs))
5198
{
5199
  gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5200
  DECL_IS_NOVOPS (*node) = 1;
5201
  return NULL_TREE;
5202
}
5203
 
5204
/* Helper for nonnull attribute handling; fetch the operand number
5205
   from the attribute argument list.  */
5206
 
5207
static bool
5208
get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5209
{
5210
  /* Verify the arg number is a constant.  */
5211
  if (TREE_CODE (arg_num_expr) != INTEGER_CST
5212
      || TREE_INT_CST_HIGH (arg_num_expr) != 0)
5213
    return false;
5214
 
5215
  *valp = TREE_INT_CST_LOW (arg_num_expr);
5216
  return true;
5217
}
5218
 
5219
/* Handle the "nonnull" attribute.  */
5220
static tree
5221
handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5222
                          tree args, int ARG_UNUSED (flags),
5223
                          bool *no_add_attrs)
5224
{
5225
  tree type = *node;
5226
  unsigned HOST_WIDE_INT attr_arg_num;
5227
 
5228
  /* If no arguments are specified, all pointer arguments should be
5229
     non-null.  Verify a full prototype is given so that the arguments
5230
     will have the correct types when we actually check them later.  */
5231
  if (!args)
5232
    {
5233
      if (!TYPE_ARG_TYPES (type))
5234
        {
5235
          error ("nonnull attribute without arguments on a non-prototype");
5236
          *no_add_attrs = true;
5237
        }
5238
      return NULL_TREE;
5239
    }
5240
 
5241
  /* Argument list specified.  Verify that each argument number references
5242
     a pointer argument.  */
5243
  for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5244
    {
5245
      tree argument;
5246
      unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5247
 
5248
      if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5249
        {
5250
          error ("nonnull argument has invalid operand number (argument %lu)",
5251
                 (unsigned long) attr_arg_num);
5252
          *no_add_attrs = true;
5253
          return NULL_TREE;
5254
        }
5255
 
5256
      argument = TYPE_ARG_TYPES (type);
5257
      if (argument)
5258
        {
5259
          for (ck_num = 1; ; ck_num++)
5260
            {
5261
              if (!argument || ck_num == arg_num)
5262
                break;
5263
              argument = TREE_CHAIN (argument);
5264
            }
5265
 
5266
          if (!argument
5267
              || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5268
            {
5269
              error ("nonnull argument with out-of-range operand number (argument %lu, operand %lu)",
5270
                     (unsigned long) attr_arg_num, (unsigned long) arg_num);
5271
              *no_add_attrs = true;
5272
              return NULL_TREE;
5273
            }
5274
 
5275
          if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5276
            {
5277
              error ("nonnull argument references non-pointer operand (argument %lu, operand %lu)",
5278
                   (unsigned long) attr_arg_num, (unsigned long) arg_num);
5279
              *no_add_attrs = true;
5280
              return NULL_TREE;
5281
            }
5282
        }
5283
    }
5284
 
5285
  return NULL_TREE;
5286
}
5287
 
5288
/* Handle a "sentinel" attribute.  */
5289
 
5290
static tree
5291
handle_sentinel_attribute (tree *node, tree name, tree args,
5292
                           int ARG_UNUSED (flags), bool *no_add_attrs)
5293
{
5294
  tree params = TYPE_ARG_TYPES (*node);
5295
 
5296
  if (!params)
5297
    {
5298
      warning (OPT_Wattributes,
5299
               "%qs attribute requires prototypes with named arguments",
5300
               IDENTIFIER_POINTER (name));
5301
      *no_add_attrs = true;
5302
    }
5303
  else
5304
    {
5305
      while (TREE_CHAIN (params))
5306
        params = TREE_CHAIN (params);
5307
 
5308
      if (VOID_TYPE_P (TREE_VALUE (params)))
5309
        {
5310
          warning (OPT_Wattributes,
5311
                   "%qs attribute only applies to variadic functions",
5312
                   IDENTIFIER_POINTER (name));
5313
          *no_add_attrs = true;
5314
        }
5315
    }
5316
 
5317
  if (args)
5318
    {
5319
      tree position = TREE_VALUE (args);
5320
 
5321
      if (TREE_CODE (position) != INTEGER_CST)
5322
        {
5323
          warning (0, "requested position is not an integer constant");
5324
          *no_add_attrs = true;
5325
        }
5326
      else
5327
        {
5328
          if (tree_int_cst_lt (position, integer_zero_node))
5329
            {
5330
              warning (0, "requested position is less than zero");
5331
              *no_add_attrs = true;
5332
            }
5333
        }
5334
    }
5335
 
5336
  return NULL_TREE;
5337
}
5338
 
5339
/* Handle a "noreturn" attribute; arguments as in
5340
   struct attribute_spec.handler.  */
5341
 
5342
static tree
5343
handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5344
                           int ARG_UNUSED (flags), bool *no_add_attrs)
5345
{
5346
  tree type = TREE_TYPE (*node);
5347
 
5348
  /* See FIXME comment in c_common_attribute_table.  */
5349
  if (TREE_CODE (*node) == FUNCTION_DECL)
5350
    TREE_THIS_VOLATILE (*node) = 1;
5351
  else if (TREE_CODE (type) == POINTER_TYPE
5352
           && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5353
    TREE_TYPE (*node)
5354
      = build_pointer_type
5355
        (build_type_variant (TREE_TYPE (type),
5356
                             TYPE_READONLY (TREE_TYPE (type)), 1));
5357
  else
5358
    {
5359
      warning (OPT_Wattributes, "%qs attribute ignored",
5360
               IDENTIFIER_POINTER (name));
5361
      *no_add_attrs = true;
5362
    }
5363
 
5364
  return NULL_TREE;
5365
}
5366
 
5367
/* Handle a "malloc" attribute; arguments as in
5368
   struct attribute_spec.handler.  */
5369
 
5370
static tree
5371
handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5372
                         int ARG_UNUSED (flags), bool *no_add_attrs)
5373
{
5374
  if (TREE_CODE (*node) == FUNCTION_DECL
5375
      && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5376
    DECL_IS_MALLOC (*node) = 1;
5377
  else
5378
    {
5379
      warning (OPT_Wattributes, "%qs attribute ignored",
5380
               IDENTIFIER_POINTER (name));
5381
      *no_add_attrs = true;
5382
    }
5383
 
5384
  return NULL_TREE;
5385
}
5386
 
5387
/* Fake handler for attributes we don't properly support.  */
5388
 
5389
tree
5390
fake_attribute_handler (tree * ARG_UNUSED (node),
5391
                        tree ARG_UNUSED (name),
5392
                        tree ARG_UNUSED (args),
5393
                        int  ARG_UNUSED (flags),
5394
                        bool * ARG_UNUSED (no_add_attrs))
5395
{
5396
  return NULL_TREE;
5397
}
5398
 
5399
/* Handle a "type_generic" attribute.  */
5400
 
5401
static tree
5402
handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5403
                               tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5404
                               bool * ARG_UNUSED (no_add_attrs))
5405
{
5406
  tree params;
5407
 
5408
  /* Ensure we have a function type.  */
5409
  gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5410
 
5411
  params = TYPE_ARG_TYPES (*node);
5412
  while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5413
    params = TREE_CHAIN (params);
5414
 
5415
  /* Ensure we have a variadic function.  */
5416
  gcc_assert (!params);
5417
 
5418
  return NULL_TREE;
5419
}
5420
 
5421
/* Handle a "vector_size" attribute; arguments as in
5422
   struct attribute_spec.handler.  */
5423
 
5424
static tree
5425
handle_vector_size_attribute (tree *node, tree name, tree args,
5426
                              int ARG_UNUSED (flags),
5427
                              bool *no_add_attrs)
5428
{
5429
  unsigned HOST_WIDE_INT vecsize, nunits;
5430
  enum machine_mode orig_mode;
5431
  tree type = *node, new_type, size;
5432
 
5433
  *no_add_attrs = true;
5434
 
5435
  size = TREE_VALUE (args);
5436
 
5437
  if (!host_integerp (size, 1))
5438
    {
5439
      warning (OPT_Wattributes, "%qs attribute ignored",
5440
               IDENTIFIER_POINTER (name));
5441
      return NULL_TREE;
5442
    }
5443
 
5444
  /* Get the vector size (in bytes).  */
5445
  vecsize = tree_low_cst (size, 1);
5446
 
5447
  /* We need to provide for vector pointers, vector arrays, and
5448
     functions returning vectors.  For example:
5449
 
5450
       __attribute__((vector_size(16))) short *foo;
5451
 
5452
     In this case, the mode is SI, but the type being modified is
5453
     HI, so we need to look further.  */
5454
 
5455
  while (POINTER_TYPE_P (type)
5456
         || TREE_CODE (type) == FUNCTION_TYPE
5457
         || TREE_CODE (type) == METHOD_TYPE
5458
         || TREE_CODE (type) == ARRAY_TYPE
5459
         || TREE_CODE (type) == OFFSET_TYPE)
5460
    type = TREE_TYPE (type);
5461
 
5462
  /* Get the mode of the type being modified.  */
5463
  orig_mode = TYPE_MODE (type);
5464
 
5465
  if ((!INTEGRAL_TYPE_P (type)
5466
       && !SCALAR_FLOAT_TYPE_P (type)
5467
       && !FIXED_POINT_TYPE_P (type))
5468
      || (!SCALAR_FLOAT_MODE_P (orig_mode)
5469
          && GET_MODE_CLASS (orig_mode) != MODE_INT
5470
          && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
5471
      || !host_integerp (TYPE_SIZE_UNIT (type), 1)
5472
      || TREE_CODE (type) == BOOLEAN_TYPE)
5473
    {
5474
      error ("invalid vector type for attribute %qs",
5475
             IDENTIFIER_POINTER (name));
5476
      return NULL_TREE;
5477
    }
5478
 
5479
  if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
5480
    {
5481
      error ("vector size not an integral multiple of component size");
5482
      return NULL;
5483
    }
5484
 
5485
  if (vecsize == 0)
5486
    {
5487
      error ("zero vector size");
5488
      return NULL;
5489
    }
5490
 
5491
  /* Calculate how many units fit in the vector.  */
5492
  nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5493
  if (nunits & (nunits - 1))
5494
    {
5495
      error ("number of components of the vector not a power of two");
5496
      return NULL_TREE;
5497
    }
5498
 
5499
  new_type = build_vector_type (type, nunits);
5500
 
5501
  /* Build back pointers if needed.  */
5502
  *node = lang_hooks.types.reconstruct_complex_type (*node, new_type);
5503
 
5504
  return NULL_TREE;
5505
}
5506
 
5507
/* Handle a "vector_type" attribute; arguments as in
5508
   struct attribute_spec.handler.  */
5509
 
5510
static tree
5511
handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5512
                              int ARG_UNUSED (flags),
5513
                              bool *no_add_attrs)
5514
{
5515
  /* Vector representative type and size.  */
5516
  tree rep_type = *node;
5517
  tree rep_size = TYPE_SIZE_UNIT (rep_type);
5518
  tree rep_name;
5519
 
5520
  /* Vector size in bytes and number of units.  */
5521
  unsigned HOST_WIDE_INT vec_bytes, vec_units;
5522
 
5523
  /* Vector element type and mode.  */
5524
  tree elem_type;
5525
  enum machine_mode elem_mode;
5526
 
5527
  *no_add_attrs = true;
5528
 
5529
  /* Get the representative array type, possibly nested within a
5530
     padding record e.g. for alignment purposes.  */
5531
 
5532
  if (TYPE_IS_PADDING_P (rep_type))
5533
    rep_type = TREE_TYPE (TYPE_FIELDS (rep_type));
5534
 
5535
  if (TREE_CODE (rep_type) != ARRAY_TYPE)
5536
    {
5537
      error ("attribute %qs applies to array types only",
5538
             IDENTIFIER_POINTER (name));
5539
      return NULL_TREE;
5540
    }
5541
 
5542
  /* Silently punt on variable sizes.  We can't make vector types for them,
5543
     need to ignore them on front-end generated subtypes of unconstrained
5544
     bases, and this attribute is for binding implementors, not end-users, so
5545
     we should never get there from legitimate explicit uses.  */
5546
 
5547
  if (!host_integerp (rep_size, 1))
5548
    return NULL_TREE;
5549
 
5550
  /* Get the element type/mode and check this is something we know
5551
     how to make vectors of.  */
5552
 
5553
  elem_type = TREE_TYPE (rep_type);
5554
  elem_mode = TYPE_MODE (elem_type);
5555
 
5556
  if ((!INTEGRAL_TYPE_P (elem_type)
5557
       && !SCALAR_FLOAT_TYPE_P (elem_type)
5558
       && !FIXED_POINT_TYPE_P (elem_type))
5559
      || (!SCALAR_FLOAT_MODE_P (elem_mode)
5560
          && GET_MODE_CLASS (elem_mode) != MODE_INT
5561
          && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode))
5562
      || !host_integerp (TYPE_SIZE_UNIT (elem_type), 1))
5563
    {
5564
      error ("invalid element type for attribute %qs",
5565
             IDENTIFIER_POINTER (name));
5566
      return NULL_TREE;
5567
    }
5568
 
5569
  /* Sanity check the vector size and element type consistency.  */
5570
 
5571
  vec_bytes = tree_low_cst (rep_size, 1);
5572
 
5573
  if (vec_bytes % tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1))
5574
    {
5575
      error ("vector size not an integral multiple of component size");
5576
      return NULL;
5577
    }
5578
 
5579
  if (vec_bytes == 0)
5580
    {
5581
      error ("zero vector size");
5582
      return NULL;
5583
    }
5584
 
5585
  vec_units = vec_bytes / tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1);
5586
  if (vec_units & (vec_units - 1))
5587
    {
5588
      error ("number of components of the vector not a power of two");
5589
      return NULL_TREE;
5590
    }
5591
 
5592
  /* Build the vector type and replace.  */
5593
 
5594
  *node = build_vector_type (elem_type, vec_units);
5595
  rep_name = TYPE_NAME (rep_type);
5596
  if (TREE_CODE (rep_name) == TYPE_DECL)
5597
    rep_name = DECL_NAME (rep_name);
5598
  TYPE_NAME (*node) = rep_name;
5599
  TYPE_REPRESENTATIVE_ARRAY (*node) = rep_type;
5600
 
5601
  return NULL_TREE;
5602
}
5603
 
5604
/* ----------------------------------------------------------------------- *
5605
 *                              BUILTIN FUNCTIONS                          *
5606
 * ----------------------------------------------------------------------- */
5607
 
5608
/* Worker for DEF_BUILTIN.  Possibly define a builtin function with one or two
5609
   names.  Does not declare a non-__builtin_ function if flag_no_builtin, or
5610
   if nonansi_p and flag_no_nonansi_builtin.  */
5611
 
5612
static void
5613
def_builtin_1 (enum built_in_function fncode,
5614
               const char *name,
5615
               enum built_in_class fnclass,
5616
               tree fntype, tree libtype,
5617
               bool both_p, bool fallback_p,
5618
               bool nonansi_p ATTRIBUTE_UNUSED,
5619
               tree fnattrs, bool implicit_p)
5620
{
5621
  tree decl;
5622
  const char *libname;
5623
 
5624
  /* Preserve an already installed decl.  It most likely was setup in advance
5625
     (e.g. as part of the internal builtins) for specific reasons.  */
5626
  if (built_in_decls[(int) fncode] != NULL_TREE)
5627
    return;
5628
 
5629
  gcc_assert ((!both_p && !fallback_p)
5630
              || !strncmp (name, "__builtin_",
5631
                           strlen ("__builtin_")));
5632
 
5633
  libname = name + strlen ("__builtin_");
5634
  decl = add_builtin_function (name, fntype, fncode, fnclass,
5635
                               (fallback_p ? libname : NULL),
5636
                               fnattrs);
5637
  if (both_p)
5638
    /* ??? This is normally further controlled by command-line options
5639
       like -fno-builtin, but we don't have them for Ada.  */
5640
    add_builtin_function (libname, libtype, fncode, fnclass,
5641
                          NULL, fnattrs);
5642
 
5643
  built_in_decls[(int) fncode] = decl;
5644
  if (implicit_p)
5645
    implicit_built_in_decls[(int) fncode] = decl;
5646
}
5647
 
5648
static int flag_isoc94 = 0;
5649
static int flag_isoc99 = 0;
5650
 
5651
/* Install what the common builtins.def offers.  */
5652
 
5653
static void
5654
install_builtin_functions (void)
5655
{
5656
#define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5657
                    NONANSI_P, ATTRS, IMPLICIT, COND)                   \
5658
  if (NAME && COND)                                                     \
5659
    def_builtin_1 (ENUM, NAME, CLASS,                                   \
5660
                   builtin_types[(int) TYPE],                           \
5661
                   builtin_types[(int) LIBTYPE],                        \
5662
                   BOTH_P, FALLBACK_P, NONANSI_P,                       \
5663
                   built_in_attributes[(int) ATTRS], IMPLICIT);
5664
#include "builtins.def"
5665
#undef DEF_BUILTIN
5666
}
5667
 
5668
/* ----------------------------------------------------------------------- *
5669
 *                              BUILTIN FUNCTIONS                          *
5670
 * ----------------------------------------------------------------------- */
5671
 
5672
/* Install the builtin functions we might need.  */
5673
 
5674
void
5675
gnat_install_builtins (void)
5676
{
5677
  install_builtin_elementary_types ();
5678
  install_builtin_function_types ();
5679
  install_builtin_attributes ();
5680
 
5681
  /* Install builtins used by generic middle-end pieces first.  Some of these
5682
     know about internal specificities and control attributes accordingly, for
5683
     instance __builtin_alloca vs no-throw and -fstack-check.  We will ignore
5684
     the generic definition from builtins.def.  */
5685
  build_common_builtin_nodes ();
5686
 
5687
  /* Now, install the target specific builtins, such as the AltiVec family on
5688
     ppc, and the common set as exposed by builtins.def.  */
5689
  targetm.init_builtins ();
5690
  install_builtin_functions ();
5691
}
5692
 
5693
#include "gt-ada-utils.h"
5694
#include "gtype-ada.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.