OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [ada/] [gcc-interface/] [utils2.c] - Blame information for rev 281

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 281 jeremybenn
/****************************************************************************
2
 *                                                                          *
3
 *                         GNAT COMPILER COMPONENTS                         *
4
 *                                                                          *
5
 *                               U T I L S 2                                *
6
 *                                                                          *
7
 *                          C Implementation File                           *
8
 *                                                                          *
9
 *          Copyright (C) 1992-2009, Free Software Foundation, Inc.         *
10
 *                                                                          *
11
 * GNAT is free software;  you can  redistribute it  and/or modify it under *
12
 * terms of the  GNU General Public License as published  by the Free Soft- *
13
 * ware  Foundation;  either version 3,  or (at your option) any later ver- *
14
 * sion.  GNAT is distributed in the hope that it will be useful, but WITH- *
15
 * OUT ANY WARRANTY;  without even the  implied warranty of MERCHANTABILITY *
16
 * or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License *
17
 * for  more details.  You should have received a copy of the GNU General   *
18
 * Public License along with GCC; see the file COPYING3.  If not see        *
19
 * <http://www.gnu.org/licenses/>.                                          *
20
 *                                                                          *
21
 * GNAT was originally developed  by the GNAT team at  New York University. *
22
 * Extensive contributions were provided by Ada Core Technologies Inc.      *
23
 *                                                                          *
24
 ****************************************************************************/
25
 
26
#include "config.h"
27
#include "system.h"
28
#include "coretypes.h"
29
#include "tm.h"
30
#include "tree.h"
31
#include "ggc.h"
32
#include "flags.h"
33
#include "output.h"
34
#include "tree-inline.h"
35
 
36
#include "ada.h"
37
#include "types.h"
38
#include "atree.h"
39
#include "elists.h"
40
#include "namet.h"
41
#include "nlists.h"
42
#include "snames.h"
43
#include "stringt.h"
44
#include "uintp.h"
45
#include "fe.h"
46
#include "sinfo.h"
47
#include "einfo.h"
48
#include "ada-tree.h"
49
#include "gigi.h"
50
 
51
static tree find_common_type (tree, tree);
52
static bool contains_save_expr_p (tree);
53
static tree contains_null_expr (tree);
54
static tree compare_arrays (tree, tree, tree);
55
static tree nonbinary_modular_operation (enum tree_code, tree, tree, tree);
56
static tree build_simple_component_ref (tree, tree, tree, bool);
57
 
58
/* Return the base type of TYPE.  */
59
 
60
tree
61
get_base_type (tree type)
62
{
63
  if (TREE_CODE (type) == RECORD_TYPE
64
      && TYPE_JUSTIFIED_MODULAR_P (type))
65
    type = TREE_TYPE (TYPE_FIELDS (type));
66
 
67
  while (TREE_TYPE (type)
68
         && (TREE_CODE (type) == INTEGER_TYPE
69
             || TREE_CODE (type) == REAL_TYPE))
70
    type = TREE_TYPE (type);
71
 
72
  return type;
73
}
74
 
75
/* EXP is a GCC tree representing an address.  See if we can find how
76
   strictly the object at that address is aligned.   Return that alignment
77
   in bits.  If we don't know anything about the alignment, return 0.  */
78
 
79
unsigned int
80
known_alignment (tree exp)
81
{
82
  unsigned int this_alignment;
83
  unsigned int lhs, rhs;
84
 
85
  switch (TREE_CODE (exp))
86
    {
87
    CASE_CONVERT:
88
    case VIEW_CONVERT_EXPR:
89
    case NON_LVALUE_EXPR:
90
      /* Conversions between pointers and integers don't change the alignment
91
         of the underlying object.  */
92
      this_alignment = known_alignment (TREE_OPERAND (exp, 0));
93
      break;
94
 
95
    case COMPOUND_EXPR:
96
      /* The value of a COMPOUND_EXPR is that of it's second operand.  */
97
      this_alignment = known_alignment (TREE_OPERAND (exp, 1));
98
      break;
99
 
100
    case PLUS_EXPR:
101
    case MINUS_EXPR:
102
      /* If two address are added, the alignment of the result is the
103
         minimum of the two alignments.  */
104
      lhs = known_alignment (TREE_OPERAND (exp, 0));
105
      rhs = known_alignment (TREE_OPERAND (exp, 1));
106
      this_alignment = MIN (lhs, rhs);
107
      break;
108
 
109
    case POINTER_PLUS_EXPR:
110
      lhs = known_alignment (TREE_OPERAND (exp, 0));
111
      rhs = known_alignment (TREE_OPERAND (exp, 1));
112
      /* If we don't know the alignment of the offset, we assume that
113
         of the base.  */
114
      if (rhs == 0)
115
        this_alignment = lhs;
116
      else
117
        this_alignment = MIN (lhs, rhs);
118
      break;
119
 
120
    case COND_EXPR:
121
      /* If there is a choice between two values, use the smallest one.  */
122
      lhs = known_alignment (TREE_OPERAND (exp, 1));
123
      rhs = known_alignment (TREE_OPERAND (exp, 2));
124
      this_alignment = MIN (lhs, rhs);
125
      break;
126
 
127
    case INTEGER_CST:
128
      {
129
        unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
130
        /* The first part of this represents the lowest bit in the constant,
131
           but it is originally in bytes, not bits.  */
132
        this_alignment = MIN (BITS_PER_UNIT * (c & -c), BIGGEST_ALIGNMENT);
133
      }
134
      break;
135
 
136
    case MULT_EXPR:
137
      /* If we know the alignment of just one side, use it.  Otherwise,
138
         use the product of the alignments.  */
139
      lhs = known_alignment (TREE_OPERAND (exp, 0));
140
      rhs = known_alignment (TREE_OPERAND (exp, 1));
141
 
142
      if (lhs == 0)
143
        this_alignment = rhs;
144
      else if (rhs == 0)
145
        this_alignment = lhs;
146
      else
147
        this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
148
      break;
149
 
150
    case BIT_AND_EXPR:
151
      /* A bit-and expression is as aligned as the maximum alignment of the
152
         operands.  We typically get here for a complex lhs and a constant
153
         negative power of two on the rhs to force an explicit alignment, so
154
         don't bother looking at the lhs.  */
155
      this_alignment = known_alignment (TREE_OPERAND (exp, 1));
156
      break;
157
 
158
    case ADDR_EXPR:
159
      this_alignment = expr_align (TREE_OPERAND (exp, 0));
160
      break;
161
 
162
    case CALL_EXPR:
163
      {
164
        tree t = maybe_inline_call_in_expr (exp);
165
        if (t)
166
          return known_alignment (t);
167
      }
168
 
169
      /* Fall through... */
170
 
171
    default:
172
      /* For other pointer expressions, we assume that the pointed-to object
173
         is at least as aligned as the pointed-to type.  Beware that we can
174
         have a dummy type here (e.g. a Taft Amendment type), for which the
175
         alignment is meaningless and should be ignored.  */
176
      if (POINTER_TYPE_P (TREE_TYPE (exp))
177
          && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
178
        this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
179
      else
180
        this_alignment = 0;
181
      break;
182
    }
183
 
184
  return this_alignment;
185
}
186
 
187
/* We have a comparison or assignment operation on two types, T1 and T2, which
188
   are either both array types or both record types.  T1 is assumed to be for
189
   the left hand side operand, and T2 for the right hand side.  Return the
190
   type that both operands should be converted to for the operation, if any.
191
   Otherwise return zero.  */
192
 
193
static tree
194
find_common_type (tree t1, tree t2)
195
{
196
  /* ??? As of today, various constructs lead here with types of different
197
     sizes even when both constants (e.g. tagged types, packable vs regular
198
     component types, padded vs unpadded types, ...).  While some of these
199
     would better be handled upstream (types should be made consistent before
200
     calling into build_binary_op), some others are really expected and we
201
     have to be careful.  */
202
 
203
  /* We must prevent writing more than what the target may hold if this is for
204
     an assignment and the case of tagged types is handled in build_binary_op
205
     so use the lhs type if it is known to be smaller, or of constant size and
206
     the rhs type is not, whatever the modes.  We also force t1 in case of
207
     constant size equality to minimize occurrences of view conversions on the
208
     lhs of assignments.  */
209
  if (TREE_CONSTANT (TYPE_SIZE (t1))
210
      && (!TREE_CONSTANT (TYPE_SIZE (t2))
211
          || !tree_int_cst_lt (TYPE_SIZE (t2), TYPE_SIZE (t1))))
212
    return t1;
213
 
214
  /* Otherwise, if the lhs type is non-BLKmode, use it.  Note that we know
215
     that we will not have any alignment problems since, if we did, the
216
     non-BLKmode type could not have been used.  */
217
  if (TYPE_MODE (t1) != BLKmode)
218
    return t1;
219
 
220
  /* If the rhs type is of constant size, use it whatever the modes.  At
221
     this point it is known to be smaller, or of constant size and the
222
     lhs type is not.  */
223
  if (TREE_CONSTANT (TYPE_SIZE (t2)))
224
    return t2;
225
 
226
  /* Otherwise, if the rhs type is non-BLKmode, use it.  */
227
  if (TYPE_MODE (t2) != BLKmode)
228
    return t2;
229
 
230
  /* In this case, both types have variable size and BLKmode.  It's
231
     probably best to leave the "type mismatch" because changing it
232
     could cause a bad self-referential reference.  */
233
  return NULL_TREE;
234
}
235
 
236
/* See if EXP contains a SAVE_EXPR in a position where we would
237
   normally put it.
238
 
239
   ??? This is a real kludge, but is probably the best approach short
240
   of some very general solution.  */
241
 
242
static bool
243
contains_save_expr_p (tree exp)
244
{
245
  switch (TREE_CODE (exp))
246
    {
247
    case SAVE_EXPR:
248
      return true;
249
 
250
    case ADDR_EXPR:  case INDIRECT_REF:
251
    case COMPONENT_REF:
252
    CASE_CONVERT: case VIEW_CONVERT_EXPR:
253
      return contains_save_expr_p (TREE_OPERAND (exp, 0));
254
 
255
    case CONSTRUCTOR:
256
      {
257
        tree value;
258
        unsigned HOST_WIDE_INT ix;
259
 
260
        FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), ix, value)
261
          if (contains_save_expr_p (value))
262
            return true;
263
        return false;
264
      }
265
 
266
    default:
267
      return false;
268
    }
269
}
270
 
271
/* See if EXP contains a NULL_EXPR in an expression we use for sizes. Return
272
   it if so.  This is used to detect types whose sizes involve computations
273
   that are known to raise Constraint_Error.  */
274
 
275
static tree
276
contains_null_expr (tree exp)
277
{
278
  tree tem;
279
 
280
  if (TREE_CODE (exp) == NULL_EXPR)
281
    return exp;
282
 
283
  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
284
    {
285
    case tcc_unary:
286
      return contains_null_expr (TREE_OPERAND (exp, 0));
287
 
288
    case tcc_comparison:
289
    case tcc_binary:
290
      tem = contains_null_expr (TREE_OPERAND (exp, 0));
291
      if (tem)
292
        return tem;
293
 
294
      return contains_null_expr (TREE_OPERAND (exp, 1));
295
 
296
    case tcc_expression:
297
      switch (TREE_CODE (exp))
298
        {
299
        case SAVE_EXPR:
300
          return contains_null_expr (TREE_OPERAND (exp, 0));
301
 
302
        case COND_EXPR:
303
          tem = contains_null_expr (TREE_OPERAND (exp, 0));
304
          if (tem)
305
            return tem;
306
 
307
          tem = contains_null_expr (TREE_OPERAND (exp, 1));
308
          if (tem)
309
            return tem;
310
 
311
          return contains_null_expr (TREE_OPERAND (exp, 2));
312
 
313
        default:
314
          return 0;
315
        }
316
 
317
    default:
318
      return 0;
319
    }
320
}
321
 
322
/* Return an expression tree representing an equality comparison of
323
   A1 and A2, two objects of ARRAY_TYPE.  The returned expression should
324
   be of type RESULT_TYPE
325
 
326
   Two arrays are equal in one of two ways: (1) if both have zero length
327
   in some dimension (not necessarily the same dimension) or (2) if the
328
   lengths in each dimension are equal and the data is equal.  We perform the
329
   length tests in as efficient a manner as possible.  */
330
 
331
static tree
332
compare_arrays (tree result_type, tree a1, tree a2)
333
{
334
  tree t1 = TREE_TYPE (a1);
335
  tree t2 = TREE_TYPE (a2);
336
  tree result = convert (result_type, integer_one_node);
337
  tree a1_is_null = convert (result_type, integer_zero_node);
338
  tree a2_is_null = convert (result_type, integer_zero_node);
339
  bool length_zero_p = false;
340
 
341
  /* Process each dimension separately and compare the lengths.  If any
342
     dimension has a size known to be zero, set SIZE_ZERO_P to 1 to
343
     suppress the comparison of the data.  */
344
  while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
345
    {
346
      tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
347
      tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
348
      tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
349
      tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
350
      tree bt = get_base_type (TREE_TYPE (lb1));
351
      tree length1 = fold_build2 (MINUS_EXPR, bt, ub1, lb1);
352
      tree length2 = fold_build2 (MINUS_EXPR, bt, ub2, lb2);
353
      tree nbt;
354
      tree tem;
355
      tree comparison, this_a1_is_null, this_a2_is_null;
356
 
357
      /* If the length of the first array is a constant, swap our operands
358
         unless the length of the second array is the constant zero.
359
         Note that we have set the `length' values to the length - 1.  */
360
      if (TREE_CODE (length1) == INTEGER_CST
361
          && !integer_zerop (fold_build2 (PLUS_EXPR, bt, length2,
362
                                          convert (bt, integer_one_node))))
363
        {
364
          tem = a1, a1 = a2, a2 = tem;
365
          tem = t1, t1 = t2, t2 = tem;
366
          tem = lb1, lb1 = lb2, lb2 = tem;
367
          tem = ub1, ub1 = ub2, ub2 = tem;
368
          tem = length1, length1 = length2, length2 = tem;
369
          tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
370
        }
371
 
372
      /* If the length of this dimension in the second array is the constant
373
         zero, we can just go inside the original bounds for the first
374
         array and see if last < first.  */
375
      if (integer_zerop (fold_build2 (PLUS_EXPR, bt, length2,
376
                                      convert (bt, integer_one_node))))
377
        {
378
          tree ub = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
379
          tree lb = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
380
 
381
          comparison = build_binary_op (LT_EXPR, result_type, ub, lb);
382
          comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
383
          length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
384
 
385
          length_zero_p = true;
386
          this_a1_is_null = comparison;
387
          this_a2_is_null = convert (result_type, integer_one_node);
388
        }
389
 
390
      /* If the length is some other constant value, we know that the
391
         this dimension in the first array cannot be superflat, so we
392
         can just use its length from the actual stored bounds.  */
393
      else if (TREE_CODE (length2) == INTEGER_CST)
394
        {
395
          ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
396
          lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
397
          ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
398
          lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
399
          nbt = get_base_type (TREE_TYPE (ub1));
400
 
401
          comparison
402
            = build_binary_op (EQ_EXPR, result_type,
403
                               build_binary_op (MINUS_EXPR, nbt, ub1, lb1),
404
                               build_binary_op (MINUS_EXPR, nbt, ub2, lb2));
405
 
406
          /* Note that we know that UB2 and LB2 are constant and hence
407
             cannot contain a PLACEHOLDER_EXPR.  */
408
 
409
          comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
410
          length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
411
 
412
          this_a1_is_null = build_binary_op (LT_EXPR, result_type, ub1, lb1);
413
          this_a2_is_null = convert (result_type, integer_zero_node);
414
        }
415
 
416
      /* Otherwise compare the computed lengths.  */
417
      else
418
        {
419
          length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
420
          length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
421
 
422
          comparison
423
            = build_binary_op (EQ_EXPR, result_type, length1, length2);
424
 
425
          this_a1_is_null
426
            = build_binary_op (LT_EXPR, result_type, length1,
427
                               convert (bt, integer_zero_node));
428
          this_a2_is_null
429
            = build_binary_op (LT_EXPR, result_type, length2,
430
                               convert (bt, integer_zero_node));
431
        }
432
 
433
      result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
434
                                result, comparison);
435
 
436
      a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
437
                                    this_a1_is_null, a1_is_null);
438
      a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
439
                                    this_a2_is_null, a2_is_null);
440
 
441
      t1 = TREE_TYPE (t1);
442
      t2 = TREE_TYPE (t2);
443
    }
444
 
445
  /* Unless the size of some bound is known to be zero, compare the
446
     data in the array.  */
447
  if (!length_zero_p)
448
    {
449
      tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
450
 
451
      if (type)
452
        a1 = convert (type, a1), a2 = convert (type, a2);
453
 
454
      result = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result,
455
                                fold_build2 (EQ_EXPR, result_type, a1, a2));
456
 
457
    }
458
 
459
  /* The result is also true if both sizes are zero.  */
460
  result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
461
                            build_binary_op (TRUTH_ANDIF_EXPR, result_type,
462
                                             a1_is_null, a2_is_null),
463
                            result);
464
 
465
  /* If either operand contains SAVE_EXPRs, they have to be evaluated before
466
     starting the comparison above since the place it would be otherwise
467
     evaluated would be wrong.  */
468
 
469
  if (contains_save_expr_p (a1))
470
    result = build2 (COMPOUND_EXPR, result_type, a1, result);
471
 
472
  if (contains_save_expr_p (a2))
473
    result = build2 (COMPOUND_EXPR, result_type, a2, result);
474
 
475
  return result;
476
}
477
 
478
/* Compute the result of applying OP_CODE to LHS and RHS, where both are of
479
   type TYPE.  We know that TYPE is a modular type with a nonbinary
480
   modulus.  */
481
 
482
static tree
483
nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
484
                             tree rhs)
485
{
486
  tree modulus = TYPE_MODULUS (type);
487
  unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
488
  unsigned int precision;
489
  bool unsignedp = true;
490
  tree op_type = type;
491
  tree result;
492
 
493
  /* If this is an addition of a constant, convert it to a subtraction
494
     of a constant since we can do that faster.  */
495
  if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
496
    {
497
      rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
498
      op_code = MINUS_EXPR;
499
    }
500
 
501
  /* For the logical operations, we only need PRECISION bits.  For
502
     addition and subtraction, we need one more and for multiplication we
503
     need twice as many.  But we never want to make a size smaller than
504
     our size. */
505
  if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
506
    needed_precision += 1;
507
  else if (op_code == MULT_EXPR)
508
    needed_precision *= 2;
509
 
510
  precision = MAX (needed_precision, TYPE_PRECISION (op_type));
511
 
512
  /* Unsigned will do for everything but subtraction.  */
513
  if (op_code == MINUS_EXPR)
514
    unsignedp = false;
515
 
516
  /* If our type is the wrong signedness or isn't wide enough, make a new
517
     type and convert both our operands to it.  */
518
  if (TYPE_PRECISION (op_type) < precision
519
      || TYPE_UNSIGNED (op_type) != unsignedp)
520
    {
521
      /* Copy the node so we ensure it can be modified to make it modular.  */
522
      op_type = copy_node (gnat_type_for_size (precision, unsignedp));
523
      modulus = convert (op_type, modulus);
524
      SET_TYPE_MODULUS (op_type, modulus);
525
      TYPE_MODULAR_P (op_type) = 1;
526
      lhs = convert (op_type, lhs);
527
      rhs = convert (op_type, rhs);
528
    }
529
 
530
  /* Do the operation, then we'll fix it up.  */
531
  result = fold_build2 (op_code, op_type, lhs, rhs);
532
 
533
  /* For multiplication, we have no choice but to do a full modulus
534
     operation.  However, we want to do this in the narrowest
535
     possible size.  */
536
  if (op_code == MULT_EXPR)
537
    {
538
      tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
539
      modulus = convert (div_type, modulus);
540
      SET_TYPE_MODULUS (div_type, modulus);
541
      TYPE_MODULAR_P (div_type) = 1;
542
      result = convert (op_type,
543
                        fold_build2 (TRUNC_MOD_EXPR, div_type,
544
                                     convert (div_type, result), modulus));
545
    }
546
 
547
  /* For subtraction, add the modulus back if we are negative.  */
548
  else if (op_code == MINUS_EXPR)
549
    {
550
      result = save_expr (result);
551
      result = fold_build3 (COND_EXPR, op_type,
552
                            fold_build2 (LT_EXPR, integer_type_node, result,
553
                                         convert (op_type, integer_zero_node)),
554
                            fold_build2 (PLUS_EXPR, op_type, result, modulus),
555
                            result);
556
    }
557
 
558
  /* For the other operations, subtract the modulus if we are >= it.  */
559
  else
560
    {
561
      result = save_expr (result);
562
      result = fold_build3 (COND_EXPR, op_type,
563
                            fold_build2 (GE_EXPR, integer_type_node,
564
                                         result, modulus),
565
                            fold_build2 (MINUS_EXPR, op_type,
566
                                         result, modulus),
567
                            result);
568
    }
569
 
570
  return convert (type, result);
571
}
572
 
573
/* Make a binary operation of kind OP_CODE.  RESULT_TYPE is the type
574
   desired for the result.  Usually the operation is to be performed
575
   in that type.  For MODIFY_EXPR and ARRAY_REF, RESULT_TYPE may be 0
576
   in which case the type to be used will be derived from the operands.
577
 
578
   This function is very much unlike the ones for C and C++ since we
579
   have already done any type conversion and matching required.  All we
580
   have to do here is validate the work done by SEM and handle subtypes.  */
581
 
582
tree
583
build_binary_op (enum tree_code op_code, tree result_type,
584
                 tree left_operand, tree right_operand)
585
{
586
  tree left_type  = TREE_TYPE (left_operand);
587
  tree right_type = TREE_TYPE (right_operand);
588
  tree left_base_type = get_base_type (left_type);
589
  tree right_base_type = get_base_type (right_type);
590
  tree operation_type = result_type;
591
  tree best_type = NULL_TREE;
592
  tree modulus, result;
593
  bool has_side_effects = false;
594
 
595
  if (operation_type
596
      && TREE_CODE (operation_type) == RECORD_TYPE
597
      && TYPE_JUSTIFIED_MODULAR_P (operation_type))
598
    operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
599
 
600
  if (operation_type
601
      && !AGGREGATE_TYPE_P (operation_type)
602
      && TYPE_EXTRA_SUBTYPE_P (operation_type))
603
    operation_type = get_base_type (operation_type);
604
 
605
  modulus = (operation_type
606
             && TREE_CODE (operation_type) == INTEGER_TYPE
607
             && TYPE_MODULAR_P (operation_type)
608
             ? TYPE_MODULUS (operation_type) : NULL_TREE);
609
 
610
  switch (op_code)
611
    {
612
    case MODIFY_EXPR:
613
      /* If there were integral or pointer conversions on the LHS, remove
614
         them; we'll be putting them back below if needed.  Likewise for
615
         conversions between array and record types, except for justified
616
         modular types.  But don't do this if the right operand is not
617
         BLKmode (for packed arrays) unless we are not changing the mode.  */
618
      while ((CONVERT_EXPR_P (left_operand)
619
              || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
620
             && (((INTEGRAL_TYPE_P (left_type)
621
                   || POINTER_TYPE_P (left_type))
622
                  && (INTEGRAL_TYPE_P (TREE_TYPE
623
                                       (TREE_OPERAND (left_operand, 0)))
624
                      || POINTER_TYPE_P (TREE_TYPE
625
                                         (TREE_OPERAND (left_operand, 0)))))
626
                 || (((TREE_CODE (left_type) == RECORD_TYPE
627
                       && !TYPE_JUSTIFIED_MODULAR_P (left_type))
628
                      || TREE_CODE (left_type) == ARRAY_TYPE)
629
                     && ((TREE_CODE (TREE_TYPE
630
                                     (TREE_OPERAND (left_operand, 0)))
631
                          == RECORD_TYPE)
632
                         || (TREE_CODE (TREE_TYPE
633
                                        (TREE_OPERAND (left_operand, 0)))
634
                             == ARRAY_TYPE))
635
                     && (TYPE_MODE (right_type) == BLKmode
636
                         || (TYPE_MODE (left_type)
637
                             == TYPE_MODE (TREE_TYPE
638
                                           (TREE_OPERAND
639
                                            (left_operand, 0))))))))
640
        {
641
          left_operand = TREE_OPERAND (left_operand, 0);
642
          left_type = TREE_TYPE (left_operand);
643
        }
644
 
645
      /* If a class-wide type may be involved, force use of the RHS type.  */
646
      if ((TREE_CODE (right_type) == RECORD_TYPE
647
           || TREE_CODE (right_type) == UNION_TYPE)
648
          && TYPE_ALIGN_OK (right_type))
649
        operation_type = right_type;
650
 
651
      /* If we are copying between padded objects with compatible types, use
652
         the padded view of the objects, this is very likely more efficient.
653
         Likewise for a padded object that is assigned a constructor, if we
654
         can convert the constructor to the inner type, to avoid putting a
655
         VIEW_CONVERT_EXPR on the LHS.  But don't do so if we wouldn't have
656
         actually copied anything.  */
657
      else if (TYPE_IS_PADDING_P (left_type)
658
               && TREE_CONSTANT (TYPE_SIZE (left_type))
659
               && ((TREE_CODE (right_operand) == COMPONENT_REF
660
                    && TYPE_IS_PADDING_P
661
                       (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
662
                    && gnat_types_compatible_p
663
                       (left_type,
664
                        TREE_TYPE (TREE_OPERAND (right_operand, 0))))
665
                   || (TREE_CODE (right_operand) == CONSTRUCTOR
666
                       && !CONTAINS_PLACEHOLDER_P
667
                           (DECL_SIZE (TYPE_FIELDS (left_type)))))
668
               && !integer_zerop (TYPE_SIZE (right_type)))
669
        operation_type = left_type;
670
 
671
      /* Find the best type to use for copying between aggregate types.  */
672
      else if (((TREE_CODE (left_type) == ARRAY_TYPE
673
                 && TREE_CODE (right_type) == ARRAY_TYPE)
674
                || (TREE_CODE (left_type) == RECORD_TYPE
675
                    && TREE_CODE (right_type) == RECORD_TYPE))
676
               && (best_type = find_common_type (left_type, right_type)))
677
        operation_type = best_type;
678
 
679
      /* Otherwise use the LHS type.  */
680
      else if (!operation_type)
681
        operation_type = left_type;
682
 
683
      /* Ensure everything on the LHS is valid.  If we have a field reference,
684
         strip anything that get_inner_reference can handle.  Then remove any
685
         conversions between types having the same code and mode.  And mark
686
         VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE.  When done, we must have
687
         either an INDIRECT_REF, a NULL_EXPR or a DECL node.  */
688
      result = left_operand;
689
      while (true)
690
        {
691
          tree restype = TREE_TYPE (result);
692
 
693
          if (TREE_CODE (result) == COMPONENT_REF
694
              || TREE_CODE (result) == ARRAY_REF
695
              || TREE_CODE (result) == ARRAY_RANGE_REF)
696
            while (handled_component_p (result))
697
              result = TREE_OPERAND (result, 0);
698
          else if (TREE_CODE (result) == REALPART_EXPR
699
                   || TREE_CODE (result) == IMAGPART_EXPR
700
                   || (CONVERT_EXPR_P (result)
701
                       && (((TREE_CODE (restype)
702
                             == TREE_CODE (TREE_TYPE
703
                                           (TREE_OPERAND (result, 0))))
704
                             && (TYPE_MODE (TREE_TYPE
705
                                            (TREE_OPERAND (result, 0)))
706
                                 == TYPE_MODE (restype)))
707
                           || TYPE_ALIGN_OK (restype))))
708
            result = TREE_OPERAND (result, 0);
709
          else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
710
            {
711
              TREE_ADDRESSABLE (result) = 1;
712
              result = TREE_OPERAND (result, 0);
713
            }
714
          else
715
            break;
716
        }
717
 
718
      gcc_assert (TREE_CODE (result) == INDIRECT_REF
719
                  || TREE_CODE (result) == NULL_EXPR
720
                  || DECL_P (result));
721
 
722
      /* Convert the right operand to the operation type unless it is
723
         either already of the correct type or if the type involves a
724
         placeholder, since the RHS may not have the same record type.  */
725
      if (operation_type != right_type
726
          && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
727
        {
728
          right_operand = convert (operation_type, right_operand);
729
          right_type = operation_type;
730
        }
731
 
732
      /* If the left operand is not of the same type as the operation
733
         type, wrap it up in a VIEW_CONVERT_EXPR.  */
734
      if (left_type != operation_type)
735
        left_operand = unchecked_convert (operation_type, left_operand, false);
736
 
737
      has_side_effects = true;
738
      modulus = NULL_TREE;
739
      break;
740
 
741
    case ARRAY_REF:
742
      if (!operation_type)
743
        operation_type = TREE_TYPE (left_type);
744
 
745
      /* ... fall through ... */
746
 
747
    case ARRAY_RANGE_REF:
748
      /* First look through conversion between type variants.  Note that
749
         this changes neither the operation type nor the type domain.  */
750
      if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
751
          && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
752
             == TYPE_MAIN_VARIANT (left_type))
753
        {
754
          left_operand = TREE_OPERAND (left_operand, 0);
755
          left_type = TREE_TYPE (left_operand);
756
        }
757
 
758
      /* For a range, make sure the element type is consistent.  */
759
      if (op_code == ARRAY_RANGE_REF
760
          && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
761
        operation_type = build_array_type (TREE_TYPE (left_type),
762
                                           TYPE_DOMAIN (operation_type));
763
 
764
      /* Then convert the right operand to its base type.  This will prevent
765
         unneeded sign conversions when sizetype is wider than integer.  */
766
      right_operand = convert (right_base_type, right_operand);
767
      right_operand = convert (sizetype, right_operand);
768
 
769
      if (!TREE_CONSTANT (right_operand)
770
          || !TREE_CONSTANT (TYPE_MIN_VALUE (right_type)))
771
        gnat_mark_addressable (left_operand);
772
 
773
      modulus = NULL_TREE;
774
      break;
775
 
776
    case GE_EXPR:
777
    case LE_EXPR:
778
    case GT_EXPR:
779
    case LT_EXPR:
780
      gcc_assert (!POINTER_TYPE_P (left_type));
781
 
782
      /* ... fall through ... */
783
 
784
    case EQ_EXPR:
785
    case NE_EXPR:
786
      /* If either operand is a NULL_EXPR, just return a new one.  */
787
      if (TREE_CODE (left_operand) == NULL_EXPR)
788
        return build2 (op_code, result_type,
789
                       build1 (NULL_EXPR, integer_type_node,
790
                               TREE_OPERAND (left_operand, 0)),
791
                       integer_zero_node);
792
 
793
      else if (TREE_CODE (right_operand) == NULL_EXPR)
794
        return build2 (op_code, result_type,
795
                       build1 (NULL_EXPR, integer_type_node,
796
                               TREE_OPERAND (right_operand, 0)),
797
                       integer_zero_node);
798
 
799
      /* If either object is a justified modular types, get the
800
         fields from within.  */
801
      if (TREE_CODE (left_type) == RECORD_TYPE
802
          && TYPE_JUSTIFIED_MODULAR_P (left_type))
803
        {
804
          left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
805
                                  left_operand);
806
          left_type = TREE_TYPE (left_operand);
807
          left_base_type = get_base_type (left_type);
808
        }
809
 
810
      if (TREE_CODE (right_type) == RECORD_TYPE
811
          && TYPE_JUSTIFIED_MODULAR_P (right_type))
812
        {
813
          right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
814
                                  right_operand);
815
          right_type = TREE_TYPE (right_operand);
816
          right_base_type = get_base_type (right_type);
817
        }
818
 
819
      /* If both objects are arrays, compare them specially.  */
820
      if ((TREE_CODE (left_type) == ARRAY_TYPE
821
           || (TREE_CODE (left_type) == INTEGER_TYPE
822
               && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
823
          && (TREE_CODE (right_type) == ARRAY_TYPE
824
              || (TREE_CODE (right_type) == INTEGER_TYPE
825
                  && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
826
        {
827
          result = compare_arrays (result_type, left_operand, right_operand);
828
 
829
          if (op_code == NE_EXPR)
830
            result = invert_truthvalue (result);
831
          else
832
            gcc_assert (op_code == EQ_EXPR);
833
 
834
          return result;
835
        }
836
 
837
      /* Otherwise, the base types must be the same, unless they are both fat
838
         pointer types or record types.  In the latter case, use the best type
839
         and convert both operands to that type.  */
840
      if (left_base_type != right_base_type)
841
        {
842
          if (TYPE_IS_FAT_POINTER_P (left_base_type)
843
              && TYPE_IS_FAT_POINTER_P (right_base_type))
844
            {
845
              gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
846
                          == TYPE_MAIN_VARIANT (right_base_type));
847
              best_type = left_base_type;
848
            }
849
 
850
          else if (TREE_CODE (left_base_type) == RECORD_TYPE
851
                   && TREE_CODE (right_base_type) == RECORD_TYPE)
852
            {
853
              /* The only way this is permitted is if both types have the same
854
                 name.  In that case, one of them must not be self-referential.
855
                 Use it as the best type.  Even better with a fixed size.  */
856
              gcc_assert (TYPE_NAME (left_base_type)
857
                          && TYPE_NAME (left_base_type)
858
                             == TYPE_NAME (right_base_type));
859
 
860
              if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
861
                best_type = left_base_type;
862
              else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
863
                best_type = right_base_type;
864
              else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
865
                best_type = left_base_type;
866
              else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
867
                best_type = right_base_type;
868
              else
869
                gcc_unreachable ();
870
            }
871
 
872
          else
873
            gcc_unreachable ();
874
 
875
          left_operand = convert (best_type, left_operand);
876
          right_operand = convert (best_type, right_operand);
877
        }
878
      else
879
        {
880
          left_operand = convert (left_base_type, left_operand);
881
          right_operand = convert (right_base_type, right_operand);
882
        }
883
 
884
      /* If we are comparing a fat pointer against zero, we just need to
885
         compare the data pointer.  */
886
      if (TYPE_IS_FAT_POINTER_P (left_base_type)
887
          && TREE_CODE (right_operand) == CONSTRUCTOR
888
          && integer_zerop (VEC_index (constructor_elt,
889
                                       CONSTRUCTOR_ELTS (right_operand),
890
                                       0)->value))
891
        {
892
          left_operand
893
            = build_component_ref (left_operand, NULL_TREE,
894
                                   TYPE_FIELDS (left_base_type), false);
895
          right_operand
896
            = convert (TREE_TYPE (left_operand), integer_zero_node);
897
        }
898
 
899
      modulus = NULL_TREE;
900
      break;
901
 
902
    case PREINCREMENT_EXPR:
903
    case PREDECREMENT_EXPR:
904
    case POSTINCREMENT_EXPR:
905
    case POSTDECREMENT_EXPR:
906
      /* These operations are not used anymore.  */
907
      gcc_unreachable ();
908
 
909
    case LSHIFT_EXPR:
910
    case RSHIFT_EXPR:
911
    case LROTATE_EXPR:
912
    case RROTATE_EXPR:
913
       /* The RHS of a shift can be any type.  Also, ignore any modulus
914
         (we used to abort, but this is needed for unchecked conversion
915
         to modular types).  Otherwise, processing is the same as normal.  */
916
      gcc_assert (operation_type == left_base_type);
917
      modulus = NULL_TREE;
918
      left_operand = convert (operation_type, left_operand);
919
      break;
920
 
921
    case BIT_AND_EXPR:
922
    case BIT_IOR_EXPR:
923
    case BIT_XOR_EXPR:
924
      /* For binary modulus, if the inputs are in range, so are the
925
         outputs.  */
926
      if (modulus && integer_pow2p (modulus))
927
        modulus = NULL_TREE;
928
      goto common;
929
 
930
    case COMPLEX_EXPR:
931
      gcc_assert (TREE_TYPE (result_type) == left_base_type
932
                  && TREE_TYPE (result_type) == right_base_type);
933
      left_operand = convert (left_base_type, left_operand);
934
      right_operand = convert (right_base_type, right_operand);
935
      break;
936
 
937
    case TRUNC_DIV_EXPR:   case TRUNC_MOD_EXPR:
938
    case CEIL_DIV_EXPR:    case CEIL_MOD_EXPR:
939
    case FLOOR_DIV_EXPR:   case FLOOR_MOD_EXPR:
940
    case ROUND_DIV_EXPR:   case ROUND_MOD_EXPR:
941
      /* These always produce results lower than either operand.  */
942
      modulus = NULL_TREE;
943
      goto common;
944
 
945
    case POINTER_PLUS_EXPR:
946
      gcc_assert (operation_type == left_base_type
947
                  && sizetype == right_base_type);
948
      left_operand = convert (operation_type, left_operand);
949
      right_operand = convert (sizetype, right_operand);
950
      break;
951
 
952
    case PLUS_NOMOD_EXPR:
953
    case MINUS_NOMOD_EXPR:
954
      if (op_code == PLUS_NOMOD_EXPR)
955
        op_code = PLUS_EXPR;
956
      else
957
        op_code = MINUS_EXPR;
958
      modulus = NULL_TREE;
959
 
960
      /* ... fall through ... */
961
 
962
    case PLUS_EXPR:
963
    case MINUS_EXPR:
964
      /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
965
         other compilers.  Contrary to C, Ada doesn't allow arithmetics in
966
         these types but can generate addition/subtraction for Succ/Pred.  */
967
      if (operation_type
968
          && (TREE_CODE (operation_type) == ENUMERAL_TYPE
969
              || TREE_CODE (operation_type) == BOOLEAN_TYPE))
970
        operation_type = left_base_type = right_base_type
971
          = gnat_type_for_mode (TYPE_MODE (operation_type),
972
                                TYPE_UNSIGNED (operation_type));
973
 
974
      /* ... fall through ... */
975
 
976
    default:
977
    common:
978
      /* The result type should be the same as the base types of the
979
         both operands (and they should be the same).  Convert
980
         everything to the result type.  */
981
 
982
      gcc_assert (operation_type == left_base_type
983
                  && left_base_type == right_base_type);
984
      left_operand = convert (operation_type, left_operand);
985
      right_operand = convert (operation_type, right_operand);
986
    }
987
 
988
  if (modulus && !integer_pow2p (modulus))
989
    {
990
      result = nonbinary_modular_operation (op_code, operation_type,
991
                                            left_operand, right_operand);
992
      modulus = NULL_TREE;
993
    }
994
  /* If either operand is a NULL_EXPR, just return a new one.  */
995
  else if (TREE_CODE (left_operand) == NULL_EXPR)
996
    return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
997
  else if (TREE_CODE (right_operand) == NULL_EXPR)
998
    return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
999
  else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1000
    result = fold (build4 (op_code, operation_type, left_operand,
1001
                           right_operand, NULL_TREE, NULL_TREE));
1002
  else
1003
    result
1004
      = fold_build2 (op_code, operation_type, left_operand, right_operand);
1005
 
1006
  TREE_SIDE_EFFECTS (result) |= has_side_effects;
1007
  TREE_CONSTANT (result)
1008
    |= (TREE_CONSTANT (left_operand) & TREE_CONSTANT (right_operand)
1009
        && op_code != ARRAY_REF && op_code != ARRAY_RANGE_REF);
1010
 
1011
  if ((op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1012
      && TYPE_VOLATILE (operation_type))
1013
    TREE_THIS_VOLATILE (result) = 1;
1014
 
1015
  /* If we are working with modular types, perform the MOD operation
1016
     if something above hasn't eliminated the need for it.  */
1017
  if (modulus)
1018
    result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1019
                          convert (operation_type, modulus));
1020
 
1021
  if (result_type && result_type != operation_type)
1022
    result = convert (result_type, result);
1023
 
1024
  return result;
1025
}
1026
 
1027
/* Similar, but for unary operations.  */
1028
 
1029
tree
1030
build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1031
{
1032
  tree type = TREE_TYPE (operand);
1033
  tree base_type = get_base_type (type);
1034
  tree operation_type = result_type;
1035
  tree result;
1036
  bool side_effects = false;
1037
 
1038
  if (operation_type
1039
      && TREE_CODE (operation_type) == RECORD_TYPE
1040
      && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1041
    operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1042
 
1043
  if (operation_type
1044
      && !AGGREGATE_TYPE_P (operation_type)
1045
      && TYPE_EXTRA_SUBTYPE_P (operation_type))
1046
    operation_type = get_base_type (operation_type);
1047
 
1048
  switch (op_code)
1049
    {
1050
    case REALPART_EXPR:
1051
    case IMAGPART_EXPR:
1052
      if (!operation_type)
1053
        result_type = operation_type = TREE_TYPE (type);
1054
      else
1055
        gcc_assert (result_type == TREE_TYPE (type));
1056
 
1057
      result = fold_build1 (op_code, operation_type, operand);
1058
      break;
1059
 
1060
    case TRUTH_NOT_EXPR:
1061
      gcc_assert (result_type == base_type);
1062
      result = invert_truthvalue (operand);
1063
      break;
1064
 
1065
    case ATTR_ADDR_EXPR:
1066
    case ADDR_EXPR:
1067
      switch (TREE_CODE (operand))
1068
        {
1069
        case INDIRECT_REF:
1070
        case UNCONSTRAINED_ARRAY_REF:
1071
          result = TREE_OPERAND (operand, 0);
1072
 
1073
          /* Make sure the type here is a pointer, not a reference.
1074
             GCC wants pointer types for function addresses.  */
1075
          if (!result_type)
1076
            result_type = build_pointer_type (type);
1077
 
1078
          /* If the underlying object can alias everything, propagate the
1079
             property since we are effectively retrieving the object.  */
1080
          if (POINTER_TYPE_P (TREE_TYPE (result))
1081
              && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1082
            {
1083
              if (TREE_CODE (result_type) == POINTER_TYPE
1084
                  && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1085
                result_type
1086
                  = build_pointer_type_for_mode (TREE_TYPE (result_type),
1087
                                                 TYPE_MODE (result_type),
1088
                                                 true);
1089
              else if (TREE_CODE (result_type) == REFERENCE_TYPE
1090
                       && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1091
                result_type
1092
                  = build_reference_type_for_mode (TREE_TYPE (result_type),
1093
                                                   TYPE_MODE (result_type),
1094
                                                   true);
1095
            }
1096
          break;
1097
 
1098
        case NULL_EXPR:
1099
          result = operand;
1100
          TREE_TYPE (result) = type = build_pointer_type (type);
1101
          break;
1102
 
1103
        case ARRAY_REF:
1104
        case ARRAY_RANGE_REF:
1105
        case COMPONENT_REF:
1106
        case BIT_FIELD_REF:
1107
            /* If this is for 'Address, find the address of the prefix and
1108
               add the offset to the field.  Otherwise, do this the normal
1109
               way.  */
1110
          if (op_code == ATTR_ADDR_EXPR)
1111
            {
1112
              HOST_WIDE_INT bitsize;
1113
              HOST_WIDE_INT bitpos;
1114
              tree offset, inner;
1115
              enum machine_mode mode;
1116
              int unsignedp, volatilep;
1117
 
1118
              inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1119
                                           &mode, &unsignedp, &volatilep,
1120
                                           false);
1121
 
1122
              /* If INNER is a padding type whose field has a self-referential
1123
                 size, convert to that inner type.  We know the offset is zero
1124
                 and we need to have that type visible.  */
1125
              if (TYPE_IS_PADDING_P (TREE_TYPE (inner))
1126
                  && CONTAINS_PLACEHOLDER_P
1127
                     (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1128
                                            (TREE_TYPE (inner))))))
1129
                inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1130
                                 inner);
1131
 
1132
              /* Compute the offset as a byte offset from INNER.  */
1133
              if (!offset)
1134
                offset = size_zero_node;
1135
 
1136
              if (bitpos % BITS_PER_UNIT != 0)
1137
                post_error
1138
                  ("taking address of object not aligned on storage unit?",
1139
                   error_gnat_node);
1140
 
1141
              offset = size_binop (PLUS_EXPR, offset,
1142
                                   size_int (bitpos / BITS_PER_UNIT));
1143
 
1144
              /* Take the address of INNER, convert the offset to void *, and
1145
                 add then.  It will later be converted to the desired result
1146
                 type, if any.  */
1147
              inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1148
              inner = convert (ptr_void_type_node, inner);
1149
              result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1150
                                        inner, offset);
1151
              result = convert (build_pointer_type (TREE_TYPE (operand)),
1152
                                result);
1153
              break;
1154
            }
1155
          goto common;
1156
 
1157
        case CONSTRUCTOR:
1158
          /* If this is just a constructor for a padded record, we can
1159
             just take the address of the single field and convert it to
1160
             a pointer to our type.  */
1161
          if (TYPE_IS_PADDING_P (type))
1162
            {
1163
              result = VEC_index (constructor_elt,
1164
                                  CONSTRUCTOR_ELTS (operand),
1165
                                  0)->value;
1166
              result = convert (build_pointer_type (TREE_TYPE (operand)),
1167
                                build_unary_op (ADDR_EXPR, NULL_TREE, result));
1168
              break;
1169
            }
1170
 
1171
          goto common;
1172
 
1173
        case NOP_EXPR:
1174
          if (AGGREGATE_TYPE_P (type)
1175
              && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1176
            return build_unary_op (ADDR_EXPR, result_type,
1177
                                   TREE_OPERAND (operand, 0));
1178
 
1179
          /* ... fallthru ... */
1180
 
1181
        case VIEW_CONVERT_EXPR:
1182
          /* If this just a variant conversion or if the conversion doesn't
1183
             change the mode, get the result type from this type and go down.
1184
             This is needed for conversions of CONST_DECLs, to eventually get
1185
             to the address of their CORRESPONDING_VARs.  */
1186
          if ((TYPE_MAIN_VARIANT (type)
1187
               == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1188
              || (TYPE_MODE (type) != BLKmode
1189
                  && (TYPE_MODE (type)
1190
                      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1191
            return build_unary_op (ADDR_EXPR,
1192
                                   (result_type ? result_type
1193
                                    : build_pointer_type (type)),
1194
                                   TREE_OPERAND (operand, 0));
1195
          goto common;
1196
 
1197
        case CONST_DECL:
1198
          operand = DECL_CONST_CORRESPONDING_VAR (operand);
1199
 
1200
          /* ... fall through ... */
1201
 
1202
        default:
1203
        common:
1204
 
1205
          /* If we are taking the address of a padded record whose field is
1206
             contains a template, take the address of the template.  */
1207
          if (TYPE_IS_PADDING_P (type)
1208
              && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1209
              && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1210
            {
1211
              type = TREE_TYPE (TYPE_FIELDS (type));
1212
              operand = convert (type, operand);
1213
            }
1214
 
1215
          if (type != error_mark_node)
1216
            operation_type = build_pointer_type (type);
1217
 
1218
          gnat_mark_addressable (operand);
1219
          result = fold_build1 (ADDR_EXPR, operation_type, operand);
1220
        }
1221
 
1222
      TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1223
      break;
1224
 
1225
    case INDIRECT_REF:
1226
      /* If we want to refer to an entire unconstrained array,
1227
         make up an expression to do so.  This will never survive to
1228
         the backend.  If TYPE is a thin pointer, first convert the
1229
         operand to a fat pointer.  */
1230
      if (TYPE_IS_THIN_POINTER_P (type)
1231
          && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
1232
        {
1233
          operand
1234
            = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))),
1235
                       operand);
1236
          type = TREE_TYPE (operand);
1237
        }
1238
 
1239
      if (TYPE_IS_FAT_POINTER_P (type))
1240
        {
1241
          result = build1 (UNCONSTRAINED_ARRAY_REF,
1242
                           TYPE_UNCONSTRAINED_ARRAY (type), operand);
1243
          TREE_READONLY (result) = TREE_STATIC (result)
1244
            = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1245
        }
1246
      else if (TREE_CODE (operand) == ADDR_EXPR)
1247
        result = TREE_OPERAND (operand, 0);
1248
 
1249
      else
1250
        {
1251
          result = fold_build1 (op_code, TREE_TYPE (type), operand);
1252
          TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1253
        }
1254
 
1255
      side_effects
1256
        = (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)));
1257
      break;
1258
 
1259
    case NEGATE_EXPR:
1260
    case BIT_NOT_EXPR:
1261
      {
1262
        tree modulus = ((operation_type
1263
                         && TREE_CODE (operation_type) == INTEGER_TYPE
1264
                         && TYPE_MODULAR_P (operation_type))
1265
                        ? TYPE_MODULUS (operation_type) : NULL_TREE);
1266
        int mod_pow2 = modulus && integer_pow2p (modulus);
1267
 
1268
        /* If this is a modular type, there are various possibilities
1269
           depending on the operation and whether the modulus is a
1270
           power of two or not.  */
1271
 
1272
        if (modulus)
1273
          {
1274
            gcc_assert (operation_type == base_type);
1275
            operand = convert (operation_type, operand);
1276
 
1277
            /* The fastest in the negate case for binary modulus is
1278
               the straightforward code; the TRUNC_MOD_EXPR below
1279
               is an AND operation.  */
1280
            if (op_code == NEGATE_EXPR && mod_pow2)
1281
              result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1282
                                    fold_build1 (NEGATE_EXPR, operation_type,
1283
                                                 operand),
1284
                                    modulus);
1285
 
1286
            /* For nonbinary negate case, return zero for zero operand,
1287
               else return the modulus minus the operand.  If the modulus
1288
               is a power of two minus one, we can do the subtraction
1289
               as an XOR since it is equivalent and faster on most machines. */
1290
            else if (op_code == NEGATE_EXPR && !mod_pow2)
1291
              {
1292
                if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1293
                                                modulus,
1294
                                                convert (operation_type,
1295
                                                         integer_one_node))))
1296
                  result = fold_build2 (BIT_XOR_EXPR, operation_type,
1297
                                        operand, modulus);
1298
                else
1299
                  result = fold_build2 (MINUS_EXPR, operation_type,
1300
                                        modulus, operand);
1301
 
1302
                result = fold_build3 (COND_EXPR, operation_type,
1303
                                      fold_build2 (NE_EXPR,
1304
                                                   integer_type_node,
1305
                                                   operand,
1306
                                                   convert
1307
                                                     (operation_type,
1308
                                                      integer_zero_node)),
1309
                                      result, operand);
1310
              }
1311
            else
1312
              {
1313
                /* For the NOT cases, we need a constant equal to
1314
                   the modulus minus one.  For a binary modulus, we
1315
                   XOR against the constant and subtract the operand from
1316
                   that constant for nonbinary modulus.  */
1317
 
1318
                tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1319
                                         convert (operation_type,
1320
                                                  integer_one_node));
1321
 
1322
                if (mod_pow2)
1323
                  result = fold_build2 (BIT_XOR_EXPR, operation_type,
1324
                                        operand, cnst);
1325
                else
1326
                  result = fold_build2 (MINUS_EXPR, operation_type,
1327
                                        cnst, operand);
1328
              }
1329
 
1330
            break;
1331
          }
1332
      }
1333
 
1334
      /* ... fall through ... */
1335
 
1336
    default:
1337
      gcc_assert (operation_type == base_type);
1338
      result = fold_build1 (op_code, operation_type,
1339
                            convert (operation_type, operand));
1340
    }
1341
 
1342
  if (side_effects)
1343
    {
1344
      TREE_SIDE_EFFECTS (result) = 1;
1345
      if (TREE_CODE (result) == INDIRECT_REF)
1346
        TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1347
    }
1348
 
1349
  if (result_type && TREE_TYPE (result) != result_type)
1350
    result = convert (result_type, result);
1351
 
1352
  return result;
1353
}
1354
 
1355
/* Similar, but for COND_EXPR.  */
1356
 
1357
tree
1358
build_cond_expr (tree result_type, tree condition_operand,
1359
                 tree true_operand, tree false_operand)
1360
{
1361
  bool addr_p = false;
1362
  tree result;
1363
 
1364
  /* The front-end verified that result, true and false operands have
1365
     same base type.  Convert everything to the result type.  */
1366
  true_operand = convert (result_type, true_operand);
1367
  false_operand = convert (result_type, false_operand);
1368
 
1369
  /* If the result type is unconstrained, take the address of the operands
1370
     and then dereference our result.  */
1371
  if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1372
      || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1373
    {
1374
      result_type = build_pointer_type (result_type);
1375
      true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1376
      false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1377
      addr_p = true;
1378
    }
1379
 
1380
  result = fold_build3 (COND_EXPR, result_type, condition_operand,
1381
                        true_operand, false_operand);
1382
 
1383
  /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1384
     in both arms, make sure it gets evaluated by moving it ahead of the
1385
     conditional expression.  This is necessary because it is evaluated
1386
     in only one place at run time and would otherwise be uninitialized
1387
     in one of the arms.  */
1388
  true_operand = skip_simple_arithmetic (true_operand);
1389
  false_operand = skip_simple_arithmetic (false_operand);
1390
 
1391
  if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1392
    result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1393
 
1394
  if (addr_p)
1395
    result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1396
 
1397
  return result;
1398
}
1399
 
1400
/* Similar, but for RETURN_EXPR.  If RESULT_DECL is non-zero, build
1401
   a RETURN_EXPR around the assignment of RET_VAL to RESULT_DECL.
1402
   If RESULT_DECL is zero, build a bare RETURN_EXPR.  */
1403
 
1404
tree
1405
build_return_expr (tree result_decl, tree ret_val)
1406
{
1407
  tree result_expr;
1408
 
1409
  if (result_decl)
1410
    {
1411
      /* The gimplifier explicitly enforces the following invariant:
1412
 
1413
           RETURN_EXPR
1414
               |
1415
           MODIFY_EXPR
1416
           /        \
1417
          /          \
1418
      RESULT_DECL    ...
1419
 
1420
      As a consequence, type-homogeneity dictates that we use the type
1421
      of the RESULT_DECL as the operation type.  */
1422
 
1423
      tree operation_type = TREE_TYPE (result_decl);
1424
 
1425
      /* Convert the right operand to the operation type.  Note that
1426
         it's the same transformation as in the MODIFY_EXPR case of
1427
         build_binary_op with the additional guarantee that the type
1428
         cannot involve a placeholder, since otherwise the function
1429
         would use the "target pointer" return mechanism.  */
1430
 
1431
      if (operation_type != TREE_TYPE (ret_val))
1432
        ret_val = convert (operation_type, ret_val);
1433
 
1434
      result_expr
1435
        = build2 (MODIFY_EXPR, operation_type, result_decl, ret_val);
1436
    }
1437
  else
1438
    result_expr = NULL_TREE;
1439
 
1440
  return build1 (RETURN_EXPR, void_type_node, result_expr);
1441
}
1442
 
1443
/* Build a CALL_EXPR to call FUNDECL with one argument, ARG.  Return
1444
   the CALL_EXPR.  */
1445
 
1446
tree
1447
build_call_1_expr (tree fundecl, tree arg)
1448
{
1449
  tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1450
                               build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1451
                               1, arg);
1452
  TREE_SIDE_EFFECTS (call) = 1;
1453
  return call;
1454
}
1455
 
1456
/* Build a CALL_EXPR to call FUNDECL with two arguments, ARG1 & ARG2.  Return
1457
   the CALL_EXPR.  */
1458
 
1459
tree
1460
build_call_2_expr (tree fundecl, tree arg1, tree arg2)
1461
{
1462
  tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1463
                               build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1464
                               2, arg1, arg2);
1465
  TREE_SIDE_EFFECTS (call) = 1;
1466
  return call;
1467
}
1468
 
1469
/* Likewise to call FUNDECL with no arguments.  */
1470
 
1471
tree
1472
build_call_0_expr (tree fundecl)
1473
{
1474
  /* We rely on build_call_nary to compute TREE_SIDE_EFFECTS.  This makes
1475
     it possible to propagate DECL_IS_PURE on parameterless functions.  */
1476
  tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1477
                               build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1478
                               0);
1479
  return call;
1480
}
1481
 
1482
/* Call a function that raises an exception and pass the line number and file
1483
   name, if requested.  MSG says which exception function to call.
1484
 
1485
   GNAT_NODE is the gnat node conveying the source location for which the
1486
   error should be signaled, or Empty in which case the error is signaled on
1487
   the current ref_file_name/input_line.
1488
 
1489
   KIND says which kind of exception this is for
1490
   (N_Raise_{Constraint,Storage,Program}_Error).  */
1491
 
1492
tree
1493
build_call_raise (int msg, Node_Id gnat_node, char kind)
1494
{
1495
  tree fndecl = gnat_raise_decls[msg];
1496
  tree label = get_exception_label (kind);
1497
  tree filename;
1498
  int line_number;
1499
  const char *str;
1500
  int len;
1501
 
1502
  /* If this is to be done as a goto, handle that case.  */
1503
  if (label)
1504
    {
1505
      Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1506
      tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1507
 
1508
      /* If Local_Raise is present, generate
1509
         Local_Raise (exception'Identity);  */
1510
      if (Present (local_raise))
1511
        {
1512
          tree gnu_local_raise
1513
            = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1514
          tree gnu_exception_entity
1515
            = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1516
          tree gnu_call
1517
            = build_call_1_expr (gnu_local_raise,
1518
                                 build_unary_op (ADDR_EXPR, NULL_TREE,
1519
                                                 gnu_exception_entity));
1520
 
1521
          gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1522
                               gnu_call, gnu_result);}
1523
 
1524
      return gnu_result;
1525
    }
1526
 
1527
  str
1528
    = (Debug_Flag_NN || Exception_Locations_Suppressed)
1529
      ? ""
1530
      : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1531
        ? IDENTIFIER_POINTER
1532
          (get_identifier (Get_Name_String
1533
                           (Debug_Source_Name
1534
                            (Get_Source_File_Index (Sloc (gnat_node))))))
1535
        : ref_filename;
1536
 
1537
  len = strlen (str);
1538
  filename = build_string (len, str);
1539
  line_number
1540
    = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1541
      ? Get_Logical_Line_Number (Sloc(gnat_node)) : input_line;
1542
 
1543
  TREE_TYPE (filename)
1544
    = build_array_type (char_type_node, build_index_type (size_int (len)));
1545
 
1546
  return
1547
    build_call_2_expr (fndecl,
1548
                       build1 (ADDR_EXPR, build_pointer_type (char_type_node),
1549
                               filename),
1550
                       build_int_cst (NULL_TREE, line_number));
1551
}
1552
 
1553
/* qsort comparer for the bit positions of two constructor elements
1554
   for record components.  */
1555
 
1556
static int
1557
compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1558
{
1559
  const_tree const elmt1 = * (const_tree const *) rt1;
1560
  const_tree const elmt2 = * (const_tree const *) rt2;
1561
  const_tree const field1 = TREE_PURPOSE (elmt1);
1562
  const_tree const field2 = TREE_PURPOSE (elmt2);
1563
  const int ret
1564
    = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1565
 
1566
  return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1567
}
1568
 
1569
/* Return a CONSTRUCTOR of TYPE whose list is LIST.  */
1570
 
1571
tree
1572
gnat_build_constructor (tree type, tree list)
1573
{
1574
  bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1575
  bool side_effects = false;
1576
  tree elmt, result;
1577
  int n_elmts;
1578
 
1579
  /* Scan the elements to see if they are all constant or if any has side
1580
     effects, to let us set global flags on the resulting constructor.  Count
1581
     the elements along the way for possible sorting purposes below.  */
1582
  for (n_elmts = 0, elmt = list; elmt; elmt = TREE_CHAIN (elmt), n_elmts ++)
1583
    {
1584
      tree obj = TREE_PURPOSE (elmt);
1585
      tree val = TREE_VALUE (elmt);
1586
 
1587
      /* The predicate must be in keeping with output_constructor.  */
1588
      if (!TREE_CONSTANT (val)
1589
          || (TREE_CODE (type) == RECORD_TYPE
1590
              && CONSTRUCTOR_BITFIELD_P (obj)
1591
              && !initializer_constant_valid_for_bitfield_p (val))
1592
          || !initializer_constant_valid_p (val, TREE_TYPE (val)))
1593
        allconstant = false;
1594
 
1595
      if (TREE_SIDE_EFFECTS (val))
1596
        side_effects = true;
1597
 
1598
      /* Propagate an NULL_EXPR from the size of the type.  We won't ever
1599
         be executing the code we generate here in that case, but handle it
1600
         specially to avoid the compiler blowing up.  */
1601
      if (TREE_CODE (type) == RECORD_TYPE
1602
          && (result = contains_null_expr (DECL_SIZE (obj))) != NULL_TREE)
1603
        return build1 (NULL_EXPR, type, TREE_OPERAND (result, 0));
1604
    }
1605
 
1606
  /* For record types with constant components only, sort field list
1607
     by increasing bit position.  This is necessary to ensure the
1608
     constructor can be output as static data.  */
1609
  if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1610
    {
1611
      /* Fill an array with an element tree per index, and ask qsort to order
1612
         them according to what a bitpos comparison function says.  */
1613
      tree *gnu_arr = (tree *) alloca (sizeof (tree) * n_elmts);
1614
      int i;
1615
 
1616
      for (i = 0, elmt = list; elmt; elmt = TREE_CHAIN (elmt), i++)
1617
        gnu_arr[i] = elmt;
1618
 
1619
      qsort (gnu_arr, n_elmts, sizeof (tree), compare_elmt_bitpos);
1620
 
1621
      /* Then reconstruct the list from the sorted array contents.  */
1622
      list = NULL_TREE;
1623
      for (i = n_elmts - 1; i >= 0; i--)
1624
        {
1625
          TREE_CHAIN (gnu_arr[i]) = list;
1626
          list = gnu_arr[i];
1627
        }
1628
    }
1629
 
1630
  result = build_constructor_from_list (type, list);
1631
  TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1632
  TREE_SIDE_EFFECTS (result) = side_effects;
1633
  TREE_READONLY (result) = TYPE_READONLY (type) || allconstant;
1634
  return result;
1635
}
1636
 
1637
/* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1638
   an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1639
   for the field.  Don't fold the result if NO_FOLD_P is true.
1640
 
1641
   We also handle the fact that we might have been passed a pointer to the
1642
   actual record and know how to look for fields in variant parts.  */
1643
 
1644
static tree
1645
build_simple_component_ref (tree record_variable, tree component,
1646
                            tree field, bool no_fold_p)
1647
{
1648
  tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1649
  tree ref, inner_variable;
1650
 
1651
  gcc_assert ((TREE_CODE (record_type) == RECORD_TYPE
1652
               || TREE_CODE (record_type) == UNION_TYPE
1653
               || TREE_CODE (record_type) == QUAL_UNION_TYPE)
1654
              && TYPE_SIZE (record_type)
1655
              && (component != 0) != (field != 0));
1656
 
1657
  /* If no field was specified, look for a field with the specified name
1658
     in the current record only.  */
1659
  if (!field)
1660
    for (field = TYPE_FIELDS (record_type); field;
1661
         field = TREE_CHAIN (field))
1662
      if (DECL_NAME (field) == component)
1663
        break;
1664
 
1665
  if (!field)
1666
    return NULL_TREE;
1667
 
1668
  /* If this field is not in the specified record, see if we can find
1669
     something in the record whose original field is the same as this one. */
1670
  if (DECL_CONTEXT (field) != record_type)
1671
    /* Check if there is a field with name COMPONENT in the record.  */
1672
    {
1673
      tree new_field;
1674
 
1675
      /* First loop thru normal components.  */
1676
 
1677
      for (new_field = TYPE_FIELDS (record_type); new_field;
1678
           new_field = TREE_CHAIN (new_field))
1679
        if (field == new_field
1680
            || DECL_ORIGINAL_FIELD (new_field) == field
1681
            || new_field == DECL_ORIGINAL_FIELD (field)
1682
            || (DECL_ORIGINAL_FIELD (field)
1683
                && (DECL_ORIGINAL_FIELD (field)
1684
                    == DECL_ORIGINAL_FIELD (new_field))))
1685
          break;
1686
 
1687
      /* Next, loop thru DECL_INTERNAL_P components if we haven't found
1688
         the component in the first search. Doing this search in 2 steps
1689
         is required to avoiding hidden homonymous fields in the
1690
         _Parent field.  */
1691
 
1692
      if (!new_field)
1693
        for (new_field = TYPE_FIELDS (record_type); new_field;
1694
             new_field = TREE_CHAIN (new_field))
1695
          if (DECL_INTERNAL_P (new_field))
1696
            {
1697
              tree field_ref
1698
                = build_simple_component_ref (record_variable,
1699
                                              NULL_TREE, new_field, no_fold_p);
1700
              ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1701
                                                no_fold_p);
1702
 
1703
              if (ref)
1704
                return ref;
1705
            }
1706
 
1707
      field = new_field;
1708
    }
1709
 
1710
  if (!field)
1711
    return NULL_TREE;
1712
 
1713
  /* If the field's offset has overflowed, do not attempt to access it
1714
     as doing so may trigger sanity checks deeper in the back-end.
1715
     Note that we don't need to warn since this will be done on trying
1716
     to declare the object.  */
1717
  if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
1718
      && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
1719
    return NULL_TREE;
1720
 
1721
  /* Look through conversion between type variants.  Note that this
1722
     is transparent as far as the field is concerned.  */
1723
  if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1724
      && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1725
         == record_type)
1726
    inner_variable = TREE_OPERAND (record_variable, 0);
1727
  else
1728
    inner_variable = record_variable;
1729
 
1730
  ref = build3 (COMPONENT_REF, TREE_TYPE (field), inner_variable, field,
1731
                NULL_TREE);
1732
 
1733
  if (TREE_READONLY (record_variable) || TREE_READONLY (field))
1734
    TREE_READONLY (ref) = 1;
1735
  if (TREE_THIS_VOLATILE (record_variable) || TREE_THIS_VOLATILE (field)
1736
      || TYPE_VOLATILE (record_type))
1737
    TREE_THIS_VOLATILE (ref) = 1;
1738
 
1739
  if (no_fold_p)
1740
    return ref;
1741
 
1742
  /* The generic folder may punt in this case because the inner array type
1743
     can be self-referential, but folding is in fact not problematic.  */
1744
  else if (TREE_CODE (record_variable) == CONSTRUCTOR
1745
           && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record_variable)))
1746
    {
1747
      VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (record_variable);
1748
      unsigned HOST_WIDE_INT idx;
1749
      tree index, value;
1750
      FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
1751
        if (index == field)
1752
          return value;
1753
      return ref;
1754
    }
1755
 
1756
  else
1757
    return fold (ref);
1758
}
1759
 
1760
/* Like build_simple_component_ref, except that we give an error if the
1761
   reference could not be found.  */
1762
 
1763
tree
1764
build_component_ref (tree record_variable, tree component,
1765
                     tree field, bool no_fold_p)
1766
{
1767
  tree ref = build_simple_component_ref (record_variable, component, field,
1768
                                         no_fold_p);
1769
 
1770
  if (ref)
1771
    return ref;
1772
 
1773
  /* If FIELD was specified, assume this is an invalid user field so raise
1774
     Constraint_Error.  Otherwise, we have no type to return so abort.  */
1775
  gcc_assert (field);
1776
  return build1 (NULL_EXPR, TREE_TYPE (field),
1777
                 build_call_raise (CE_Discriminant_Check_Failed, Empty,
1778
                                   N_Raise_Constraint_Error));
1779
}
1780
 
1781
/* Helper for build_call_alloc_dealloc, with arguments to be interpreted
1782
   identically.  Process the case where a GNAT_PROC to call is provided.  */
1783
 
1784
static inline tree
1785
build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
1786
                               Entity_Id gnat_proc, Entity_Id gnat_pool)
1787
{
1788
  tree gnu_proc = gnat_to_gnu (gnat_proc);
1789
  tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1790
  tree gnu_call;
1791
 
1792
  /* The storage pools are obviously always tagged types, but the
1793
     secondary stack uses the same mechanism and is not tagged.  */
1794
  if (Is_Tagged_Type (Etype (gnat_pool)))
1795
    {
1796
      /* The size is the third parameter; the alignment is the
1797
         same type.  */
1798
      Entity_Id gnat_size_type
1799
        = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
1800
      tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1801
 
1802
      tree gnu_pool = gnat_to_gnu (gnat_pool);
1803
      tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
1804
      tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
1805
 
1806
      gnu_size = convert (gnu_size_type, gnu_size);
1807
      gnu_align = convert (gnu_size_type, gnu_align);
1808
 
1809
      /* The first arg is always the address of the storage pool; next
1810
         comes the address of the object, for a deallocator, then the
1811
         size and alignment.  */
1812
      if (gnu_obj)
1813
        gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1814
                                    gnu_proc_addr, 4, gnu_pool_addr,
1815
                                    gnu_obj, gnu_size, gnu_align);
1816
      else
1817
        gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1818
                                    gnu_proc_addr, 3, gnu_pool_addr,
1819
                                    gnu_size, gnu_align);
1820
    }
1821
 
1822
  /* Secondary stack case.  */
1823
  else
1824
    {
1825
      /* The size is the second parameter.  */
1826
      Entity_Id gnat_size_type
1827
        = Etype (Next_Formal (First_Formal (gnat_proc)));
1828
      tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1829
 
1830
      gnu_size = convert (gnu_size_type, gnu_size);
1831
 
1832
      /* The first arg is the address of the object, for a deallocator,
1833
         then the size.  */
1834
      if (gnu_obj)
1835
        gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1836
                                    gnu_proc_addr, 2, gnu_obj, gnu_size);
1837
      else
1838
        gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1839
                                    gnu_proc_addr, 1, gnu_size);
1840
    }
1841
 
1842
  TREE_SIDE_EFFECTS (gnu_call) = 1;
1843
  return gnu_call;
1844
}
1845
 
1846
/* Helper for build_call_alloc_dealloc, to build and return an allocator for
1847
   DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
1848
   __gnat_malloc allocator.  Honor DATA_TYPE alignments greater than what the
1849
   latter offers.  */
1850
 
1851
static inline tree
1852
maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
1853
{
1854
  /* When the DATA_TYPE alignment is stricter than what malloc offers
1855
     (super-aligned case), we allocate an "aligning" wrapper type and return
1856
     the address of its single data field with the malloc's return value
1857
     stored just in front.  */
1858
 
1859
  unsigned int data_align = TYPE_ALIGN (data_type);
1860
  unsigned int default_allocator_alignment
1861
      = get_target_default_allocator_alignment () * BITS_PER_UNIT;
1862
 
1863
  tree aligning_type
1864
    = ((data_align > default_allocator_alignment)
1865
       ? make_aligning_type (data_type, data_align, data_size,
1866
                             default_allocator_alignment,
1867
                             POINTER_SIZE / BITS_PER_UNIT)
1868
       : NULL_TREE);
1869
 
1870
  tree size_to_malloc
1871
    = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
1872
 
1873
  tree malloc_ptr;
1874
 
1875
  /* On VMS, if 64-bit memory is disabled or pointers are 64-bit and the
1876
     allocator size is 32-bit or Convention C, allocate 32-bit memory.  */
1877
  if (TARGET_ABI_OPEN_VMS
1878
      && (!TARGET_MALLOC64
1879
          || (POINTER_SIZE == 64
1880
              && (UI_To_Int (Esize (Etype (gnat_node))) == 32
1881
                  || Convention (Etype (gnat_node)) == Convention_C))))
1882
    malloc_ptr = build_call_1_expr (malloc32_decl, size_to_malloc);
1883
  else
1884
    malloc_ptr = build_call_1_expr (malloc_decl, size_to_malloc);
1885
 
1886
  if (aligning_type)
1887
    {
1888
      /* Latch malloc's return value and get a pointer to the aligning field
1889
         first.  */
1890
      tree storage_ptr = save_expr (malloc_ptr);
1891
 
1892
      tree aligning_record_addr
1893
        = convert (build_pointer_type (aligning_type), storage_ptr);
1894
 
1895
      tree aligning_record
1896
        = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
1897
 
1898
      tree aligning_field
1899
        = build_component_ref (aligning_record, NULL_TREE,
1900
                               TYPE_FIELDS (aligning_type), 0);
1901
 
1902
      tree aligning_field_addr
1903
        = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
1904
 
1905
      /* Then arrange to store the allocator's return value ahead
1906
         and return.  */
1907
      tree storage_ptr_slot_addr
1908
        = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1909
                           convert (ptr_void_type_node, aligning_field_addr),
1910
                           size_int (-(HOST_WIDE_INT) POINTER_SIZE
1911
                                     / BITS_PER_UNIT));
1912
 
1913
      tree storage_ptr_slot
1914
        = build_unary_op (INDIRECT_REF, NULL_TREE,
1915
                          convert (build_pointer_type (ptr_void_type_node),
1916
                                   storage_ptr_slot_addr));
1917
 
1918
      return
1919
        build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
1920
                build_binary_op (MODIFY_EXPR, NULL_TREE,
1921
                                 storage_ptr_slot, storage_ptr),
1922
                aligning_field_addr);
1923
    }
1924
  else
1925
    return malloc_ptr;
1926
}
1927
 
1928
/* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
1929
   designated by DATA_PTR using the __gnat_free entry point.  */
1930
 
1931
static inline tree
1932
maybe_wrap_free (tree data_ptr, tree data_type)
1933
{
1934
  /* In the regular alignment case, we pass the data pointer straight to free.
1935
     In the superaligned case, we need to retrieve the initial allocator
1936
     return value, stored in front of the data block at allocation time.  */
1937
 
1938
  unsigned int data_align = TYPE_ALIGN (data_type);
1939
  unsigned int default_allocator_alignment
1940
      = get_target_default_allocator_alignment () * BITS_PER_UNIT;
1941
 
1942
  tree free_ptr;
1943
 
1944
  if (data_align > default_allocator_alignment)
1945
    {
1946
      /* DATA_FRONT_PTR (void *)
1947
         = (void *)DATA_PTR - (void *)sizeof (void *))  */
1948
      tree data_front_ptr
1949
        = build_binary_op
1950
          (POINTER_PLUS_EXPR, ptr_void_type_node,
1951
           convert (ptr_void_type_node, data_ptr),
1952
           size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
1953
 
1954
      /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR  */
1955
      free_ptr
1956
        = build_unary_op
1957
          (INDIRECT_REF, NULL_TREE,
1958
           convert (build_pointer_type (ptr_void_type_node), data_front_ptr));
1959
    }
1960
  else
1961
    free_ptr = data_ptr;
1962
 
1963
  return build_call_1_expr (free_decl, free_ptr);
1964
}
1965
 
1966
/* Build a GCC tree to call an allocation or deallocation function.
1967
   If GNU_OBJ is nonzero, it is an object to deallocate.  Otherwise,
1968
   generate an allocator.
1969
 
1970
   GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
1971
   object type, used to determine the to-be-honored address alignment.
1972
   GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
1973
   pool to use.  If not present, malloc and free are used.  GNAT_NODE is used
1974
   to provide an error location for restriction violation messages.  */
1975
 
1976
tree
1977
build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
1978
                          Entity_Id gnat_proc, Entity_Id gnat_pool,
1979
                          Node_Id gnat_node)
1980
{
1981
  gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
1982
 
1983
  /* Explicit proc to call ?  This one is assumed to deal with the type
1984
     alignment constraints.  */
1985
  if (Present (gnat_proc))
1986
    return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
1987
                                          gnat_proc, gnat_pool);
1988
 
1989
  /* Otherwise, object to "free" or "malloc" with possible special processing
1990
     for alignments stricter than what the default allocator honors.  */
1991
  else if (gnu_obj)
1992
    return maybe_wrap_free (gnu_obj, gnu_type);
1993
  else
1994
    {
1995
      /* Assert that we no longer can be called with this special pool.  */
1996
      gcc_assert (gnat_pool != -1);
1997
 
1998
      /* Check that we aren't violating the associated restriction.  */
1999
      if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2000
        Check_No_Implicit_Heap_Alloc (gnat_node);
2001
 
2002
      return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2003
    }
2004
}
2005
 
2006
/* Build a GCC tree to correspond to allocating an object of TYPE whose
2007
   initial value is INIT, if INIT is nonzero.  Convert the expression to
2008
   RESULT_TYPE, which must be some type of pointer.  Return the tree.
2009
 
2010
   GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2011
   the storage pool to use.  GNAT_NODE is used to provide an error
2012
   location for restriction violation messages.  If IGNORE_INIT_TYPE is
2013
   true, ignore the type of INIT for the purpose of determining the size;
2014
   this will cause the maximum size to be allocated if TYPE is of
2015
   self-referential size.  */
2016
 
2017
tree
2018
build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2019
                 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2020
{
2021
  tree size = TYPE_SIZE_UNIT (type);
2022
  tree result;
2023
 
2024
  /* If the initializer, if present, is a NULL_EXPR, just return a new one.  */
2025
  if (init && TREE_CODE (init) == NULL_EXPR)
2026
    return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2027
 
2028
  /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2029
     sizes of the object and its template.  Allocate the whole thing and
2030
     fill in the parts that are known.  */
2031
  else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2032
    {
2033
      tree storage_type
2034
        = build_unc_object_type_from_ptr (result_type, type,
2035
                                          get_identifier ("ALLOC"));
2036
      tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2037
      tree storage_ptr_type = build_pointer_type (storage_type);
2038
      tree storage;
2039
      tree template_cons = NULL_TREE;
2040
 
2041
      size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2042
                                             init);
2043
 
2044
      /* If the size overflows, pass -1 so the allocator will raise
2045
         storage error.  */
2046
      if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2047
        size = ssize_int (-1);
2048
 
2049
      storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2050
                                          gnat_proc, gnat_pool, gnat_node);
2051
      storage = convert (storage_ptr_type, protect_multiple_eval (storage));
2052
 
2053
      if (TYPE_IS_PADDING_P (type))
2054
        {
2055
          type = TREE_TYPE (TYPE_FIELDS (type));
2056
          if (init)
2057
            init = convert (type, init);
2058
        }
2059
 
2060
      /* If there is an initializing expression, make a constructor for
2061
         the entire object including the bounds and copy it into the
2062
         object.  If there is no initializing expression, just set the
2063
         bounds.  */
2064
      if (init)
2065
        {
2066
          template_cons = tree_cons (TREE_CHAIN (TYPE_FIELDS (storage_type)),
2067
                                     init, NULL_TREE);
2068
          template_cons = tree_cons (TYPE_FIELDS (storage_type),
2069
                                     build_template (template_type, type,
2070
                                                     init),
2071
                                     template_cons);
2072
 
2073
          return convert
2074
            (result_type,
2075
             build2 (COMPOUND_EXPR, storage_ptr_type,
2076
                     build_binary_op
2077
                     (MODIFY_EXPR, storage_type,
2078
                      build_unary_op (INDIRECT_REF, NULL_TREE,
2079
                                      convert (storage_ptr_type, storage)),
2080
                      gnat_build_constructor (storage_type, template_cons)),
2081
                     convert (storage_ptr_type, storage)));
2082
        }
2083
      else
2084
        return build2
2085
          (COMPOUND_EXPR, result_type,
2086
           build_binary_op
2087
           (MODIFY_EXPR, template_type,
2088
            build_component_ref
2089
            (build_unary_op (INDIRECT_REF, NULL_TREE,
2090
                             convert (storage_ptr_type, storage)),
2091
             NULL_TREE, TYPE_FIELDS (storage_type), 0),
2092
            build_template (template_type, type, NULL_TREE)),
2093
           convert (result_type, convert (storage_ptr_type, storage)));
2094
    }
2095
 
2096
  /* If we have an initializing expression, see if its size is simpler
2097
     than the size from the type.  */
2098
  if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2099
      && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2100
          || CONTAINS_PLACEHOLDER_P (size)))
2101
    size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2102
 
2103
  /* If the size is still self-referential, reference the initializing
2104
     expression, if it is present.  If not, this must have been a
2105
     call to allocate a library-level object, in which case we use
2106
     the maximum size.  */
2107
  if (CONTAINS_PLACEHOLDER_P (size))
2108
    {
2109
      if (!ignore_init_type && init)
2110
        size = substitute_placeholder_in_expr (size, init);
2111
      else
2112
        size = max_size (size, true);
2113
    }
2114
 
2115
  /* If the size overflows, pass -1 so the allocator will raise
2116
     storage error.  */
2117
  if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2118
    size = ssize_int (-1);
2119
 
2120
  result = convert (result_type,
2121
                    build_call_alloc_dealloc (NULL_TREE, size, type,
2122
                                              gnat_proc, gnat_pool,
2123
                                              gnat_node));
2124
 
2125
  /* If we have an initial value, put the new address into a SAVE_EXPR, assign
2126
     the value, and return the address.  Do this with a COMPOUND_EXPR.  */
2127
 
2128
  if (init)
2129
    {
2130
      result = save_expr (result);
2131
      result
2132
        = build2 (COMPOUND_EXPR, TREE_TYPE (result),
2133
                  build_binary_op
2134
                  (MODIFY_EXPR, NULL_TREE,
2135
                   build_unary_op (INDIRECT_REF,
2136
                                   TREE_TYPE (TREE_TYPE (result)), result),
2137
                   init),
2138
                  result);
2139
    }
2140
 
2141
  return convert (result_type, result);
2142
}
2143
 
2144
/* Fill in a VMS descriptor for EXPR and return a constructor for it.
2145
   GNAT_FORMAL is how we find the descriptor record.  GNAT_ACTUAL is
2146
   how we derive the source location to raise C_E on an out of range
2147
   pointer. */
2148
 
2149
tree
2150
fill_vms_descriptor (tree expr, Entity_Id gnat_formal, Node_Id gnat_actual)
2151
{
2152
  tree field;
2153
  tree parm_decl = get_gnu_tree (gnat_formal);
2154
  tree const_list = NULL_TREE;
2155
  tree record_type = TREE_TYPE (TREE_TYPE (parm_decl));
2156
  int do_range_check =
2157
      strcmp ("MBO",
2158
              IDENTIFIER_POINTER (DECL_NAME (TYPE_FIELDS (record_type))));
2159
 
2160
  expr = maybe_unconstrained_array (expr);
2161
  gnat_mark_addressable (expr);
2162
 
2163
  for (field = TYPE_FIELDS (record_type); field; field = TREE_CHAIN (field))
2164
    {
2165
      tree conexpr = convert (TREE_TYPE (field),
2166
                              SUBSTITUTE_PLACEHOLDER_IN_EXPR
2167
                              (DECL_INITIAL (field), expr));
2168
 
2169
      /* Check to ensure that only 32bit pointers are passed in
2170
         32bit descriptors */
2171
      if (do_range_check &&
2172
          strcmp (IDENTIFIER_POINTER (DECL_NAME (field)), "POINTER") == 0)
2173
        {
2174
          tree pointer64type =
2175
             build_pointer_type_for_mode (void_type_node, DImode, false);
2176
          tree addr64expr = build_unary_op (ADDR_EXPR, pointer64type, expr);
2177
          tree malloc64low =
2178
             build_int_cstu (long_integer_type_node, 0x80000000);
2179
 
2180
          add_stmt (build3 (COND_EXPR, void_type_node,
2181
                            build_binary_op (GE_EXPR, long_integer_type_node,
2182
                                             convert (long_integer_type_node,
2183
                                                      addr64expr),
2184
                                             malloc64low),
2185
                            build_call_raise (CE_Range_Check_Failed, gnat_actual,
2186
                                              N_Raise_Constraint_Error),
2187
                            NULL_TREE));
2188
        }
2189
      const_list = tree_cons (field, conexpr, const_list);
2190
    }
2191
 
2192
  return gnat_build_constructor (record_type, nreverse (const_list));
2193
}
2194
 
2195
/* Indicate that we need to make the address of EXPR_NODE and it therefore
2196
   should not be allocated in a register.  Returns true if successful.  */
2197
 
2198
bool
2199
gnat_mark_addressable (tree expr_node)
2200
{
2201
  while (1)
2202
    switch (TREE_CODE (expr_node))
2203
      {
2204
      case ADDR_EXPR:
2205
      case COMPONENT_REF:
2206
      case ARRAY_REF:
2207
      case ARRAY_RANGE_REF:
2208
      case REALPART_EXPR:
2209
      case IMAGPART_EXPR:
2210
      case VIEW_CONVERT_EXPR:
2211
      case NON_LVALUE_EXPR:
2212
      CASE_CONVERT:
2213
        expr_node = TREE_OPERAND (expr_node, 0);
2214
        break;
2215
 
2216
      case CONSTRUCTOR:
2217
        TREE_ADDRESSABLE (expr_node) = 1;
2218
        return true;
2219
 
2220
      case VAR_DECL:
2221
      case PARM_DECL:
2222
      case RESULT_DECL:
2223
        TREE_ADDRESSABLE (expr_node) = 1;
2224
        return true;
2225
 
2226
      case FUNCTION_DECL:
2227
        TREE_ADDRESSABLE (expr_node) = 1;
2228
        return true;
2229
 
2230
      case CONST_DECL:
2231
        return (DECL_CONST_CORRESPONDING_VAR (expr_node)
2232
                && (gnat_mark_addressable
2233
                    (DECL_CONST_CORRESPONDING_VAR (expr_node))));
2234
      default:
2235
        return true;
2236
    }
2237
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.