OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [ada/] [gcc-interface/] [utils2.c] - Blame information for rev 729

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 706 jeremybenn
/****************************************************************************
2
 *                                                                          *
3
 *                         GNAT COMPILER COMPONENTS                         *
4
 *                                                                          *
5
 *                               U T I L S 2                                *
6
 *                                                                          *
7
 *                          C Implementation File                           *
8
 *                                                                          *
9
 *          Copyright (C) 1992-2012, Free Software Foundation, Inc.         *
10
 *                                                                          *
11
 * GNAT is free software;  you can  redistribute it  and/or modify it under *
12
 * terms of the  GNU General Public License as published  by the Free Soft- *
13
 * ware  Foundation;  either version 3,  or (at your option) any later ver- *
14
 * sion.  GNAT is distributed in the hope that it will be useful, but WITH- *
15
 * OUT ANY WARRANTY;  without even the  implied warranty of MERCHANTABILITY *
16
 * or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License *
17
 * for  more details.  You should have received a copy of the GNU General   *
18
 * Public License along with GCC; see the file COPYING3.  If not see        *
19
 * <http://www.gnu.org/licenses/>.                                          *
20
 *                                                                          *
21
 * GNAT was originally developed  by the GNAT team at  New York University. *
22
 * Extensive contributions were provided by Ada Core Technologies Inc.      *
23
 *                                                                          *
24
 ****************************************************************************/
25
 
26
#include "config.h"
27
#include "system.h"
28
#include "coretypes.h"
29
#include "tm.h"
30
#include "tree.h"
31
#include "flags.h"
32
#include "toplev.h"
33
#include "ggc.h"
34
#include "output.h"
35
#include "tree-inline.h"
36
 
37
#include "ada.h"
38
#include "types.h"
39
#include "atree.h"
40
#include "elists.h"
41
#include "namet.h"
42
#include "nlists.h"
43
#include "snames.h"
44
#include "stringt.h"
45
#include "uintp.h"
46
#include "fe.h"
47
#include "sinfo.h"
48
#include "einfo.h"
49
#include "ada-tree.h"
50
#include "gigi.h"
51
 
52
/* Return the base type of TYPE.  */
53
 
54
tree
55
get_base_type (tree type)
56
{
57
  if (TREE_CODE (type) == RECORD_TYPE
58
      && TYPE_JUSTIFIED_MODULAR_P (type))
59
    type = TREE_TYPE (TYPE_FIELDS (type));
60
 
61
  while (TREE_TYPE (type)
62
         && (TREE_CODE (type) == INTEGER_TYPE
63
             || TREE_CODE (type) == REAL_TYPE))
64
    type = TREE_TYPE (type);
65
 
66
  return type;
67
}
68
 
69
/* EXP is a GCC tree representing an address.  See if we can find how
70
   strictly the object at that address is aligned.   Return that alignment
71
   in bits.  If we don't know anything about the alignment, return 0.  */
72
 
73
unsigned int
74
known_alignment (tree exp)
75
{
76
  unsigned int this_alignment;
77
  unsigned int lhs, rhs;
78
 
79
  switch (TREE_CODE (exp))
80
    {
81
    CASE_CONVERT:
82
    case VIEW_CONVERT_EXPR:
83
    case NON_LVALUE_EXPR:
84
      /* Conversions between pointers and integers don't change the alignment
85
         of the underlying object.  */
86
      this_alignment = known_alignment (TREE_OPERAND (exp, 0));
87
      break;
88
 
89
    case COMPOUND_EXPR:
90
      /* The value of a COMPOUND_EXPR is that of it's second operand.  */
91
      this_alignment = known_alignment (TREE_OPERAND (exp, 1));
92
      break;
93
 
94
    case PLUS_EXPR:
95
    case MINUS_EXPR:
96
      /* If two address are added, the alignment of the result is the
97
         minimum of the two alignments.  */
98
      lhs = known_alignment (TREE_OPERAND (exp, 0));
99
      rhs = known_alignment (TREE_OPERAND (exp, 1));
100
      this_alignment = MIN (lhs, rhs);
101
      break;
102
 
103
    case POINTER_PLUS_EXPR:
104
      lhs = known_alignment (TREE_OPERAND (exp, 0));
105
      rhs = known_alignment (TREE_OPERAND (exp, 1));
106
      /* If we don't know the alignment of the offset, we assume that
107
         of the base.  */
108
      if (rhs == 0)
109
        this_alignment = lhs;
110
      else
111
        this_alignment = MIN (lhs, rhs);
112
      break;
113
 
114
    case COND_EXPR:
115
      /* If there is a choice between two values, use the smallest one.  */
116
      lhs = known_alignment (TREE_OPERAND (exp, 1));
117
      rhs = known_alignment (TREE_OPERAND (exp, 2));
118
      this_alignment = MIN (lhs, rhs);
119
      break;
120
 
121
    case INTEGER_CST:
122
      {
123
        unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
124
        /* The first part of this represents the lowest bit in the constant,
125
           but it is originally in bytes, not bits.  */
126
        this_alignment = MIN (BITS_PER_UNIT * (c & -c), BIGGEST_ALIGNMENT);
127
      }
128
      break;
129
 
130
    case MULT_EXPR:
131
      /* If we know the alignment of just one side, use it.  Otherwise,
132
         use the product of the alignments.  */
133
      lhs = known_alignment (TREE_OPERAND (exp, 0));
134
      rhs = known_alignment (TREE_OPERAND (exp, 1));
135
 
136
      if (lhs == 0)
137
        this_alignment = rhs;
138
      else if (rhs == 0)
139
        this_alignment = lhs;
140
      else
141
        this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
142
      break;
143
 
144
    case BIT_AND_EXPR:
145
      /* A bit-and expression is as aligned as the maximum alignment of the
146
         operands.  We typically get here for a complex lhs and a constant
147
         negative power of two on the rhs to force an explicit alignment, so
148
         don't bother looking at the lhs.  */
149
      this_alignment = known_alignment (TREE_OPERAND (exp, 1));
150
      break;
151
 
152
    case ADDR_EXPR:
153
      this_alignment = expr_align (TREE_OPERAND (exp, 0));
154
      break;
155
 
156
    case CALL_EXPR:
157
      {
158
        tree t = maybe_inline_call_in_expr (exp);
159
        if (t)
160
          return known_alignment (t);
161
      }
162
 
163
      /* Fall through... */
164
 
165
    default:
166
      /* For other pointer expressions, we assume that the pointed-to object
167
         is at least as aligned as the pointed-to type.  Beware that we can
168
         have a dummy type here (e.g. a Taft Amendment type), for which the
169
         alignment is meaningless and should be ignored.  */
170
      if (POINTER_TYPE_P (TREE_TYPE (exp))
171
          && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
172
        this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
173
      else
174
        this_alignment = 0;
175
      break;
176
    }
177
 
178
  return this_alignment;
179
}
180
 
181
/* We have a comparison or assignment operation on two types, T1 and T2, which
182
   are either both array types or both record types.  T1 is assumed to be for
183
   the left hand side operand, and T2 for the right hand side.  Return the
184
   type that both operands should be converted to for the operation, if any.
185
   Otherwise return zero.  */
186
 
187
static tree
188
find_common_type (tree t1, tree t2)
189
{
190
  /* ??? As of today, various constructs lead to here with types of different
191
     sizes even when both constants (e.g. tagged types, packable vs regular
192
     component types, padded vs unpadded types, ...).  While some of these
193
     would better be handled upstream (types should be made consistent before
194
     calling into build_binary_op), some others are really expected and we
195
     have to be careful.  */
196
 
197
  /* We must avoid writing more than what the target can hold if this is for
198
     an assignment and the case of tagged types is handled in build_binary_op
199
     so we use the lhs type if it is known to be smaller or of constant size
200
     and the rhs type is not, whatever the modes.  We also force t1 in case of
201
     constant size equality to minimize occurrences of view conversions on the
202
     lhs of an assignment, except for the case of record types with a variant
203
     part on the lhs but not on the rhs to make the conversion simpler.  */
204
  if (TREE_CONSTANT (TYPE_SIZE (t1))
205
      && (!TREE_CONSTANT (TYPE_SIZE (t2))
206
          || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
207
          || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
208
              && !(TREE_CODE (t1) == RECORD_TYPE
209
                   && TREE_CODE (t2) == RECORD_TYPE
210
                   && get_variant_part (t1) != NULL_TREE
211
                   && get_variant_part (t2) == NULL_TREE))))
212
    return t1;
213
 
214
  /* Otherwise, if the lhs type is non-BLKmode, use it.  Note that we know
215
     that we will not have any alignment problems since, if we did, the
216
     non-BLKmode type could not have been used.  */
217
  if (TYPE_MODE (t1) != BLKmode)
218
    return t1;
219
 
220
  /* If the rhs type is of constant size, use it whatever the modes.  At
221
     this point it is known to be smaller, or of constant size and the
222
     lhs type is not.  */
223
  if (TREE_CONSTANT (TYPE_SIZE (t2)))
224
    return t2;
225
 
226
  /* Otherwise, if the rhs type is non-BLKmode, use it.  */
227
  if (TYPE_MODE (t2) != BLKmode)
228
    return t2;
229
 
230
  /* In this case, both types have variable size and BLKmode.  It's
231
     probably best to leave the "type mismatch" because changing it
232
     could cause a bad self-referential reference.  */
233
  return NULL_TREE;
234
}
235
 
236
/* Return an expression tree representing an equality comparison of A1 and A2,
237
   two objects of type ARRAY_TYPE.  The result should be of type RESULT_TYPE.
238
 
239
   Two arrays are equal in one of two ways: (1) if both have zero length in
240
   some dimension (not necessarily the same dimension) or (2) if the lengths
241
   in each dimension are equal and the data is equal.  We perform the length
242
   tests in as efficient a manner as possible.  */
243
 
244
static tree
245
compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
246
{
247
  tree result = convert (result_type, boolean_true_node);
248
  tree a1_is_null = convert (result_type, boolean_false_node);
249
  tree a2_is_null = convert (result_type, boolean_false_node);
250
  tree t1 = TREE_TYPE (a1);
251
  tree t2 = TREE_TYPE (a2);
252
  bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
253
  bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
254
  bool length_zero_p = false;
255
 
256
  /* If either operand has side-effects, they have to be evaluated only once
257
     in spite of the multiple references to the operand in the comparison.  */
258
  if (a1_side_effects_p)
259
    a1 = gnat_protect_expr (a1);
260
 
261
  if (a2_side_effects_p)
262
    a2 = gnat_protect_expr (a2);
263
 
264
  /* Process each dimension separately and compare the lengths.  If any
265
     dimension has a length known to be zero, set LENGTH_ZERO_P to true
266
     in order to suppress the comparison of the data at the end.  */
267
  while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
268
    {
269
      tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
270
      tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
271
      tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
272
      tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
273
      tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
274
                                 size_one_node);
275
      tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
276
                                 size_one_node);
277
      tree comparison, this_a1_is_null, this_a2_is_null;
278
 
279
      /* If the length of the first array is a constant, swap our operands
280
         unless the length of the second array is the constant zero.  */
281
      if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
282
        {
283
          tree tem;
284
          bool btem;
285
 
286
          tem = a1, a1 = a2, a2 = tem;
287
          tem = t1, t1 = t2, t2 = tem;
288
          tem = lb1, lb1 = lb2, lb2 = tem;
289
          tem = ub1, ub1 = ub2, ub2 = tem;
290
          tem = length1, length1 = length2, length2 = tem;
291
          tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
292
          btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
293
          a2_side_effects_p = btem;
294
        }
295
 
296
      /* If the length of the second array is the constant zero, we can just
297
         use the original stored bounds for the first array and see whether
298
         last < first holds.  */
299
      if (integer_zerop (length2))
300
        {
301
          length_zero_p = true;
302
 
303
          ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
304
          lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
305
 
306
          comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
307
          comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
308
          if (EXPR_P (comparison))
309
            SET_EXPR_LOCATION (comparison, loc);
310
 
311
          this_a1_is_null = comparison;
312
          this_a2_is_null = convert (result_type, boolean_true_node);
313
        }
314
 
315
      /* Otherwise, if the length is some other constant value, we know that
316
         this dimension in the second array cannot be superflat, so we can
317
         just use its length computed from the actual stored bounds.  */
318
      else if (TREE_CODE (length2) == INTEGER_CST)
319
        {
320
          tree bt;
321
 
322
          ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
323
          lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
324
          /* Note that we know that UB2 and LB2 are constant and hence
325
             cannot contain a PLACEHOLDER_EXPR.  */
326
          ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
327
          lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
328
          bt = get_base_type (TREE_TYPE (ub1));
329
 
330
          comparison
331
            = fold_build2_loc (loc, EQ_EXPR, result_type,
332
                               build_binary_op (MINUS_EXPR, bt, ub1, lb1),
333
                               build_binary_op (MINUS_EXPR, bt, ub2, lb2));
334
          comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
335
          if (EXPR_P (comparison))
336
            SET_EXPR_LOCATION (comparison, loc);
337
 
338
          this_a1_is_null
339
            = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
340
 
341
          this_a2_is_null = convert (result_type, boolean_false_node);
342
        }
343
 
344
      /* Otherwise, compare the computed lengths.  */
345
      else
346
        {
347
          length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
348
          length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
349
 
350
          comparison
351
            = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
352
 
353
          /* If the length expression is of the form (cond ? val : 0), assume
354
             that cond is equivalent to (length != 0).  That's guaranteed by
355
             construction of the array types in gnat_to_gnu_entity.  */
356
          if (TREE_CODE (length1) == COND_EXPR
357
              && integer_zerop (TREE_OPERAND (length1, 2)))
358
            this_a1_is_null
359
              = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
360
          else
361
            this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
362
                                               length1, size_zero_node);
363
 
364
          /* Likewise for the second array.  */
365
          if (TREE_CODE (length2) == COND_EXPR
366
              && integer_zerop (TREE_OPERAND (length2, 2)))
367
            this_a2_is_null
368
              = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
369
          else
370
            this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
371
                                               length2, size_zero_node);
372
        }
373
 
374
      /* Append expressions for this dimension to the final expressions.  */
375
      result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
376
                                result, comparison);
377
 
378
      a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
379
                                    this_a1_is_null, a1_is_null);
380
 
381
      a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
382
                                    this_a2_is_null, a2_is_null);
383
 
384
      t1 = TREE_TYPE (t1);
385
      t2 = TREE_TYPE (t2);
386
    }
387
 
388
  /* Unless the length of some dimension is known to be zero, compare the
389
     data in the array.  */
390
  if (!length_zero_p)
391
    {
392
      tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
393
      tree comparison;
394
 
395
      if (type)
396
        {
397
          a1 = convert (type, a1),
398
          a2 = convert (type, a2);
399
        }
400
 
401
      comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
402
 
403
      result
404
        = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
405
    }
406
 
407
  /* The result is also true if both sizes are zero.  */
408
  result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
409
                            build_binary_op (TRUTH_ANDIF_EXPR, result_type,
410
                                             a1_is_null, a2_is_null),
411
                            result);
412
 
413
  /* If either operand has side-effects, they have to be evaluated before
414
     starting the comparison above since the place they would be otherwise
415
     evaluated could be wrong.  */
416
  if (a1_side_effects_p)
417
    result = build2 (COMPOUND_EXPR, result_type, a1, result);
418
 
419
  if (a2_side_effects_p)
420
    result = build2 (COMPOUND_EXPR, result_type, a2, result);
421
 
422
  return result;
423
}
424
 
425
/* Return an expression tree representing an equality comparison of P1 and P2,
426
   two objects of fat pointer type.  The result should be of type RESULT_TYPE.
427
 
428
   Two fat pointers are equal in one of two ways: (1) if both have a null
429
   pointer to the array or (2) if they contain the same couple of pointers.
430
   We perform the comparison in as efficient a manner as possible.  */
431
 
432
static tree
433
compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
434
{
435
  tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
436
  tree p1_array_is_null, p2_array_is_null;
437
 
438
  /* If either operand has side-effects, they have to be evaluated only once
439
     in spite of the multiple references to the operand in the comparison.  */
440
  p1 = gnat_protect_expr (p1);
441
  p2 = gnat_protect_expr (p2);
442
 
443
  /* The constant folder doesn't fold fat pointer types so we do it here.  */
444
  if (TREE_CODE (p1) == CONSTRUCTOR)
445
    p1_array = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p1), 0)->value;
446
  else
447
    p1_array = build_component_ref (p1, NULL_TREE,
448
                                    TYPE_FIELDS (TREE_TYPE (p1)), true);
449
 
450
  p1_array_is_null
451
    = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
452
                       fold_convert_loc (loc, TREE_TYPE (p1_array),
453
                                         null_pointer_node));
454
 
455
  if (TREE_CODE (p2) == CONSTRUCTOR)
456
    p2_array = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p2), 0)->value;
457
  else
458
    p2_array = build_component_ref (p2, NULL_TREE,
459
                                    TYPE_FIELDS (TREE_TYPE (p2)), true);
460
 
461
  p2_array_is_null
462
    = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
463
                       fold_convert_loc (loc, TREE_TYPE (p2_array),
464
                                         null_pointer_node));
465
 
466
  /* If one of the pointers to the array is null, just compare the other.  */
467
  if (integer_zerop (p1_array))
468
    return p2_array_is_null;
469
  else if (integer_zerop (p2_array))
470
    return p1_array_is_null;
471
 
472
  /* Otherwise, do the fully-fledged comparison.  */
473
  same_array
474
    = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
475
 
476
  if (TREE_CODE (p1) == CONSTRUCTOR)
477
    p1_bounds = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p1), 1)->value;
478
  else
479
    p1_bounds
480
      = build_component_ref (p1, NULL_TREE,
481
                             DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))), true);
482
 
483
  if (TREE_CODE (p2) == CONSTRUCTOR)
484
    p2_bounds = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p2), 1)->value;
485
  else
486
    p2_bounds
487
      = build_component_ref (p2, NULL_TREE,
488
                             DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))), true);
489
 
490
  same_bounds
491
    = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
492
 
493
  /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS).  */
494
  return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
495
                          build_binary_op (TRUTH_ORIF_EXPR, result_type,
496
                                           p1_array_is_null, same_bounds));
497
}
498
 
499
/* Compute the result of applying OP_CODE to LHS and RHS, where both are of
500
   type TYPE.  We know that TYPE is a modular type with a nonbinary
501
   modulus.  */
502
 
503
static tree
504
nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
505
                             tree rhs)
506
{
507
  tree modulus = TYPE_MODULUS (type);
508
  unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
509
  unsigned int precision;
510
  bool unsignedp = true;
511
  tree op_type = type;
512
  tree result;
513
 
514
  /* If this is an addition of a constant, convert it to a subtraction
515
     of a constant since we can do that faster.  */
516
  if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
517
    {
518
      rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
519
      op_code = MINUS_EXPR;
520
    }
521
 
522
  /* For the logical operations, we only need PRECISION bits.  For
523
     addition and subtraction, we need one more and for multiplication we
524
     need twice as many.  But we never want to make a size smaller than
525
     our size. */
526
  if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
527
    needed_precision += 1;
528
  else if (op_code == MULT_EXPR)
529
    needed_precision *= 2;
530
 
531
  precision = MAX (needed_precision, TYPE_PRECISION (op_type));
532
 
533
  /* Unsigned will do for everything but subtraction.  */
534
  if (op_code == MINUS_EXPR)
535
    unsignedp = false;
536
 
537
  /* If our type is the wrong signedness or isn't wide enough, make a new
538
     type and convert both our operands to it.  */
539
  if (TYPE_PRECISION (op_type) < precision
540
      || TYPE_UNSIGNED (op_type) != unsignedp)
541
    {
542
      /* Copy the node so we ensure it can be modified to make it modular.  */
543
      op_type = copy_node (gnat_type_for_size (precision, unsignedp));
544
      modulus = convert (op_type, modulus);
545
      SET_TYPE_MODULUS (op_type, modulus);
546
      TYPE_MODULAR_P (op_type) = 1;
547
      lhs = convert (op_type, lhs);
548
      rhs = convert (op_type, rhs);
549
    }
550
 
551
  /* Do the operation, then we'll fix it up.  */
552
  result = fold_build2 (op_code, op_type, lhs, rhs);
553
 
554
  /* For multiplication, we have no choice but to do a full modulus
555
     operation.  However, we want to do this in the narrowest
556
     possible size.  */
557
  if (op_code == MULT_EXPR)
558
    {
559
      tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
560
      modulus = convert (div_type, modulus);
561
      SET_TYPE_MODULUS (div_type, modulus);
562
      TYPE_MODULAR_P (div_type) = 1;
563
      result = convert (op_type,
564
                        fold_build2 (TRUNC_MOD_EXPR, div_type,
565
                                     convert (div_type, result), modulus));
566
    }
567
 
568
  /* For subtraction, add the modulus back if we are negative.  */
569
  else if (op_code == MINUS_EXPR)
570
    {
571
      result = gnat_protect_expr (result);
572
      result = fold_build3 (COND_EXPR, op_type,
573
                            fold_build2 (LT_EXPR, boolean_type_node, result,
574
                                         convert (op_type, integer_zero_node)),
575
                            fold_build2 (PLUS_EXPR, op_type, result, modulus),
576
                            result);
577
    }
578
 
579
  /* For the other operations, subtract the modulus if we are >= it.  */
580
  else
581
    {
582
      result = gnat_protect_expr (result);
583
      result = fold_build3 (COND_EXPR, op_type,
584
                            fold_build2 (GE_EXPR, boolean_type_node,
585
                                         result, modulus),
586
                            fold_build2 (MINUS_EXPR, op_type,
587
                                         result, modulus),
588
                            result);
589
    }
590
 
591
  return convert (type, result);
592
}
593
 
594
/* This page contains routines that implement the Ada semantics with regard
595
   to atomic objects.  They are fully piggybacked on the middle-end support
596
   for atomic loads and stores.
597
 
598
   *** Memory barriers and volatile objects ***
599
 
600
   We implement the weakened form of the C.6(16) clause that was introduced
601
   in Ada 2012 (AI05-117).  Earlier forms of this clause wouldn't have been
602
   implementable without significant performance hits on modern platforms.
603
 
604
   We also take advantage of the requirements imposed on shared variables by
605
   9.10 (conditions for sequential actions) to have non-erroneous execution
606
   and consider that C.6(16) and C.6(17) only prescribe an uniform order of
607
   volatile updates with regard to sequential actions, i.e. with regard to
608
   reads or updates of atomic objects.
609
 
610
   As such, an update of an atomic object by a task requires that all earlier
611
   accesses to volatile objects have completed.  Similarly, later accesses to
612
   volatile objects cannot be reordered before the update of the atomic object.
613
   So, memory barriers both before and after the atomic update are needed.
614
 
615
   For a read of an atomic object, to avoid seeing writes of volatile objects
616
   by a task earlier than by the other tasks, a memory barrier is needed before
617
   the atomic read.  Finally, to avoid reordering later reads or updates of
618
   volatile objects to before the atomic read, a barrier is needed after the
619
   atomic read.
620
 
621
   So, memory barriers are needed before and after atomic reads and updates.
622
   And, in order to simplify the implementation, we use full memory barriers
623
   in all cases, i.e. we enforce sequential consistency for atomic accesses.  */
624
 
625
/* Return the size of TYPE, which must be a positive power of 2.  */
626
 
627
static unsigned int
628
resolve_atomic_size (tree type)
629
{
630
  unsigned HOST_WIDE_INT size = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
631
 
632
  if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
633
    return size;
634
 
635
  /* We shouldn't reach here without having already detected that the size
636
     isn't compatible with an atomic access.  */
637
  gcc_assert (Serious_Errors_Detected);
638
 
639
  return 0;
640
}
641
 
642
/* Build an atomic load for the underlying atomic object in SRC.  */
643
 
644
tree
645
build_atomic_load (tree src)
646
{
647
  tree ptr_type
648
    = build_pointer_type
649
      (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
650
  tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
651
  tree orig_src = src;
652
  tree type = TREE_TYPE (src);
653
  tree t, val;
654
  unsigned int size;
655
  int fncode;
656
 
657
  src = remove_conversions (src, false);
658
  size = resolve_atomic_size (TREE_TYPE (src));
659
  if (size == 0)
660
    return orig_src;
661
 
662
  fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
663
  t = builtin_decl_implicit ((enum built_in_function) fncode);
664
 
665
  src = build_unary_op (ADDR_EXPR, ptr_type, src);
666
  val = build_call_expr (t, 2, src, mem_model);
667
 
668
  return unchecked_convert (type, val, true);
669
}
670
 
671
/* Build an atomic store from SRC to the underlying atomic object in DEST.  */
672
 
673
tree
674
build_atomic_store (tree dest, tree src)
675
{
676
  tree ptr_type
677
    = build_pointer_type
678
      (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
679
  tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
680
  tree orig_dest = dest;
681
  tree t, int_type;
682
  unsigned int size;
683
  int fncode;
684
 
685
  dest = remove_conversions (dest, false);
686
  size = resolve_atomic_size (TREE_TYPE (dest));
687
  if (size == 0)
688
    return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
689
 
690
  fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
691
  t = builtin_decl_implicit ((enum built_in_function) fncode);
692
  int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
693
 
694
  dest = build_unary_op (ADDR_EXPR, ptr_type, dest);
695
  src = unchecked_convert (int_type, src, true);
696
 
697
  return build_call_expr (t, 3, dest, src, mem_model);
698
}
699
 
700
/* Make a binary operation of kind OP_CODE.  RESULT_TYPE is the type
701
   desired for the result.  Usually the operation is to be performed
702
   in that type.  For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
703
   NULL_TREE.  For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
704
   case the type to be used will be derived from the operands.
705
 
706
   This function is very much unlike the ones for C and C++ since we
707
   have already done any type conversion and matching required.  All we
708
   have to do here is validate the work done by SEM and handle subtypes.  */
709
 
710
tree
711
build_binary_op (enum tree_code op_code, tree result_type,
712
                 tree left_operand, tree right_operand)
713
{
714
  tree left_type  = TREE_TYPE (left_operand);
715
  tree right_type = TREE_TYPE (right_operand);
716
  tree left_base_type = get_base_type (left_type);
717
  tree right_base_type = get_base_type (right_type);
718
  tree operation_type = result_type;
719
  tree best_type = NULL_TREE;
720
  tree modulus, result;
721
  bool has_side_effects = false;
722
 
723
  if (operation_type
724
      && TREE_CODE (operation_type) == RECORD_TYPE
725
      && TYPE_JUSTIFIED_MODULAR_P (operation_type))
726
    operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
727
 
728
  if (operation_type
729
      && TREE_CODE (operation_type) == INTEGER_TYPE
730
      && TYPE_EXTRA_SUBTYPE_P (operation_type))
731
    operation_type = get_base_type (operation_type);
732
 
733
  modulus = (operation_type
734
             && TREE_CODE (operation_type) == INTEGER_TYPE
735
             && TYPE_MODULAR_P (operation_type)
736
             ? TYPE_MODULUS (operation_type) : NULL_TREE);
737
 
738
  switch (op_code)
739
    {
740
    case INIT_EXPR:
741
    case MODIFY_EXPR:
742
#ifdef ENABLE_CHECKING
743
      gcc_assert (result_type == NULL_TREE);
744
#endif
745
      /* If there were integral or pointer conversions on the LHS, remove
746
         them; we'll be putting them back below if needed.  Likewise for
747
         conversions between array and record types, except for justified
748
         modular types.  But don't do this if the right operand is not
749
         BLKmode (for packed arrays) unless we are not changing the mode.  */
750
      while ((CONVERT_EXPR_P (left_operand)
751
              || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
752
             && (((INTEGRAL_TYPE_P (left_type)
753
                   || POINTER_TYPE_P (left_type))
754
                  && (INTEGRAL_TYPE_P (TREE_TYPE
755
                                       (TREE_OPERAND (left_operand, 0)))
756
                      || POINTER_TYPE_P (TREE_TYPE
757
                                         (TREE_OPERAND (left_operand, 0)))))
758
                 || (((TREE_CODE (left_type) == RECORD_TYPE
759
                       && !TYPE_JUSTIFIED_MODULAR_P (left_type))
760
                      || TREE_CODE (left_type) == ARRAY_TYPE)
761
                     && ((TREE_CODE (TREE_TYPE
762
                                     (TREE_OPERAND (left_operand, 0)))
763
                          == RECORD_TYPE)
764
                         || (TREE_CODE (TREE_TYPE
765
                                        (TREE_OPERAND (left_operand, 0)))
766
                             == ARRAY_TYPE))
767
                     && (TYPE_MODE (right_type) == BLKmode
768
                         || (TYPE_MODE (left_type)
769
                             == TYPE_MODE (TREE_TYPE
770
                                           (TREE_OPERAND
771
                                            (left_operand, 0))))))))
772
        {
773
          left_operand = TREE_OPERAND (left_operand, 0);
774
          left_type = TREE_TYPE (left_operand);
775
        }
776
 
777
      /* If a class-wide type may be involved, force use of the RHS type.  */
778
      if ((TREE_CODE (right_type) == RECORD_TYPE
779
           || TREE_CODE (right_type) == UNION_TYPE)
780
          && TYPE_ALIGN_OK (right_type))
781
        operation_type = right_type;
782
 
783
      /* If we are copying between padded objects with compatible types, use
784
         the padded view of the objects, this is very likely more efficient.
785
         Likewise for a padded object that is assigned a constructor, if we
786
         can convert the constructor to the inner type, to avoid putting a
787
         VIEW_CONVERT_EXPR on the LHS.  But don't do so if we wouldn't have
788
         actually copied anything.  */
789
      else if (TYPE_IS_PADDING_P (left_type)
790
               && TREE_CONSTANT (TYPE_SIZE (left_type))
791
               && ((TREE_CODE (right_operand) == COMPONENT_REF
792
                    && TYPE_IS_PADDING_P
793
                       (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
794
                    && gnat_types_compatible_p
795
                       (left_type,
796
                        TREE_TYPE (TREE_OPERAND (right_operand, 0))))
797
                   || (TREE_CODE (right_operand) == CONSTRUCTOR
798
                       && !CONTAINS_PLACEHOLDER_P
799
                           (DECL_SIZE (TYPE_FIELDS (left_type)))))
800
               && !integer_zerop (TYPE_SIZE (right_type)))
801
        operation_type = left_type;
802
 
803
      /* If we have a call to a function that returns an unconstrained type
804
         with default discriminant on the RHS, use the RHS type (which is
805
         padded) as we cannot compute the size of the actual assignment.  */
806
      else if (TREE_CODE (right_operand) == CALL_EXPR
807
               && TYPE_IS_PADDING_P (right_type)
808
               && CONTAINS_PLACEHOLDER_P
809
                  (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (right_type)))))
810
        operation_type = right_type;
811
 
812
      /* Find the best type to use for copying between aggregate types.  */
813
      else if (((TREE_CODE (left_type) == ARRAY_TYPE
814
                 && TREE_CODE (right_type) == ARRAY_TYPE)
815
                || (TREE_CODE (left_type) == RECORD_TYPE
816
                    && TREE_CODE (right_type) == RECORD_TYPE))
817
               && (best_type = find_common_type (left_type, right_type)))
818
        operation_type = best_type;
819
 
820
      /* Otherwise use the LHS type.  */
821
      else
822
        operation_type = left_type;
823
 
824
      /* Ensure everything on the LHS is valid.  If we have a field reference,
825
         strip anything that get_inner_reference can handle.  Then remove any
826
         conversions between types having the same code and mode.  And mark
827
         VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE.  When done, we must have
828
         either an INDIRECT_REF, a NULL_EXPR or a DECL node.  */
829
      result = left_operand;
830
      while (true)
831
        {
832
          tree restype = TREE_TYPE (result);
833
 
834
          if (TREE_CODE (result) == COMPONENT_REF
835
              || TREE_CODE (result) == ARRAY_REF
836
              || TREE_CODE (result) == ARRAY_RANGE_REF)
837
            while (handled_component_p (result))
838
              result = TREE_OPERAND (result, 0);
839
          else if (TREE_CODE (result) == REALPART_EXPR
840
                   || TREE_CODE (result) == IMAGPART_EXPR
841
                   || (CONVERT_EXPR_P (result)
842
                       && (((TREE_CODE (restype)
843
                             == TREE_CODE (TREE_TYPE
844
                                           (TREE_OPERAND (result, 0))))
845
                             && (TYPE_MODE (TREE_TYPE
846
                                            (TREE_OPERAND (result, 0)))
847
                                 == TYPE_MODE (restype)))
848
                           || TYPE_ALIGN_OK (restype))))
849
            result = TREE_OPERAND (result, 0);
850
          else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
851
            {
852
              TREE_ADDRESSABLE (result) = 1;
853
              result = TREE_OPERAND (result, 0);
854
            }
855
          else
856
            break;
857
        }
858
 
859
      gcc_assert (TREE_CODE (result) == INDIRECT_REF
860
                  || TREE_CODE (result) == NULL_EXPR
861
                  || DECL_P (result));
862
 
863
      /* Convert the right operand to the operation type unless it is
864
         either already of the correct type or if the type involves a
865
         placeholder, since the RHS may not have the same record type.  */
866
      if (operation_type != right_type
867
          && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
868
        {
869
          right_operand = convert (operation_type, right_operand);
870
          right_type = operation_type;
871
        }
872
 
873
      /* If the left operand is not of the same type as the operation
874
         type, wrap it up in a VIEW_CONVERT_EXPR.  */
875
      if (left_type != operation_type)
876
        left_operand = unchecked_convert (operation_type, left_operand, false);
877
 
878
      has_side_effects = true;
879
      modulus = NULL_TREE;
880
      break;
881
 
882
    case ARRAY_REF:
883
      if (!operation_type)
884
        operation_type = TREE_TYPE (left_type);
885
 
886
      /* ... fall through ... */
887
 
888
    case ARRAY_RANGE_REF:
889
      /* First look through conversion between type variants.  Note that
890
         this changes neither the operation type nor the type domain.  */
891
      if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
892
          && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
893
             == TYPE_MAIN_VARIANT (left_type))
894
        {
895
          left_operand = TREE_OPERAND (left_operand, 0);
896
          left_type = TREE_TYPE (left_operand);
897
        }
898
 
899
      /* For a range, make sure the element type is consistent.  */
900
      if (op_code == ARRAY_RANGE_REF
901
          && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
902
        operation_type = build_array_type (TREE_TYPE (left_type),
903
                                           TYPE_DOMAIN (operation_type));
904
 
905
      /* Then convert the right operand to its base type.  This will prevent
906
         unneeded sign conversions when sizetype is wider than integer.  */
907
      right_operand = convert (right_base_type, right_operand);
908
      right_operand = convert_to_index_type (right_operand);
909
      modulus = NULL_TREE;
910
      break;
911
 
912
    case TRUTH_ANDIF_EXPR:
913
    case TRUTH_ORIF_EXPR:
914
    case TRUTH_AND_EXPR:
915
    case TRUTH_OR_EXPR:
916
    case TRUTH_XOR_EXPR:
917
#ifdef ENABLE_CHECKING
918
      gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
919
#endif
920
      operation_type = left_base_type;
921
      left_operand = convert (operation_type, left_operand);
922
      right_operand = convert (operation_type, right_operand);
923
      break;
924
 
925
    case GE_EXPR:
926
    case LE_EXPR:
927
    case GT_EXPR:
928
    case LT_EXPR:
929
    case EQ_EXPR:
930
    case NE_EXPR:
931
#ifdef ENABLE_CHECKING
932
      gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
933
#endif
934
      /* If either operand is a NULL_EXPR, just return a new one.  */
935
      if (TREE_CODE (left_operand) == NULL_EXPR)
936
        return build2 (op_code, result_type,
937
                       build1 (NULL_EXPR, integer_type_node,
938
                               TREE_OPERAND (left_operand, 0)),
939
                       integer_zero_node);
940
 
941
      else if (TREE_CODE (right_operand) == NULL_EXPR)
942
        return build2 (op_code, result_type,
943
                       build1 (NULL_EXPR, integer_type_node,
944
                               TREE_OPERAND (right_operand, 0)),
945
                       integer_zero_node);
946
 
947
      /* If either object is a justified modular types, get the
948
         fields from within.  */
949
      if (TREE_CODE (left_type) == RECORD_TYPE
950
          && TYPE_JUSTIFIED_MODULAR_P (left_type))
951
        {
952
          left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
953
                                  left_operand);
954
          left_type = TREE_TYPE (left_operand);
955
          left_base_type = get_base_type (left_type);
956
        }
957
 
958
      if (TREE_CODE (right_type) == RECORD_TYPE
959
          && TYPE_JUSTIFIED_MODULAR_P (right_type))
960
        {
961
          right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
962
                                  right_operand);
963
          right_type = TREE_TYPE (right_operand);
964
          right_base_type = get_base_type (right_type);
965
        }
966
 
967
      /* If both objects are arrays, compare them specially.  */
968
      if ((TREE_CODE (left_type) == ARRAY_TYPE
969
           || (TREE_CODE (left_type) == INTEGER_TYPE
970
               && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
971
          && (TREE_CODE (right_type) == ARRAY_TYPE
972
              || (TREE_CODE (right_type) == INTEGER_TYPE
973
                  && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
974
        {
975
          result = compare_arrays (input_location,
976
                                   result_type, left_operand, right_operand);
977
          if (op_code == NE_EXPR)
978
            result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
979
          else
980
            gcc_assert (op_code == EQ_EXPR);
981
 
982
          return result;
983
        }
984
 
985
      /* Otherwise, the base types must be the same, unless they are both fat
986
         pointer types or record types.  In the latter case, use the best type
987
         and convert both operands to that type.  */
988
      if (left_base_type != right_base_type)
989
        {
990
          if (TYPE_IS_FAT_POINTER_P (left_base_type)
991
              && TYPE_IS_FAT_POINTER_P (right_base_type))
992
            {
993
              gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
994
                          == TYPE_MAIN_VARIANT (right_base_type));
995
              best_type = left_base_type;
996
            }
997
 
998
          else if (TREE_CODE (left_base_type) == RECORD_TYPE
999
                   && TREE_CODE (right_base_type) == RECORD_TYPE)
1000
            {
1001
              /* The only way this is permitted is if both types have the same
1002
                 name.  In that case, one of them must not be self-referential.
1003
                 Use it as the best type.  Even better with a fixed size.  */
1004
              gcc_assert (TYPE_NAME (left_base_type)
1005
                          && TYPE_NAME (left_base_type)
1006
                             == TYPE_NAME (right_base_type));
1007
 
1008
              if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1009
                best_type = left_base_type;
1010
              else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1011
                best_type = right_base_type;
1012
              else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1013
                best_type = left_base_type;
1014
              else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1015
                best_type = right_base_type;
1016
              else
1017
                gcc_unreachable ();
1018
            }
1019
 
1020
          else
1021
            gcc_unreachable ();
1022
 
1023
          left_operand = convert (best_type, left_operand);
1024
          right_operand = convert (best_type, right_operand);
1025
        }
1026
      else
1027
        {
1028
          left_operand = convert (left_base_type, left_operand);
1029
          right_operand = convert (right_base_type, right_operand);
1030
        }
1031
 
1032
      /* If both objects are fat pointers, compare them specially.  */
1033
      if (TYPE_IS_FAT_POINTER_P (left_base_type))
1034
        {
1035
          result
1036
            = compare_fat_pointers (input_location,
1037
                                    result_type, left_operand, right_operand);
1038
          if (op_code == NE_EXPR)
1039
            result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1040
          else
1041
            gcc_assert (op_code == EQ_EXPR);
1042
 
1043
          return result;
1044
        }
1045
 
1046
      modulus = NULL_TREE;
1047
      break;
1048
 
1049
    case LSHIFT_EXPR:
1050
    case RSHIFT_EXPR:
1051
    case LROTATE_EXPR:
1052
    case RROTATE_EXPR:
1053
       /* The RHS of a shift can be any type.  Also, ignore any modulus
1054
         (we used to abort, but this is needed for unchecked conversion
1055
         to modular types).  Otherwise, processing is the same as normal.  */
1056
      gcc_assert (operation_type == left_base_type);
1057
      modulus = NULL_TREE;
1058
      left_operand = convert (operation_type, left_operand);
1059
      break;
1060
 
1061
    case BIT_AND_EXPR:
1062
    case BIT_IOR_EXPR:
1063
    case BIT_XOR_EXPR:
1064
      /* For binary modulus, if the inputs are in range, so are the
1065
         outputs.  */
1066
      if (modulus && integer_pow2p (modulus))
1067
        modulus = NULL_TREE;
1068
      goto common;
1069
 
1070
    case COMPLEX_EXPR:
1071
      gcc_assert (TREE_TYPE (result_type) == left_base_type
1072
                  && TREE_TYPE (result_type) == right_base_type);
1073
      left_operand = convert (left_base_type, left_operand);
1074
      right_operand = convert (right_base_type, right_operand);
1075
      break;
1076
 
1077
    case TRUNC_DIV_EXPR:   case TRUNC_MOD_EXPR:
1078
    case CEIL_DIV_EXPR:    case CEIL_MOD_EXPR:
1079
    case FLOOR_DIV_EXPR:   case FLOOR_MOD_EXPR:
1080
    case ROUND_DIV_EXPR:   case ROUND_MOD_EXPR:
1081
      /* These always produce results lower than either operand.  */
1082
      modulus = NULL_TREE;
1083
      goto common;
1084
 
1085
    case POINTER_PLUS_EXPR:
1086
      gcc_assert (operation_type == left_base_type
1087
                  && sizetype == right_base_type);
1088
      left_operand = convert (operation_type, left_operand);
1089
      right_operand = convert (sizetype, right_operand);
1090
      break;
1091
 
1092
    case PLUS_NOMOD_EXPR:
1093
    case MINUS_NOMOD_EXPR:
1094
      if (op_code == PLUS_NOMOD_EXPR)
1095
        op_code = PLUS_EXPR;
1096
      else
1097
        op_code = MINUS_EXPR;
1098
      modulus = NULL_TREE;
1099
 
1100
      /* ... fall through ... */
1101
 
1102
    case PLUS_EXPR:
1103
    case MINUS_EXPR:
1104
      /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1105
         other compilers.  Contrary to C, Ada doesn't allow arithmetics in
1106
         these types but can generate addition/subtraction for Succ/Pred.  */
1107
      if (operation_type
1108
          && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1109
              || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1110
        operation_type = left_base_type = right_base_type
1111
          = gnat_type_for_mode (TYPE_MODE (operation_type),
1112
                                TYPE_UNSIGNED (operation_type));
1113
 
1114
      /* ... fall through ... */
1115
 
1116
    default:
1117
    common:
1118
      /* The result type should be the same as the base types of the
1119
         both operands (and they should be the same).  Convert
1120
         everything to the result type.  */
1121
 
1122
      gcc_assert (operation_type == left_base_type
1123
                  && left_base_type == right_base_type);
1124
      left_operand = convert (operation_type, left_operand);
1125
      right_operand = convert (operation_type, right_operand);
1126
    }
1127
 
1128
  if (modulus && !integer_pow2p (modulus))
1129
    {
1130
      result = nonbinary_modular_operation (op_code, operation_type,
1131
                                            left_operand, right_operand);
1132
      modulus = NULL_TREE;
1133
    }
1134
  /* If either operand is a NULL_EXPR, just return a new one.  */
1135
  else if (TREE_CODE (left_operand) == NULL_EXPR)
1136
    return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1137
  else if (TREE_CODE (right_operand) == NULL_EXPR)
1138
    return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1139
  else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1140
    result = fold (build4 (op_code, operation_type, left_operand,
1141
                           right_operand, NULL_TREE, NULL_TREE));
1142
  else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1143
    result = build2 (op_code, void_type_node, left_operand, right_operand);
1144
  else
1145
    result
1146
      = fold_build2 (op_code, operation_type, left_operand, right_operand);
1147
 
1148
  if (TREE_CONSTANT (result))
1149
    ;
1150
  else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1151
    {
1152
      TREE_THIS_NOTRAP (result) = 1;
1153
      if (TYPE_VOLATILE (operation_type))
1154
        TREE_THIS_VOLATILE (result) = 1;
1155
    }
1156
  else
1157
    TREE_CONSTANT (result)
1158
      |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1159
 
1160
  TREE_SIDE_EFFECTS (result) |= has_side_effects;
1161
 
1162
  /* If we are working with modular types, perform the MOD operation
1163
     if something above hasn't eliminated the need for it.  */
1164
  if (modulus)
1165
    result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1166
                          convert (operation_type, modulus));
1167
 
1168
  if (result_type && result_type != operation_type)
1169
    result = convert (result_type, result);
1170
 
1171
  return result;
1172
}
1173
 
1174
/* Similar, but for unary operations.  */
1175
 
1176
tree
1177
build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1178
{
1179
  tree type = TREE_TYPE (operand);
1180
  tree base_type = get_base_type (type);
1181
  tree operation_type = result_type;
1182
  tree result;
1183
 
1184
  if (operation_type
1185
      && TREE_CODE (operation_type) == RECORD_TYPE
1186
      && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1187
    operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1188
 
1189
  if (operation_type
1190
      && TREE_CODE (operation_type) == INTEGER_TYPE
1191
      && TYPE_EXTRA_SUBTYPE_P (operation_type))
1192
    operation_type = get_base_type (operation_type);
1193
 
1194
  switch (op_code)
1195
    {
1196
    case REALPART_EXPR:
1197
    case IMAGPART_EXPR:
1198
      if (!operation_type)
1199
        result_type = operation_type = TREE_TYPE (type);
1200
      else
1201
        gcc_assert (result_type == TREE_TYPE (type));
1202
 
1203
      result = fold_build1 (op_code, operation_type, operand);
1204
      break;
1205
 
1206
    case TRUTH_NOT_EXPR:
1207
#ifdef ENABLE_CHECKING
1208
      gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1209
#endif
1210
      result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1211
      /* When not optimizing, fold the result as invert_truthvalue_loc
1212
         doesn't fold the result of comparisons.  This is intended to undo
1213
         the trick used for boolean rvalues in gnat_to_gnu.  */
1214
      if (!optimize)
1215
        result = fold (result);
1216
      break;
1217
 
1218
    case ATTR_ADDR_EXPR:
1219
    case ADDR_EXPR:
1220
      switch (TREE_CODE (operand))
1221
        {
1222
        case INDIRECT_REF:
1223
        case UNCONSTRAINED_ARRAY_REF:
1224
          result = TREE_OPERAND (operand, 0);
1225
 
1226
          /* Make sure the type here is a pointer, not a reference.
1227
             GCC wants pointer types for function addresses.  */
1228
          if (!result_type)
1229
            result_type = build_pointer_type (type);
1230
 
1231
          /* If the underlying object can alias everything, propagate the
1232
             property since we are effectively retrieving the object.  */
1233
          if (POINTER_TYPE_P (TREE_TYPE (result))
1234
              && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1235
            {
1236
              if (TREE_CODE (result_type) == POINTER_TYPE
1237
                  && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1238
                result_type
1239
                  = build_pointer_type_for_mode (TREE_TYPE (result_type),
1240
                                                 TYPE_MODE (result_type),
1241
                                                 true);
1242
              else if (TREE_CODE (result_type) == REFERENCE_TYPE
1243
                       && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1244
                result_type
1245
                  = build_reference_type_for_mode (TREE_TYPE (result_type),
1246
                                                   TYPE_MODE (result_type),
1247
                                                   true);
1248
            }
1249
          break;
1250
 
1251
        case NULL_EXPR:
1252
          result = operand;
1253
          TREE_TYPE (result) = type = build_pointer_type (type);
1254
          break;
1255
 
1256
        case COMPOUND_EXPR:
1257
          /* Fold a compound expression if it has unconstrained array type
1258
             since the middle-end cannot handle it.  But we don't it in the
1259
             general case because it may introduce aliasing issues if the
1260
             first operand is an indirect assignment and the second operand
1261
             the corresponding address, e.g. for an allocator.  */
1262
          if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
1263
            {
1264
              result = build_unary_op (ADDR_EXPR, result_type,
1265
                                       TREE_OPERAND (operand, 1));
1266
              result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1267
                               TREE_OPERAND (operand, 0), result);
1268
              break;
1269
            }
1270
          goto common;
1271
 
1272
        case ARRAY_REF:
1273
        case ARRAY_RANGE_REF:
1274
        case COMPONENT_REF:
1275
        case BIT_FIELD_REF:
1276
            /* If this is for 'Address, find the address of the prefix and add
1277
               the offset to the field.  Otherwise, do this the normal way.  */
1278
          if (op_code == ATTR_ADDR_EXPR)
1279
            {
1280
              HOST_WIDE_INT bitsize;
1281
              HOST_WIDE_INT bitpos;
1282
              tree offset, inner;
1283
              enum machine_mode mode;
1284
              int unsignedp, volatilep;
1285
 
1286
              inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1287
                                           &mode, &unsignedp, &volatilep,
1288
                                           false);
1289
 
1290
              /* If INNER is a padding type whose field has a self-referential
1291
                 size, convert to that inner type.  We know the offset is zero
1292
                 and we need to have that type visible.  */
1293
              if (TYPE_IS_PADDING_P (TREE_TYPE (inner))
1294
                  && CONTAINS_PLACEHOLDER_P
1295
                     (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1296
                                            (TREE_TYPE (inner))))))
1297
                inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1298
                                 inner);
1299
 
1300
              /* Compute the offset as a byte offset from INNER.  */
1301
              if (!offset)
1302
                offset = size_zero_node;
1303
 
1304
              offset = size_binop (PLUS_EXPR, offset,
1305
                                   size_int (bitpos / BITS_PER_UNIT));
1306
 
1307
              /* Take the address of INNER, convert the offset to void *, and
1308
                 add then.  It will later be converted to the desired result
1309
                 type, if any.  */
1310
              inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1311
              inner = convert (ptr_void_type_node, inner);
1312
              result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1313
                                        inner, offset);
1314
              result = convert (build_pointer_type (TREE_TYPE (operand)),
1315
                                result);
1316
              break;
1317
            }
1318
          goto common;
1319
 
1320
        case CONSTRUCTOR:
1321
          /* If this is just a constructor for a padded record, we can
1322
             just take the address of the single field and convert it to
1323
             a pointer to our type.  */
1324
          if (TYPE_IS_PADDING_P (type))
1325
            {
1326
              result = VEC_index (constructor_elt,
1327
                                  CONSTRUCTOR_ELTS (operand),
1328
                                  0)->value;
1329
              result = convert (build_pointer_type (TREE_TYPE (operand)),
1330
                                build_unary_op (ADDR_EXPR, NULL_TREE, result));
1331
              break;
1332
            }
1333
 
1334
          goto common;
1335
 
1336
        case NOP_EXPR:
1337
          if (AGGREGATE_TYPE_P (type)
1338
              && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1339
            return build_unary_op (ADDR_EXPR, result_type,
1340
                                   TREE_OPERAND (operand, 0));
1341
 
1342
          /* ... fallthru ... */
1343
 
1344
        case VIEW_CONVERT_EXPR:
1345
          /* If this just a variant conversion or if the conversion doesn't
1346
             change the mode, get the result type from this type and go down.
1347
             This is needed for conversions of CONST_DECLs, to eventually get
1348
             to the address of their CORRESPONDING_VARs.  */
1349
          if ((TYPE_MAIN_VARIANT (type)
1350
               == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1351
              || (TYPE_MODE (type) != BLKmode
1352
                  && (TYPE_MODE (type)
1353
                      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1354
            return build_unary_op (ADDR_EXPR,
1355
                                   (result_type ? result_type
1356
                                    : build_pointer_type (type)),
1357
                                   TREE_OPERAND (operand, 0));
1358
          goto common;
1359
 
1360
        case CONST_DECL:
1361
          operand = DECL_CONST_CORRESPONDING_VAR (operand);
1362
 
1363
          /* ... fall through ... */
1364
 
1365
        default:
1366
        common:
1367
 
1368
          /* If we are taking the address of a padded record whose field
1369
             contains a template, take the address of the field.  */
1370
          if (TYPE_IS_PADDING_P (type)
1371
              && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1372
              && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1373
            {
1374
              type = TREE_TYPE (TYPE_FIELDS (type));
1375
              operand = convert (type, operand);
1376
            }
1377
 
1378
          gnat_mark_addressable (operand);
1379
          result = build_fold_addr_expr (operand);
1380
        }
1381
 
1382
      TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1383
      break;
1384
 
1385
    case INDIRECT_REF:
1386
      {
1387
        tree t = remove_conversions (operand, false);
1388
        bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1389
 
1390
        /* If TYPE is a thin pointer, either first retrieve the base if this
1391
           is an expression with an offset built for the initialization of an
1392
           object with an unconstrained nominal subtype, or else convert to
1393
           the fat pointer.  */
1394
        if (TYPE_IS_THIN_POINTER_P (type))
1395
          {
1396
            tree rec_type = TREE_TYPE (type);
1397
 
1398
            if (TREE_CODE (operand) == POINTER_PLUS_EXPR
1399
                && integer_zerop
1400
                   (size_binop (PLUS_EXPR, TREE_OPERAND (operand, 1),
1401
                                DECL_FIELD_OFFSET (TYPE_FIELDS (rec_type))))
1402
                && TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1403
              {
1404
                operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1405
                type = TREE_TYPE (operand);
1406
              }
1407
            else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1408
              {
1409
                operand
1410
                  = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1411
                             operand);
1412
                type = TREE_TYPE (operand);
1413
              }
1414
          }
1415
 
1416
        /* If we want to refer to an unconstrained array, use the appropriate
1417
           expression.  But this will never survive down to the back-end.  */
1418
        if (TYPE_IS_FAT_POINTER_P (type))
1419
          {
1420
            result = build1 (UNCONSTRAINED_ARRAY_REF,
1421
                             TYPE_UNCONSTRAINED_ARRAY (type), operand);
1422
            TREE_READONLY (result)
1423
              = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1424
          }
1425
 
1426
        /* If we are dereferencing an ADDR_EXPR, return its operand.  */
1427
        else if (TREE_CODE (operand) == ADDR_EXPR)
1428
          result = TREE_OPERAND (operand, 0);
1429
 
1430
        /* Otherwise, build and fold the indirect reference.  */
1431
        else
1432
          {
1433
            result = build_fold_indirect_ref (operand);
1434
            TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1435
          }
1436
 
1437
        if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1438
          {
1439
            TREE_SIDE_EFFECTS (result) = 1;
1440
            if (TREE_CODE (result) == INDIRECT_REF)
1441
              TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1442
          }
1443
 
1444
        if ((TREE_CODE (result) == INDIRECT_REF
1445
             || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1446
            && can_never_be_null)
1447
          TREE_THIS_NOTRAP (result) = 1;
1448
 
1449
        break;
1450
      }
1451
 
1452
    case NEGATE_EXPR:
1453
    case BIT_NOT_EXPR:
1454
      {
1455
        tree modulus = ((operation_type
1456
                         && TREE_CODE (operation_type) == INTEGER_TYPE
1457
                         && TYPE_MODULAR_P (operation_type))
1458
                        ? TYPE_MODULUS (operation_type) : NULL_TREE);
1459
        int mod_pow2 = modulus && integer_pow2p (modulus);
1460
 
1461
        /* If this is a modular type, there are various possibilities
1462
           depending on the operation and whether the modulus is a
1463
           power of two or not.  */
1464
 
1465
        if (modulus)
1466
          {
1467
            gcc_assert (operation_type == base_type);
1468
            operand = convert (operation_type, operand);
1469
 
1470
            /* The fastest in the negate case for binary modulus is
1471
               the straightforward code; the TRUNC_MOD_EXPR below
1472
               is an AND operation.  */
1473
            if (op_code == NEGATE_EXPR && mod_pow2)
1474
              result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1475
                                    fold_build1 (NEGATE_EXPR, operation_type,
1476
                                                 operand),
1477
                                    modulus);
1478
 
1479
            /* For nonbinary negate case, return zero for zero operand,
1480
               else return the modulus minus the operand.  If the modulus
1481
               is a power of two minus one, we can do the subtraction
1482
               as an XOR since it is equivalent and faster on most machines. */
1483
            else if (op_code == NEGATE_EXPR && !mod_pow2)
1484
              {
1485
                if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1486
                                                modulus,
1487
                                                convert (operation_type,
1488
                                                         integer_one_node))))
1489
                  result = fold_build2 (BIT_XOR_EXPR, operation_type,
1490
                                        operand, modulus);
1491
                else
1492
                  result = fold_build2 (MINUS_EXPR, operation_type,
1493
                                        modulus, operand);
1494
 
1495
                result = fold_build3 (COND_EXPR, operation_type,
1496
                                      fold_build2 (NE_EXPR,
1497
                                                   boolean_type_node,
1498
                                                   operand,
1499
                                                   convert
1500
                                                     (operation_type,
1501
                                                      integer_zero_node)),
1502
                                      result, operand);
1503
              }
1504
            else
1505
              {
1506
                /* For the NOT cases, we need a constant equal to
1507
                   the modulus minus one.  For a binary modulus, we
1508
                   XOR against the constant and subtract the operand from
1509
                   that constant for nonbinary modulus.  */
1510
 
1511
                tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1512
                                         convert (operation_type,
1513
                                                  integer_one_node));
1514
 
1515
                if (mod_pow2)
1516
                  result = fold_build2 (BIT_XOR_EXPR, operation_type,
1517
                                        operand, cnst);
1518
                else
1519
                  result = fold_build2 (MINUS_EXPR, operation_type,
1520
                                        cnst, operand);
1521
              }
1522
 
1523
            break;
1524
          }
1525
      }
1526
 
1527
      /* ... fall through ... */
1528
 
1529
    default:
1530
      gcc_assert (operation_type == base_type);
1531
      result = fold_build1 (op_code, operation_type,
1532
                            convert (operation_type, operand));
1533
    }
1534
 
1535
  if (result_type && TREE_TYPE (result) != result_type)
1536
    result = convert (result_type, result);
1537
 
1538
  return result;
1539
}
1540
 
1541
/* Similar, but for COND_EXPR.  */
1542
 
1543
tree
1544
build_cond_expr (tree result_type, tree condition_operand,
1545
                 tree true_operand, tree false_operand)
1546
{
1547
  bool addr_p = false;
1548
  tree result;
1549
 
1550
  /* The front-end verified that result, true and false operands have
1551
     same base type.  Convert everything to the result type.  */
1552
  true_operand = convert (result_type, true_operand);
1553
  false_operand = convert (result_type, false_operand);
1554
 
1555
  /* If the result type is unconstrained, take the address of the operands and
1556
     then dereference the result.  Likewise if the result type is passed by
1557
     reference, because creating a temporary of this type is not allowed.  */
1558
  if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1559
      || TYPE_IS_BY_REFERENCE_P (result_type)
1560
      || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1561
    {
1562
      result_type = build_pointer_type (result_type);
1563
      true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1564
      false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1565
      addr_p = true;
1566
    }
1567
 
1568
  result = fold_build3 (COND_EXPR, result_type, condition_operand,
1569
                        true_operand, false_operand);
1570
 
1571
  /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1572
     in both arms, make sure it gets evaluated by moving it ahead of the
1573
     conditional expression.  This is necessary because it is evaluated
1574
     in only one place at run time and would otherwise be uninitialized
1575
     in one of the arms.  */
1576
  true_operand = skip_simple_arithmetic (true_operand);
1577
  false_operand = skip_simple_arithmetic (false_operand);
1578
 
1579
  if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1580
    result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1581
 
1582
  if (addr_p)
1583
    result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1584
 
1585
  return result;
1586
}
1587
 
1588
/* Similar, but for COMPOUND_EXPR.  */
1589
 
1590
tree
1591
build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1592
{
1593
  bool addr_p = false;
1594
  tree result;
1595
 
1596
  /* If the result type is unconstrained, take the address of the operand and
1597
     then dereference the result.  Likewise if the result type is passed by
1598
     reference, but this is natively handled in the gimplifier.  */
1599
  if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1600
      || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1601
    {
1602
      result_type = build_pointer_type (result_type);
1603
      expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1604
      addr_p = true;
1605
    }
1606
 
1607
  result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1608
                        expr_operand);
1609
 
1610
  if (addr_p)
1611
    result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1612
 
1613
  return result;
1614
}
1615
 
1616
/* Conveniently construct a function call expression.  FNDECL names the
1617
   function to be called, N is the number of arguments, and the "..."
1618
   parameters are the argument expressions.  Unlike build_call_expr
1619
   this doesn't fold the call, hence it will always return a CALL_EXPR.  */
1620
 
1621
tree
1622
build_call_n_expr (tree fndecl, int n, ...)
1623
{
1624
  va_list ap;
1625
  tree fntype = TREE_TYPE (fndecl);
1626
  tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1627
 
1628
  va_start (ap, n);
1629
  fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1630
  va_end (ap);
1631
  return fn;
1632
}
1633
 
1634
/* Call a function that raises an exception and pass the line number and file
1635
   name, if requested.  MSG says which exception function to call.
1636
 
1637
   GNAT_NODE is the gnat node conveying the source location for which the
1638
   error should be signaled, or Empty in which case the error is signaled on
1639
   the current ref_file_name/input_line.
1640
 
1641
   KIND says which kind of exception this is for
1642
   (N_Raise_{Constraint,Storage,Program}_Error).  */
1643
 
1644
tree
1645
build_call_raise (int msg, Node_Id gnat_node, char kind)
1646
{
1647
  tree fndecl = gnat_raise_decls[msg];
1648
  tree label = get_exception_label (kind);
1649
  tree filename;
1650
  int line_number;
1651
  const char *str;
1652
  int len;
1653
 
1654
  /* If this is to be done as a goto, handle that case.  */
1655
  if (label)
1656
    {
1657
      Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1658
      tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1659
 
1660
      /* If Local_Raise is present, generate
1661
         Local_Raise (exception'Identity);  */
1662
      if (Present (local_raise))
1663
        {
1664
          tree gnu_local_raise
1665
            = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1666
          tree gnu_exception_entity
1667
            = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1668
          tree gnu_call
1669
            = build_call_n_expr (gnu_local_raise, 1,
1670
                                 build_unary_op (ADDR_EXPR, NULL_TREE,
1671
                                                 gnu_exception_entity));
1672
 
1673
          gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1674
                               gnu_call, gnu_result);}
1675
 
1676
      return gnu_result;
1677
    }
1678
 
1679
  str
1680
    = (Debug_Flag_NN || Exception_Locations_Suppressed)
1681
      ? ""
1682
      : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1683
        ? IDENTIFIER_POINTER
1684
          (get_identifier (Get_Name_String
1685
                           (Debug_Source_Name
1686
                            (Get_Source_File_Index (Sloc (gnat_node))))))
1687
        : ref_filename;
1688
 
1689
  len = strlen (str);
1690
  filename = build_string (len, str);
1691
  line_number
1692
    = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1693
      ? Get_Logical_Line_Number (Sloc(gnat_node)) : input_line;
1694
 
1695
  TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1696
                                           build_index_type (size_int (len)));
1697
 
1698
  return
1699
    build_call_n_expr (fndecl, 2,
1700
                       build1 (ADDR_EXPR,
1701
                               build_pointer_type (unsigned_char_type_node),
1702
                               filename),
1703
                       build_int_cst (NULL_TREE, line_number));
1704
}
1705
 
1706
/* Similar to build_call_raise, for an index or range check exception as
1707
   determined by MSG, with extra information generated of the form
1708
   "INDEX out of range FIRST..LAST".  */
1709
 
1710
tree
1711
build_call_raise_range (int msg, Node_Id gnat_node,
1712
                        tree index, tree first, tree last)
1713
{
1714
  tree fndecl = gnat_raise_decls_ext[msg];
1715
  tree filename;
1716
  int line_number, column_number;
1717
  const char *str;
1718
  int len;
1719
 
1720
  str
1721
    = (Debug_Flag_NN || Exception_Locations_Suppressed)
1722
      ? ""
1723
      : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1724
        ? IDENTIFIER_POINTER
1725
          (get_identifier (Get_Name_String
1726
                           (Debug_Source_Name
1727
                            (Get_Source_File_Index (Sloc (gnat_node))))))
1728
        : ref_filename;
1729
 
1730
  len = strlen (str);
1731
  filename = build_string (len, str);
1732
  if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1733
    {
1734
      line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1735
      column_number = Get_Column_Number (Sloc (gnat_node));
1736
    }
1737
  else
1738
    {
1739
      line_number = input_line;
1740
      column_number = 0;
1741
    }
1742
 
1743
  TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1744
                                           build_index_type (size_int (len)));
1745
 
1746
  return
1747
    build_call_n_expr (fndecl, 6,
1748
                       build1 (ADDR_EXPR,
1749
                               build_pointer_type (unsigned_char_type_node),
1750
                               filename),
1751
                       build_int_cst (NULL_TREE, line_number),
1752
                       build_int_cst (NULL_TREE, column_number),
1753
                       convert (integer_type_node, index),
1754
                       convert (integer_type_node, first),
1755
                       convert (integer_type_node, last));
1756
}
1757
 
1758
/* Similar to build_call_raise, with extra information about the column
1759
   where the check failed.  */
1760
 
1761
tree
1762
build_call_raise_column (int msg, Node_Id gnat_node)
1763
{
1764
  tree fndecl = gnat_raise_decls_ext[msg];
1765
  tree filename;
1766
  int line_number, column_number;
1767
  const char *str;
1768
  int len;
1769
 
1770
  str
1771
    = (Debug_Flag_NN || Exception_Locations_Suppressed)
1772
      ? ""
1773
      : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1774
        ? IDENTIFIER_POINTER
1775
          (get_identifier (Get_Name_String
1776
                           (Debug_Source_Name
1777
                            (Get_Source_File_Index (Sloc (gnat_node))))))
1778
        : ref_filename;
1779
 
1780
  len = strlen (str);
1781
  filename = build_string (len, str);
1782
  if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1783
    {
1784
      line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1785
      column_number = Get_Column_Number (Sloc (gnat_node));
1786
    }
1787
  else
1788
    {
1789
      line_number = input_line;
1790
      column_number = 0;
1791
    }
1792
 
1793
  TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1794
                                           build_index_type (size_int (len)));
1795
 
1796
  return
1797
    build_call_n_expr (fndecl, 3,
1798
                       build1 (ADDR_EXPR,
1799
                               build_pointer_type (unsigned_char_type_node),
1800
                               filename),
1801
                       build_int_cst (NULL_TREE, line_number),
1802
                       build_int_cst (NULL_TREE, column_number));
1803
}
1804
 
1805
/* qsort comparer for the bit positions of two constructor elements
1806
   for record components.  */
1807
 
1808
static int
1809
compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1810
{
1811
  const constructor_elt * const elmt1 = (const constructor_elt * const) rt1;
1812
  const constructor_elt * const elmt2 = (const constructor_elt * const) rt2;
1813
  const_tree const field1 = elmt1->index;
1814
  const_tree const field2 = elmt2->index;
1815
  const int ret
1816
    = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1817
 
1818
  return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1819
}
1820
 
1821
/* Return a CONSTRUCTOR of TYPE whose elements are V.  */
1822
 
1823
tree
1824
gnat_build_constructor (tree type, VEC(constructor_elt,gc) *v)
1825
{
1826
  bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1827
  bool side_effects = false;
1828
  tree result, obj, val;
1829
  unsigned int n_elmts;
1830
 
1831
  /* Scan the elements to see if they are all constant or if any has side
1832
     effects, to let us set global flags on the resulting constructor.  Count
1833
     the elements along the way for possible sorting purposes below.  */
1834
  FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1835
    {
1836
      /* The predicate must be in keeping with output_constructor.  */
1837
      if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
1838
          || (TREE_CODE (type) == RECORD_TYPE
1839
              && CONSTRUCTOR_BITFIELD_P (obj)
1840
              && !initializer_constant_valid_for_bitfield_p (val))
1841
          || !initializer_constant_valid_p (val, TREE_TYPE (val)))
1842
        allconstant = false;
1843
 
1844
      if (TREE_SIDE_EFFECTS (val))
1845
        side_effects = true;
1846
    }
1847
 
1848
  /* For record types with constant components only, sort field list
1849
     by increasing bit position.  This is necessary to ensure the
1850
     constructor can be output as static data.  */
1851
  if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1852
    VEC_qsort (constructor_elt, v, compare_elmt_bitpos);
1853
 
1854
  result = build_constructor (type, v);
1855
  TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1856
  TREE_SIDE_EFFECTS (result) = side_effects;
1857
  TREE_READONLY (result) = TYPE_READONLY (type) || allconstant;
1858
  return result;
1859
}
1860
 
1861
/* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1862
   an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1863
   for the field.  Don't fold the result if NO_FOLD_P is true.
1864
 
1865
   We also handle the fact that we might have been passed a pointer to the
1866
   actual record and know how to look for fields in variant parts.  */
1867
 
1868
static tree
1869
build_simple_component_ref (tree record_variable, tree component,
1870
                            tree field, bool no_fold_p)
1871
{
1872
  tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1873
  tree ref, inner_variable;
1874
 
1875
  gcc_assert (RECORD_OR_UNION_TYPE_P (record_type)
1876
              && COMPLETE_TYPE_P (record_type)
1877
              && (component == NULL_TREE) != (field == NULL_TREE));
1878
 
1879
  /* If no field was specified, look for a field with the specified name in
1880
     the current record only.  */
1881
  if (!field)
1882
    for (field = TYPE_FIELDS (record_type);
1883
         field;
1884
         field = DECL_CHAIN (field))
1885
      if (DECL_NAME (field) == component)
1886
        break;
1887
 
1888
  if (!field)
1889
    return NULL_TREE;
1890
 
1891
  /* If this field is not in the specified record, see if we can find a field
1892
     in the specified record whose original field is the same as this one.  */
1893
  if (DECL_CONTEXT (field) != record_type)
1894
    {
1895
      tree new_field;
1896
 
1897
      /* First loop thru normal components.  */
1898
      for (new_field = TYPE_FIELDS (record_type);
1899
           new_field;
1900
           new_field = DECL_CHAIN (new_field))
1901
        if (SAME_FIELD_P (field, new_field))
1902
          break;
1903
 
1904
      /* Next, see if we're looking for an inherited component in an extension.
1905
         If so, look thru the extension directly.  */
1906
      if (!new_field
1907
          && TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1908
          && TYPE_ALIGN_OK (record_type)
1909
          && TREE_CODE (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1910
             == RECORD_TYPE
1911
          && TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (record_variable, 0))))
1912
        {
1913
          ref = build_simple_component_ref (TREE_OPERAND (record_variable, 0),
1914
                                            NULL_TREE, field, no_fold_p);
1915
          if (ref)
1916
            return ref;
1917
        }
1918
 
1919
      /* Next, loop thru DECL_INTERNAL_P components if we haven't found the
1920
         component in the first search.  Doing this search in two steps is
1921
         required to avoid hidden homonymous fields in the _Parent field.  */
1922
      if (!new_field)
1923
        for (new_field = TYPE_FIELDS (record_type);
1924
             new_field;
1925
             new_field = DECL_CHAIN (new_field))
1926
          if (DECL_INTERNAL_P (new_field))
1927
            {
1928
              tree field_ref
1929
                = build_simple_component_ref (record_variable,
1930
                                              NULL_TREE, new_field, no_fold_p);
1931
              ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1932
                                                no_fold_p);
1933
              if (ref)
1934
                return ref;
1935
            }
1936
 
1937
      field = new_field;
1938
    }
1939
 
1940
  if (!field)
1941
    return NULL_TREE;
1942
 
1943
  /* If the field's offset has overflowed, do not try to access it, as doing
1944
     so may trigger sanity checks deeper in the back-end.  Note that we don't
1945
     need to warn since this will be done on trying to declare the object.  */
1946
  if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
1947
      && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
1948
    return NULL_TREE;
1949
 
1950
  /* Look through conversion between type variants.  This is transparent as
1951
     far as the field is concerned.  */
1952
  if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1953
      && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1954
         == record_type)
1955
    inner_variable = TREE_OPERAND (record_variable, 0);
1956
  else
1957
    inner_variable = record_variable;
1958
 
1959
  ref = build3 (COMPONENT_REF, TREE_TYPE (field), inner_variable, field,
1960
                NULL_TREE);
1961
 
1962
  if (TREE_READONLY (record_variable)
1963
      || TREE_READONLY (field)
1964
      || TYPE_READONLY (record_type))
1965
    TREE_READONLY (ref) = 1;
1966
 
1967
  if (TREE_THIS_VOLATILE (record_variable)
1968
      || TREE_THIS_VOLATILE (field)
1969
      || TYPE_VOLATILE (record_type))
1970
    TREE_THIS_VOLATILE (ref) = 1;
1971
 
1972
  if (no_fold_p)
1973
    return ref;
1974
 
1975
  /* The generic folder may punt in this case because the inner array type
1976
     can be self-referential, but folding is in fact not problematic.  */
1977
  if (TREE_CODE (record_variable) == CONSTRUCTOR
1978
      && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record_variable)))
1979
    {
1980
      VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (record_variable);
1981
      unsigned HOST_WIDE_INT idx;
1982
      tree index, value;
1983
      FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
1984
        if (index == field)
1985
          return value;
1986
      return ref;
1987
    }
1988
 
1989
  return fold (ref);
1990
}
1991
 
1992
/* Like build_simple_component_ref, except that we give an error if the
1993
   reference could not be found.  */
1994
 
1995
tree
1996
build_component_ref (tree record_variable, tree component,
1997
                     tree field, bool no_fold_p)
1998
{
1999
  tree ref = build_simple_component_ref (record_variable, component, field,
2000
                                         no_fold_p);
2001
 
2002
  if (ref)
2003
    return ref;
2004
 
2005
  /* If FIELD was specified, assume this is an invalid user field so raise
2006
     Constraint_Error.  Otherwise, we have no type to return so abort.  */
2007
  gcc_assert (field);
2008
  return build1 (NULL_EXPR, TREE_TYPE (field),
2009
                 build_call_raise (CE_Discriminant_Check_Failed, Empty,
2010
                                   N_Raise_Constraint_Error));
2011
}
2012
 
2013
/* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2014
   identically.  Process the case where a GNAT_PROC to call is provided.  */
2015
 
2016
static inline tree
2017
build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2018
                               Entity_Id gnat_proc, Entity_Id gnat_pool)
2019
{
2020
  tree gnu_proc = gnat_to_gnu (gnat_proc);
2021
  tree gnu_call;
2022
 
2023
  /* A storage pool's underlying type is a record type (for both predefined
2024
     storage pools and GNAT simple storage pools). The secondary stack uses
2025
     the same mechanism, but its pool object (SS_Pool) is an integer.  */
2026
  if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
2027
    {
2028
      /* The size is the third parameter; the alignment is the
2029
         same type.  */
2030
      Entity_Id gnat_size_type
2031
        = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2032
      tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2033
 
2034
      tree gnu_pool = gnat_to_gnu (gnat_pool);
2035
      tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2036
      tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2037
 
2038
      gnu_size = convert (gnu_size_type, gnu_size);
2039
      gnu_align = convert (gnu_size_type, gnu_align);
2040
 
2041
      /* The first arg is always the address of the storage pool; next
2042
         comes the address of the object, for a deallocator, then the
2043
         size and alignment.  */
2044
      if (gnu_obj)
2045
        gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2046
                                      gnu_size, gnu_align);
2047
      else
2048
        gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2049
                                      gnu_size, gnu_align);
2050
    }
2051
 
2052
  /* Secondary stack case.  */
2053
  else
2054
    {
2055
      /* The size is the second parameter.  */
2056
      Entity_Id gnat_size_type
2057
        = Etype (Next_Formal (First_Formal (gnat_proc)));
2058
      tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2059
 
2060
      gnu_size = convert (gnu_size_type, gnu_size);
2061
 
2062
      /* The first arg is the address of the object, for a deallocator,
2063
         then the size.  */
2064
      if (gnu_obj)
2065
        gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
2066
      else
2067
        gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
2068
    }
2069
 
2070
  return gnu_call;
2071
}
2072
 
2073
/* Helper for build_call_alloc_dealloc, to build and return an allocator for
2074
   DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2075
   __gnat_malloc allocator.  Honor DATA_TYPE alignments greater than what the
2076
   latter offers.  */
2077
 
2078
static inline tree
2079
maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2080
{
2081
  /* When the DATA_TYPE alignment is stricter than what malloc offers
2082
     (super-aligned case), we allocate an "aligning" wrapper type and return
2083
     the address of its single data field with the malloc's return value
2084
     stored just in front.  */
2085
 
2086
  unsigned int data_align = TYPE_ALIGN (data_type);
2087
  unsigned int system_allocator_alignment
2088
      = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2089
 
2090
  tree aligning_type
2091
    = ((data_align > system_allocator_alignment)
2092
       ? make_aligning_type (data_type, data_align, data_size,
2093
                             system_allocator_alignment,
2094
                             POINTER_SIZE / BITS_PER_UNIT)
2095
       : NULL_TREE);
2096
 
2097
  tree size_to_malloc
2098
    = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2099
 
2100
  tree malloc_ptr;
2101
 
2102
  /* On VMS, if pointers are 64-bit and the allocator size is 32-bit or
2103
     Convention C, allocate 32-bit memory.  */
2104
  if (TARGET_ABI_OPEN_VMS
2105
      && POINTER_SIZE == 64
2106
      && Nkind (gnat_node) == N_Allocator
2107
      && (UI_To_Int (Esize (Etype (gnat_node))) == 32
2108
          || Convention (Etype (gnat_node)) == Convention_C))
2109
    malloc_ptr = build_call_n_expr (malloc32_decl, 1, size_to_malloc);
2110
  else
2111
    malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2112
 
2113
  if (aligning_type)
2114
    {
2115
      /* Latch malloc's return value and get a pointer to the aligning field
2116
         first.  */
2117
      tree storage_ptr = gnat_protect_expr (malloc_ptr);
2118
 
2119
      tree aligning_record_addr
2120
        = convert (build_pointer_type (aligning_type), storage_ptr);
2121
 
2122
      tree aligning_record
2123
        = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2124
 
2125
      tree aligning_field
2126
        = build_component_ref (aligning_record, NULL_TREE,
2127
                               TYPE_FIELDS (aligning_type), false);
2128
 
2129
      tree aligning_field_addr
2130
        = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2131
 
2132
      /* Then arrange to store the allocator's return value ahead
2133
         and return.  */
2134
      tree storage_ptr_slot_addr
2135
        = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
2136
                           convert (ptr_void_type_node, aligning_field_addr),
2137
                           size_int (-(HOST_WIDE_INT) POINTER_SIZE
2138
                                     / BITS_PER_UNIT));
2139
 
2140
      tree storage_ptr_slot
2141
        = build_unary_op (INDIRECT_REF, NULL_TREE,
2142
                          convert (build_pointer_type (ptr_void_type_node),
2143
                                   storage_ptr_slot_addr));
2144
 
2145
      return
2146
        build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2147
                build_binary_op (INIT_EXPR, NULL_TREE,
2148
                                 storage_ptr_slot, storage_ptr),
2149
                aligning_field_addr);
2150
    }
2151
  else
2152
    return malloc_ptr;
2153
}
2154
 
2155
/* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2156
   designated by DATA_PTR using the __gnat_free entry point.  */
2157
 
2158
static inline tree
2159
maybe_wrap_free (tree data_ptr, tree data_type)
2160
{
2161
  /* In the regular alignment case, we pass the data pointer straight to free.
2162
     In the superaligned case, we need to retrieve the initial allocator
2163
     return value, stored in front of the data block at allocation time.  */
2164
 
2165
  unsigned int data_align = TYPE_ALIGN (data_type);
2166
  unsigned int system_allocator_alignment
2167
      = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2168
 
2169
  tree free_ptr;
2170
 
2171
  if (data_align > system_allocator_alignment)
2172
    {
2173
      /* DATA_FRONT_PTR (void *)
2174
         = (void *)DATA_PTR - (void *)sizeof (void *))  */
2175
      tree data_front_ptr
2176
        = build_binary_op
2177
          (POINTER_PLUS_EXPR, ptr_void_type_node,
2178
           convert (ptr_void_type_node, data_ptr),
2179
           size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2180
 
2181
      /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR  */
2182
      free_ptr
2183
        = build_unary_op
2184
          (INDIRECT_REF, NULL_TREE,
2185
           convert (build_pointer_type (ptr_void_type_node), data_front_ptr));
2186
    }
2187
  else
2188
    free_ptr = data_ptr;
2189
 
2190
  return build_call_n_expr (free_decl, 1, free_ptr);
2191
}
2192
 
2193
/* Build a GCC tree to call an allocation or deallocation function.
2194
   If GNU_OBJ is nonzero, it is an object to deallocate.  Otherwise,
2195
   generate an allocator.
2196
 
2197
   GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2198
   object type, used to determine the to-be-honored address alignment.
2199
   GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2200
   pool to use.  If not present, malloc and free are used.  GNAT_NODE is used
2201
   to provide an error location for restriction violation messages.  */
2202
 
2203
tree
2204
build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2205
                          Entity_Id gnat_proc, Entity_Id gnat_pool,
2206
                          Node_Id gnat_node)
2207
{
2208
  gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
2209
 
2210
  /* Explicit proc to call ?  This one is assumed to deal with the type
2211
     alignment constraints.  */
2212
  if (Present (gnat_proc))
2213
    return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2214
                                          gnat_proc, gnat_pool);
2215
 
2216
  /* Otherwise, object to "free" or "malloc" with possible special processing
2217
     for alignments stricter than what the default allocator honors.  */
2218
  else if (gnu_obj)
2219
    return maybe_wrap_free (gnu_obj, gnu_type);
2220
  else
2221
    {
2222
      /* Assert that we no longer can be called with this special pool.  */
2223
      gcc_assert (gnat_pool != -1);
2224
 
2225
      /* Check that we aren't violating the associated restriction.  */
2226
      if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2227
        Check_No_Implicit_Heap_Alloc (gnat_node);
2228
 
2229
      return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2230
    }
2231
}
2232
 
2233
/* Build a GCC tree that corresponds to allocating an object of TYPE whose
2234
   initial value is INIT, if INIT is nonzero.  Convert the expression to
2235
   RESULT_TYPE, which must be some pointer type, and return the result.
2236
 
2237
   GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2238
   the storage pool to use.  GNAT_NODE is used to provide an error
2239
   location for restriction violation messages.  If IGNORE_INIT_TYPE is
2240
   true, ignore the type of INIT for the purpose of determining the size;
2241
   this will cause the maximum size to be allocated if TYPE is of
2242
   self-referential size.  */
2243
 
2244
tree
2245
build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2246
                 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2247
{
2248
  tree size, storage, storage_deref, storage_init;
2249
 
2250
  /* If the initializer, if present, is a NULL_EXPR, just return a new one.  */
2251
  if (init && TREE_CODE (init) == NULL_EXPR)
2252
    return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2253
 
2254
  /* If the initializer, if present, is a COND_EXPR, deal with each branch.  */
2255
  else if (init && TREE_CODE (init) == COND_EXPR)
2256
    return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2257
                   build_allocator (type, TREE_OPERAND (init, 1), result_type,
2258
                                    gnat_proc, gnat_pool, gnat_node,
2259
                                    ignore_init_type),
2260
                   build_allocator (type, TREE_OPERAND (init, 2), result_type,
2261
                                    gnat_proc, gnat_pool, gnat_node,
2262
                                    ignore_init_type));
2263
 
2264
  /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2265
     sizes of the object and its template.  Allocate the whole thing and
2266
     fill in the parts that are known.  */
2267
  else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2268
    {
2269
      tree storage_type
2270
        = build_unc_object_type_from_ptr (result_type, type,
2271
                                          get_identifier ("ALLOC"), false);
2272
      tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2273
      tree storage_ptr_type = build_pointer_type (storage_type);
2274
 
2275
      size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2276
                                             init);
2277
 
2278
      /* If the size overflows, pass -1 so Storage_Error will be raised.  */
2279
      if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2280
        size = ssize_int (-1);
2281
 
2282
      storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2283
                                          gnat_proc, gnat_pool, gnat_node);
2284
      storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2285
      storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2286
      TREE_THIS_NOTRAP (storage_deref) = 1;
2287
 
2288
      /* If there is an initializing expression, then make a constructor for
2289
         the entire object including the bounds and copy it into the object.
2290
         If there is no initializing expression, just set the bounds.  */
2291
      if (init)
2292
        {
2293
          VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
2294
 
2295
          CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2296
                                  build_template (template_type, type, init));
2297
          CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2298
                                  init);
2299
          storage_init
2300
            = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
2301
                               gnat_build_constructor (storage_type, v));
2302
        }
2303
      else
2304
        storage_init
2305
          = build_binary_op (INIT_EXPR, NULL_TREE,
2306
                             build_component_ref (storage_deref, NULL_TREE,
2307
                                                  TYPE_FIELDS (storage_type),
2308
                                                  false),
2309
                             build_template (template_type, type, NULL_TREE));
2310
 
2311
      return build2 (COMPOUND_EXPR, result_type,
2312
                     storage_init, convert (result_type, storage));
2313
    }
2314
 
2315
  size = TYPE_SIZE_UNIT (type);
2316
 
2317
  /* If we have an initializing expression, see if its size is simpler
2318
     than the size from the type.  */
2319
  if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2320
      && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2321
          || CONTAINS_PLACEHOLDER_P (size)))
2322
    size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2323
 
2324
  /* If the size is still self-referential, reference the initializing
2325
     expression, if it is present.  If not, this must have been a
2326
     call to allocate a library-level object, in which case we use
2327
     the maximum size.  */
2328
  if (CONTAINS_PLACEHOLDER_P (size))
2329
    {
2330
      if (!ignore_init_type && init)
2331
        size = substitute_placeholder_in_expr (size, init);
2332
      else
2333
        size = max_size (size, true);
2334
    }
2335
 
2336
  /* If the size overflows, pass -1 so Storage_Error will be raised.  */
2337
  if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2338
    size = ssize_int (-1);
2339
 
2340
  storage = convert (result_type,
2341
                     build_call_alloc_dealloc (NULL_TREE, size, type,
2342
                                               gnat_proc, gnat_pool,
2343
                                               gnat_node));
2344
 
2345
  /* If we have an initial value, protect the new address, assign the value
2346
     and return the address with a COMPOUND_EXPR.  */
2347
  if (init)
2348
    {
2349
      storage = gnat_protect_expr (storage);
2350
      storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2351
      TREE_THIS_NOTRAP (storage_deref) = 1;
2352
      storage_init
2353
        = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
2354
      return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2355
    }
2356
 
2357
  return storage;
2358
}
2359
 
2360
/* Indicate that we need to take the address of T and that it therefore
2361
   should not be allocated in a register.  Returns true if successful.  */
2362
 
2363
bool
2364
gnat_mark_addressable (tree t)
2365
{
2366
  while (true)
2367
    switch (TREE_CODE (t))
2368
      {
2369
      case ADDR_EXPR:
2370
      case COMPONENT_REF:
2371
      case ARRAY_REF:
2372
      case ARRAY_RANGE_REF:
2373
      case REALPART_EXPR:
2374
      case IMAGPART_EXPR:
2375
      case VIEW_CONVERT_EXPR:
2376
      case NON_LVALUE_EXPR:
2377
      CASE_CONVERT:
2378
        t = TREE_OPERAND (t, 0);
2379
        break;
2380
 
2381
      case COMPOUND_EXPR:
2382
        t = TREE_OPERAND (t, 1);
2383
        break;
2384
 
2385
      case CONSTRUCTOR:
2386
        TREE_ADDRESSABLE (t) = 1;
2387
        return true;
2388
 
2389
      case VAR_DECL:
2390
      case PARM_DECL:
2391
      case RESULT_DECL:
2392
        TREE_ADDRESSABLE (t) = 1;
2393
        return true;
2394
 
2395
      case FUNCTION_DECL:
2396
        TREE_ADDRESSABLE (t) = 1;
2397
        return true;
2398
 
2399
      case CONST_DECL:
2400
        return DECL_CONST_CORRESPONDING_VAR (t)
2401
               && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2402
 
2403
      default:
2404
        return true;
2405
    }
2406
}
2407
 
2408
/* Save EXP for later use or reuse.  This is equivalent to save_expr in tree.c
2409
   but we know how to handle our own nodes.  */
2410
 
2411
tree
2412
gnat_save_expr (tree exp)
2413
{
2414
  tree type = TREE_TYPE (exp);
2415
  enum tree_code code = TREE_CODE (exp);
2416
 
2417
  if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2418
    return exp;
2419
 
2420
  if (code == UNCONSTRAINED_ARRAY_REF)
2421
    {
2422
      tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2423
      TREE_READONLY (t) = TYPE_READONLY (type);
2424
      return t;
2425
    }
2426
 
2427
  /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2428
     This may be more efficient, but will also allow us to more easily find
2429
     the match for the PLACEHOLDER_EXPR.  */
2430
  if (code == COMPONENT_REF
2431
      && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2432
    return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2433
                   TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2434
 
2435
  return save_expr (exp);
2436
}
2437
 
2438
/* Protect EXP for immediate reuse.  This is a variant of gnat_save_expr that
2439
   is optimized under the assumption that EXP's value doesn't change before
2440
   its subsequent reuse(s) except through its potential reevaluation.  */
2441
 
2442
tree
2443
gnat_protect_expr (tree exp)
2444
{
2445
  tree type = TREE_TYPE (exp);
2446
  enum tree_code code = TREE_CODE (exp);
2447
 
2448
  if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2449
    return exp;
2450
 
2451
  /* If EXP has no side effects, we theoretically don't need to do anything.
2452
     However, we may be recursively passed more and more complex expressions
2453
     involving checks which will be reused multiple times and eventually be
2454
     unshared for gimplification; in order to avoid a complexity explosion
2455
     at that point, we protect any expressions more complex than a simple
2456
     arithmetic expression.  */
2457
  if (!TREE_SIDE_EFFECTS (exp))
2458
    {
2459
      tree inner = skip_simple_arithmetic (exp);
2460
      if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2461
        return exp;
2462
    }
2463
 
2464
  /* If this is a conversion, protect what's inside the conversion.  */
2465
  if (code == NON_LVALUE_EXPR
2466
      || CONVERT_EXPR_CODE_P (code)
2467
      || code == VIEW_CONVERT_EXPR)
2468
  return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2469
 
2470
  /* If we're indirectly referencing something, we only need to protect the
2471
     address since the data itself can't change in these situations.  */
2472
  if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2473
    {
2474
      tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2475
      TREE_READONLY (t) = TYPE_READONLY (type);
2476
      return t;
2477
    }
2478
 
2479
  /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2480
     This may be more efficient, but will also allow us to more easily find
2481
     the match for the PLACEHOLDER_EXPR.  */
2482
  if (code == COMPONENT_REF
2483
      && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2484
    return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2485
                   TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2486
 
2487
  /* If this is a fat pointer or something that can be placed in a register,
2488
     just make a SAVE_EXPR.  Likewise for a CALL_EXPR as large objects are
2489
     returned via invisible reference in most ABIs so the temporary will
2490
     directly be filled by the callee.  */
2491
  if (TYPE_IS_FAT_POINTER_P (type)
2492
      || TYPE_MODE (type) != BLKmode
2493
      || code == CALL_EXPR)
2494
    return save_expr (exp);
2495
 
2496
  /* Otherwise reference, protect the address and dereference.  */
2497
  return
2498
    build_unary_op (INDIRECT_REF, type,
2499
                    save_expr (build_unary_op (ADDR_EXPR,
2500
                                               build_reference_type (type),
2501
                                               exp)));
2502
}
2503
 
2504
/* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2505
   argument to force evaluation of everything.  */
2506
 
2507
static tree
2508
gnat_stabilize_reference_1 (tree e, bool force)
2509
{
2510
  enum tree_code code = TREE_CODE (e);
2511
  tree type = TREE_TYPE (e);
2512
  tree result;
2513
 
2514
  /* We cannot ignore const expressions because it might be a reference
2515
     to a const array but whose index contains side-effects.  But we can
2516
     ignore things that are actual constant or that already have been
2517
     handled by this function.  */
2518
  if (TREE_CONSTANT (e) || code == SAVE_EXPR)
2519
    return e;
2520
 
2521
  switch (TREE_CODE_CLASS (code))
2522
    {
2523
    case tcc_exceptional:
2524
    case tcc_declaration:
2525
    case tcc_comparison:
2526
    case tcc_expression:
2527
    case tcc_reference:
2528
    case tcc_vl_exp:
2529
      /* If this is a COMPONENT_REF of a fat pointer, save the entire
2530
         fat pointer.  This may be more efficient, but will also allow
2531
         us to more easily find the match for the PLACEHOLDER_EXPR.  */
2532
      if (code == COMPONENT_REF
2533
          && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2534
        result
2535
          = build3 (code, type,
2536
                    gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2537
                    TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2538
      /* If the expression has side-effects, then encase it in a SAVE_EXPR
2539
         so that it will only be evaluated once.  */
2540
      /* The tcc_reference and tcc_comparison classes could be handled as
2541
         below, but it is generally faster to only evaluate them once.  */
2542
      else if (TREE_SIDE_EFFECTS (e) || force)
2543
        return save_expr (e);
2544
      else
2545
        return e;
2546
      break;
2547
 
2548
    case tcc_binary:
2549
      /* Recursively stabilize each operand.  */
2550
      result
2551
        = build2 (code, type,
2552
                  gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2553
                  gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), force));
2554
      break;
2555
 
2556
    case tcc_unary:
2557
      /* Recursively stabilize each operand.  */
2558
      result
2559
        = build1 (code, type,
2560
                  gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force));
2561
      break;
2562
 
2563
    default:
2564
      gcc_unreachable ();
2565
    }
2566
 
2567
  /* See similar handling in gnat_stabilize_reference.  */
2568
  TREE_READONLY (result) = TREE_READONLY (e);
2569
  TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2570
  TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2571
 
2572
  if (code == INDIRECT_REF
2573
      || code == UNCONSTRAINED_ARRAY_REF
2574
      || code == ARRAY_REF
2575
      || code == ARRAY_RANGE_REF)
2576
    TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (e);
2577
 
2578
  return result;
2579
}
2580
 
2581
/* This is equivalent to stabilize_reference in tree.c but we know how to
2582
   handle our own nodes and we take extra arguments.  FORCE says whether to
2583
   force evaluation of everything.  We set SUCCESS to true unless we walk
2584
   through something we don't know how to stabilize.  */
2585
 
2586
tree
2587
gnat_stabilize_reference (tree ref, bool force, bool *success)
2588
{
2589
  tree type = TREE_TYPE (ref);
2590
  enum tree_code code = TREE_CODE (ref);
2591
  tree result;
2592
 
2593
  /* Assume we'll success unless proven otherwise.  */
2594
  if (success)
2595
    *success = true;
2596
 
2597
  switch (code)
2598
    {
2599
    case CONST_DECL:
2600
    case VAR_DECL:
2601
    case PARM_DECL:
2602
    case RESULT_DECL:
2603
      /* No action is needed in this case.  */
2604
      return ref;
2605
 
2606
    case ADDR_EXPR:
2607
    CASE_CONVERT:
2608
    case FLOAT_EXPR:
2609
    case FIX_TRUNC_EXPR:
2610
    case VIEW_CONVERT_EXPR:
2611
      result
2612
        = build1 (code, type,
2613
                  gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2614
                                            success));
2615
      break;
2616
 
2617
    case INDIRECT_REF:
2618
    case UNCONSTRAINED_ARRAY_REF:
2619
      result = build1 (code, type,
2620
                       gnat_stabilize_reference_1 (TREE_OPERAND (ref, 0),
2621
                                                   force));
2622
      break;
2623
 
2624
    case COMPONENT_REF:
2625
     result = build3 (COMPONENT_REF, type,
2626
                      gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2627
                                                success),
2628
                      TREE_OPERAND (ref, 1), NULL_TREE);
2629
      break;
2630
 
2631
    case BIT_FIELD_REF:
2632
      result = build3 (BIT_FIELD_REF, type,
2633
                       gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2634
                                                 success),
2635
                       gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2636
                                                   force),
2637
                       gnat_stabilize_reference_1 (TREE_OPERAND (ref, 2),
2638
                                                   force));
2639
      break;
2640
 
2641
    case ARRAY_REF:
2642
    case ARRAY_RANGE_REF:
2643
      result = build4 (code, type,
2644
                       gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2645
                                                 success),
2646
                       gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2647
                                                   force),
2648
                       NULL_TREE, NULL_TREE);
2649
      break;
2650
 
2651
    case CALL_EXPR:
2652
      result = gnat_stabilize_reference_1 (ref, force);
2653
      break;
2654
 
2655
    case COMPOUND_EXPR:
2656
      result = build2 (COMPOUND_EXPR, type,
2657
                       gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2658
                                                 success),
2659
                       gnat_stabilize_reference (TREE_OPERAND (ref, 1), force,
2660
                                                 success));
2661
      break;
2662
 
2663
    case CONSTRUCTOR:
2664
      /* Constructors with 1 element are used extensively to formally
2665
         convert objects to special wrapping types.  */
2666
      if (TREE_CODE (type) == RECORD_TYPE
2667
          && VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ref)) == 1)
2668
        {
2669
          tree index
2670
            = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0)->index;
2671
          tree value
2672
            = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0)->value;
2673
          result
2674
            = build_constructor_single (type, index,
2675
                                        gnat_stabilize_reference_1 (value,
2676
                                                                    force));
2677
        }
2678
      else
2679
        {
2680
          if (success)
2681
            *success = false;
2682
          return ref;
2683
        }
2684
      break;
2685
 
2686
    case ERROR_MARK:
2687
      ref = error_mark_node;
2688
 
2689
      /* ...  fall through to failure ... */
2690
 
2691
      /* If arg isn't a kind of lvalue we recognize, make no change.
2692
         Caller should recognize the error for an invalid lvalue.  */
2693
    default:
2694
      if (success)
2695
        *success = false;
2696
      return ref;
2697
    }
2698
 
2699
  /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2700
     may not be sustained across some paths, such as the way via build1 for
2701
     INDIRECT_REF.  We reset those flags here in the general case, which is
2702
     consistent with the GCC version of this routine.
2703
 
2704
     Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2705
     paths introduce side-effects where there was none initially (e.g. if a
2706
     SAVE_EXPR is built) and we also want to keep track of that.  */
2707
  TREE_READONLY (result) = TREE_READONLY (ref);
2708
  TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2709
  TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2710
 
2711
  if (code == INDIRECT_REF
2712
      || code == UNCONSTRAINED_ARRAY_REF
2713
      || code == ARRAY_REF
2714
      || code == ARRAY_RANGE_REF)
2715
    TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2716
 
2717
  return result;
2718
}
2719
 
2720
/* If EXPR is an expression that is invariant in the current function, in the
2721
   sense that it can be evaluated anywhere in the function and any number of
2722
   times, return EXPR or an equivalent expression.  Otherwise return NULL.  */
2723
 
2724
tree
2725
gnat_invariant_expr (tree expr)
2726
{
2727
  tree type = TREE_TYPE (expr), t;
2728
 
2729
  expr = remove_conversions (expr, false);
2730
 
2731
  while ((TREE_CODE (expr) == CONST_DECL
2732
          || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2733
         && decl_function_context (expr) == current_function_decl
2734
         && DECL_INITIAL (expr))
2735
    expr = remove_conversions (DECL_INITIAL (expr), false);
2736
 
2737
  if (TREE_CONSTANT (expr))
2738
    return fold_convert (type, expr);
2739
 
2740
  t = expr;
2741
 
2742
  while (true)
2743
    {
2744
      switch (TREE_CODE (t))
2745
        {
2746
        case COMPONENT_REF:
2747
          if (TREE_OPERAND (t, 2) != NULL_TREE)
2748
            return NULL_TREE;
2749
          break;
2750
 
2751
        case ARRAY_REF:
2752
        case ARRAY_RANGE_REF:
2753
          if (!TREE_CONSTANT (TREE_OPERAND (t, 1))
2754
              || TREE_OPERAND (t, 2) != NULL_TREE
2755
              || TREE_OPERAND (t, 3) != NULL_TREE)
2756
            return NULL_TREE;
2757
          break;
2758
 
2759
        case BIT_FIELD_REF:
2760
        case VIEW_CONVERT_EXPR:
2761
        case REALPART_EXPR:
2762
        case IMAGPART_EXPR:
2763
          break;
2764
 
2765
        case INDIRECT_REF:
2766
          if (!TREE_READONLY (t)
2767
              || TREE_SIDE_EFFECTS (t)
2768
              || !TREE_THIS_NOTRAP (t))
2769
            return NULL_TREE;
2770
          break;
2771
 
2772
        default:
2773
          goto object;
2774
        }
2775
 
2776
      t = TREE_OPERAND (t, 0);
2777
    }
2778
 
2779
object:
2780
  if (TREE_SIDE_EFFECTS (t))
2781
    return NULL_TREE;
2782
 
2783
  if (TREE_CODE (t) == CONST_DECL
2784
      && (DECL_EXTERNAL (t)
2785
          || decl_function_context (t) != current_function_decl))
2786
    return fold_convert (type, expr);
2787
 
2788
  if (!TREE_READONLY (t))
2789
    return NULL_TREE;
2790
 
2791
  if (TREE_CODE (t) == PARM_DECL)
2792
    return fold_convert (type, expr);
2793
 
2794
  if (TREE_CODE (t) == VAR_DECL
2795
      && (DECL_EXTERNAL (t)
2796
          || decl_function_context (t) != current_function_decl))
2797
    return fold_convert (type, expr);
2798
 
2799
  return NULL_TREE;
2800
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.