OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [stor-layout.c] - Blame information for rev 16

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
/* C-compiler utilities for types and variables storage layout
2
   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1996, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 2, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING.  If not, write to the Free
19
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20
02110-1301, USA.  */
21
 
22
 
23
#include "config.h"
24
#include "system.h"
25
#include "coretypes.h"
26
#include "tm.h"
27
#include "tree.h"
28
#include "rtl.h"
29
#include "tm_p.h"
30
#include "flags.h"
31
#include "function.h"
32
#include "expr.h"
33
#include "output.h"
34
#include "toplev.h"
35
#include "ggc.h"
36
#include "target.h"
37
#include "langhooks.h"
38
#include "regs.h"
39
#include "params.h"
40
 
41
/* Data type for the expressions representing sizes of data types.
42
   It is the first integer type laid out.  */
43
tree sizetype_tab[(int) TYPE_KIND_LAST];
44
 
45
/* If nonzero, this is an upper limit on alignment of structure fields.
46
   The value is measured in bits.  */
47
unsigned int maximum_field_alignment = TARGET_DEFAULT_PACK_STRUCT * BITS_PER_UNIT;
48
/* ... and its original value in bytes, specified via -fpack-struct=<value>.  */
49
unsigned int initial_max_fld_align = TARGET_DEFAULT_PACK_STRUCT;
50
 
51
/* Nonzero if all REFERENCE_TYPEs are internal and hence should be
52
   allocated in Pmode, not ptr_mode.   Set only by internal_reference_types
53
   called only by a front end.  */
54
static int reference_types_internal = 0;
55
 
56
static void finalize_record_size (record_layout_info);
57
static void finalize_type_size (tree);
58
static void place_union_field (record_layout_info, tree);
59
#if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
60
static int excess_unit_span (HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT,
61
                             HOST_WIDE_INT, tree);
62
#endif
63
extern void debug_rli (record_layout_info);
64
 
65
/* SAVE_EXPRs for sizes of types and decls, waiting to be expanded.  */
66
 
67
static GTY(()) tree pending_sizes;
68
 
69
/* Show that REFERENCE_TYPES are internal and should be Pmode.  Called only
70
   by front end.  */
71
 
72
void
73
internal_reference_types (void)
74
{
75
  reference_types_internal = 1;
76
}
77
 
78
/* Get a list of all the objects put on the pending sizes list.  */
79
 
80
tree
81
get_pending_sizes (void)
82
{
83
  tree chain = pending_sizes;
84
 
85
  pending_sizes = 0;
86
  return chain;
87
}
88
 
89
/* Add EXPR to the pending sizes list.  */
90
 
91
void
92
put_pending_size (tree expr)
93
{
94
  /* Strip any simple arithmetic from EXPR to see if it has an underlying
95
     SAVE_EXPR.  */
96
  expr = skip_simple_arithmetic (expr);
97
 
98
  if (TREE_CODE (expr) == SAVE_EXPR)
99
    pending_sizes = tree_cons (NULL_TREE, expr, pending_sizes);
100
}
101
 
102
/* Put a chain of objects into the pending sizes list, which must be
103
   empty.  */
104
 
105
void
106
put_pending_sizes (tree chain)
107
{
108
  gcc_assert (!pending_sizes);
109
  pending_sizes = chain;
110
}
111
 
112
/* Given a size SIZE that may not be a constant, return a SAVE_EXPR
113
   to serve as the actual size-expression for a type or decl.  */
114
 
115
tree
116
variable_size (tree size)
117
{
118
  tree save;
119
 
120
  /* If the language-processor is to take responsibility for variable-sized
121
     items (e.g., languages which have elaboration procedures like Ada),
122
     just return SIZE unchanged.  Likewise for self-referential sizes and
123
     constant sizes.  */
124
  if (TREE_CONSTANT (size)
125
      || lang_hooks.decls.global_bindings_p () < 0
126
      || CONTAINS_PLACEHOLDER_P (size))
127
    return size;
128
 
129
  size = save_expr (size);
130
 
131
  /* If an array with a variable number of elements is declared, and
132
     the elements require destruction, we will emit a cleanup for the
133
     array.  That cleanup is run both on normal exit from the block
134
     and in the exception-handler for the block.  Normally, when code
135
     is used in both ordinary code and in an exception handler it is
136
     `unsaved', i.e., all SAVE_EXPRs are recalculated.  However, we do
137
     not wish to do that here; the array-size is the same in both
138
     places.  */
139
  save = skip_simple_arithmetic (size);
140
 
141
  if (cfun && cfun->x_dont_save_pending_sizes_p)
142
    /* The front-end doesn't want us to keep a list of the expressions
143
       that determine sizes for variable size objects.  Trust it.  */
144
    return size;
145
 
146
  if (lang_hooks.decls.global_bindings_p ())
147
    {
148
      if (TREE_CONSTANT (size))
149
        error ("type size can%'t be explicitly evaluated");
150
      else
151
        error ("variable-size type declared outside of any function");
152
 
153
      return size_one_node;
154
    }
155
 
156
  put_pending_size (save);
157
 
158
  return size;
159
}
160
 
161
#ifndef MAX_FIXED_MODE_SIZE
162
#define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
163
#endif
164
 
165
/* Return the machine mode to use for a nonscalar of SIZE bits.  The
166
   mode must be in class CLASS, and have exactly that many value bits;
167
   it may have padding as well.  If LIMIT is nonzero, modes of wider
168
   than MAX_FIXED_MODE_SIZE will not be used.  */
169
 
170
enum machine_mode
171
mode_for_size (unsigned int size, enum mode_class class, int limit)
172
{
173
  enum machine_mode mode;
174
 
175
  if (limit && size > MAX_FIXED_MODE_SIZE)
176
    return BLKmode;
177
 
178
  /* Get the first mode which has this size, in the specified class.  */
179
  for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
180
       mode = GET_MODE_WIDER_MODE (mode))
181
    if (GET_MODE_PRECISION (mode) == size)
182
      return mode;
183
 
184
  return BLKmode;
185
}
186
 
187
/* Similar, except passed a tree node.  */
188
 
189
enum machine_mode
190
mode_for_size_tree (tree size, enum mode_class class, int limit)
191
{
192
  if (TREE_CODE (size) != INTEGER_CST
193
      || TREE_OVERFLOW (size)
194
      /* What we really want to say here is that the size can fit in a
195
         host integer, but we know there's no way we'd find a mode for
196
         this many bits, so there's no point in doing the precise test.  */
197
      || compare_tree_int (size, 1000) > 0)
198
    return BLKmode;
199
  else
200
    return mode_for_size (tree_low_cst (size, 1), class, limit);
201
}
202
 
203
/* Similar, but never return BLKmode; return the narrowest mode that
204
   contains at least the requested number of value bits.  */
205
 
206
enum machine_mode
207
smallest_mode_for_size (unsigned int size, enum mode_class class)
208
{
209
  enum machine_mode mode;
210
 
211
  /* Get the first mode which has at least this size, in the
212
     specified class.  */
213
  for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
214
       mode = GET_MODE_WIDER_MODE (mode))
215
    if (GET_MODE_PRECISION (mode) >= size)
216
      return mode;
217
 
218
  gcc_unreachable ();
219
}
220
 
221
/* Find an integer mode of the exact same size, or BLKmode on failure.  */
222
 
223
enum machine_mode
224
int_mode_for_mode (enum machine_mode mode)
225
{
226
  switch (GET_MODE_CLASS (mode))
227
    {
228
    case MODE_INT:
229
    case MODE_PARTIAL_INT:
230
      break;
231
 
232
    case MODE_COMPLEX_INT:
233
    case MODE_COMPLEX_FLOAT:
234
    case MODE_FLOAT:
235
    case MODE_VECTOR_INT:
236
    case MODE_VECTOR_FLOAT:
237
      mode = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 0);
238
      break;
239
 
240
    case MODE_RANDOM:
241
      if (mode == BLKmode)
242
        break;
243
 
244
      /* ... fall through ...  */
245
 
246
    case MODE_CC:
247
    default:
248
      gcc_unreachable ();
249
    }
250
 
251
  return mode;
252
}
253
 
254
/* Return the alignment of MODE. This will be bounded by 1 and
255
   BIGGEST_ALIGNMENT.  */
256
 
257
unsigned int
258
get_mode_alignment (enum machine_mode mode)
259
{
260
  return MIN (BIGGEST_ALIGNMENT, MAX (1, mode_base_align[mode]*BITS_PER_UNIT));
261
}
262
 
263
 
264
/* Subroutine of layout_decl: Force alignment required for the data type.
265
   But if the decl itself wants greater alignment, don't override that.  */
266
 
267
static inline void
268
do_type_align (tree type, tree decl)
269
{
270
  if (TYPE_ALIGN (type) > DECL_ALIGN (decl))
271
    {
272
      DECL_ALIGN (decl) = TYPE_ALIGN (type);
273
      if (TREE_CODE (decl) == FIELD_DECL)
274
        DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type);
275
    }
276
}
277
 
278
/* Set the size, mode and alignment of a ..._DECL node.
279
   TYPE_DECL does need this for C++.
280
   Note that LABEL_DECL and CONST_DECL nodes do not need this,
281
   and FUNCTION_DECL nodes have them set up in a special (and simple) way.
282
   Don't call layout_decl for them.
283
 
284
   KNOWN_ALIGN is the amount of alignment we can assume this
285
   decl has with no special effort.  It is relevant only for FIELD_DECLs
286
   and depends on the previous fields.
287
   All that matters about KNOWN_ALIGN is which powers of 2 divide it.
288
   If KNOWN_ALIGN is 0, it means, "as much alignment as you like":
289
   the record will be aligned to suit.  */
290
 
291
void
292
layout_decl (tree decl, unsigned int known_align)
293
{
294
  tree type = TREE_TYPE (decl);
295
  enum tree_code code = TREE_CODE (decl);
296
  rtx rtl = NULL_RTX;
297
 
298
  if (code == CONST_DECL)
299
    return;
300
 
301
  gcc_assert (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL
302
              || code == TYPE_DECL ||code == FIELD_DECL);
303
 
304
  rtl = DECL_RTL_IF_SET (decl);
305
 
306
  if (type == error_mark_node)
307
    type = void_type_node;
308
 
309
  /* Usually the size and mode come from the data type without change,
310
     however, the front-end may set the explicit width of the field, so its
311
     size may not be the same as the size of its type.  This happens with
312
     bitfields, of course (an `int' bitfield may be only 2 bits, say), but it
313
     also happens with other fields.  For example, the C++ front-end creates
314
     zero-sized fields corresponding to empty base classes, and depends on
315
     layout_type setting DECL_FIELD_BITPOS correctly for the field.  Set the
316
     size in bytes from the size in bits.  If we have already set the mode,
317
     don't set it again since we can be called twice for FIELD_DECLs.  */
318
 
319
  DECL_UNSIGNED (decl) = TYPE_UNSIGNED (type);
320
  if (DECL_MODE (decl) == VOIDmode)
321
    DECL_MODE (decl) = TYPE_MODE (type);
322
 
323
  if (DECL_SIZE (decl) == 0)
324
    {
325
      DECL_SIZE (decl) = TYPE_SIZE (type);
326
      DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
327
    }
328
  else if (DECL_SIZE_UNIT (decl) == 0)
329
    DECL_SIZE_UNIT (decl)
330
      = fold_convert (sizetype, size_binop (CEIL_DIV_EXPR, DECL_SIZE (decl),
331
                                            bitsize_unit_node));
332
 
333
  if (code != FIELD_DECL)
334
    /* For non-fields, update the alignment from the type.  */
335
    do_type_align (type, decl);
336
  else
337
    /* For fields, it's a bit more complicated...  */
338
    {
339
      bool old_user_align = DECL_USER_ALIGN (decl);
340
      bool zero_bitfield = false;
341
      unsigned int mfa;
342
 
343
      if (DECL_BIT_FIELD (decl))
344
        {
345
          DECL_BIT_FIELD_TYPE (decl) = type;
346
 
347
          /* A zero-length bit-field affects the alignment of the next
348
             field.  */
349
          if (integer_zerop (DECL_SIZE (decl))
350
              && ! targetm.ms_bitfield_layout_p (DECL_FIELD_CONTEXT (decl)))
351
            {
352
              zero_bitfield = true;
353
#ifdef PCC_BITFIELD_TYPE_MATTERS
354
              if (PCC_BITFIELD_TYPE_MATTERS)
355
                do_type_align (type, decl);
356
              else
357
#endif
358
                {
359
#ifdef EMPTY_FIELD_BOUNDARY
360
                  if (EMPTY_FIELD_BOUNDARY > DECL_ALIGN (decl))
361
                    {
362
                      DECL_ALIGN (decl) = EMPTY_FIELD_BOUNDARY;
363
                      DECL_USER_ALIGN (decl) = 0;
364
                    }
365
#endif
366
                }
367
            }
368
 
369
          /* See if we can use an ordinary integer mode for a bit-field.
370
             Conditions are: a fixed size that is correct for another mode
371
             and occupying a complete byte or bytes on proper boundary.  */
372
          if (TYPE_SIZE (type) != 0
373
              && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
374
              && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
375
            {
376
              enum machine_mode xmode
377
                = mode_for_size_tree (DECL_SIZE (decl), MODE_INT, 1);
378
 
379
              if (xmode != BLKmode
380
                  && (known_align == 0
381
                      || known_align >= GET_MODE_ALIGNMENT (xmode)))
382
                {
383
                  DECL_ALIGN (decl) = MAX (GET_MODE_ALIGNMENT (xmode),
384
                                           DECL_ALIGN (decl));
385
                  DECL_MODE (decl) = xmode;
386
                  DECL_BIT_FIELD (decl) = 0;
387
                }
388
            }
389
 
390
          /* Turn off DECL_BIT_FIELD if we won't need it set.  */
391
          if (TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
392
              && known_align >= TYPE_ALIGN (type)
393
              && DECL_ALIGN (decl) >= TYPE_ALIGN (type))
394
            DECL_BIT_FIELD (decl) = 0;
395
        }
396
      else if (DECL_PACKED (decl) && DECL_USER_ALIGN (decl))
397
        /* Don't touch DECL_ALIGN.  For other packed fields, go ahead and
398
           round up; we'll reduce it again below.  We want packing to
399
           supersede USER_ALIGN inherited from the type, but defer to
400
           alignment explicitly specified on the field decl.  */;
401
      else
402
        do_type_align (type, decl);
403
 
404
      /* If the field is of variable size, we can't misalign it since we
405
         have no way to make a temporary to align the result.  But this
406
         isn't an issue if the decl is not addressable.  Likewise if it
407
         is of unknown size.
408
 
409
         Note that do_type_align may set DECL_USER_ALIGN, so we need to
410
         check old_user_align instead.  */
411
      if (DECL_PACKED (decl)
412
          && !old_user_align
413
          && !zero_bitfield
414
          && (DECL_NONADDRESSABLE_P (decl)
415
              || DECL_SIZE_UNIT (decl) == 0
416
              || TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST))
417
        DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT);
418
 
419
      if (! DECL_USER_ALIGN (decl) && (! DECL_PACKED (decl) || zero_bitfield))
420
        {
421
          /* Some targets (i.e. i386, VMS) limit struct field alignment
422
             to a lower boundary than alignment of variables unless
423
             it was overridden by attribute aligned.  */
424
#ifdef BIGGEST_FIELD_ALIGNMENT
425
          DECL_ALIGN (decl)
426
            = MIN (DECL_ALIGN (decl), (unsigned) BIGGEST_FIELD_ALIGNMENT);
427
#endif
428
#ifdef ADJUST_FIELD_ALIGN
429
          DECL_ALIGN (decl) = ADJUST_FIELD_ALIGN (decl, DECL_ALIGN (decl));
430
#endif
431
        }
432
 
433
      if (zero_bitfield)
434
        mfa = initial_max_fld_align * BITS_PER_UNIT;
435
      else
436
        mfa = maximum_field_alignment;
437
      /* Should this be controlled by DECL_USER_ALIGN, too?  */
438
      if (mfa != 0)
439
        DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), mfa);
440
    }
441
 
442
  /* Evaluate nonconstant size only once, either now or as soon as safe.  */
443
  if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
444
    DECL_SIZE (decl) = variable_size (DECL_SIZE (decl));
445
  if (DECL_SIZE_UNIT (decl) != 0
446
      && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST)
447
    DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl));
448
 
449
  /* If requested, warn about definitions of large data objects.  */
450
  if (warn_larger_than
451
      && (code == VAR_DECL || code == PARM_DECL)
452
      && ! DECL_EXTERNAL (decl))
453
    {
454
      tree size = DECL_SIZE_UNIT (decl);
455
 
456
      if (size != 0 && TREE_CODE (size) == INTEGER_CST
457
          && compare_tree_int (size, larger_than_size) > 0)
458
        {
459
          int size_as_int = TREE_INT_CST_LOW (size);
460
 
461
          if (compare_tree_int (size, size_as_int) == 0)
462
            warning (0, "size of %q+D is %d bytes", decl, size_as_int);
463
          else
464
            warning (0, "size of %q+D is larger than %wd bytes",
465
                     decl, larger_than_size);
466
        }
467
    }
468
 
469
  /* If the RTL was already set, update its mode and mem attributes.  */
470
  if (rtl)
471
    {
472
      PUT_MODE (rtl, DECL_MODE (decl));
473
      SET_DECL_RTL (decl, 0);
474
      set_mem_attributes (rtl, decl, 1);
475
      SET_DECL_RTL (decl, rtl);
476
    }
477
}
478
 
479
/* Given a VAR_DECL, PARM_DECL or RESULT_DECL, clears the results of
480
   a previous call to layout_decl and calls it again.  */
481
 
482
void
483
relayout_decl (tree decl)
484
{
485
  DECL_SIZE (decl) = DECL_SIZE_UNIT (decl) = 0;
486
  DECL_MODE (decl) = VOIDmode;
487
  DECL_ALIGN (decl) = 0;
488
  SET_DECL_RTL (decl, 0);
489
 
490
  layout_decl (decl, 0);
491
}
492
 
493
/* Hook for a front-end function that can modify the record layout as needed
494
   immediately before it is finalized.  */
495
 
496
static void (*lang_adjust_rli) (record_layout_info) = 0;
497
 
498
void
499
set_lang_adjust_rli (void (*f) (record_layout_info))
500
{
501
  lang_adjust_rli = f;
502
}
503
 
504
/* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
505
   QUAL_UNION_TYPE.  Return a pointer to a struct record_layout_info which
506
   is to be passed to all other layout functions for this record.  It is the
507
   responsibility of the caller to call `free' for the storage returned.
508
   Note that garbage collection is not permitted until we finish laying
509
   out the record.  */
510
 
511
record_layout_info
512
start_record_layout (tree t)
513
{
514
  record_layout_info rli = xmalloc (sizeof (struct record_layout_info_s));
515
 
516
  rli->t = t;
517
 
518
  /* If the type has a minimum specified alignment (via an attribute
519
     declaration, for example) use it -- otherwise, start with a
520
     one-byte alignment.  */
521
  rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t));
522
  rli->unpacked_align = rli->record_align;
523
  rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT);
524
 
525
#ifdef STRUCTURE_SIZE_BOUNDARY
526
  /* Packed structures don't need to have minimum size.  */
527
  if (! TYPE_PACKED (t))
528
    rli->record_align = MAX (rli->record_align, (unsigned) STRUCTURE_SIZE_BOUNDARY);
529
#endif
530
 
531
  rli->offset = size_zero_node;
532
  rli->bitpos = bitsize_zero_node;
533
  rli->prev_field = 0;
534
  rli->pending_statics = 0;
535
  rli->packed_maybe_necessary = 0;
536
 
537
  return rli;
538
}
539
 
540
/* These four routines perform computations that convert between
541
   the offset/bitpos forms and byte and bit offsets.  */
542
 
543
tree
544
bit_from_pos (tree offset, tree bitpos)
545
{
546
  return size_binop (PLUS_EXPR, bitpos,
547
                     size_binop (MULT_EXPR,
548
                                 fold_convert (bitsizetype, offset),
549
                                 bitsize_unit_node));
550
}
551
 
552
tree
553
byte_from_pos (tree offset, tree bitpos)
554
{
555
  return size_binop (PLUS_EXPR, offset,
556
                     fold_convert (sizetype,
557
                                   size_binop (TRUNC_DIV_EXPR, bitpos,
558
                                               bitsize_unit_node)));
559
}
560
 
561
void
562
pos_from_bit (tree *poffset, tree *pbitpos, unsigned int off_align,
563
              tree pos)
564
{
565
  *poffset = size_binop (MULT_EXPR,
566
                         fold_convert (sizetype,
567
                                       size_binop (FLOOR_DIV_EXPR, pos,
568
                                                   bitsize_int (off_align))),
569
                         size_int (off_align / BITS_PER_UNIT));
570
  *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, bitsize_int (off_align));
571
}
572
 
573
/* Given a pointer to bit and byte offsets and an offset alignment,
574
   normalize the offsets so they are within the alignment.  */
575
 
576
void
577
normalize_offset (tree *poffset, tree *pbitpos, unsigned int off_align)
578
{
579
  /* If the bit position is now larger than it should be, adjust it
580
     downwards.  */
581
  if (compare_tree_int (*pbitpos, off_align) >= 0)
582
    {
583
      tree extra_aligns = size_binop (FLOOR_DIV_EXPR, *pbitpos,
584
                                      bitsize_int (off_align));
585
 
586
      *poffset
587
        = size_binop (PLUS_EXPR, *poffset,
588
                      size_binop (MULT_EXPR,
589
                                  fold_convert (sizetype, extra_aligns),
590
                                  size_int (off_align / BITS_PER_UNIT)));
591
 
592
      *pbitpos
593
        = size_binop (FLOOR_MOD_EXPR, *pbitpos, bitsize_int (off_align));
594
    }
595
}
596
 
597
/* Print debugging information about the information in RLI.  */
598
 
599
void
600
debug_rli (record_layout_info rli)
601
{
602
  print_node_brief (stderr, "type", rli->t, 0);
603
  print_node_brief (stderr, "\noffset", rli->offset, 0);
604
  print_node_brief (stderr, " bitpos", rli->bitpos, 0);
605
 
606
  fprintf (stderr, "\naligns: rec = %u, unpack = %u, off = %u\n",
607
           rli->record_align, rli->unpacked_align,
608
           rli->offset_align);
609
  if (rli->packed_maybe_necessary)
610
    fprintf (stderr, "packed may be necessary\n");
611
 
612
  if (rli->pending_statics)
613
    {
614
      fprintf (stderr, "pending statics:\n");
615
      debug_tree (rli->pending_statics);
616
    }
617
}
618
 
619
/* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and
620
   BITPOS if necessary to keep BITPOS below OFFSET_ALIGN.  */
621
 
622
void
623
normalize_rli (record_layout_info rli)
624
{
625
  normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align);
626
}
627
 
628
/* Returns the size in bytes allocated so far.  */
629
 
630
tree
631
rli_size_unit_so_far (record_layout_info rli)
632
{
633
  return byte_from_pos (rli->offset, rli->bitpos);
634
}
635
 
636
/* Returns the size in bits allocated so far.  */
637
 
638
tree
639
rli_size_so_far (record_layout_info rli)
640
{
641
  return bit_from_pos (rli->offset, rli->bitpos);
642
}
643
 
644
/* FIELD is about to be added to RLI->T.  The alignment (in bits) of
645
   the next available location within the record is given by KNOWN_ALIGN.
646
   Update the variable alignment fields in RLI, and return the alignment
647
   to give the FIELD.  */
648
 
649
unsigned int
650
update_alignment_for_field (record_layout_info rli, tree field,
651
                            unsigned int known_align)
652
{
653
  /* The alignment required for FIELD.  */
654
  unsigned int desired_align;
655
  /* The type of this field.  */
656
  tree type = TREE_TYPE (field);
657
  /* True if the field was explicitly aligned by the user.  */
658
  bool user_align;
659
  bool is_bitfield;
660
 
661
  /* Do not attempt to align an ERROR_MARK node */
662
  if (TREE_CODE (type) == ERROR_MARK)
663
    return 0;
664
 
665
  /* Lay out the field so we know what alignment it needs.  */
666
  layout_decl (field, known_align);
667
  desired_align = DECL_ALIGN (field);
668
  user_align = DECL_USER_ALIGN (field);
669
 
670
  is_bitfield = (type != error_mark_node
671
                 && DECL_BIT_FIELD_TYPE (field)
672
                 && ! integer_zerop (TYPE_SIZE (type)));
673
 
674
  /* Record must have at least as much alignment as any field.
675
     Otherwise, the alignment of the field within the record is
676
     meaningless.  */
677
  if (is_bitfield && targetm.ms_bitfield_layout_p (rli->t))
678
    {
679
      /* Here, the alignment of the underlying type of a bitfield can
680
         affect the alignment of a record; even a zero-sized field
681
         can do this.  The alignment should be to the alignment of
682
         the type, except that for zero-size bitfields this only
683
         applies if there was an immediately prior, nonzero-size
684
         bitfield.  (That's the way it is, experimentally.) */
685
      if (! integer_zerop (DECL_SIZE (field))
686
          ? ! DECL_PACKED (field)
687
          : (rli->prev_field
688
             && DECL_BIT_FIELD_TYPE (rli->prev_field)
689
             && ! integer_zerop (DECL_SIZE (rli->prev_field))))
690
        {
691
          unsigned int type_align = TYPE_ALIGN (type);
692
          type_align = MAX (type_align, desired_align);
693
          if (maximum_field_alignment != 0)
694
            type_align = MIN (type_align, maximum_field_alignment);
695
          rli->record_align = MAX (rli->record_align, type_align);
696
          rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
697
          /* If we start a new run, make sure we start it properly aligned.  */
698
          if ((!rli->prev_field
699
               || integer_zerop (DECL_SIZE (field))
700
               || integer_zerop (DECL_SIZE (rli->prev_field))
701
               || !host_integerp (DECL_SIZE (rli->prev_field), 0)
702
               || !host_integerp (TYPE_SIZE (type), 0)
703
               || !simple_cst_equal (TYPE_SIZE (type),
704
                                     TYPE_SIZE (TREE_TYPE (rli->prev_field)))
705
               || (rli->remaining_in_alignment
706
                   < tree_low_cst (DECL_SIZE (field), 0)))
707
              && desired_align < type_align)
708
            desired_align = type_align;
709
        }
710
    }
711
#ifdef PCC_BITFIELD_TYPE_MATTERS
712
  else if (is_bitfield && PCC_BITFIELD_TYPE_MATTERS)
713
    {
714
      /* Named bit-fields cause the entire structure to have the
715
         alignment implied by their type.  Some targets also apply the same
716
         rules to unnamed bitfields.  */
717
      if (DECL_NAME (field) != 0
718
          || targetm.align_anon_bitfield ())
719
        {
720
          unsigned int type_align = TYPE_ALIGN (type);
721
 
722
#ifdef ADJUST_FIELD_ALIGN
723
          if (! TYPE_USER_ALIGN (type))
724
            type_align = ADJUST_FIELD_ALIGN (field, type_align);
725
#endif
726
 
727
          /* Targets might chose to handle unnamed and hence possibly
728
             zero-width bitfield.  Those are not influenced by #pragmas
729
             or packed attributes.  */
730
          if (integer_zerop (DECL_SIZE (field)))
731
            {
732
              if (initial_max_fld_align)
733
                type_align = MIN (type_align,
734
                                  initial_max_fld_align * BITS_PER_UNIT);
735
            }
736
          else if (maximum_field_alignment != 0)
737
            type_align = MIN (type_align, maximum_field_alignment);
738
          else if (DECL_PACKED (field))
739
            type_align = MIN (type_align, BITS_PER_UNIT);
740
 
741
          /* The alignment of the record is increased to the maximum
742
             of the current alignment, the alignment indicated on the
743
             field (i.e., the alignment specified by an __aligned__
744
             attribute), and the alignment indicated by the type of
745
             the field.  */
746
          rli->record_align = MAX (rli->record_align, desired_align);
747
          rli->record_align = MAX (rli->record_align, type_align);
748
 
749
          if (warn_packed)
750
            rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
751
          user_align |= TYPE_USER_ALIGN (type);
752
        }
753
    }
754
#endif
755
  else
756
    {
757
      rli->record_align = MAX (rli->record_align, desired_align);
758
      rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
759
    }
760
 
761
  TYPE_USER_ALIGN (rli->t) |= user_align;
762
 
763
  return desired_align;
764
}
765
 
766
/* Called from place_field to handle unions.  */
767
 
768
static void
769
place_union_field (record_layout_info rli, tree field)
770
{
771
  update_alignment_for_field (rli, field, /*known_align=*/0);
772
 
773
  DECL_FIELD_OFFSET (field) = size_zero_node;
774
  DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
775
  SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT);
776
 
777
  /* If this is an ERROR_MARK return *after* having set the
778
     field at the start of the union. This helps when parsing
779
     invalid fields. */
780
  if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK)
781
    return;
782
 
783
  /* We assume the union's size will be a multiple of a byte so we don't
784
     bother with BITPOS.  */
785
  if (TREE_CODE (rli->t) == UNION_TYPE)
786
    rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field));
787
  else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
788
    rli->offset = fold_build3 (COND_EXPR, sizetype,
789
                               DECL_QUALIFIER (field),
790
                               DECL_SIZE_UNIT (field), rli->offset);
791
}
792
 
793
#if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
794
/* A bitfield of SIZE with a required access alignment of ALIGN is allocated
795
   at BYTE_OFFSET / BIT_OFFSET.  Return nonzero if the field would span more
796
   units of alignment than the underlying TYPE.  */
797
static int
798
excess_unit_span (HOST_WIDE_INT byte_offset, HOST_WIDE_INT bit_offset,
799
                  HOST_WIDE_INT size, HOST_WIDE_INT align, tree type)
800
{
801
  /* Note that the calculation of OFFSET might overflow; we calculate it so
802
     that we still get the right result as long as ALIGN is a power of two.  */
803
  unsigned HOST_WIDE_INT offset = byte_offset * BITS_PER_UNIT + bit_offset;
804
 
805
  offset = offset % align;
806
  return ((offset + size + align - 1) / align
807
          > ((unsigned HOST_WIDE_INT) tree_low_cst (TYPE_SIZE (type), 1)
808
             / align));
809
}
810
#endif
811
 
812
/* RLI contains information about the layout of a RECORD_TYPE.  FIELD
813
   is a FIELD_DECL to be added after those fields already present in
814
   T.  (FIELD is not actually added to the TYPE_FIELDS list here;
815
   callers that desire that behavior must manually perform that step.)  */
816
 
817
void
818
place_field (record_layout_info rli, tree field)
819
{
820
  /* The alignment required for FIELD.  */
821
  unsigned int desired_align;
822
  /* The alignment FIELD would have if we just dropped it into the
823
     record as it presently stands.  */
824
  unsigned int known_align;
825
  unsigned int actual_align;
826
  /* The type of this field.  */
827
  tree type = TREE_TYPE (field);
828
 
829
  gcc_assert (TREE_CODE (field) != ERROR_MARK);
830
 
831
  /* If FIELD is static, then treat it like a separate variable, not
832
     really like a structure field.  If it is a FUNCTION_DECL, it's a
833
     method.  In both cases, all we do is lay out the decl, and we do
834
     it *after* the record is laid out.  */
835
  if (TREE_CODE (field) == VAR_DECL)
836
    {
837
      rli->pending_statics = tree_cons (NULL_TREE, field,
838
                                        rli->pending_statics);
839
      return;
840
    }
841
 
842
  /* Enumerators and enum types which are local to this class need not
843
     be laid out.  Likewise for initialized constant fields.  */
844
  else if (TREE_CODE (field) != FIELD_DECL)
845
    return;
846
 
847
  /* Unions are laid out very differently than records, so split
848
     that code off to another function.  */
849
  else if (TREE_CODE (rli->t) != RECORD_TYPE)
850
    {
851
      place_union_field (rli, field);
852
      return;
853
    }
854
 
855
  else if (TREE_CODE (type) == ERROR_MARK)
856
    {
857
      /* Place this field at the current allocation position, so we
858
         maintain monotonicity.  */
859
      DECL_FIELD_OFFSET (field) = rli->offset;
860
      DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
861
      SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
862
      return;
863
    }
864
 
865
  /* Work out the known alignment so far.  Note that A & (-A) is the
866
     value of the least-significant bit in A that is one.  */
867
  if (! integer_zerop (rli->bitpos))
868
    known_align = (tree_low_cst (rli->bitpos, 1)
869
                   & - tree_low_cst (rli->bitpos, 1));
870
  else if (integer_zerop (rli->offset))
871
    known_align = 0;
872
  else if (host_integerp (rli->offset, 1))
873
    known_align = (BITS_PER_UNIT
874
                   * (tree_low_cst (rli->offset, 1)
875
                      & - tree_low_cst (rli->offset, 1)));
876
  else
877
    known_align = rli->offset_align;
878
 
879
  desired_align = update_alignment_for_field (rli, field, known_align);
880
  if (known_align == 0)
881
    known_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
882
 
883
  if (warn_packed && DECL_PACKED (field))
884
    {
885
      if (known_align >= TYPE_ALIGN (type))
886
        {
887
          if (TYPE_ALIGN (type) > desired_align)
888
            {
889
              if (STRICT_ALIGNMENT)
890
                warning (OPT_Wattributes, "packed attribute causes "
891
                         "inefficient alignment for %q+D", field);
892
              else
893
                warning (OPT_Wattributes, "packed attribute is "
894
                         "unnecessary for %q+D", field);
895
            }
896
        }
897
      else
898
        rli->packed_maybe_necessary = 1;
899
    }
900
 
901
  /* Does this field automatically have alignment it needs by virtue
902
     of the fields that precede it and the record's own alignment?  */
903
  if (known_align < desired_align)
904
    {
905
      /* No, we need to skip space before this field.
906
         Bump the cumulative size to multiple of field alignment.  */
907
 
908
      warning (OPT_Wpadded, "padding struct to align %q+D", field);
909
 
910
      /* If the alignment is still within offset_align, just align
911
         the bit position.  */
912
      if (desired_align < rli->offset_align)
913
        rli->bitpos = round_up (rli->bitpos, desired_align);
914
      else
915
        {
916
          /* First adjust OFFSET by the partial bits, then align.  */
917
          rli->offset
918
            = size_binop (PLUS_EXPR, rli->offset,
919
                          fold_convert (sizetype,
920
                                        size_binop (CEIL_DIV_EXPR, rli->bitpos,
921
                                                    bitsize_unit_node)));
922
          rli->bitpos = bitsize_zero_node;
923
 
924
          rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
925
        }
926
 
927
      if (! TREE_CONSTANT (rli->offset))
928
        rli->offset_align = desired_align;
929
 
930
    }
931
 
932
  /* Handle compatibility with PCC.  Note that if the record has any
933
     variable-sized fields, we need not worry about compatibility.  */
934
#ifdef PCC_BITFIELD_TYPE_MATTERS
935
  if (PCC_BITFIELD_TYPE_MATTERS
936
      && ! targetm.ms_bitfield_layout_p (rli->t)
937
      && TREE_CODE (field) == FIELD_DECL
938
      && type != error_mark_node
939
      && DECL_BIT_FIELD (field)
940
      && ! DECL_PACKED (field)
941
      && maximum_field_alignment == 0
942
      && ! integer_zerop (DECL_SIZE (field))
943
      && host_integerp (DECL_SIZE (field), 1)
944
      && host_integerp (rli->offset, 1)
945
      && host_integerp (TYPE_SIZE (type), 1))
946
    {
947
      unsigned int type_align = TYPE_ALIGN (type);
948
      tree dsize = DECL_SIZE (field);
949
      HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
950
      HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
951
      HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
952
 
953
#ifdef ADJUST_FIELD_ALIGN
954
      if (! TYPE_USER_ALIGN (type))
955
        type_align = ADJUST_FIELD_ALIGN (field, type_align);
956
#endif
957
 
958
      /* A bit field may not span more units of alignment of its type
959
         than its type itself.  Advance to next boundary if necessary.  */
960
      if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
961
        rli->bitpos = round_up (rli->bitpos, type_align);
962
 
963
      TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
964
    }
965
#endif
966
 
967
#ifdef BITFIELD_NBYTES_LIMITED
968
  if (BITFIELD_NBYTES_LIMITED
969
      && ! targetm.ms_bitfield_layout_p (rli->t)
970
      && TREE_CODE (field) == FIELD_DECL
971
      && type != error_mark_node
972
      && DECL_BIT_FIELD_TYPE (field)
973
      && ! DECL_PACKED (field)
974
      && ! integer_zerop (DECL_SIZE (field))
975
      && host_integerp (DECL_SIZE (field), 1)
976
      && host_integerp (rli->offset, 1)
977
      && host_integerp (TYPE_SIZE (type), 1))
978
    {
979
      unsigned int type_align = TYPE_ALIGN (type);
980
      tree dsize = DECL_SIZE (field);
981
      HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
982
      HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
983
      HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
984
 
985
#ifdef ADJUST_FIELD_ALIGN
986
      if (! TYPE_USER_ALIGN (type))
987
        type_align = ADJUST_FIELD_ALIGN (field, type_align);
988
#endif
989
 
990
      if (maximum_field_alignment != 0)
991
        type_align = MIN (type_align, maximum_field_alignment);
992
      /* ??? This test is opposite the test in the containing if
993
         statement, so this code is unreachable currently.  */
994
      else if (DECL_PACKED (field))
995
        type_align = MIN (type_align, BITS_PER_UNIT);
996
 
997
      /* A bit field may not span the unit of alignment of its type.
998
         Advance to next boundary if necessary.  */
999
      if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
1000
        rli->bitpos = round_up (rli->bitpos, type_align);
1001
 
1002
      TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
1003
    }
1004
#endif
1005
 
1006
  /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details.
1007
     A subtlety:
1008
        When a bit field is inserted into a packed record, the whole
1009
        size of the underlying type is used by one or more same-size
1010
        adjacent bitfields.  (That is, if its long:3, 32 bits is
1011
        used in the record, and any additional adjacent long bitfields are
1012
        packed into the same chunk of 32 bits. However, if the size
1013
        changes, a new field of that size is allocated.)  In an unpacked
1014
        record, this is the same as using alignment, but not equivalent
1015
        when packing.
1016
 
1017
     Note: for compatibility, we use the type size, not the type alignment
1018
     to determine alignment, since that matches the documentation */
1019
 
1020
  if (targetm.ms_bitfield_layout_p (rli->t)
1021
       && ((DECL_BIT_FIELD_TYPE (field) && ! DECL_PACKED (field))
1022
          || (rli->prev_field && ! DECL_PACKED (rli->prev_field))))
1023
    {
1024
      /* At this point, either the prior or current are bitfields,
1025
         (possibly both), and we're dealing with MS packing.  */
1026
      tree prev_saved = rli->prev_field;
1027
 
1028
      /* Is the prior field a bitfield?  If so, handle "runs" of same
1029
         type size fields.  */
1030
      if (rli->prev_field /* necessarily a bitfield if it exists.  */)
1031
        {
1032
          /* If both are bitfields, nonzero, and the same size, this is
1033
             the middle of a run.  Zero declared size fields are special
1034
             and handled as "end of run". (Note: it's nonzero declared
1035
             size, but equal type sizes!) (Since we know that both
1036
             the current and previous fields are bitfields by the
1037
             time we check it, DECL_SIZE must be present for both.) */
1038
          if (DECL_BIT_FIELD_TYPE (field)
1039
              && !integer_zerop (DECL_SIZE (field))
1040
              && !integer_zerop (DECL_SIZE (rli->prev_field))
1041
              && host_integerp (DECL_SIZE (rli->prev_field), 0)
1042
              && host_integerp (TYPE_SIZE (type), 0)
1043
              && simple_cst_equal (TYPE_SIZE (type),
1044
                                   TYPE_SIZE (TREE_TYPE (rli->prev_field))))
1045
            {
1046
              /* We're in the middle of a run of equal type size fields; make
1047
                 sure we realign if we run out of bits.  (Not decl size,
1048
                 type size!) */
1049
              HOST_WIDE_INT bitsize = tree_low_cst (DECL_SIZE (field), 0);
1050
 
1051
              if (rli->remaining_in_alignment < bitsize)
1052
                {
1053
                  /* If PREV_FIELD is packed, and we haven't lumped
1054
                     non-packed bitfields with it, treat this as if PREV_FIELD
1055
                     was not a bitfield.  This avoids anomalies where a packed
1056
                     bitfield with long long base type can take up more
1057
                     space than a same-size bitfield with base type short.  */
1058
                  if (rli->prev_packed)
1059
                    rli->prev_field = prev_saved = NULL;
1060
                  else
1061
                    {
1062
                      /* out of bits; bump up to next 'word'.  */
1063
                      rli->offset = DECL_FIELD_OFFSET (rli->prev_field);
1064
                      rli->bitpos
1065
                        = size_binop (PLUS_EXPR, TYPE_SIZE (type),
1066
                                      DECL_FIELD_BIT_OFFSET (rli->prev_field));
1067
                      rli->prev_field = field;
1068
                      rli->remaining_in_alignment
1069
                        = tree_low_cst (TYPE_SIZE (type), 0) - bitsize;
1070
                    }
1071
                }
1072
              else
1073
                rli->remaining_in_alignment -= bitsize;
1074
            }
1075
          else if (rli->prev_packed)
1076
            rli->prev_field = prev_saved = NULL;
1077
          else
1078
            {
1079
              /* End of a run: if leaving a run of bitfields of the same type
1080
                 size, we have to "use up" the rest of the bits of the type
1081
                 size.
1082
 
1083
                 Compute the new position as the sum of the size for the prior
1084
                 type and where we first started working on that type.
1085
                 Note: since the beginning of the field was aligned then
1086
                 of course the end will be too.  No round needed.  */
1087
 
1088
              if (!integer_zerop (DECL_SIZE (rli->prev_field)))
1089
                {
1090
                  tree type_size = TYPE_SIZE (TREE_TYPE (rli->prev_field));
1091
 
1092
                  /* If the desired alignment is greater or equal to TYPE_SIZE,
1093
                     we have already adjusted rli->bitpos / rli->offset above.
1094
                   */
1095
                  if ((unsigned HOST_WIDE_INT) tree_low_cst (type_size, 0)
1096
                      > desired_align)
1097
                    rli->bitpos
1098
                      = size_binop (PLUS_EXPR, type_size,
1099
                                    DECL_FIELD_BIT_OFFSET (rli->prev_field));
1100
                }
1101
              else
1102
                /* We "use up" size zero fields; the code below should behave
1103
                   as if the prior field was not a bitfield.  */
1104
                prev_saved = NULL;
1105
 
1106
              /* Cause a new bitfield to be captured, either this time (if
1107
                 currently a bitfield) or next time we see one.  */
1108
              if (!DECL_BIT_FIELD_TYPE(field)
1109
                 || integer_zerop (DECL_SIZE (field)))
1110
                rli->prev_field = NULL;
1111
            }
1112
 
1113
          rli->prev_packed = 0;
1114
          normalize_rli (rli);
1115
        }
1116
 
1117
      /* If we're starting a new run of same size type bitfields
1118
         (or a run of non-bitfields), set up the "first of the run"
1119
         fields.
1120
 
1121
         That is, if the current field is not a bitfield, or if there
1122
         was a prior bitfield the type sizes differ, or if there wasn't
1123
         a prior bitfield the size of the current field is nonzero.
1124
 
1125
         Note: we must be sure to test ONLY the type size if there was
1126
         a prior bitfield and ONLY for the current field being zero if
1127
         there wasn't.  */
1128
 
1129
      if (!DECL_BIT_FIELD_TYPE (field)
1130
          || ( prev_saved != NULL
1131
               ? !simple_cst_equal (TYPE_SIZE (type),
1132
                                    TYPE_SIZE (TREE_TYPE (prev_saved)))
1133
              : !integer_zerop (DECL_SIZE (field)) ))
1134
        {
1135
          /* Never smaller than a byte for compatibility.  */
1136
          unsigned int type_align = BITS_PER_UNIT;
1137
 
1138
          /* (When not a bitfield), we could be seeing a flex array (with
1139
             no DECL_SIZE).  Since we won't be using remaining_in_alignment
1140
             until we see a bitfield (and come by here again) we just skip
1141
             calculating it.  */
1142
          if (DECL_SIZE (field) != NULL
1143
              && host_integerp (TYPE_SIZE (TREE_TYPE (field)), 0)
1144
              && host_integerp (DECL_SIZE (field), 0))
1145
            rli->remaining_in_alignment
1146
              = tree_low_cst (TYPE_SIZE (TREE_TYPE(field)), 0)
1147
                - tree_low_cst (DECL_SIZE (field), 0);
1148
 
1149
          /* Now align (conventionally) for the new type.  */
1150
          if (!DECL_PACKED(field))
1151
            type_align = MAX(TYPE_ALIGN (type), type_align);
1152
 
1153
          if (prev_saved
1154
              && DECL_BIT_FIELD_TYPE (prev_saved)
1155
              /* If the previous bit-field is zero-sized, we've already
1156
                 accounted for its alignment needs (or ignored it, if
1157
                 appropriate) while placing it.  */
1158
              && ! integer_zerop (DECL_SIZE (prev_saved)))
1159
            type_align = MAX (type_align,
1160
                              TYPE_ALIGN (TREE_TYPE (prev_saved)));
1161
 
1162
          if (maximum_field_alignment != 0)
1163
            type_align = MIN (type_align, maximum_field_alignment);
1164
 
1165
          rli->bitpos = round_up (rli->bitpos, type_align);
1166
 
1167
          /* If we really aligned, don't allow subsequent bitfields
1168
             to undo that.  */
1169
          rli->prev_field = NULL;
1170
        }
1171
    }
1172
 
1173
  /* Offset so far becomes the position of this field after normalizing.  */
1174
  normalize_rli (rli);
1175
  DECL_FIELD_OFFSET (field) = rli->offset;
1176
  DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1177
  SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1178
 
1179
  /* If this field ended up more aligned than we thought it would be (we
1180
     approximate this by seeing if its position changed), lay out the field
1181
     again; perhaps we can use an integral mode for it now.  */
1182
  if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
1183
    actual_align = (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1184
                    & - tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1));
1185
  else if (integer_zerop (DECL_FIELD_OFFSET (field)))
1186
    actual_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
1187
  else if (host_integerp (DECL_FIELD_OFFSET (field), 1))
1188
    actual_align = (BITS_PER_UNIT
1189
                   * (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
1190
                      & - tree_low_cst (DECL_FIELD_OFFSET (field), 1)));
1191
  else
1192
    actual_align = DECL_OFFSET_ALIGN (field);
1193
  /* ACTUAL_ALIGN is still the actual alignment *within the record* .
1194
     store / extract bit field operations will check the alignment of the
1195
     record against the mode of bit fields.  */
1196
 
1197
  if (known_align != actual_align)
1198
    layout_decl (field, actual_align);
1199
 
1200
  if (DECL_BIT_FIELD_TYPE (field))
1201
    {
1202
      unsigned int type_align = TYPE_ALIGN (type);
1203
      unsigned int mfa = maximum_field_alignment;
1204
 
1205
      if (integer_zerop (DECL_SIZE (field)))
1206
        mfa = initial_max_fld_align * BITS_PER_UNIT;
1207
 
1208
      /* Only the MS bitfields use this.  We used to also put any kind of
1209
         packed bit fields into prev_field, but that makes no sense, because
1210
         an 8 bit packed bit field shouldn't impose more restriction on
1211
         following fields than a char field, and the alignment requirements
1212
         are also not fulfilled.
1213
         There is no sane value to set rli->remaining_in_alignment to when
1214
         a packed bitfield in prev_field is unaligned.  */
1215
      if (mfa != 0)
1216
        type_align = MIN (type_align, mfa);
1217
      gcc_assert (rli->prev_field
1218
                  || actual_align >= type_align || DECL_PACKED (field)
1219
                  || integer_zerop (DECL_SIZE (field))
1220
                  || !targetm.ms_bitfield_layout_p (rli->t));
1221
      if (rli->prev_field == NULL && actual_align >= type_align
1222
          && !integer_zerop (DECL_SIZE (field)))
1223
        {
1224
          rli->prev_field = field;
1225
          /* rli->remaining_in_alignment has not been set if the bitfield
1226
             has size zero, or if it is a packed bitfield.  */
1227
          rli->remaining_in_alignment
1228
            = (tree_low_cst (TYPE_SIZE (TREE_TYPE (field)), 0)
1229
               - tree_low_cst (DECL_SIZE (field), 0));
1230
          rli->prev_packed = DECL_PACKED (field);
1231
 
1232
        }
1233
      else if (rli->prev_field && DECL_PACKED (field))
1234
        {
1235
          HOST_WIDE_INT bitsize = tree_low_cst (DECL_SIZE (field), 0);
1236
 
1237
          if (rli->remaining_in_alignment < bitsize)
1238
            rli->prev_field = NULL;
1239
          else
1240
            rli->remaining_in_alignment -= bitsize;
1241
        }
1242
    }
1243
 
1244
  /* Now add size of this field to the size of the record.  If the size is
1245
     not constant, treat the field as being a multiple of bytes and just
1246
     adjust the offset, resetting the bit position.  Otherwise, apportion the
1247
     size amongst the bit position and offset.  First handle the case of an
1248
     unspecified size, which can happen when we have an invalid nested struct
1249
     definition, such as struct j { struct j { int i; } }.  The error message
1250
     is printed in finish_struct.  */
1251
  if (DECL_SIZE (field) == 0)
1252
    /* Do nothing.  */;
1253
  else if (TREE_CODE (DECL_SIZE_UNIT (field)) != INTEGER_CST
1254
           || TREE_CONSTANT_OVERFLOW (DECL_SIZE_UNIT (field)))
1255
    {
1256
      rli->offset
1257
        = size_binop (PLUS_EXPR, rli->offset,
1258
                      fold_convert (sizetype,
1259
                                    size_binop (CEIL_DIV_EXPR, rli->bitpos,
1260
                                                bitsize_unit_node)));
1261
      rli->offset
1262
        = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
1263
      rli->bitpos = bitsize_zero_node;
1264
      rli->offset_align = MIN (rli->offset_align, desired_align);
1265
    }
1266
  else
1267
    {
1268
      rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1269
      normalize_rli (rli);
1270
    }
1271
}
1272
 
1273
/* Assuming that all the fields have been laid out, this function uses
1274
   RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type
1275
   indicated by RLI.  */
1276
 
1277
static void
1278
finalize_record_size (record_layout_info rli)
1279
{
1280
  tree unpadded_size, unpadded_size_unit;
1281
 
1282
  /* Now we want just byte and bit offsets, so set the offset alignment
1283
     to be a byte and then normalize.  */
1284
  rli->offset_align = BITS_PER_UNIT;
1285
  normalize_rli (rli);
1286
 
1287
  /* Determine the desired alignment.  */
1288
#ifdef ROUND_TYPE_ALIGN
1289
  TYPE_ALIGN (rli->t) = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
1290
                                          rli->record_align);
1291
#else
1292
  TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align);
1293
#endif
1294
 
1295
  /* Compute the size so far.  Be sure to allow for extra bits in the
1296
     size in bytes.  We have guaranteed above that it will be no more
1297
     than a single byte.  */
1298
  unpadded_size = rli_size_so_far (rli);
1299
  unpadded_size_unit = rli_size_unit_so_far (rli);
1300
  if (! integer_zerop (rli->bitpos))
1301
    unpadded_size_unit
1302
      = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
1303
 
1304
  /* Round the size up to be a multiple of the required alignment.  */
1305
  TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
1306
  TYPE_SIZE_UNIT (rli->t)
1307
    = round_up (unpadded_size_unit, TYPE_ALIGN_UNIT (rli->t));
1308
 
1309
  if (TREE_CONSTANT (unpadded_size)
1310
      && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0)
1311
    warning (OPT_Wpadded, "padding struct size to alignment boundary");
1312
 
1313
  if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
1314
      && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary
1315
      && TREE_CONSTANT (unpadded_size))
1316
    {
1317
      tree unpacked_size;
1318
 
1319
#ifdef ROUND_TYPE_ALIGN
1320
      rli->unpacked_align
1321
        = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align);
1322
#else
1323
      rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
1324
#endif
1325
 
1326
      unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
1327
      if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
1328
        {
1329
          TYPE_PACKED (rli->t) = 0;
1330
 
1331
          if (TYPE_NAME (rli->t))
1332
            {
1333
              const char *name;
1334
 
1335
              if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE)
1336
                name = IDENTIFIER_POINTER (TYPE_NAME (rli->t));
1337
              else
1338
                name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (rli->t)));
1339
 
1340
              if (STRICT_ALIGNMENT)
1341
                warning (OPT_Wpacked, "packed attribute causes inefficient "
1342
                         "alignment for %qs", name);
1343
              else
1344
                warning (OPT_Wpacked,
1345
                         "packed attribute is unnecessary for %qs", name);
1346
            }
1347
          else
1348
            {
1349
              if (STRICT_ALIGNMENT)
1350
                warning (OPT_Wpacked,
1351
                         "packed attribute causes inefficient alignment");
1352
              else
1353
                warning (OPT_Wpacked, "packed attribute is unnecessary");
1354
            }
1355
        }
1356
    }
1357
}
1358
 
1359
/* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE).  */
1360
 
1361
void
1362
compute_record_mode (tree type)
1363
{
1364
  tree field;
1365
  enum machine_mode mode = VOIDmode;
1366
 
1367
  /* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
1368
     However, if possible, we use a mode that fits in a register
1369
     instead, in order to allow for better optimization down the
1370
     line.  */
1371
  TYPE_MODE (type) = BLKmode;
1372
 
1373
  if (! host_integerp (TYPE_SIZE (type), 1))
1374
    return;
1375
 
1376
  /* A record which has any BLKmode members must itself be
1377
     BLKmode; it can't go in a register.  Unless the member is
1378
     BLKmode only because it isn't aligned.  */
1379
  for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1380
    {
1381
      if (TREE_CODE (field) != FIELD_DECL)
1382
        continue;
1383
 
1384
      if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
1385
          || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
1386
              && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field))
1387
              && !(TYPE_SIZE (TREE_TYPE (field)) != 0
1388
                   && integer_zerop (TYPE_SIZE (TREE_TYPE (field)))))
1389
          || ! host_integerp (bit_position (field), 1)
1390
          || DECL_SIZE (field) == 0
1391
          || ! host_integerp (DECL_SIZE (field), 1))
1392
        return;
1393
 
1394
      /* If this field is the whole struct, remember its mode so
1395
         that, say, we can put a double in a class into a DF
1396
         register instead of forcing it to live in the stack.  */
1397
      if (simple_cst_equal (TYPE_SIZE (type), DECL_SIZE (field)))
1398
        mode = DECL_MODE (field);
1399
 
1400
#ifdef MEMBER_TYPE_FORCES_BLK
1401
      /* With some targets, eg. c4x, it is sub-optimal
1402
         to access an aligned BLKmode structure as a scalar.  */
1403
 
1404
      if (MEMBER_TYPE_FORCES_BLK (field, mode))
1405
        return;
1406
#endif /* MEMBER_TYPE_FORCES_BLK  */
1407
    }
1408
 
1409
  /* If we only have one real field; use its mode if that mode's size
1410
     matches the type's size.  This only applies to RECORD_TYPE.  This
1411
     does not apply to unions.  */
1412
  if (TREE_CODE (type) == RECORD_TYPE && mode != VOIDmode
1413
      && host_integerp (TYPE_SIZE (type), 1)
1414
      && GET_MODE_BITSIZE (mode) == TREE_INT_CST_LOW (TYPE_SIZE (type)))
1415
    TYPE_MODE (type) = mode;
1416
  else
1417
    TYPE_MODE (type) = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1418
 
1419
  /* If structure's known alignment is less than what the scalar
1420
     mode would need, and it matters, then stick with BLKmode.  */
1421
  if (TYPE_MODE (type) != BLKmode
1422
      && STRICT_ALIGNMENT
1423
      && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
1424
            || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (TYPE_MODE (type))))
1425
    {
1426
      /* If this is the only reason this type is BLKmode, then
1427
         don't force containing types to be BLKmode.  */
1428
      TYPE_NO_FORCE_BLK (type) = 1;
1429
      TYPE_MODE (type) = BLKmode;
1430
    }
1431
}
1432
 
1433
/* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid
1434
   out.  */
1435
 
1436
static void
1437
finalize_type_size (tree type)
1438
{
1439
  /* Normally, use the alignment corresponding to the mode chosen.
1440
     However, where strict alignment is not required, avoid
1441
     over-aligning structures, since most compilers do not do this
1442
     alignment.  */
1443
 
1444
  if (TYPE_MODE (type) != BLKmode && TYPE_MODE (type) != VOIDmode
1445
      && (STRICT_ALIGNMENT
1446
          || (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
1447
              && TREE_CODE (type) != QUAL_UNION_TYPE
1448
              && TREE_CODE (type) != ARRAY_TYPE)))
1449
    {
1450
      unsigned mode_align = GET_MODE_ALIGNMENT (TYPE_MODE (type));
1451
 
1452
      /* Don't override a larger alignment requirement coming from a user
1453
         alignment of one of the fields.  */
1454
      if (mode_align >= TYPE_ALIGN (type))
1455
        {
1456
          TYPE_ALIGN (type) = mode_align;
1457
          TYPE_USER_ALIGN (type) = 0;
1458
        }
1459
    }
1460
 
1461
  /* Do machine-dependent extra alignment.  */
1462
#ifdef ROUND_TYPE_ALIGN
1463
  TYPE_ALIGN (type)
1464
    = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT);
1465
#endif
1466
 
1467
  /* If we failed to find a simple way to calculate the unit size
1468
     of the type, find it by division.  */
1469
  if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
1470
    /* TYPE_SIZE (type) is computed in bitsizetype.  After the division, the
1471
       result will fit in sizetype.  We will get more efficient code using
1472
       sizetype, so we force a conversion.  */
1473
    TYPE_SIZE_UNIT (type)
1474
      = fold_convert (sizetype,
1475
                      size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
1476
                                  bitsize_unit_node));
1477
 
1478
  if (TYPE_SIZE (type) != 0)
1479
    {
1480
      TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
1481
      TYPE_SIZE_UNIT (type) = round_up (TYPE_SIZE_UNIT (type),
1482
                                        TYPE_ALIGN_UNIT (type));
1483
    }
1484
 
1485
  /* Evaluate nonconstant sizes only once, either now or as soon as safe.  */
1486
  if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1487
    TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
1488
  if (TYPE_SIZE_UNIT (type) != 0
1489
      && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)
1490
    TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type));
1491
 
1492
  /* Also layout any other variants of the type.  */
1493
  if (TYPE_NEXT_VARIANT (type)
1494
      || type != TYPE_MAIN_VARIANT (type))
1495
    {
1496
      tree variant;
1497
      /* Record layout info of this variant.  */
1498
      tree size = TYPE_SIZE (type);
1499
      tree size_unit = TYPE_SIZE_UNIT (type);
1500
      unsigned int align = TYPE_ALIGN (type);
1501
      unsigned int user_align = TYPE_USER_ALIGN (type);
1502
      enum machine_mode mode = TYPE_MODE (type);
1503
 
1504
      /* Copy it into all variants.  */
1505
      for (variant = TYPE_MAIN_VARIANT (type);
1506
           variant != 0;
1507
           variant = TYPE_NEXT_VARIANT (variant))
1508
        {
1509
          TYPE_SIZE (variant) = size;
1510
          TYPE_SIZE_UNIT (variant) = size_unit;
1511
          TYPE_ALIGN (variant) = align;
1512
          TYPE_USER_ALIGN (variant) = user_align;
1513
          TYPE_MODE (variant) = mode;
1514
        }
1515
    }
1516
}
1517
 
1518
/* Do all of the work required to layout the type indicated by RLI,
1519
   once the fields have been laid out.  This function will call `free'
1520
   for RLI, unless FREE_P is false.  Passing a value other than false
1521
   for FREE_P is bad practice; this option only exists to support the
1522
   G++ 3.2 ABI.  */
1523
 
1524
void
1525
finish_record_layout (record_layout_info rli, int free_p)
1526
{
1527
  /* Compute the final size.  */
1528
  finalize_record_size (rli);
1529
 
1530
  /* Compute the TYPE_MODE for the record.  */
1531
  compute_record_mode (rli->t);
1532
 
1533
  /* Perform any last tweaks to the TYPE_SIZE, etc.  */
1534
  finalize_type_size (rli->t);
1535
 
1536
  /* Lay out any static members.  This is done now because their type
1537
     may use the record's type.  */
1538
  while (rli->pending_statics)
1539
    {
1540
      layout_decl (TREE_VALUE (rli->pending_statics), 0);
1541
      rli->pending_statics = TREE_CHAIN (rli->pending_statics);
1542
    }
1543
 
1544
  /* Clean up.  */
1545
  if (free_p)
1546
    free (rli);
1547
}
1548
 
1549
 
1550
/* Finish processing a builtin RECORD_TYPE type TYPE.  It's name is
1551
   NAME, its fields are chained in reverse on FIELDS.
1552
 
1553
   If ALIGN_TYPE is non-null, it is given the same alignment as
1554
   ALIGN_TYPE.  */
1555
 
1556
void
1557
finish_builtin_struct (tree type, const char *name, tree fields,
1558
                       tree align_type)
1559
{
1560
  tree tail, next;
1561
 
1562
  for (tail = NULL_TREE; fields; tail = fields, fields = next)
1563
    {
1564
      DECL_FIELD_CONTEXT (fields) = type;
1565
      next = TREE_CHAIN (fields);
1566
      TREE_CHAIN (fields) = tail;
1567
    }
1568
  TYPE_FIELDS (type) = tail;
1569
 
1570
  if (align_type)
1571
    {
1572
      TYPE_ALIGN (type) = TYPE_ALIGN (align_type);
1573
      TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (align_type);
1574
    }
1575
 
1576
  layout_type (type);
1577
#if 0 /* not yet, should get fixed properly later */
1578
  TYPE_NAME (type) = make_type_decl (get_identifier (name), type);
1579
#else
1580
  TYPE_NAME (type) = build_decl (TYPE_DECL, get_identifier (name), type);
1581
#endif
1582
  TYPE_STUB_DECL (type) = TYPE_NAME (type);
1583
  layout_decl (TYPE_NAME (type), 0);
1584
}
1585
 
1586
/* Calculate the mode, size, and alignment for TYPE.
1587
   For an array type, calculate the element separation as well.
1588
   Record TYPE on the chain of permanent or temporary types
1589
   so that dbxout will find out about it.
1590
 
1591
   TYPE_SIZE of a type is nonzero if the type has been laid out already.
1592
   layout_type does nothing on such a type.
1593
 
1594
   If the type is incomplete, its TYPE_SIZE remains zero.  */
1595
 
1596
void
1597
layout_type (tree type)
1598
{
1599
  gcc_assert (type);
1600
 
1601
  if (type == error_mark_node)
1602
    return;
1603
 
1604
  /* Do nothing if type has been laid out before.  */
1605
  if (TYPE_SIZE (type))
1606
    return;
1607
 
1608
  switch (TREE_CODE (type))
1609
    {
1610
    case LANG_TYPE:
1611
      /* This kind of type is the responsibility
1612
         of the language-specific code.  */
1613
      gcc_unreachable ();
1614
 
1615
    case BOOLEAN_TYPE:  /* Used for Java, Pascal, and Chill.  */
1616
      if (TYPE_PRECISION (type) == 0)
1617
        TYPE_PRECISION (type) = 1; /* default to one byte/boolean.  */
1618
 
1619
      /* ... fall through ...  */
1620
 
1621
    case INTEGER_TYPE:
1622
    case ENUMERAL_TYPE:
1623
    case CHAR_TYPE:
1624
      if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1625
          && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1626
        TYPE_UNSIGNED (type) = 1;
1627
 
1628
      TYPE_MODE (type) = smallest_mode_for_size (TYPE_PRECISION (type),
1629
                                                 MODE_INT);
1630
      TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1631
      TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1632
      break;
1633
 
1634
    case REAL_TYPE:
1635
      TYPE_MODE (type) = mode_for_size (TYPE_PRECISION (type), MODE_FLOAT, 0);
1636
      TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1637
      TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1638
      break;
1639
 
1640
    case COMPLEX_TYPE:
1641
      TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
1642
      TYPE_MODE (type)
1643
        = mode_for_size (2 * TYPE_PRECISION (TREE_TYPE (type)),
1644
                         (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE
1645
                          ? MODE_COMPLEX_FLOAT : MODE_COMPLEX_INT),
1646
                         0);
1647
      TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1648
      TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1649
      break;
1650
 
1651
    case VECTOR_TYPE:
1652
      {
1653
        int nunits = TYPE_VECTOR_SUBPARTS (type);
1654
        tree nunits_tree = build_int_cst (NULL_TREE, nunits);
1655
        tree innertype = TREE_TYPE (type);
1656
 
1657
        gcc_assert (!(nunits & (nunits - 1)));
1658
 
1659
        /* Find an appropriate mode for the vector type.  */
1660
        if (TYPE_MODE (type) == VOIDmode)
1661
          {
1662
            enum machine_mode innermode = TYPE_MODE (innertype);
1663
            enum machine_mode mode;
1664
 
1665
            /* First, look for a supported vector type.  */
1666
            if (GET_MODE_CLASS (innermode) == MODE_FLOAT)
1667
              mode = MIN_MODE_VECTOR_FLOAT;
1668
            else
1669
              mode = MIN_MODE_VECTOR_INT;
1670
 
1671
            for (; mode != VOIDmode ; mode = GET_MODE_WIDER_MODE (mode))
1672
              if (GET_MODE_NUNITS (mode) == nunits
1673
                  && GET_MODE_INNER (mode) == innermode
1674
                  && targetm.vector_mode_supported_p (mode))
1675
                break;
1676
 
1677
            /* For integers, try mapping it to a same-sized scalar mode.  */
1678
            if (mode == VOIDmode
1679
                && GET_MODE_CLASS (innermode) == MODE_INT)
1680
              mode = mode_for_size (nunits * GET_MODE_BITSIZE (innermode),
1681
                                    MODE_INT, 0);
1682
 
1683
            if (mode == VOIDmode || !have_regs_of_mode[mode])
1684
              TYPE_MODE (type) = BLKmode;
1685
            else
1686
              TYPE_MODE (type) = mode;
1687
          }
1688
 
1689
        TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
1690
        TYPE_SIZE_UNIT (type) = int_const_binop (MULT_EXPR,
1691
                                                 TYPE_SIZE_UNIT (innertype),
1692
                                                 nunits_tree, 0);
1693
        TYPE_SIZE (type) = int_const_binop (MULT_EXPR, TYPE_SIZE (innertype),
1694
                                            nunits_tree, 0);
1695
 
1696
        /* Always naturally align vectors.  This prevents ABI changes
1697
           depending on whether or not native vector modes are supported.  */
1698
        TYPE_ALIGN (type) = tree_low_cst (TYPE_SIZE (type), 0);
1699
        break;
1700
      }
1701
 
1702
    case VOID_TYPE:
1703
      /* This is an incomplete type and so doesn't have a size.  */
1704
      TYPE_ALIGN (type) = 1;
1705
      TYPE_USER_ALIGN (type) = 0;
1706
      TYPE_MODE (type) = VOIDmode;
1707
      break;
1708
 
1709
    case OFFSET_TYPE:
1710
      TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
1711
      TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE / BITS_PER_UNIT);
1712
      /* A pointer might be MODE_PARTIAL_INT,
1713
         but ptrdiff_t must be integral.  */
1714
      TYPE_MODE (type) = mode_for_size (POINTER_SIZE, MODE_INT, 0);
1715
      break;
1716
 
1717
    case FUNCTION_TYPE:
1718
    case METHOD_TYPE:
1719
      /* It's hard to see what the mode and size of a function ought to
1720
         be, but we do know the alignment is FUNCTION_BOUNDARY, so
1721
         make it consistent with that.  */
1722
      TYPE_MODE (type) = mode_for_size (FUNCTION_BOUNDARY, MODE_INT, 0);
1723
      TYPE_SIZE (type) = bitsize_int (FUNCTION_BOUNDARY);
1724
      TYPE_SIZE_UNIT (type) = size_int (FUNCTION_BOUNDARY / BITS_PER_UNIT);
1725
      break;
1726
 
1727
    case POINTER_TYPE:
1728
    case REFERENCE_TYPE:
1729
      {
1730
 
1731
        enum machine_mode mode = ((TREE_CODE (type) == REFERENCE_TYPE
1732
                                   && reference_types_internal)
1733
                                  ? Pmode : TYPE_MODE (type));
1734
 
1735
        int nbits = GET_MODE_BITSIZE (mode);
1736
 
1737
        TYPE_SIZE (type) = bitsize_int (nbits);
1738
        TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
1739
        TYPE_UNSIGNED (type) = 1;
1740
        TYPE_PRECISION (type) = nbits;
1741
      }
1742
      break;
1743
 
1744
    case ARRAY_TYPE:
1745
      {
1746
        tree index = TYPE_DOMAIN (type);
1747
        tree element = TREE_TYPE (type);
1748
 
1749
        build_pointer_type (element);
1750
 
1751
        /* We need to know both bounds in order to compute the size.  */
1752
        if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index)
1753
            && TYPE_SIZE (element))
1754
          {
1755
            tree ub = TYPE_MAX_VALUE (index);
1756
            tree lb = TYPE_MIN_VALUE (index);
1757
            tree length;
1758
            tree element_size;
1759
 
1760
            /* The initial subtraction should happen in the original type so
1761
               that (possible) negative values are handled appropriately.  */
1762
            length = size_binop (PLUS_EXPR, size_one_node,
1763
                                 fold_convert (sizetype,
1764
                                               fold_build2 (MINUS_EXPR,
1765
                                                            TREE_TYPE (lb),
1766
                                                            ub, lb)));
1767
 
1768
            /* Special handling for arrays of bits (for Chill).  */
1769
            element_size = TYPE_SIZE (element);
1770
            if (TYPE_PACKED (type) && INTEGRAL_TYPE_P (element)
1771
                && (integer_zerop (TYPE_MAX_VALUE (element))
1772
                    || integer_onep (TYPE_MAX_VALUE (element)))
1773
                && host_integerp (TYPE_MIN_VALUE (element), 1))
1774
              {
1775
                HOST_WIDE_INT maxvalue
1776
                  = tree_low_cst (TYPE_MAX_VALUE (element), 1);
1777
                HOST_WIDE_INT minvalue
1778
                  = tree_low_cst (TYPE_MIN_VALUE (element), 1);
1779
 
1780
                if (maxvalue - minvalue == 1
1781
                    && (maxvalue == 1 || maxvalue == 0))
1782
                  element_size = integer_one_node;
1783
              }
1784
 
1785
            /* If neither bound is a constant and sizetype is signed, make
1786
               sure the size is never negative.  We should really do this
1787
               if *either* bound is non-constant, but this is the best
1788
               compromise between C and Ada.  */
1789
            if (!TYPE_UNSIGNED (sizetype)
1790
                && TREE_CODE (TYPE_MIN_VALUE (index)) != INTEGER_CST
1791
                && TREE_CODE (TYPE_MAX_VALUE (index)) != INTEGER_CST)
1792
              length = size_binop (MAX_EXPR, length, size_zero_node);
1793
 
1794
            TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
1795
                                           fold_convert (bitsizetype,
1796
                                                         length));
1797
 
1798
            /* If we know the size of the element, calculate the total
1799
               size directly, rather than do some division thing below.
1800
               This optimization helps Fortran assumed-size arrays
1801
               (where the size of the array is determined at runtime)
1802
               substantially.
1803
               Note that we can't do this in the case where the size of
1804
               the elements is one bit since TYPE_SIZE_UNIT cannot be
1805
               set correctly in that case.  */
1806
            if (TYPE_SIZE_UNIT (element) != 0 && ! integer_onep (element_size))
1807
              TYPE_SIZE_UNIT (type)
1808
                = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length);
1809
          }
1810
 
1811
        /* Now round the alignment and size,
1812
           using machine-dependent criteria if any.  */
1813
 
1814
#ifdef ROUND_TYPE_ALIGN
1815
        TYPE_ALIGN (type)
1816
          = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (element), BITS_PER_UNIT);
1817
#else
1818
        TYPE_ALIGN (type) = MAX (TYPE_ALIGN (element), BITS_PER_UNIT);
1819
#endif
1820
        TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element);
1821
        TYPE_MODE (type) = BLKmode;
1822
        if (TYPE_SIZE (type) != 0
1823
#ifdef MEMBER_TYPE_FORCES_BLK
1824
            && ! MEMBER_TYPE_FORCES_BLK (type, VOIDmode)
1825
#endif
1826
            /* BLKmode elements force BLKmode aggregate;
1827
               else extract/store fields may lose.  */
1828
            && (TYPE_MODE (TREE_TYPE (type)) != BLKmode
1829
                || TYPE_NO_FORCE_BLK (TREE_TYPE (type))))
1830
          {
1831
            /* One-element arrays get the component type's mode.  */
1832
            if (simple_cst_equal (TYPE_SIZE (type),
1833
                                  TYPE_SIZE (TREE_TYPE (type))))
1834
              TYPE_MODE (type) = TYPE_MODE (TREE_TYPE (type));
1835
            else
1836
              TYPE_MODE (type)
1837
                = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1838
 
1839
            if (TYPE_MODE (type) != BLKmode
1840
                && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT
1841
                && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type))
1842
                && TYPE_MODE (type) != BLKmode)
1843
              {
1844
                TYPE_NO_FORCE_BLK (type) = 1;
1845
                TYPE_MODE (type) = BLKmode;
1846
              }
1847
          }
1848
        /* When the element size is constant, check that it is at least as
1849
           large as the element alignment.  */
1850
        if (TYPE_SIZE_UNIT (element)
1851
            && TREE_CODE (TYPE_SIZE_UNIT (element)) == INTEGER_CST
1852
            /* If TYPE_SIZE_UNIT overflowed, then it is certainly larger than
1853
               TYPE_ALIGN_UNIT.  */
1854
            && !TREE_CONSTANT_OVERFLOW (TYPE_SIZE_UNIT (element))
1855
            && !integer_zerop (TYPE_SIZE_UNIT (element))
1856
            && compare_tree_int (TYPE_SIZE_UNIT (element),
1857
                                 TYPE_ALIGN_UNIT (element)) < 0)
1858
          error ("alignment of array elements is greater than element size");
1859
        break;
1860
      }
1861
 
1862
    case RECORD_TYPE:
1863
    case UNION_TYPE:
1864
    case QUAL_UNION_TYPE:
1865
      {
1866
        tree field;
1867
        record_layout_info rli;
1868
 
1869
        /* Initialize the layout information.  */
1870
        rli = start_record_layout (type);
1871
 
1872
        /* If this is a QUAL_UNION_TYPE, we want to process the fields
1873
           in the reverse order in building the COND_EXPR that denotes
1874
           its size.  We reverse them again later.  */
1875
        if (TREE_CODE (type) == QUAL_UNION_TYPE)
1876
          TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1877
 
1878
        /* Place all the fields.  */
1879
        for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1880
          place_field (rli, field);
1881
 
1882
        if (TREE_CODE (type) == QUAL_UNION_TYPE)
1883
          TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1884
 
1885
        if (lang_adjust_rli)
1886
          (*lang_adjust_rli) (rli);
1887
 
1888
        /* Finish laying out the record.  */
1889
        finish_record_layout (rli, /*free_p=*/true);
1890
      }
1891
      break;
1892
 
1893
    default:
1894
      gcc_unreachable ();
1895
    }
1896
 
1897
  /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE.  For
1898
     records and unions, finish_record_layout already called this
1899
     function.  */
1900
  if (TREE_CODE (type) != RECORD_TYPE
1901
      && TREE_CODE (type) != UNION_TYPE
1902
      && TREE_CODE (type) != QUAL_UNION_TYPE)
1903
    finalize_type_size (type);
1904
 
1905
  /* If an alias set has been set for this aggregate when it was incomplete,
1906
     force it into alias set 0.
1907
     This is too conservative, but we cannot call record_component_aliases
1908
     here because some frontends still change the aggregates after
1909
     layout_type.  */
1910
  if (AGGREGATE_TYPE_P (type) && TYPE_ALIAS_SET_KNOWN_P (type))
1911
    TYPE_ALIAS_SET (type) = 0;
1912
}
1913
 
1914
/* Create and return a type for signed integers of PRECISION bits.  */
1915
 
1916
tree
1917
make_signed_type (int precision)
1918
{
1919
  tree type = make_node (INTEGER_TYPE);
1920
 
1921
  TYPE_PRECISION (type) = precision;
1922
 
1923
  fixup_signed_type (type);
1924
  return type;
1925
}
1926
 
1927
/* Create and return a type for unsigned integers of PRECISION bits.  */
1928
 
1929
tree
1930
make_unsigned_type (int precision)
1931
{
1932
  tree type = make_node (INTEGER_TYPE);
1933
 
1934
  TYPE_PRECISION (type) = precision;
1935
 
1936
  fixup_unsigned_type (type);
1937
  return type;
1938
}
1939
 
1940
/* Initialize sizetype and bitsizetype to a reasonable and temporary
1941
   value to enable integer types to be created.  */
1942
 
1943
void
1944
initialize_sizetypes (bool signed_p)
1945
{
1946
  tree t = make_node (INTEGER_TYPE);
1947
 
1948
  TYPE_MODE (t) = SImode;
1949
  TYPE_ALIGN (t) = GET_MODE_ALIGNMENT (SImode);
1950
  TYPE_USER_ALIGN (t) = 0;
1951
  TYPE_IS_SIZETYPE (t) = 1;
1952
  TYPE_UNSIGNED (t) = !signed_p;
1953
  TYPE_SIZE (t) = build_int_cst (t, GET_MODE_BITSIZE (SImode));
1954
  TYPE_SIZE_UNIT (t) = build_int_cst (t, GET_MODE_SIZE (SImode));
1955
  TYPE_PRECISION (t) = GET_MODE_BITSIZE (SImode);
1956
  TYPE_MIN_VALUE (t) = build_int_cst (t, 0);
1957
 
1958
  /* 1000 avoids problems with possible overflow and is certainly
1959
     larger than any size value we'd want to be storing.  */
1960
  TYPE_MAX_VALUE (t) = build_int_cst (t, 1000);
1961
 
1962
  sizetype = t;
1963
  bitsizetype = build_distinct_type_copy (t);
1964
}
1965
 
1966
/* Make sizetype a version of TYPE, and initialize *sizetype
1967
   accordingly.  We do this by overwriting the stub sizetype and
1968
   bitsizetype nodes created by initialize_sizetypes.  This makes sure
1969
   that (a) anything stubby about them no longer exists, (b) any
1970
   INTEGER_CSTs created with such a type, remain valid.  */
1971
 
1972
void
1973
set_sizetype (tree type)
1974
{
1975
  int oprecision = TYPE_PRECISION (type);
1976
  /* The *bitsizetype types use a precision that avoids overflows when
1977
     calculating signed sizes / offsets in bits.  However, when
1978
     cross-compiling from a 32 bit to a 64 bit host, we are limited to 64 bit
1979
     precision.  */
1980
  int precision = MIN (oprecision + BITS_PER_UNIT_LOG + 1,
1981
                       2 * HOST_BITS_PER_WIDE_INT);
1982
  tree t;
1983
 
1984
  gcc_assert (TYPE_UNSIGNED (type) == TYPE_UNSIGNED (sizetype));
1985
 
1986
  t = build_distinct_type_copy (type);
1987
  /* We do want to use sizetype's cache, as we will be replacing that
1988
     type.  */
1989
  TYPE_CACHED_VALUES (t) = TYPE_CACHED_VALUES (sizetype);
1990
  TYPE_CACHED_VALUES_P (t) = TYPE_CACHED_VALUES_P (sizetype);
1991
  TREE_TYPE (TYPE_CACHED_VALUES (t)) = type;
1992
  TYPE_UID (t) = TYPE_UID (sizetype);
1993
  TYPE_IS_SIZETYPE (t) = 1;
1994
 
1995
  /* Replace our original stub sizetype.  */
1996
  memcpy (sizetype, t, tree_size (sizetype));
1997
  TYPE_MAIN_VARIANT (sizetype) = sizetype;
1998
 
1999
  t = make_node (INTEGER_TYPE);
2000
  TYPE_NAME (t) = get_identifier ("bit_size_type");
2001
  /* We do want to use bitsizetype's cache, as we will be replacing that
2002
     type.  */
2003
  TYPE_CACHED_VALUES (t) = TYPE_CACHED_VALUES (bitsizetype);
2004
  TYPE_CACHED_VALUES_P (t) = TYPE_CACHED_VALUES_P (bitsizetype);
2005
  TYPE_PRECISION (t) = precision;
2006
  TYPE_UID (t) = TYPE_UID (bitsizetype);
2007
  TYPE_IS_SIZETYPE (t) = 1;
2008
 
2009
  /* Replace our original stub bitsizetype.  */
2010
  memcpy (bitsizetype, t, tree_size (bitsizetype));
2011
  TYPE_MAIN_VARIANT (bitsizetype) = bitsizetype;
2012
 
2013
  if (TYPE_UNSIGNED (type))
2014
    {
2015
      fixup_unsigned_type (bitsizetype);
2016
      ssizetype = build_distinct_type_copy (make_signed_type (oprecision));
2017
      TYPE_IS_SIZETYPE (ssizetype) = 1;
2018
      sbitsizetype = build_distinct_type_copy (make_signed_type (precision));
2019
      TYPE_IS_SIZETYPE (sbitsizetype) = 1;
2020
    }
2021
  else
2022
    {
2023
      fixup_signed_type (bitsizetype);
2024
      ssizetype = sizetype;
2025
      sbitsizetype = bitsizetype;
2026
    }
2027
}
2028
 
2029
/* TYPE is an integral type, i.e., an INTEGRAL_TYPE, ENUMERAL_TYPE,
2030
   BOOLEAN_TYPE, or CHAR_TYPE.  Set TYPE_MIN_VALUE and TYPE_MAX_VALUE
2031
   for TYPE, based on the PRECISION and whether or not the TYPE
2032
   IS_UNSIGNED.  PRECISION need not correspond to a width supported
2033
   natively by the hardware; for example, on a machine with 8-bit,
2034
   16-bit, and 32-bit register modes, PRECISION might be 7, 23, or
2035
   61.  */
2036
 
2037
void
2038
set_min_and_max_values_for_integral_type (tree type,
2039
                                          int precision,
2040
                                          bool is_unsigned)
2041
{
2042
  tree min_value;
2043
  tree max_value;
2044
 
2045
  if (is_unsigned)
2046
    {
2047
      min_value = build_int_cst (type, 0);
2048
      max_value
2049
        = build_int_cst_wide (type, precision - HOST_BITS_PER_WIDE_INT >= 0
2050
                              ? -1
2051
                              : ((HOST_WIDE_INT) 1 << precision) - 1,
2052
                              precision - HOST_BITS_PER_WIDE_INT > 0
2053
                              ? ((unsigned HOST_WIDE_INT) ~0
2054
                                 >> (HOST_BITS_PER_WIDE_INT
2055
                                     - (precision - HOST_BITS_PER_WIDE_INT)))
2056
                              : 0);
2057
    }
2058
  else
2059
    {
2060
      min_value
2061
        = build_int_cst_wide (type,
2062
                              (precision - HOST_BITS_PER_WIDE_INT > 0
2063
                               ? 0
2064
                               : (HOST_WIDE_INT) (-1) << (precision - 1)),
2065
                              (((HOST_WIDE_INT) (-1)
2066
                                << (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
2067
                                    ? precision - HOST_BITS_PER_WIDE_INT - 1
2068
                                    : 0))));
2069
      max_value
2070
        = build_int_cst_wide (type,
2071
                              (precision - HOST_BITS_PER_WIDE_INT > 0
2072
                               ? -1
2073
                               : ((HOST_WIDE_INT) 1 << (precision - 1)) - 1),
2074
                              (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
2075
                               ? (((HOST_WIDE_INT) 1
2076
                                   << (precision - HOST_BITS_PER_WIDE_INT - 1))) - 1
2077
                               : 0));
2078
    }
2079
 
2080
  TYPE_MIN_VALUE (type) = min_value;
2081
  TYPE_MAX_VALUE (type) = max_value;
2082
}
2083
 
2084
/* Set the extreme values of TYPE based on its precision in bits,
2085
   then lay it out.  Used when make_signed_type won't do
2086
   because the tree code is not INTEGER_TYPE.
2087
   E.g. for Pascal, when the -fsigned-char option is given.  */
2088
 
2089
void
2090
fixup_signed_type (tree type)
2091
{
2092
  int precision = TYPE_PRECISION (type);
2093
 
2094
  /* We can not represent properly constants greater then
2095
     2 * HOST_BITS_PER_WIDE_INT, still we need the types
2096
     as they are used by i386 vector extensions and friends.  */
2097
  if (precision > HOST_BITS_PER_WIDE_INT * 2)
2098
    precision = HOST_BITS_PER_WIDE_INT * 2;
2099
 
2100
  set_min_and_max_values_for_integral_type (type, precision,
2101
                                            /*is_unsigned=*/false);
2102
 
2103
  /* Lay out the type: set its alignment, size, etc.  */
2104
  layout_type (type);
2105
}
2106
 
2107
/* Set the extreme values of TYPE based on its precision in bits,
2108
   then lay it out.  This is used both in `make_unsigned_type'
2109
   and for enumeral types.  */
2110
 
2111
void
2112
fixup_unsigned_type (tree type)
2113
{
2114
  int precision = TYPE_PRECISION (type);
2115
 
2116
  /* We can not represent properly constants greater then
2117
     2 * HOST_BITS_PER_WIDE_INT, still we need the types
2118
     as they are used by i386 vector extensions and friends.  */
2119
  if (precision > HOST_BITS_PER_WIDE_INT * 2)
2120
    precision = HOST_BITS_PER_WIDE_INT * 2;
2121
 
2122
  TYPE_UNSIGNED (type) = 1;
2123
 
2124
  set_min_and_max_values_for_integral_type (type, precision,
2125
                                            /*is_unsigned=*/true);
2126
 
2127
  /* Lay out the type: set its alignment, size, etc.  */
2128
  layout_type (type);
2129
}
2130
 
2131
/* Find the best machine mode to use when referencing a bit field of length
2132
   BITSIZE bits starting at BITPOS.
2133
 
2134
   The underlying object is known to be aligned to a boundary of ALIGN bits.
2135
   If LARGEST_MODE is not VOIDmode, it means that we should not use a mode
2136
   larger than LARGEST_MODE (usually SImode).
2137
 
2138
   If no mode meets all these conditions, we return VOIDmode.  Otherwise, if
2139
   VOLATILEP is true or SLOW_BYTE_ACCESS is false, we return the smallest
2140
   mode meeting these conditions.
2141
 
2142
   Otherwise (VOLATILEP is false and SLOW_BYTE_ACCESS is true), we return
2143
   the largest mode (but a mode no wider than UNITS_PER_WORD) that meets
2144
   all the conditions.  */
2145
 
2146
enum machine_mode
2147
get_best_mode (int bitsize, int bitpos, unsigned int align,
2148
               enum machine_mode largest_mode, int volatilep)
2149
{
2150
  enum machine_mode mode;
2151
  unsigned int unit = 0;
2152
 
2153
  /* Find the narrowest integer mode that contains the bit field.  */
2154
  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2155
       mode = GET_MODE_WIDER_MODE (mode))
2156
    {
2157
      unit = GET_MODE_BITSIZE (mode);
2158
      if ((bitpos % unit) + bitsize <= unit)
2159
        break;
2160
    }
2161
 
2162
  if (mode == VOIDmode
2163
      /* It is tempting to omit the following line
2164
         if STRICT_ALIGNMENT is true.
2165
         But that is incorrect, since if the bitfield uses part of 3 bytes
2166
         and we use a 4-byte mode, we could get a spurious segv
2167
         if the extra 4th byte is past the end of memory.
2168
         (Though at least one Unix compiler ignores this problem:
2169
         that on the Sequent 386 machine.  */
2170
      || MIN (unit, BIGGEST_ALIGNMENT) > align
2171
      || (largest_mode != VOIDmode && unit > GET_MODE_BITSIZE (largest_mode)))
2172
    return VOIDmode;
2173
 
2174
  if (SLOW_BYTE_ACCESS && ! volatilep)
2175
    {
2176
      enum machine_mode wide_mode = VOIDmode, tmode;
2177
 
2178
      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); tmode != VOIDmode;
2179
           tmode = GET_MODE_WIDER_MODE (tmode))
2180
        {
2181
          unit = GET_MODE_BITSIZE (tmode);
2182
          if (bitpos / unit == (bitpos + bitsize - 1) / unit
2183
              && unit <= BITS_PER_WORD
2184
              && unit <= MIN (align, BIGGEST_ALIGNMENT)
2185
              && (largest_mode == VOIDmode
2186
                  || unit <= GET_MODE_BITSIZE (largest_mode)))
2187
            wide_mode = tmode;
2188
        }
2189
 
2190
      if (wide_mode != VOIDmode)
2191
        return wide_mode;
2192
    }
2193
 
2194
  return mode;
2195
}
2196
 
2197
/* Gets minimal and maximal values for MODE (signed or unsigned depending on
2198
   SIGN).  The returned constants are made to be usable in TARGET_MODE.  */
2199
 
2200
void
2201
get_mode_bounds (enum machine_mode mode, int sign,
2202
                 enum machine_mode target_mode,
2203
                 rtx *mmin, rtx *mmax)
2204
{
2205
  unsigned size = GET_MODE_BITSIZE (mode);
2206
  unsigned HOST_WIDE_INT min_val, max_val;
2207
 
2208
  gcc_assert (size <= HOST_BITS_PER_WIDE_INT);
2209
 
2210
  if (sign)
2211
    {
2212
      min_val = -((unsigned HOST_WIDE_INT) 1 << (size - 1));
2213
      max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1)) - 1;
2214
    }
2215
  else
2216
    {
2217
      min_val = 0;
2218
      max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1) << 1) - 1;
2219
    }
2220
 
2221
  *mmin = gen_int_mode (min_val, target_mode);
2222
  *mmax = gen_int_mode (max_val, target_mode);
2223
}
2224
 
2225
#include "gt-stor-layout.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.