OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [tree-affine.c] - Blame information for rev 429

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Operations with affine combinations of trees.
2
   Copyright (C) 2005, 2007, 2008 Free Software Foundation, Inc.
3
 
4
This file is part of GCC.
5
 
6
GCC is free software; you can redistribute it and/or modify it
7
under the terms of the GNU General Public License as published by the
8
Free Software Foundation; either version 3, or (at your option) any
9
later version.
10
 
11
GCC is distributed in the hope that it will be useful, but WITHOUT
12
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14
for more details.
15
 
16
You should have received a copy of the GNU General Public License
17
along with GCC; see the file COPYING3.  If not see
18
<http://www.gnu.org/licenses/>.  */
19
 
20
#include "config.h"
21
#include "system.h"
22
#include "coretypes.h"
23
#include "tm.h"
24
#include "tree.h"
25
#include "rtl.h"
26
#include "tm_p.h"
27
#include "hard-reg-set.h"
28
#include "output.h"
29
#include "diagnostic.h"
30
#include "tree-dump.h"
31
#include "pointer-set.h"
32
#include "tree-affine.h"
33
#include "gimple.h"
34
#include "flags.h"
35
 
36
/* Extends CST as appropriate for the affine combinations COMB.  */
37
 
38
double_int
39
double_int_ext_for_comb (double_int cst, aff_tree *comb)
40
{
41
  return double_int_sext (cst, TYPE_PRECISION (comb->type));
42
}
43
 
44
/* Initializes affine combination COMB so that its value is zero in TYPE.  */
45
 
46
static void
47
aff_combination_zero (aff_tree *comb, tree type)
48
{
49
  comb->type = type;
50
  comb->offset = double_int_zero;
51
  comb->n = 0;
52
  comb->rest = NULL_TREE;
53
}
54
 
55
/* Sets COMB to CST.  */
56
 
57
void
58
aff_combination_const (aff_tree *comb, tree type, double_int cst)
59
{
60
  aff_combination_zero (comb, type);
61
  comb->offset = double_int_ext_for_comb (cst, comb);
62
}
63
 
64
/* Sets COMB to single element ELT.  */
65
 
66
void
67
aff_combination_elt (aff_tree *comb, tree type, tree elt)
68
{
69
  aff_combination_zero (comb, type);
70
 
71
  comb->n = 1;
72
  comb->elts[0].val = elt;
73
  comb->elts[0].coef = double_int_one;
74
}
75
 
76
/* Scales COMB by SCALE.  */
77
 
78
void
79
aff_combination_scale (aff_tree *comb, double_int scale)
80
{
81
  unsigned i, j;
82
 
83
  scale = double_int_ext_for_comb (scale, comb);
84
  if (double_int_one_p (scale))
85
    return;
86
 
87
  if (double_int_zero_p (scale))
88
    {
89
      aff_combination_zero (comb, comb->type);
90
      return;
91
    }
92
 
93
  comb->offset
94
    = double_int_ext_for_comb (double_int_mul (scale, comb->offset), comb);
95
  for (i = 0, j = 0; i < comb->n; i++)
96
    {
97
      double_int new_coef;
98
 
99
      new_coef
100
        = double_int_ext_for_comb (double_int_mul (scale, comb->elts[i].coef),
101
                                   comb);
102
      /* A coefficient may become zero due to overflow.  Remove the zero
103
         elements.  */
104
      if (double_int_zero_p (new_coef))
105
        continue;
106
      comb->elts[j].coef = new_coef;
107
      comb->elts[j].val = comb->elts[i].val;
108
      j++;
109
    }
110
  comb->n = j;
111
 
112
  if (comb->rest)
113
    {
114
      tree type = comb->type;
115
      if (POINTER_TYPE_P (type))
116
        type = sizetype;
117
      if (comb->n < MAX_AFF_ELTS)
118
        {
119
          comb->elts[comb->n].coef = scale;
120
          comb->elts[comb->n].val = comb->rest;
121
          comb->rest = NULL_TREE;
122
          comb->n++;
123
        }
124
      else
125
        comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
126
                                  double_int_to_tree (type, scale));
127
    }
128
}
129
 
130
/* Adds ELT * SCALE to COMB.  */
131
 
132
void
133
aff_combination_add_elt (aff_tree *comb, tree elt, double_int scale)
134
{
135
  unsigned i;
136
  tree type;
137
 
138
  scale = double_int_ext_for_comb (scale, comb);
139
  if (double_int_zero_p (scale))
140
    return;
141
 
142
  for (i = 0; i < comb->n; i++)
143
    if (operand_equal_p (comb->elts[i].val, elt, 0))
144
      {
145
        double_int new_coef;
146
 
147
        new_coef = double_int_add (comb->elts[i].coef, scale);
148
        new_coef = double_int_ext_for_comb (new_coef, comb);
149
        if (!double_int_zero_p (new_coef))
150
          {
151
            comb->elts[i].coef = new_coef;
152
            return;
153
          }
154
 
155
        comb->n--;
156
        comb->elts[i] = comb->elts[comb->n];
157
 
158
        if (comb->rest)
159
          {
160
            gcc_assert (comb->n == MAX_AFF_ELTS - 1);
161
            comb->elts[comb->n].coef = double_int_one;
162
            comb->elts[comb->n].val = comb->rest;
163
            comb->rest = NULL_TREE;
164
            comb->n++;
165
          }
166
        return;
167
      }
168
  if (comb->n < MAX_AFF_ELTS)
169
    {
170
      comb->elts[comb->n].coef = scale;
171
      comb->elts[comb->n].val = elt;
172
      comb->n++;
173
      return;
174
    }
175
 
176
  type = comb->type;
177
  if (POINTER_TYPE_P (type))
178
    type = sizetype;
179
 
180
  if (double_int_one_p (scale))
181
    elt = fold_convert (type, elt);
182
  else
183
    elt = fold_build2 (MULT_EXPR, type,
184
                       fold_convert (type, elt),
185
                       double_int_to_tree (type, scale));
186
 
187
  if (comb->rest)
188
    comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
189
                              elt);
190
  else
191
    comb->rest = elt;
192
}
193
 
194
/* Adds CST to C.  */
195
 
196
static void
197
aff_combination_add_cst (aff_tree *c, double_int cst)
198
{
199
  c->offset = double_int_ext_for_comb (double_int_add (c->offset, cst), c);
200
}
201
 
202
/* Adds COMB2 to COMB1.  */
203
 
204
void
205
aff_combination_add (aff_tree *comb1, aff_tree *comb2)
206
{
207
  unsigned i;
208
 
209
  aff_combination_add_cst (comb1, comb2->offset);
210
  for (i = 0; i < comb2->n; i++)
211
    aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
212
  if (comb2->rest)
213
    aff_combination_add_elt (comb1, comb2->rest, double_int_one);
214
}
215
 
216
/* Converts affine combination COMB to TYPE.  */
217
 
218
void
219
aff_combination_convert (aff_tree *comb, tree type)
220
{
221
  unsigned i, j;
222
  tree comb_type = comb->type;
223
 
224
  if  (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
225
    {
226
      tree val = fold_convert (type, aff_combination_to_tree (comb));
227
      tree_to_aff_combination (val, type, comb);
228
      return;
229
    }
230
 
231
  comb->type = type;
232
  if (comb->rest && !POINTER_TYPE_P (type))
233
    comb->rest = fold_convert (type, comb->rest);
234
 
235
  if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
236
    return;
237
 
238
  comb->offset = double_int_ext_for_comb (comb->offset, comb);
239
  for (i = j = 0; i < comb->n; i++)
240
    {
241
      double_int new_coef = double_int_ext_for_comb (comb->elts[i].coef, comb);
242
      if (double_int_zero_p (new_coef))
243
        continue;
244
      comb->elts[j].coef = new_coef;
245
      comb->elts[j].val = fold_convert (type, comb->elts[i].val);
246
      j++;
247
    }
248
 
249
  comb->n = j;
250
  if (comb->n < MAX_AFF_ELTS && comb->rest)
251
    {
252
      comb->elts[comb->n].coef = double_int_one;
253
      comb->elts[comb->n].val = comb->rest;
254
      comb->rest = NULL_TREE;
255
      comb->n++;
256
    }
257
}
258
 
259
/* Splits EXPR into an affine combination of parts.  */
260
 
261
void
262
tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
263
{
264
  aff_tree tmp;
265
  enum tree_code code;
266
  tree cst, core, toffset;
267
  HOST_WIDE_INT bitpos, bitsize;
268
  enum machine_mode mode;
269
  int unsignedp, volatilep;
270
 
271
  STRIP_NOPS (expr);
272
 
273
  code = TREE_CODE (expr);
274
  switch (code)
275
    {
276
    case INTEGER_CST:
277
      aff_combination_const (comb, type, tree_to_double_int (expr));
278
      return;
279
 
280
    case POINTER_PLUS_EXPR:
281
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
282
      tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
283
      aff_combination_add (comb, &tmp);
284
      return;
285
 
286
    case PLUS_EXPR:
287
    case MINUS_EXPR:
288
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
289
      tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
290
      if (code == MINUS_EXPR)
291
        aff_combination_scale (&tmp, double_int_minus_one);
292
      aff_combination_add (comb, &tmp);
293
      return;
294
 
295
    case MULT_EXPR:
296
      cst = TREE_OPERAND (expr, 1);
297
      if (TREE_CODE (cst) != INTEGER_CST)
298
        break;
299
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
300
      aff_combination_scale (comb, tree_to_double_int (cst));
301
      return;
302
 
303
    case NEGATE_EXPR:
304
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
305
      aff_combination_scale (comb, double_int_minus_one);
306
      return;
307
 
308
    case BIT_NOT_EXPR:
309
      /* ~x = -x - 1 */
310
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
311
      aff_combination_scale (comb, double_int_minus_one);
312
      aff_combination_add_cst (comb, double_int_minus_one);
313
      return;
314
 
315
    case ADDR_EXPR:
316
      core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
317
                                  &toffset, &mode, &unsignedp, &volatilep,
318
                                  false);
319
      if (bitpos % BITS_PER_UNIT != 0)
320
        break;
321
      aff_combination_const (comb, type,
322
                             uhwi_to_double_int (bitpos / BITS_PER_UNIT));
323
      core = build_fold_addr_expr (core);
324
      if (TREE_CODE (core) == ADDR_EXPR)
325
        aff_combination_add_elt (comb, core, double_int_one);
326
      else
327
        {
328
          tree_to_aff_combination (core, type, &tmp);
329
          aff_combination_add (comb, &tmp);
330
        }
331
      if (toffset)
332
        {
333
          tree_to_aff_combination (toffset, type, &tmp);
334
          aff_combination_add (comb, &tmp);
335
        }
336
      return;
337
 
338
    default:
339
      break;
340
    }
341
 
342
  aff_combination_elt (comb, type, expr);
343
}
344
 
345
/* Creates EXPR + ELT * SCALE in TYPE.  EXPR is taken from affine
346
   combination COMB.  */
347
 
348
static tree
349
add_elt_to_tree (tree expr, tree type, tree elt, double_int scale,
350
                 aff_tree *comb)
351
{
352
  enum tree_code code;
353
  tree type1 = type;
354
  if (POINTER_TYPE_P (type))
355
    type1 = sizetype;
356
 
357
  scale = double_int_ext_for_comb (scale, comb);
358
  elt = fold_convert (type1, elt);
359
 
360
  if (double_int_one_p (scale))
361
    {
362
      if (!expr)
363
        return fold_convert (type, elt);
364
 
365
      if (POINTER_TYPE_P (type))
366
        return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
367
      return fold_build2 (PLUS_EXPR, type, expr, elt);
368
    }
369
 
370
  if (double_int_minus_one_p (scale))
371
    {
372
      if (!expr)
373
        return fold_convert (type, fold_build1 (NEGATE_EXPR, type1, elt));
374
 
375
      if (POINTER_TYPE_P (type))
376
        {
377
          elt = fold_build1 (NEGATE_EXPR, type1, elt);
378
          return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
379
        }
380
      return fold_build2 (MINUS_EXPR, type, expr, elt);
381
    }
382
 
383
  if (!expr)
384
    return fold_convert (type,
385
                         fold_build2 (MULT_EXPR, type1, elt,
386
                                      double_int_to_tree (type1, scale)));
387
 
388
  if (double_int_negative_p (scale))
389
    {
390
      code = MINUS_EXPR;
391
      scale = double_int_neg (scale);
392
    }
393
  else
394
    code = PLUS_EXPR;
395
 
396
  elt = fold_build2 (MULT_EXPR, type1, elt,
397
                     double_int_to_tree (type1, scale));
398
  if (POINTER_TYPE_P (type))
399
    {
400
      if (code == MINUS_EXPR)
401
        elt = fold_build1 (NEGATE_EXPR, type1, elt);
402
      return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
403
    }
404
  return fold_build2 (code, type, expr, elt);
405
}
406
 
407
/* Makes tree from the affine combination COMB.  */
408
 
409
tree
410
aff_combination_to_tree (aff_tree *comb)
411
{
412
  tree type = comb->type;
413
  tree expr = comb->rest;
414
  unsigned i;
415
  double_int off, sgn;
416
  tree type1 = type;
417
  if (POINTER_TYPE_P (type))
418
    type1 = sizetype;
419
 
420
  gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
421
 
422
  for (i = 0; i < comb->n; i++)
423
    expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
424
                            comb);
425
 
426
  /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
427
     unsigned.  */
428
  if (double_int_negative_p (comb->offset))
429
    {
430
      off = double_int_neg (comb->offset);
431
      sgn = double_int_minus_one;
432
    }
433
  else
434
    {
435
      off = comb->offset;
436
      sgn = double_int_one;
437
    }
438
  return add_elt_to_tree (expr, type, double_int_to_tree (type1, off), sgn,
439
                          comb);
440
}
441
 
442
/* Copies the tree elements of COMB to ensure that they are not shared.  */
443
 
444
void
445
unshare_aff_combination (aff_tree *comb)
446
{
447
  unsigned i;
448
 
449
  for (i = 0; i < comb->n; i++)
450
    comb->elts[i].val = unshare_expr (comb->elts[i].val);
451
  if (comb->rest)
452
    comb->rest = unshare_expr (comb->rest);
453
}
454
 
455
/* Remove M-th element from COMB.  */
456
 
457
void
458
aff_combination_remove_elt (aff_tree *comb, unsigned m)
459
{
460
  comb->n--;
461
  if (m <= comb->n)
462
    comb->elts[m] = comb->elts[comb->n];
463
  if (comb->rest)
464
    {
465
      comb->elts[comb->n].coef = double_int_one;
466
      comb->elts[comb->n].val = comb->rest;
467
      comb->rest = NULL_TREE;
468
      comb->n++;
469
    }
470
}
471
 
472
/* Adds C * COEF * VAL to R.  VAL may be NULL, in that case only
473
   C * COEF is added to R.  */
474
 
475
 
476
static void
477
aff_combination_add_product (aff_tree *c, double_int coef, tree val,
478
                             aff_tree *r)
479
{
480
  unsigned i;
481
  tree aval, type;
482
 
483
  for (i = 0; i < c->n; i++)
484
    {
485
      aval = c->elts[i].val;
486
      if (val)
487
        {
488
          type = TREE_TYPE (aval);
489
          aval = fold_build2 (MULT_EXPR, type, aval,
490
                              fold_convert (type, val));
491
        }
492
 
493
      aff_combination_add_elt (r, aval,
494
                               double_int_mul (coef, c->elts[i].coef));
495
    }
496
 
497
  if (c->rest)
498
    {
499
      aval = c->rest;
500
      if (val)
501
        {
502
          type = TREE_TYPE (aval);
503
          aval = fold_build2 (MULT_EXPR, type, aval,
504
                              fold_convert (type, val));
505
        }
506
 
507
      aff_combination_add_elt (r, aval, coef);
508
    }
509
 
510
  if (val)
511
    aff_combination_add_elt (r, val,
512
                             double_int_mul (coef, c->offset));
513
  else
514
    aff_combination_add_cst (r, double_int_mul (coef, c->offset));
515
}
516
 
517
/* Multiplies C1 by C2, storing the result to R  */
518
 
519
void
520
aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
521
{
522
  unsigned i;
523
  gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
524
 
525
  aff_combination_zero (r, c1->type);
526
 
527
  for (i = 0; i < c2->n; i++)
528
    aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
529
  if (c2->rest)
530
    aff_combination_add_product (c1, double_int_one, c2->rest, r);
531
  aff_combination_add_product (c1, c2->offset, NULL, r);
532
}
533
 
534
/* Returns the element of COMB whose value is VAL, or NULL if no such
535
   element exists.  If IDX is not NULL, it is set to the index of VAL in
536
   COMB.  */
537
 
538
static struct aff_comb_elt *
539
aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
540
{
541
  unsigned i;
542
 
543
  for (i = 0; i < comb->n; i++)
544
    if (operand_equal_p (comb->elts[i].val, val, 0))
545
      {
546
        if (idx)
547
          *idx = i;
548
 
549
        return &comb->elts[i];
550
      }
551
 
552
  return NULL;
553
}
554
 
555
/* Element of the cache that maps ssa name NAME to its expanded form
556
   as an affine expression EXPANSION.  */
557
 
558
struct name_expansion
559
{
560
  aff_tree expansion;
561
 
562
  /* True if the expansion for the name is just being generated.  */
563
  unsigned in_progress : 1;
564
};
565
 
566
/* Expands SSA names in COMB recursively.  CACHE is used to cache the
567
   results.  */
568
 
569
void
570
aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
571
                        struct pointer_map_t **cache ATTRIBUTE_UNUSED)
572
{
573
  unsigned i;
574
  aff_tree to_add, current, curre;
575
  tree e, rhs;
576
  gimple def;
577
  double_int scale;
578
  void **slot;
579
  struct name_expansion *exp;
580
 
581
  aff_combination_zero (&to_add, comb->type);
582
  for (i = 0; i < comb->n; i++)
583
    {
584
      tree type, name;
585
      enum tree_code code;
586
 
587
      e = comb->elts[i].val;
588
      type = TREE_TYPE (e);
589
      name = e;
590
      /* Look through some conversions.  */
591
      if (TREE_CODE (e) == NOP_EXPR
592
          && (TYPE_PRECISION (type)
593
              >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
594
        name = TREE_OPERAND (e, 0);
595
      if (TREE_CODE (name) != SSA_NAME)
596
        continue;
597
      def = SSA_NAME_DEF_STMT (name);
598
      if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
599
        continue;
600
 
601
      code = gimple_assign_rhs_code (def);
602
      if (code != SSA_NAME
603
          && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
604
          && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
605
              || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
606
        continue;
607
 
608
      /* We do not know whether the reference retains its value at the
609
         place where the expansion is used.  */
610
      if (TREE_CODE_CLASS (code) == tcc_reference)
611
        continue;
612
 
613
      if (!*cache)
614
        *cache = pointer_map_create ();
615
      slot = pointer_map_insert (*cache, e);
616
      exp = (struct name_expansion *) *slot;
617
 
618
      if (!exp)
619
        {
620
          exp = XNEW (struct name_expansion);
621
          exp->in_progress = 1;
622
          *slot = exp;
623
          /* In principle this is a generally valid folding, but
624
             it is not unconditionally an optimization, so do it
625
             here and not in fold_unary.  */
626
          /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
627
             than the type of X and overflow for the type of X is
628
             undefined.  */
629
          if (e != name
630
              && INTEGRAL_TYPE_P (type)
631
              && INTEGRAL_TYPE_P (TREE_TYPE (name))
632
              && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
633
              && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
634
              && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
635
              && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
636
            rhs = fold_build2 (code, type,
637
                               fold_convert (type, gimple_assign_rhs1 (def)),
638
                               fold_convert (type, gimple_assign_rhs2 (def)));
639
          else
640
            {
641
              rhs = gimple_assign_rhs_to_tree (def);
642
              if (e != name)
643
                rhs = fold_convert (type, rhs);
644
            }
645
          tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
646
          exp->expansion = current;
647
          exp->in_progress = 0;
648
        }
649
      else
650
        {
651
          /* Since we follow the definitions in the SSA form, we should not
652
             enter a cycle unless we pass through a phi node.  */
653
          gcc_assert (!exp->in_progress);
654
          current = exp->expansion;
655
        }
656
 
657
      /* Accumulate the new terms to TO_ADD, so that we do not modify
658
         COMB while traversing it; include the term -coef * E, to remove
659
         it from COMB.  */
660
      scale = comb->elts[i].coef;
661
      aff_combination_zero (&curre, comb->type);
662
      aff_combination_add_elt (&curre, e, double_int_neg (scale));
663
      aff_combination_scale (&current, scale);
664
      aff_combination_add (&to_add, &current);
665
      aff_combination_add (&to_add, &curre);
666
    }
667
  aff_combination_add (comb, &to_add);
668
}
669
 
670
/* Similar to tree_to_aff_combination, but follows SSA name definitions
671
   and expands them recursively.  CACHE is used to cache the expansions
672
   of the ssa names, to avoid exponential time complexity for cases
673
   like
674
 
675
   a1 = a0 + a0;
676
   a2 = a1 + a1;
677
   a3 = a2 + a2;
678
   ...  */
679
 
680
void
681
tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
682
                                struct pointer_map_t **cache)
683
{
684
  tree_to_aff_combination (expr, type, comb);
685
  aff_combination_expand (comb, cache);
686
}
687
 
688
/* Frees memory occupied by struct name_expansion in *VALUE.  Callback for
689
   pointer_map_traverse.  */
690
 
691
static bool
692
free_name_expansion (const void *key ATTRIBUTE_UNUSED, void **value,
693
                     void *data ATTRIBUTE_UNUSED)
694
{
695
  struct name_expansion *const exp = (struct name_expansion *) *value;
696
 
697
  free (exp);
698
  return true;
699
}
700
 
701
/* Frees memory allocated for the CACHE used by
702
   tree_to_aff_combination_expand.  */
703
 
704
void
705
free_affine_expand_cache (struct pointer_map_t **cache)
706
{
707
  if (!*cache)
708
    return;
709
 
710
  pointer_map_traverse (*cache, free_name_expansion, NULL);
711
  pointer_map_destroy (*cache);
712
  *cache = NULL;
713
}
714
 
715
/* If VAL != CST * DIV for any constant CST, returns false.
716
   Otherwise, if VAL != 0 (and hence CST != 0), and *MULT_SET is true,
717
   additionally compares CST and MULT, and if they are different,
718
   returns false.  Finally, if neither of these two cases occur,
719
   true is returned, and if CST != 0, CST is stored to MULT and
720
   MULT_SET is set to true.  */
721
 
722
static bool
723
double_int_constant_multiple_p (double_int val, double_int div,
724
                                bool *mult_set, double_int *mult)
725
{
726
  double_int rem, cst;
727
 
728
  if (double_int_zero_p (val))
729
    return true;
730
 
731
  if (double_int_zero_p (div))
732
    return false;
733
 
734
  cst = double_int_sdivmod (val, div, FLOOR_DIV_EXPR, &rem);
735
  if (!double_int_zero_p (rem))
736
    return false;
737
 
738
  if (*mult_set && !double_int_equal_p (*mult, cst))
739
    return false;
740
 
741
  *mult_set = true;
742
  *mult = cst;
743
  return true;
744
}
745
 
746
/* Returns true if VAL = X * DIV for some constant X.  If this is the case,
747
   X is stored to MULT.  */
748
 
749
bool
750
aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
751
                                     double_int *mult)
752
{
753
  bool mult_set = false;
754
  unsigned i;
755
 
756
  if (val->n == 0 && double_int_zero_p (val->offset))
757
    {
758
      *mult = double_int_zero;
759
      return true;
760
    }
761
  if (val->n != div->n)
762
    return false;
763
 
764
  if (val->rest || div->rest)
765
    return false;
766
 
767
  if (!double_int_constant_multiple_p (val->offset, div->offset,
768
                                       &mult_set, mult))
769
    return false;
770
 
771
  for (i = 0; i < div->n; i++)
772
    {
773
      struct aff_comb_elt *elt
774
              = aff_combination_find_elt (val, div->elts[i].val, NULL);
775
      if (!elt)
776
        return false;
777
      if (!double_int_constant_multiple_p (elt->coef, div->elts[i].coef,
778
                                           &mult_set, mult))
779
        return false;
780
    }
781
 
782
  gcc_assert (mult_set);
783
  return true;
784
}
785
 
786
/* Prints the affine VAL to the FILE. */
787
 
788
void
789
print_aff (FILE *file, aff_tree *val)
790
{
791
  unsigned i;
792
  bool uns = TYPE_UNSIGNED (val->type);
793
  if (POINTER_TYPE_P (val->type))
794
    uns = false;
795
  fprintf (file, "{\n  type = ");
796
  print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
797
  fprintf (file, "\n  offset = ");
798
  dump_double_int (file, val->offset, uns);
799
  if (val->n > 0)
800
    {
801
      fprintf (file, "\n  elements = {\n");
802
      for (i = 0; i < val->n; i++)
803
        {
804
          fprintf (file, "    [%d] = ", i);
805
          print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
806
 
807
          fprintf (file, " * ");
808
          dump_double_int (file, val->elts[i].coef, uns);
809
          if (i != val->n - 1)
810
            fprintf (file, ", \n");
811
        }
812
      fprintf (file, "\n  }");
813
  }
814
  if (val->rest)
815
    {
816
      fprintf (file, "\n  rest = ");
817
      print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
818
    }
819
  fprintf (file, "\n}");
820
}
821
 
822
/* Prints the affine VAL to the standard error, used for debugging.  */
823
 
824
void
825
debug_aff (aff_tree *val)
826
{
827
  print_aff (stderr, val);
828
  fprintf (stderr, "\n");
829
}
830
 
831
/* Returns address of the reference REF in ADDR.  The size of the accessed
832
   location is stored to SIZE.  */
833
 
834
void
835
get_inner_reference_aff (tree ref, aff_tree *addr, double_int *size)
836
{
837
  HOST_WIDE_INT bitsize, bitpos;
838
  tree toff;
839
  enum machine_mode mode;
840
  int uns, vol;
841
  aff_tree tmp;
842
  tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
843
                                   &uns, &vol, false);
844
  tree base_addr = build_fold_addr_expr (base);
845
 
846
  /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT.  */
847
 
848
  tree_to_aff_combination (base_addr, sizetype, addr);
849
 
850
  if (toff)
851
    {
852
      tree_to_aff_combination (toff, sizetype, &tmp);
853
      aff_combination_add (addr, &tmp);
854
    }
855
 
856
  aff_combination_const (&tmp, sizetype,
857
                         shwi_to_double_int (bitpos / BITS_PER_UNIT));
858
  aff_combination_add (addr, &tmp);
859
 
860
  *size = shwi_to_double_int ((bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
861
}
862
 

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.