OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [tree-eh.c] - Blame information for rev 16

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
/* Exception handling semantics and decomposition for trees.
2
   Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3
 
4
This file is part of GCC.
5
 
6
GCC is free software; you can redistribute it and/or modify
7
it under the terms of the GNU General Public License as published by
8
the Free Software Foundation; either version 2, or (at your option)
9
any later version.
10
 
11
GCC is distributed in the hope that it will be useful,
12
but WITHOUT ANY WARRANTY; without even the implied warranty of
13
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
GNU General Public License for more details.
15
 
16
You should have received a copy of the GNU General Public License
17
along with GCC; see the file COPYING.  If not, write to
18
the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19
Boston, MA 02110-1301, USA.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "tree.h"
26
#include "rtl.h"
27
#include "tm_p.h"
28
#include "flags.h"
29
#include "function.h"
30
#include "except.h"
31
#include "tree-flow.h"
32
#include "tree-dump.h"
33
#include "tree-inline.h"
34
#include "tree-iterator.h"
35
#include "tree-pass.h"
36
#include "timevar.h"
37
#include "langhooks.h"
38
#include "ggc.h"
39
#include "toplev.h"
40
 
41
 
42
/* Nonzero if we are using EH to handle cleanups.  */
43
static int using_eh_for_cleanups_p = 0;
44
 
45
void
46
using_eh_for_cleanups (void)
47
{
48
  using_eh_for_cleanups_p = 1;
49
}
50
 
51
/* Misc functions used in this file.  */
52
 
53
/* Compare and hash for any structure which begins with a canonical
54
   pointer.  Assumes all pointers are interchangeable, which is sort
55
   of already assumed by gcc elsewhere IIRC.  */
56
 
57
static int
58
struct_ptr_eq (const void *a, const void *b)
59
{
60
  const void * const * x = a;
61
  const void * const * y = b;
62
  return *x == *y;
63
}
64
 
65
static hashval_t
66
struct_ptr_hash (const void *a)
67
{
68
  const void * const * x = a;
69
  return (size_t)*x >> 4;
70
}
71
 
72
 
73
/* Remember and lookup EH region data for arbitrary statements.
74
   Really this means any statement that could_throw_p.  We could
75
   stuff this information into the stmt_ann data structure, but:
76
 
77
   (1) We absolutely rely on this information being kept until
78
   we get to rtl.  Once we're done with lowering here, if we lose
79
   the information there's no way to recover it!
80
 
81
   (2) There are many more statements that *cannot* throw as
82
   compared to those that can.  We should be saving some amount
83
   of space by only allocating memory for those that can throw.  */
84
 
85
static void
86
record_stmt_eh_region (struct eh_region *region, tree t)
87
{
88
  if (!region)
89
    return;
90
 
91
  add_stmt_to_eh_region (t, get_eh_region_number (region));
92
}
93
 
94
void
95
add_stmt_to_eh_region_fn (struct function *ifun, tree t, int num)
96
{
97
  struct throw_stmt_node *n;
98
  void **slot;
99
 
100
  gcc_assert (num >= 0);
101
  gcc_assert (TREE_CODE (t) != RESX_EXPR);
102
 
103
  n = ggc_alloc (sizeof (*n));
104
  n->stmt = t;
105
  n->region_nr = num;
106
 
107
  if (!get_eh_throw_stmt_table (ifun))
108
    set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
109
                                                    struct_ptr_eq,
110
                                                    ggc_free));
111
 
112
  slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
113
  gcc_assert (!*slot);
114
  *slot = n;
115
  /* ??? For the benefit of calls.c, converting all this to rtl,
116
     we need to record the call expression, not just the outer
117
     modify statement.  */
118
  if (TREE_CODE (t) == MODIFY_EXPR
119
      && (t = get_call_expr_in (t)))
120
    add_stmt_to_eh_region_fn (ifun, t, num);
121
}
122
 
123
void
124
add_stmt_to_eh_region (tree t, int num)
125
{
126
  add_stmt_to_eh_region_fn (cfun, t, num);
127
}
128
 
129
bool
130
remove_stmt_from_eh_region_fn (struct function *ifun, tree t)
131
{
132
  struct throw_stmt_node dummy;
133
  void **slot;
134
 
135
  if (!get_eh_throw_stmt_table (ifun))
136
    return false;
137
 
138
  dummy.stmt = t;
139
  slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
140
                        NO_INSERT);
141
  if (slot)
142
    {
143
      htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
144
      /* ??? For the benefit of calls.c, converting all this to rtl,
145
         we need to record the call expression, not just the outer
146
         modify statement.  */
147
      if (TREE_CODE (t) == MODIFY_EXPR
148
          && (t = get_call_expr_in (t)))
149
        remove_stmt_from_eh_region_fn (ifun, t);
150
      return true;
151
    }
152
  else
153
    return false;
154
}
155
 
156
bool
157
remove_stmt_from_eh_region (tree t)
158
{
159
  return remove_stmt_from_eh_region_fn (cfun, t);
160
}
161
 
162
int
163
lookup_stmt_eh_region_fn (struct function *ifun, tree t)
164
{
165
  struct throw_stmt_node *p, n;
166
 
167
  if (!get_eh_throw_stmt_table (ifun))
168
    return -2;
169
 
170
  n.stmt = t;
171
  p = htab_find (get_eh_throw_stmt_table (ifun), &n);
172
 
173
  return (p ? p->region_nr : -1);
174
}
175
 
176
int
177
lookup_stmt_eh_region (tree t)
178
{
179
  /* We can get called from initialized data when -fnon-call-exceptions
180
     is on; prevent crash.  */
181
  if (!cfun)
182
    return -1;
183
  return lookup_stmt_eh_region_fn (cfun, t);
184
}
185
 
186
 
187
/* First pass of EH node decomposition.  Build up a tree of TRY_FINALLY_EXPR
188
   nodes and LABEL_DECL nodes.  We will use this during the second phase to
189
   determine if a goto leaves the body of a TRY_FINALLY_EXPR node.  */
190
 
191
struct finally_tree_node
192
{
193
  tree child, parent;
194
};
195
 
196
/* Note that this table is *not* marked GTY.  It is short-lived.  */
197
static htab_t finally_tree;
198
 
199
static void
200
record_in_finally_tree (tree child, tree parent)
201
{
202
  struct finally_tree_node *n;
203
  void **slot;
204
 
205
  n = xmalloc (sizeof (*n));
206
  n->child = child;
207
  n->parent = parent;
208
 
209
  slot = htab_find_slot (finally_tree, n, INSERT);
210
  gcc_assert (!*slot);
211
  *slot = n;
212
}
213
 
214
static void
215
collect_finally_tree (tree t, tree region)
216
{
217
 tailrecurse:
218
  switch (TREE_CODE (t))
219
    {
220
    case LABEL_EXPR:
221
      record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
222
      break;
223
 
224
    case TRY_FINALLY_EXPR:
225
      record_in_finally_tree (t, region);
226
      collect_finally_tree (TREE_OPERAND (t, 0), t);
227
      t = TREE_OPERAND (t, 1);
228
      goto tailrecurse;
229
 
230
    case TRY_CATCH_EXPR:
231
      collect_finally_tree (TREE_OPERAND (t, 0), region);
232
      t = TREE_OPERAND (t, 1);
233
      goto tailrecurse;
234
 
235
    case CATCH_EXPR:
236
      t = CATCH_BODY (t);
237
      goto tailrecurse;
238
 
239
    case EH_FILTER_EXPR:
240
      t = EH_FILTER_FAILURE (t);
241
      goto tailrecurse;
242
 
243
    case STATEMENT_LIST:
244
      {
245
        tree_stmt_iterator i;
246
        for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
247
          collect_finally_tree (tsi_stmt (i), region);
248
      }
249
      break;
250
 
251
    default:
252
      /* A type, a decl, or some kind of statement that we're not
253
         interested in.  Don't walk them.  */
254
      break;
255
    }
256
}
257
 
258
/* Use the finally tree to determine if a jump from START to TARGET
259
   would leave the try_finally node that START lives in.  */
260
 
261
static bool
262
outside_finally_tree (tree start, tree target)
263
{
264
  struct finally_tree_node n, *p;
265
 
266
  do
267
    {
268
      n.child = start;
269
      p = htab_find (finally_tree, &n);
270
      if (!p)
271
        return true;
272
      start = p->parent;
273
    }
274
  while (start != target);
275
 
276
  return false;
277
}
278
 
279
/* Second pass of EH node decomposition.  Actually transform the TRY_FINALLY
280
   and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
281
   The eh region creation is straight-forward, but frobbing all the gotos
282
   and such into shape isn't.  */
283
 
284
/* State of the world while lowering.  */
285
 
286
struct leh_state
287
{
288
  /* What's "current" while constructing the eh region tree.  These
289
     correspond to variables of the same name in cfun->eh, which we
290
     don't have easy access to.  */
291
  struct eh_region *cur_region;
292
  struct eh_region *prev_try;
293
 
294
  /* Processing of TRY_FINALLY requires a bit more state.  This is
295
     split out into a separate structure so that we don't have to
296
     copy so much when processing other nodes.  */
297
  struct leh_tf_state *tf;
298
};
299
 
300
struct leh_tf_state
301
{
302
  /* Pointer to the TRY_FINALLY node under discussion.  The try_finally_expr
303
     is the original TRY_FINALLY_EXPR.  We need to retain this so that
304
     outside_finally_tree can reliably reference the tree used in the
305
     collect_finally_tree data structures.  */
306
  tree try_finally_expr;
307
  tree *top_p;
308
 
309
  /* The state outside this try_finally node.  */
310
  struct leh_state *outer;
311
 
312
  /* The exception region created for it.  */
313
  struct eh_region *region;
314
 
315
  /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
316
     that are seen to escape this TRY_FINALLY_EXPR node.  */
317
  struct goto_queue_node {
318
    tree stmt;
319
    tree repl_stmt;
320
    tree cont_stmt;
321
    int index;
322
  } *goto_queue;
323
  size_t goto_queue_size;
324
  size_t goto_queue_active;
325
 
326
  /* The set of unique labels seen as entries in the goto queue.  */
327
  VEC(tree,heap) *dest_array;
328
 
329
  /* A label to be added at the end of the completed transformed
330
     sequence.  It will be set if may_fallthru was true *at one time*,
331
     though subsequent transformations may have cleared that flag.  */
332
  tree fallthru_label;
333
 
334
  /* A label that has been registered with except.c to be the
335
     landing pad for this try block.  */
336
  tree eh_label;
337
 
338
  /* True if it is possible to fall out the bottom of the try block.
339
     Cleared if the fallthru is converted to a goto.  */
340
  bool may_fallthru;
341
 
342
  /* True if any entry in goto_queue is a RETURN_EXPR.  */
343
  bool may_return;
344
 
345
  /* True if the finally block can receive an exception edge.
346
     Cleared if the exception case is handled by code duplication.  */
347
  bool may_throw;
348
};
349
 
350
static void lower_eh_filter (struct leh_state *, tree *);
351
static void lower_eh_constructs_1 (struct leh_state *, tree *);
352
 
353
/* Comparison function for qsort/bsearch.  We're interested in
354
   searching goto queue elements for source statements.  */
355
 
356
static int
357
goto_queue_cmp (const void *x, const void *y)
358
{
359
  tree a = ((const struct goto_queue_node *)x)->stmt;
360
  tree b = ((const struct goto_queue_node *)y)->stmt;
361
  return (a == b ? 0 : a < b ? -1 : 1);
362
}
363
 
364
/* Search for STMT in the goto queue.  Return the replacement,
365
   or null if the statement isn't in the queue.  */
366
 
367
static tree
368
find_goto_replacement (struct leh_tf_state *tf, tree stmt)
369
{
370
  struct goto_queue_node tmp, *ret;
371
  tmp.stmt = stmt;
372
  ret = bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
373
                 sizeof (struct goto_queue_node), goto_queue_cmp);
374
  return (ret ? ret->repl_stmt : NULL);
375
}
376
 
377
/* A subroutine of replace_goto_queue_1.  Handles the sub-clauses of a
378
   lowered COND_EXPR.  If, by chance, the replacement is a simple goto,
379
   then we can just splat it in, otherwise we add the new stmts immediately
380
   after the COND_EXPR and redirect.  */
381
 
382
static void
383
replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
384
                                tree_stmt_iterator *tsi)
385
{
386
  tree new, one, label;
387
 
388
  new = find_goto_replacement (tf, *tp);
389
  if (!new)
390
    return;
391
 
392
  one = expr_only (new);
393
  if (one && TREE_CODE (one) == GOTO_EXPR)
394
    {
395
      *tp = one;
396
      return;
397
    }
398
 
399
  label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
400
  *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
401
 
402
  tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
403
  tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
404
}
405
 
406
/* The real work of replace_goto_queue.  Returns with TSI updated to
407
   point to the next statement.  */
408
 
409
static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
410
 
411
static void
412
replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
413
{
414
  switch (TREE_CODE (t))
415
    {
416
    case GOTO_EXPR:
417
    case RETURN_EXPR:
418
      t = find_goto_replacement (tf, t);
419
      if (t)
420
        {
421
          tsi_link_before (tsi, t, TSI_SAME_STMT);
422
          tsi_delink (tsi);
423
          return;
424
        }
425
      break;
426
 
427
    case COND_EXPR:
428
      replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
429
      replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
430
      break;
431
 
432
    case TRY_FINALLY_EXPR:
433
    case TRY_CATCH_EXPR:
434
      replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
435
      replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
436
      break;
437
    case CATCH_EXPR:
438
      replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
439
      break;
440
    case EH_FILTER_EXPR:
441
      replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
442
      break;
443
 
444
    case STATEMENT_LIST:
445
      gcc_unreachable ();
446
 
447
    default:
448
      /* These won't have gotos in them.  */
449
      break;
450
    }
451
 
452
  tsi_next (tsi);
453
}
454
 
455
/* A subroutine of replace_goto_queue.  Handles STATEMENT_LISTs.  */
456
 
457
static void
458
replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
459
{
460
  tree_stmt_iterator i = tsi_start (t);
461
  while (!tsi_end_p (i))
462
    replace_goto_queue_1 (tsi_stmt (i), tf, &i);
463
}
464
 
465
/* Replace all goto queue members.  */
466
 
467
static void
468
replace_goto_queue (struct leh_tf_state *tf)
469
{
470
  if (tf->goto_queue_active == 0)
471
    return;
472
  replace_goto_queue_stmt_list (*tf->top_p, tf);
473
}
474
 
475
/* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
476
   node, and if so record that fact in the goto queue associated with that
477
   try_finally node.  */
478
 
479
static void
480
maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
481
{
482
  struct leh_tf_state *tf = state->tf;
483
  struct goto_queue_node *q;
484
  size_t active, size;
485
  int index;
486
 
487
  if (!tf)
488
    return;
489
 
490
  switch (TREE_CODE (stmt))
491
    {
492
    case GOTO_EXPR:
493
      {
494
        tree lab = GOTO_DESTINATION (stmt);
495
 
496
        /* Computed and non-local gotos do not get processed.  Given
497
           their nature we can neither tell whether we've escaped the
498
           finally block nor redirect them if we knew.  */
499
        if (TREE_CODE (lab) != LABEL_DECL)
500
          return;
501
 
502
        /* No need to record gotos that don't leave the try block.  */
503
        if (! outside_finally_tree (lab, tf->try_finally_expr))
504
          return;
505
 
506
        if (! tf->dest_array)
507
          {
508
            tf->dest_array = VEC_alloc (tree, heap, 10);
509
            VEC_quick_push (tree, tf->dest_array, lab);
510
            index = 0;
511
          }
512
        else
513
          {
514
            int n = VEC_length (tree, tf->dest_array);
515
            for (index = 0; index < n; ++index)
516
              if (VEC_index (tree, tf->dest_array, index) == lab)
517
                break;
518
            if (index == n)
519
              VEC_safe_push (tree, heap, tf->dest_array, lab);
520
          }
521
      }
522
      break;
523
 
524
    case RETURN_EXPR:
525
      tf->may_return = true;
526
      index = -1;
527
      break;
528
 
529
    default:
530
      gcc_unreachable ();
531
    }
532
 
533
  active = tf->goto_queue_active;
534
  size = tf->goto_queue_size;
535
  if (active >= size)
536
    {
537
      size = (size ? size * 2 : 32);
538
      tf->goto_queue_size = size;
539
      tf->goto_queue
540
        = xrealloc (tf->goto_queue, size * sizeof (struct goto_queue_node));
541
    }
542
 
543
  q = &tf->goto_queue[active];
544
  tf->goto_queue_active = active + 1;
545
 
546
  memset (q, 0, sizeof (*q));
547
  q->stmt = stmt;
548
  q->index = index;
549
}
550
 
551
#ifdef ENABLE_CHECKING
552
/* We do not process SWITCH_EXPRs for now.  As long as the original source
553
   was in fact structured, and we've not yet done jump threading, then none
554
   of the labels will leave outer TRY_FINALLY_EXPRs.  Verify this.  */
555
 
556
static void
557
verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
558
{
559
  struct leh_tf_state *tf = state->tf;
560
  size_t i, n;
561
  tree vec;
562
 
563
  if (!tf)
564
    return;
565
 
566
  vec = SWITCH_LABELS (switch_expr);
567
  n = TREE_VEC_LENGTH (vec);
568
 
569
  for (i = 0; i < n; ++i)
570
    {
571
      tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
572
      gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
573
    }
574
}
575
#else
576
#define verify_norecord_switch_expr(state, switch_expr)
577
#endif
578
 
579
/* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB.  Place in CONT_P
580
   whatever is needed to finish the return.  If MOD is non-null, insert it
581
   before the new branch.  RETURN_VALUE_P is a cache containing a temporary
582
   variable to be used in manipulating the value returned from the function.  */
583
 
584
static void
585
do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
586
                       tree *return_value_p)
587
{
588
  tree ret_expr = TREE_OPERAND (q->stmt, 0);
589
  tree x;
590
 
591
  if (ret_expr)
592
    {
593
      /* The nasty part about redirecting the return value is that the
594
         return value itself is to be computed before the FINALLY block
595
         is executed.  e.g.
596
 
597
                int x;
598
                int foo (void)
599
                {
600
                  x = 0;
601
                  try {
602
                    return x;
603
                  } finally {
604
                    x++;
605
                  }
606
                }
607
 
608
          should return 0, not 1.  Arrange for this to happen by copying
609
          computed the return value into a local temporary.  This also
610
          allows us to redirect multiple return statements through the
611
          same destination block; whether this is a net win or not really
612
          depends, I guess, but it does make generation of the switch in
613
          lower_try_finally_switch easier.  */
614
 
615
      switch (TREE_CODE (ret_expr))
616
        {
617
        case RESULT_DECL:
618
          if (!*return_value_p)
619
            *return_value_p = ret_expr;
620
          else
621
            gcc_assert (*return_value_p == ret_expr);
622
          q->cont_stmt = q->stmt;
623
          break;
624
 
625
        case MODIFY_EXPR:
626
          {
627
            tree result = TREE_OPERAND (ret_expr, 0);
628
            tree new, old = TREE_OPERAND (ret_expr, 1);
629
 
630
            if (!*return_value_p)
631
              {
632
                if (aggregate_value_p (TREE_TYPE (result),
633
                                      TREE_TYPE (current_function_decl)))
634
                  /* If this function returns in memory, copy the argument
635
                    into the return slot now.  Otherwise, we might need to
636
                    worry about magic return semantics, so we need to use a
637
                    temporary to hold the value until we're actually ready
638
                    to return.  */
639
                  new = result;
640
                else
641
                  new = create_tmp_var (TREE_TYPE (old), "rettmp");
642
                *return_value_p = new;
643
              }
644
            else
645
              new = *return_value_p;
646
 
647
            x = build (MODIFY_EXPR, TREE_TYPE (new), new, old);
648
            append_to_statement_list (x, &q->repl_stmt);
649
 
650
            if (new == result)
651
              x = result;
652
            else
653
              x = build (MODIFY_EXPR, TREE_TYPE (result), result, new);
654
            q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
655
          }
656
 
657
        default:
658
          gcc_unreachable ();
659
        }
660
    }
661
  else
662
    {
663
      /* If we don't return a value, all return statements are the same.  */
664
      q->cont_stmt = q->stmt;
665
    }
666
 
667
  if (mod)
668
    append_to_statement_list (mod, &q->repl_stmt);
669
 
670
  x = build1 (GOTO_EXPR, void_type_node, finlab);
671
  append_to_statement_list (x, &q->repl_stmt);
672
}
673
 
674
/* Similar, but easier, for GOTO_EXPR.  */
675
 
676
static void
677
do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
678
{
679
  tree x;
680
 
681
  q->cont_stmt = q->stmt;
682
  if (mod)
683
    append_to_statement_list (mod, &q->repl_stmt);
684
 
685
  x = build1 (GOTO_EXPR, void_type_node, finlab);
686
  append_to_statement_list (x, &q->repl_stmt);
687
}
688
 
689
/* We want to transform
690
        try { body; } catch { stuff; }
691
   to
692
        body; goto over; lab: stuff; over:
693
 
694
   T is a TRY_FINALLY or TRY_CATCH node.  LAB is the label that
695
   should be placed before the second operand, or NULL.  OVER is
696
   an existing label that should be put at the exit, or NULL.  */
697
 
698
static void
699
frob_into_branch_around (tree *tp, tree lab, tree over)
700
{
701
  tree x, op1;
702
 
703
  op1 = TREE_OPERAND (*tp, 1);
704
  *tp = TREE_OPERAND (*tp, 0);
705
 
706
  if (block_may_fallthru (*tp))
707
    {
708
      if (!over)
709
        over = create_artificial_label ();
710
      x = build1 (GOTO_EXPR, void_type_node, over);
711
      append_to_statement_list (x, tp);
712
    }
713
 
714
  if (lab)
715
    {
716
      x = build1 (LABEL_EXPR, void_type_node, lab);
717
      append_to_statement_list (x, tp);
718
    }
719
 
720
  append_to_statement_list (op1, tp);
721
 
722
  if (over)
723
    {
724
      x = build1 (LABEL_EXPR, void_type_node, over);
725
      append_to_statement_list (x, tp);
726
    }
727
}
728
 
729
/* A subroutine of lower_try_finally.  Duplicate the tree rooted at T.
730
   Make sure to record all new labels found.  */
731
 
732
static tree
733
lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
734
{
735
  tree region = NULL;
736
 
737
  t = unsave_expr_now (t);
738
 
739
  if (outer_state->tf)
740
    region = outer_state->tf->try_finally_expr;
741
  collect_finally_tree (t, region);
742
 
743
  return t;
744
}
745
 
746
/* A subroutine of lower_try_finally.  Create a fallthru label for
747
   the given try_finally state.  The only tricky bit here is that
748
   we have to make sure to record the label in our outer context.  */
749
 
750
static tree
751
lower_try_finally_fallthru_label (struct leh_tf_state *tf)
752
{
753
  tree label = tf->fallthru_label;
754
  if (!label)
755
    {
756
      label = create_artificial_label ();
757
      tf->fallthru_label = label;
758
      if (tf->outer->tf)
759
        record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
760
    }
761
  return label;
762
}
763
 
764
/* A subroutine of lower_try_finally.  If lang_protect_cleanup_actions
765
   returns non-null, then the language requires that the exception path out
766
   of a try_finally be treated specially.  To wit: the code within the
767
   finally block may not itself throw an exception.  We have two choices here.
768
   First we can duplicate the finally block and wrap it in a must_not_throw
769
   region.  Second, we can generate code like
770
 
771
        try {
772
          finally_block;
773
        } catch {
774
          if (fintmp == eh_edge)
775
            protect_cleanup_actions;
776
        }
777
 
778
   where "fintmp" is the temporary used in the switch statement generation
779
   alternative considered below.  For the nonce, we always choose the first
780
   option.
781
 
782
   THIS_STATE may be null if this is a try-cleanup, not a try-finally.  */
783
 
784
static void
785
honor_protect_cleanup_actions (struct leh_state *outer_state,
786
                               struct leh_state *this_state,
787
                               struct leh_tf_state *tf)
788
{
789
  tree protect_cleanup_actions, finally, x;
790
  tree_stmt_iterator i;
791
  bool finally_may_fallthru;
792
 
793
  /* First check for nothing to do.  */
794
  if (lang_protect_cleanup_actions)
795
    protect_cleanup_actions = lang_protect_cleanup_actions ();
796
  else
797
    protect_cleanup_actions = NULL;
798
 
799
  finally = TREE_OPERAND (*tf->top_p, 1);
800
 
801
  /* If the EH case of the finally block can fall through, this may be a
802
     structure of the form
803
        try {
804
          try {
805
            throw ...;
806
          } cleanup {
807
            try {
808
              throw ...;
809
            } catch (...) {
810
            }
811
          }
812
        } catch (...) {
813
          yyy;
814
        }
815
    E.g. with an inline destructor with an embedded try block.  In this
816
    case we must save the runtime EH data around the nested exception.
817
 
818
    This complication means that any time the previous runtime data might
819
    be used (via fallthru from the finally) we handle the eh case here,
820
    whether or not protect_cleanup_actions is active.  */
821
 
822
  finally_may_fallthru = block_may_fallthru (finally);
823
  if (!finally_may_fallthru && !protect_cleanup_actions)
824
    return;
825
 
826
  /* Duplicate the FINALLY block.  Only need to do this for try-finally,
827
     and not for cleanups.  */
828
  if (this_state)
829
    finally = lower_try_finally_dup_block (finally, outer_state);
830
 
831
  /* Resume execution after the exception.  Adding this now lets
832
     lower_eh_filter not add unnecessary gotos, as it is clear that
833
     we never fallthru from this copy of the finally block.  */
834
  if (finally_may_fallthru)
835
    {
836
      tree save_eptr, save_filt;
837
 
838
      save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
839
      save_filt = create_tmp_var (integer_type_node, "save_filt");
840
 
841
      i = tsi_start (finally);
842
      x = build (EXC_PTR_EXPR, ptr_type_node);
843
      x = build (MODIFY_EXPR, void_type_node, save_eptr, x);
844
      tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
845
 
846
      x = build (FILTER_EXPR, integer_type_node);
847
      x = build (MODIFY_EXPR, void_type_node, save_filt, x);
848
      tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
849
 
850
      i = tsi_last (finally);
851
      x = build (EXC_PTR_EXPR, ptr_type_node);
852
      x = build (MODIFY_EXPR, void_type_node, x, save_eptr);
853
      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
854
 
855
      x = build (FILTER_EXPR, integer_type_node);
856
      x = build (MODIFY_EXPR, void_type_node, x, save_filt);
857
      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
858
 
859
      x = build_resx (get_eh_region_number (tf->region));
860
      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
861
    }
862
 
863
  /* Wrap the block with protect_cleanup_actions as the action.  */
864
  if (protect_cleanup_actions)
865
    {
866
      x = build (EH_FILTER_EXPR, void_type_node, NULL, NULL);
867
      append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
868
      EH_FILTER_MUST_NOT_THROW (x) = 1;
869
      finally = build (TRY_CATCH_EXPR, void_type_node, finally, x);
870
      lower_eh_filter (outer_state, &finally);
871
    }
872
  else
873
    lower_eh_constructs_1 (outer_state, &finally);
874
 
875
  /* Hook this up to the end of the existing try block.  If we
876
     previously fell through the end, we'll have to branch around.
877
     This means adding a new goto, and adding it to the queue.  */
878
 
879
  i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
880
 
881
  if (tf->may_fallthru)
882
    {
883
      x = lower_try_finally_fallthru_label (tf);
884
      x = build1 (GOTO_EXPR, void_type_node, x);
885
      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
886
 
887
      if (this_state)
888
        maybe_record_in_goto_queue (this_state, x);
889
 
890
      tf->may_fallthru = false;
891
    }
892
 
893
  x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
894
  tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
895
  tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
896
 
897
  /* Having now been handled, EH isn't to be considered with
898
     the rest of the outgoing edges.  */
899
  tf->may_throw = false;
900
}
901
 
902
/* A subroutine of lower_try_finally.  We have determined that there is
903
   no fallthru edge out of the finally block.  This means that there is
904
   no outgoing edge corresponding to any incoming edge.  Restructure the
905
   try_finally node for this special case.  */
906
 
907
static void
908
lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
909
{
910
  tree x, finally, lab, return_val;
911
  struct goto_queue_node *q, *qe;
912
 
913
  if (tf->may_throw)
914
    lab = tf->eh_label;
915
  else
916
    lab = create_artificial_label ();
917
 
918
  finally = TREE_OPERAND (*tf->top_p, 1);
919
  *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
920
 
921
  x = build1 (LABEL_EXPR, void_type_node, lab);
922
  append_to_statement_list (x, tf->top_p);
923
 
924
  return_val = NULL;
925
  q = tf->goto_queue;
926
  qe = q + tf->goto_queue_active;
927
  for (; q < qe; ++q)
928
    if (q->index < 0)
929
      do_return_redirection (q, lab, NULL, &return_val);
930
    else
931
      do_goto_redirection (q, lab, NULL);
932
 
933
  replace_goto_queue (tf);
934
 
935
  lower_eh_constructs_1 (state, &finally);
936
  append_to_statement_list (finally, tf->top_p);
937
}
938
 
939
/* A subroutine of lower_try_finally.  We have determined that there is
940
   exactly one destination of the finally block.  Restructure the
941
   try_finally node for this special case.  */
942
 
943
static void
944
lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
945
{
946
  struct goto_queue_node *q, *qe;
947
  tree x, finally, finally_label;
948
 
949
  finally = TREE_OPERAND (*tf->top_p, 1);
950
  *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
951
 
952
  lower_eh_constructs_1 (state, &finally);
953
 
954
  if (tf->may_throw)
955
    {
956
      /* Only reachable via the exception edge.  Add the given label to
957
         the head of the FINALLY block.  Append a RESX at the end.  */
958
 
959
      x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
960
      append_to_statement_list (x, tf->top_p);
961
 
962
      append_to_statement_list (finally, tf->top_p);
963
 
964
      x = build_resx (get_eh_region_number (tf->region));
965
 
966
      append_to_statement_list (x, tf->top_p);
967
 
968
      return;
969
    }
970
 
971
  if (tf->may_fallthru)
972
    {
973
      /* Only reachable via the fallthru edge.  Do nothing but let
974
         the two blocks run together; we'll fall out the bottom.  */
975
      append_to_statement_list (finally, tf->top_p);
976
      return;
977
    }
978
 
979
  finally_label = create_artificial_label ();
980
  x = build1 (LABEL_EXPR, void_type_node, finally_label);
981
  append_to_statement_list (x, tf->top_p);
982
 
983
  append_to_statement_list (finally, tf->top_p);
984
 
985
  q = tf->goto_queue;
986
  qe = q + tf->goto_queue_active;
987
 
988
  if (tf->may_return)
989
    {
990
      /* Reachable by return expressions only.  Redirect them.  */
991
      tree return_val = NULL;
992
      for (; q < qe; ++q)
993
        do_return_redirection (q, finally_label, NULL, &return_val);
994
      replace_goto_queue (tf);
995
    }
996
  else
997
    {
998
      /* Reachable by goto expressions only.  Redirect them.  */
999
      for (; q < qe; ++q)
1000
        do_goto_redirection (q, finally_label, NULL);
1001
      replace_goto_queue (tf);
1002
 
1003
      if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1004
        {
1005
          /* Reachable by goto to fallthru label only.  Redirect it
1006
             to the new label (already created, sadly), and do not
1007
             emit the final branch out, or the fallthru label.  */
1008
          tf->fallthru_label = NULL;
1009
          return;
1010
        }
1011
    }
1012
 
1013
  append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
1014
  maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
1015
}
1016
 
1017
/* A subroutine of lower_try_finally.  There are multiple edges incoming
1018
   and outgoing from the finally block.  Implement this by duplicating the
1019
   finally block for every destination.  */
1020
 
1021
static void
1022
lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1023
{
1024
  tree finally, new_stmt;
1025
  tree x;
1026
 
1027
  finally = TREE_OPERAND (*tf->top_p, 1);
1028
  *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1029
 
1030
  new_stmt = NULL_TREE;
1031
 
1032
  if (tf->may_fallthru)
1033
    {
1034
      x = lower_try_finally_dup_block (finally, state);
1035
      lower_eh_constructs_1 (state, &x);
1036
      append_to_statement_list (x, &new_stmt);
1037
 
1038
      x = lower_try_finally_fallthru_label (tf);
1039
      x = build1 (GOTO_EXPR, void_type_node, x);
1040
      append_to_statement_list (x, &new_stmt);
1041
    }
1042
 
1043
  if (tf->may_throw)
1044
    {
1045
      x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1046
      append_to_statement_list (x, &new_stmt);
1047
 
1048
      x = lower_try_finally_dup_block (finally, state);
1049
      lower_eh_constructs_1 (state, &x);
1050
      append_to_statement_list (x, &new_stmt);
1051
 
1052
      x = build_resx (get_eh_region_number (tf->region));
1053
      append_to_statement_list (x, &new_stmt);
1054
    }
1055
 
1056
  if (tf->goto_queue)
1057
    {
1058
      struct goto_queue_node *q, *qe;
1059
      tree return_val = NULL;
1060
      int return_index, index;
1061
      struct
1062
      {
1063
        struct goto_queue_node *q;
1064
        tree label;
1065
      } *labels;
1066
 
1067
      return_index = VEC_length (tree, tf->dest_array);
1068
      labels = xcalloc (sizeof (*labels), return_index + 1);
1069
 
1070
      q = tf->goto_queue;
1071
      qe = q + tf->goto_queue_active;
1072
      for (; q < qe; q++)
1073
        {
1074
          index = q->index < 0 ? return_index : q->index;
1075
 
1076
          if (!labels[index].q)
1077
            labels[index].q = q;
1078
        }
1079
 
1080
      for (index = 0; index < return_index + 1; index++)
1081
        {
1082
          tree lab;
1083
 
1084
          q = labels[index].q;
1085
          if (! q)
1086
            continue;
1087
 
1088
          lab = labels[index].label = create_artificial_label ();
1089
 
1090
          if (index == return_index)
1091
            do_return_redirection (q, lab, NULL, &return_val);
1092
          else
1093
            do_goto_redirection (q, lab, NULL);
1094
 
1095
          x = build1 (LABEL_EXPR, void_type_node, lab);
1096
          append_to_statement_list (x, &new_stmt);
1097
 
1098
          x = lower_try_finally_dup_block (finally, state);
1099
          lower_eh_constructs_1 (state, &x);
1100
          append_to_statement_list (x, &new_stmt);
1101
 
1102
          append_to_statement_list (q->cont_stmt, &new_stmt);
1103
          maybe_record_in_goto_queue (state, q->cont_stmt);
1104
        }
1105
 
1106
      for (q = tf->goto_queue; q < qe; q++)
1107
        {
1108
          tree lab;
1109
 
1110
          index = q->index < 0 ? return_index : q->index;
1111
 
1112
          if (labels[index].q == q)
1113
            continue;
1114
 
1115
          lab = labels[index].label;
1116
 
1117
          if (index == return_index)
1118
            do_return_redirection (q, lab, NULL, &return_val);
1119
          else
1120
            do_goto_redirection (q, lab, NULL);
1121
        }
1122
 
1123
      replace_goto_queue (tf);
1124
      free (labels);
1125
    }
1126
 
1127
  /* Need to link new stmts after running replace_goto_queue due
1128
     to not wanting to process the same goto stmts twice.  */
1129
  append_to_statement_list (new_stmt, tf->top_p);
1130
}
1131
 
1132
/* A subroutine of lower_try_finally.  There are multiple edges incoming
1133
   and outgoing from the finally block.  Implement this by instrumenting
1134
   each incoming edge and creating a switch statement at the end of the
1135
   finally block that branches to the appropriate destination.  */
1136
 
1137
static void
1138
lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1139
{
1140
  struct goto_queue_node *q, *qe;
1141
  tree return_val = NULL;
1142
  tree finally, finally_tmp, finally_label;
1143
  int return_index, eh_index, fallthru_index;
1144
  int nlabels, ndests, j, last_case_index;
1145
  tree case_label_vec, switch_stmt, last_case, switch_body;
1146
  tree x;
1147
 
1148
  /* Mash the TRY block to the head of the chain.  */
1149
  finally = TREE_OPERAND (*tf->top_p, 1);
1150
  *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1151
 
1152
  /* Lower the finally block itself.  */
1153
  lower_eh_constructs_1 (state, &finally);
1154
 
1155
  /* Prepare for switch statement generation.  */
1156
  nlabels = VEC_length (tree, tf->dest_array);
1157
  return_index = nlabels;
1158
  eh_index = return_index + tf->may_return;
1159
  fallthru_index = eh_index + tf->may_throw;
1160
  ndests = fallthru_index + tf->may_fallthru;
1161
 
1162
  finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1163
  finally_label = create_artificial_label ();
1164
 
1165
  case_label_vec = make_tree_vec (ndests);
1166
  switch_stmt = build (SWITCH_EXPR, integer_type_node, finally_tmp,
1167
                       NULL_TREE, case_label_vec);
1168
  switch_body = NULL;
1169
  last_case = NULL;
1170
  last_case_index = 0;
1171
 
1172
  /* Begin inserting code for getting to the finally block.  Things
1173
     are done in this order to correspond to the sequence the code is
1174
     layed out.  */
1175
 
1176
  if (tf->may_fallthru)
1177
    {
1178
      x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1179
                 build_int_cst (NULL_TREE, fallthru_index));
1180
      append_to_statement_list (x, tf->top_p);
1181
 
1182
      if (tf->may_throw)
1183
        {
1184
          x = build1 (GOTO_EXPR, void_type_node, finally_label);
1185
          append_to_statement_list (x, tf->top_p);
1186
        }
1187
 
1188
 
1189
      last_case = build (CASE_LABEL_EXPR, void_type_node,
1190
                         build_int_cst (NULL_TREE, fallthru_index), NULL,
1191
                         create_artificial_label ());
1192
      TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1193
      last_case_index++;
1194
 
1195
      x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1196
      append_to_statement_list (x, &switch_body);
1197
 
1198
      x = lower_try_finally_fallthru_label (tf);
1199
      x = build1 (GOTO_EXPR, void_type_node, x);
1200
      append_to_statement_list (x, &switch_body);
1201
    }
1202
 
1203
  if (tf->may_throw)
1204
    {
1205
      x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1206
      append_to_statement_list (x, tf->top_p);
1207
 
1208
      x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1209
                 build_int_cst (NULL_TREE, eh_index));
1210
      append_to_statement_list (x, tf->top_p);
1211
 
1212
      last_case = build (CASE_LABEL_EXPR, void_type_node,
1213
                         build_int_cst (NULL_TREE, eh_index), NULL,
1214
                         create_artificial_label ());
1215
      TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1216
      last_case_index++;
1217
 
1218
      x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1219
      append_to_statement_list (x, &switch_body);
1220
      x = build_resx (get_eh_region_number (tf->region));
1221
      append_to_statement_list (x, &switch_body);
1222
    }
1223
 
1224
  x = build1 (LABEL_EXPR, void_type_node, finally_label);
1225
  append_to_statement_list (x, tf->top_p);
1226
 
1227
  append_to_statement_list (finally, tf->top_p);
1228
 
1229
  /* Redirect each incoming goto edge.  */
1230
  q = tf->goto_queue;
1231
  qe = q + tf->goto_queue_active;
1232
  j = last_case_index + tf->may_return;
1233
  for (; q < qe; ++q)
1234
    {
1235
      tree mod;
1236
      int switch_id, case_index;
1237
 
1238
      if (q->index < 0)
1239
        {
1240
          mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1241
                       build_int_cst (NULL_TREE, return_index));
1242
          do_return_redirection (q, finally_label, mod, &return_val);
1243
          switch_id = return_index;
1244
        }
1245
      else
1246
        {
1247
          mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1248
                       build_int_cst (NULL_TREE, q->index));
1249
          do_goto_redirection (q, finally_label, mod);
1250
          switch_id = q->index;
1251
        }
1252
 
1253
      case_index = j + q->index;
1254
      if (!TREE_VEC_ELT (case_label_vec, case_index))
1255
        TREE_VEC_ELT (case_label_vec, case_index)
1256
          = build (CASE_LABEL_EXPR, void_type_node,
1257
                   build_int_cst (NULL_TREE, switch_id), NULL,
1258
                   /* We store the cont_stmt in the
1259
                      CASE_LABEL, so that we can recover it
1260
                      in the loop below.  We don't create
1261
                      the new label while walking the
1262
                      goto_queue because pointers don't
1263
                      offer a stable order.  */
1264
                   q->cont_stmt);
1265
    }
1266
  for (j = last_case_index; j < last_case_index + nlabels; j++)
1267
    {
1268
      tree label;
1269
      tree cont_stmt;
1270
 
1271
      last_case = TREE_VEC_ELT (case_label_vec, j);
1272
 
1273
      gcc_assert (last_case);
1274
 
1275
      cont_stmt = CASE_LABEL (last_case);
1276
 
1277
      label = create_artificial_label ();
1278
      CASE_LABEL (last_case) = label;
1279
 
1280
      x = build (LABEL_EXPR, void_type_node, label);
1281
      append_to_statement_list (x, &switch_body);
1282
      append_to_statement_list (cont_stmt, &switch_body);
1283
      maybe_record_in_goto_queue (state, cont_stmt);
1284
    }
1285
  replace_goto_queue (tf);
1286
 
1287
  /* Make sure that the last case is the default label, as one is required.
1288
     Then sort the labels, which is also required in GIMPLE.  */
1289
  CASE_LOW (last_case) = NULL;
1290
  sort_case_labels (case_label_vec);
1291
 
1292
  /* Need to link switch_stmt after running replace_goto_queue due
1293
     to not wanting to process the same goto stmts twice.  */
1294
  append_to_statement_list (switch_stmt, tf->top_p);
1295
  append_to_statement_list (switch_body, tf->top_p);
1296
}
1297
 
1298
/* Decide whether or not we are going to duplicate the finally block.
1299
   There are several considerations.
1300
 
1301
   First, if this is Java, then the finally block contains code
1302
   written by the user.  It has line numbers associated with it,
1303
   so duplicating the block means it's difficult to set a breakpoint.
1304
   Since controlling code generation via -g is verboten, we simply
1305
   never duplicate code without optimization.
1306
 
1307
   Second, we'd like to prevent egregious code growth.  One way to
1308
   do this is to estimate the size of the finally block, multiply
1309
   that by the number of copies we'd need to make, and compare against
1310
   the estimate of the size of the switch machinery we'd have to add.  */
1311
 
1312
static bool
1313
decide_copy_try_finally (int ndests, tree finally)
1314
{
1315
  int f_estimate, sw_estimate;
1316
 
1317
  if (!optimize)
1318
    return false;
1319
 
1320
  /* Finally estimate N times, plus N gotos.  */
1321
  f_estimate = estimate_num_insns (finally);
1322
  f_estimate = (f_estimate + 1) * ndests;
1323
 
1324
  /* Switch statement (cost 10), N variable assignments, N gotos.  */
1325
  sw_estimate = 10 + 2 * ndests;
1326
 
1327
  /* Optimize for size clearly wants our best guess.  */
1328
  if (optimize_size)
1329
    return f_estimate < sw_estimate;
1330
 
1331
  /* ??? These numbers are completely made up so far.  */
1332
  if (optimize > 1)
1333
    return f_estimate < 100 || f_estimate < sw_estimate * 2;
1334
  else
1335
    return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1336
}
1337
 
1338
/* A subroutine of lower_eh_constructs_1.  Lower a TRY_FINALLY_EXPR nodes
1339
   to a sequence of labels and blocks, plus the exception region trees
1340
   that record all the magic.  This is complicated by the need to
1341
   arrange for the FINALLY block to be executed on all exits.  */
1342
 
1343
static void
1344
lower_try_finally (struct leh_state *state, tree *tp)
1345
{
1346
  struct leh_tf_state this_tf;
1347
  struct leh_state this_state;
1348
  int ndests;
1349
 
1350
  /* Process the try block.  */
1351
 
1352
  memset (&this_tf, 0, sizeof (this_tf));
1353
  this_tf.try_finally_expr = *tp;
1354
  this_tf.top_p = tp;
1355
  this_tf.outer = state;
1356
  if (using_eh_for_cleanups_p)
1357
    this_tf.region
1358
      = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1359
  else
1360
    this_tf.region = NULL;
1361
 
1362
  this_state.cur_region = this_tf.region;
1363
  this_state.prev_try = state->prev_try;
1364
  this_state.tf = &this_tf;
1365
 
1366
  lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1367
 
1368
  /* Determine if the try block is escaped through the bottom.  */
1369
  this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1370
 
1371
  /* Determine if any exceptions are possible within the try block.  */
1372
  if (using_eh_for_cleanups_p)
1373
    this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1374
  if (this_tf.may_throw)
1375
    {
1376
      this_tf.eh_label = create_artificial_label ();
1377
      set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1378
      honor_protect_cleanup_actions (state, &this_state, &this_tf);
1379
    }
1380
 
1381
  /* Sort the goto queue for efficient searching later.  */
1382
  if (this_tf.goto_queue_active > 1)
1383
    qsort (this_tf.goto_queue, this_tf.goto_queue_active,
1384
           sizeof (struct goto_queue_node), goto_queue_cmp);
1385
 
1386
  /* Determine how many edges (still) reach the finally block.  Or rather,
1387
     how many destinations are reached by the finally block.  Use this to
1388
     determine how we process the finally block itself.  */
1389
 
1390
  ndests = VEC_length (tree, this_tf.dest_array);
1391
  ndests += this_tf.may_fallthru;
1392
  ndests += this_tf.may_return;
1393
  ndests += this_tf.may_throw;
1394
 
1395
  /* If the FINALLY block is not reachable, dike it out.  */
1396
  if (ndests == 0)
1397
    *tp = TREE_OPERAND (*tp, 0);
1398
 
1399
  /* If the finally block doesn't fall through, then any destination
1400
     we might try to impose there isn't reached either.  There may be
1401
     some minor amount of cleanup and redirection still needed.  */
1402
  else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1403
    lower_try_finally_nofallthru (state, &this_tf);
1404
 
1405
  /* We can easily special-case redirection to a single destination.  */
1406
  else if (ndests == 1)
1407
    lower_try_finally_onedest (state, &this_tf);
1408
 
1409
  else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1410
    lower_try_finally_copy (state, &this_tf);
1411
  else
1412
    lower_try_finally_switch (state, &this_tf);
1413
 
1414
  /* If someone requested we add a label at the end of the transformed
1415
     block, do so.  */
1416
  if (this_tf.fallthru_label)
1417
    {
1418
      tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1419
      append_to_statement_list (x, tp);
1420
    }
1421
 
1422
  VEC_free (tree, heap, this_tf.dest_array);
1423
  if (this_tf.goto_queue)
1424
    free (this_tf.goto_queue);
1425
}
1426
 
1427
/* A subroutine of lower_eh_constructs_1.  Lower a TRY_CATCH_EXPR with a
1428
   list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1429
   exception region trees that record all the magic.  */
1430
 
1431
static void
1432
lower_catch (struct leh_state *state, tree *tp)
1433
{
1434
  struct eh_region *try_region;
1435
  struct leh_state this_state;
1436
  tree_stmt_iterator i;
1437
  tree out_label;
1438
 
1439
  try_region = gen_eh_region_try (state->cur_region);
1440
  this_state.cur_region = try_region;
1441
  this_state.prev_try = try_region;
1442
  this_state.tf = state->tf;
1443
 
1444
  lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1445
 
1446
  if (!get_eh_region_may_contain_throw (try_region))
1447
    {
1448
      *tp = TREE_OPERAND (*tp, 0);
1449
      return;
1450
    }
1451
 
1452
  out_label = NULL;
1453
  for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1454
    {
1455
      struct eh_region *catch_region;
1456
      tree catch, x, eh_label;
1457
 
1458
      catch = tsi_stmt (i);
1459
      catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1460
 
1461
      this_state.cur_region = catch_region;
1462
      this_state.prev_try = state->prev_try;
1463
      lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1464
 
1465
      eh_label = create_artificial_label ();
1466
      set_eh_region_tree_label (catch_region, eh_label);
1467
 
1468
      x = build1 (LABEL_EXPR, void_type_node, eh_label);
1469
      tsi_link_before (&i, x, TSI_SAME_STMT);
1470
 
1471
      if (block_may_fallthru (CATCH_BODY (catch)))
1472
        {
1473
          if (!out_label)
1474
            out_label = create_artificial_label ();
1475
 
1476
          x = build1 (GOTO_EXPR, void_type_node, out_label);
1477
          append_to_statement_list (x, &CATCH_BODY (catch));
1478
        }
1479
 
1480
      tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1481
      tsi_delink (&i);
1482
    }
1483
 
1484
  frob_into_branch_around (tp, NULL, out_label);
1485
}
1486
 
1487
/* A subroutine of lower_eh_constructs_1.  Lower a TRY_CATCH_EXPR with a
1488
   EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1489
   region trees that record all the magic.  */
1490
 
1491
static void
1492
lower_eh_filter (struct leh_state *state, tree *tp)
1493
{
1494
  struct leh_state this_state;
1495
  struct eh_region *this_region;
1496
  tree inner = expr_first (TREE_OPERAND (*tp, 1));
1497
  tree eh_label;
1498
 
1499
  if (EH_FILTER_MUST_NOT_THROW (inner))
1500
    this_region = gen_eh_region_must_not_throw (state->cur_region);
1501
  else
1502
    this_region = gen_eh_region_allowed (state->cur_region,
1503
                                         EH_FILTER_TYPES (inner));
1504
  this_state = *state;
1505
  this_state.cur_region = this_region;
1506
 
1507
  lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1508
 
1509
  if (!get_eh_region_may_contain_throw (this_region))
1510
    {
1511
      *tp = TREE_OPERAND (*tp, 0);
1512
      return;
1513
    }
1514
 
1515
  lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1516
  TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1517
 
1518
  eh_label = create_artificial_label ();
1519
  set_eh_region_tree_label (this_region, eh_label);
1520
 
1521
  frob_into_branch_around (tp, eh_label, NULL);
1522
}
1523
 
1524
/* Implement a cleanup expression.  This is similar to try-finally,
1525
   except that we only execute the cleanup block for exception edges.  */
1526
 
1527
static void
1528
lower_cleanup (struct leh_state *state, tree *tp)
1529
{
1530
  struct leh_state this_state;
1531
  struct eh_region *this_region;
1532
  struct leh_tf_state fake_tf;
1533
 
1534
  /* If not using eh, then exception-only cleanups are no-ops.  */
1535
  if (!flag_exceptions)
1536
    {
1537
      *tp = TREE_OPERAND (*tp, 0);
1538
      lower_eh_constructs_1 (state, tp);
1539
      return;
1540
    }
1541
 
1542
  this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1543
  this_state = *state;
1544
  this_state.cur_region = this_region;
1545
 
1546
  lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1547
 
1548
  if (!get_eh_region_may_contain_throw (this_region))
1549
    {
1550
      *tp = TREE_OPERAND (*tp, 0);
1551
      return;
1552
    }
1553
 
1554
  /* Build enough of a try-finally state so that we can reuse
1555
     honor_protect_cleanup_actions.  */
1556
  memset (&fake_tf, 0, sizeof (fake_tf));
1557
  fake_tf.top_p = tp;
1558
  fake_tf.outer = state;
1559
  fake_tf.region = this_region;
1560
  fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1561
  fake_tf.may_throw = true;
1562
 
1563
  fake_tf.eh_label = create_artificial_label ();
1564
  set_eh_region_tree_label (this_region, fake_tf.eh_label);
1565
 
1566
  honor_protect_cleanup_actions (state, NULL, &fake_tf);
1567
 
1568
  if (fake_tf.may_throw)
1569
    {
1570
      /* In this case honor_protect_cleanup_actions had nothing to do,
1571
         and we should process this normally.  */
1572
      lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1573
      frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1574
    }
1575
  else
1576
    {
1577
      /* In this case honor_protect_cleanup_actions did nearly all of
1578
         the work.  All we have left is to append the fallthru_label.  */
1579
 
1580
      *tp = TREE_OPERAND (*tp, 0);
1581
      if (fake_tf.fallthru_label)
1582
        {
1583
          tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1584
          append_to_statement_list (x, tp);
1585
        }
1586
    }
1587
}
1588
 
1589
/* Main loop for lowering eh constructs.  */
1590
 
1591
static void
1592
lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1593
{
1594
  tree_stmt_iterator i;
1595
  tree t = *tp;
1596
 
1597
  switch (TREE_CODE (t))
1598
    {
1599
    case COND_EXPR:
1600
      lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1601
      lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1602
      break;
1603
 
1604
    case CALL_EXPR:
1605
      /* Look for things that can throw exceptions, and record them.  */
1606
      if (state->cur_region && tree_could_throw_p (t))
1607
        {
1608
          record_stmt_eh_region (state->cur_region, t);
1609
          note_eh_region_may_contain_throw (state->cur_region);
1610
        }
1611
      break;
1612
 
1613
    case MODIFY_EXPR:
1614
      /* Look for things that can throw exceptions, and record them.  */
1615
      if (state->cur_region && tree_could_throw_p (t))
1616
        {
1617
          record_stmt_eh_region (state->cur_region, t);
1618
          note_eh_region_may_contain_throw (state->cur_region);
1619
        }
1620
      break;
1621
 
1622
    case GOTO_EXPR:
1623
    case RETURN_EXPR:
1624
      maybe_record_in_goto_queue (state, t);
1625
      break;
1626
    case SWITCH_EXPR:
1627
      verify_norecord_switch_expr (state, t);
1628
      break;
1629
 
1630
    case TRY_FINALLY_EXPR:
1631
      lower_try_finally (state, tp);
1632
      break;
1633
 
1634
    case TRY_CATCH_EXPR:
1635
      i = tsi_start (TREE_OPERAND (t, 1));
1636
      switch (TREE_CODE (tsi_stmt (i)))
1637
        {
1638
        case CATCH_EXPR:
1639
          lower_catch (state, tp);
1640
          break;
1641
        case EH_FILTER_EXPR:
1642
          lower_eh_filter (state, tp);
1643
          break;
1644
        default:
1645
          lower_cleanup (state, tp);
1646
          break;
1647
        }
1648
      break;
1649
 
1650
    case STATEMENT_LIST:
1651
      for (i = tsi_start (t); !tsi_end_p (i); )
1652
        {
1653
          lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1654
          t = tsi_stmt (i);
1655
          if (TREE_CODE (t) == STATEMENT_LIST)
1656
            {
1657
              tsi_link_before (&i, t, TSI_SAME_STMT);
1658
              tsi_delink (&i);
1659
            }
1660
          else
1661
            tsi_next (&i);
1662
        }
1663
      break;
1664
 
1665
    default:
1666
      /* A type, a decl, or some kind of statement that we're not
1667
         interested in.  Don't walk them.  */
1668
      break;
1669
    }
1670
}
1671
 
1672
static void
1673
lower_eh_constructs (void)
1674
{
1675
  struct leh_state null_state;
1676
  tree *tp = &DECL_SAVED_TREE (current_function_decl);
1677
 
1678
  finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1679
 
1680
  collect_finally_tree (*tp, NULL);
1681
 
1682
  memset (&null_state, 0, sizeof (null_state));
1683
  lower_eh_constructs_1 (&null_state, tp);
1684
 
1685
  htab_delete (finally_tree);
1686
 
1687
  collect_eh_region_array ();
1688
}
1689
 
1690
struct tree_opt_pass pass_lower_eh =
1691
{
1692
  "eh",                                 /* name */
1693
  NULL,                                 /* gate */
1694
  lower_eh_constructs,                  /* execute */
1695
  NULL,                                 /* sub */
1696
  NULL,                                 /* next */
1697
  0,                                     /* static_pass_number */
1698
  TV_TREE_EH,                           /* tv_id */
1699
  PROP_gimple_lcf,                      /* properties_required */
1700
  PROP_gimple_leh,                      /* properties_provided */
1701
  PROP_gimple_lcf,                      /* properties_destroyed */
1702
  0,                                     /* todo_flags_start */
1703
  TODO_dump_func,                       /* todo_flags_finish */
1704
 
1705
};
1706
 
1707
 
1708
/* Construct EH edges for STMT.  */
1709
 
1710
static void
1711
make_eh_edge (struct eh_region *region, void *data)
1712
{
1713
  tree stmt, lab;
1714
  basic_block src, dst;
1715
 
1716
  stmt = data;
1717
  lab = get_eh_region_tree_label (region);
1718
 
1719
  src = bb_for_stmt (stmt);
1720
  dst = label_to_block (lab);
1721
 
1722
  make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1723
}
1724
 
1725
void
1726
make_eh_edges (tree stmt)
1727
{
1728
  int region_nr;
1729
  bool is_resx;
1730
 
1731
  if (TREE_CODE (stmt) == RESX_EXPR)
1732
    {
1733
      region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1734
      is_resx = true;
1735
    }
1736
  else
1737
    {
1738
      region_nr = lookup_stmt_eh_region (stmt);
1739
      if (region_nr < 0)
1740
        return;
1741
      is_resx = false;
1742
    }
1743
 
1744
  foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1745
}
1746
 
1747
static bool mark_eh_edge_found_error;
1748
 
1749
/* Mark edge make_eh_edge would create for given region by setting it aux
1750
   field, output error if something goes wrong.  */
1751
static void
1752
mark_eh_edge (struct eh_region *region, void *data)
1753
{
1754
  tree stmt, lab;
1755
  basic_block src, dst;
1756
  edge e;
1757
 
1758
  stmt = data;
1759
  lab = get_eh_region_tree_label (region);
1760
 
1761
  src = bb_for_stmt (stmt);
1762
  dst = label_to_block (lab);
1763
 
1764
  e = find_edge (src, dst);
1765
  if (!e)
1766
    {
1767
      error ("EH edge %i->%i is missing", src->index, dst->index);
1768
      mark_eh_edge_found_error = true;
1769
    }
1770
  else if (!(e->flags & EDGE_EH))
1771
    {
1772
      error ("EH edge %i->%i miss EH flag", src->index, dst->index);
1773
      mark_eh_edge_found_error = true;
1774
    }
1775
  else if (e->aux)
1776
    {
1777
      /* ??? might not be mistake.  */
1778
      error ("EH edge %i->%i has duplicated regions", src->index, dst->index);
1779
      mark_eh_edge_found_error = true;
1780
    }
1781
  else
1782
    e->aux = (void *)1;
1783
}
1784
 
1785
/* Verify that BB containing stmt as last stmt has precisely the edges
1786
   make_eh_edges would create.  */
1787
bool
1788
verify_eh_edges (tree stmt)
1789
{
1790
  int region_nr;
1791
  bool is_resx;
1792
  basic_block bb = bb_for_stmt (stmt);
1793
  edge_iterator ei;
1794
  edge e;
1795
 
1796
  FOR_EACH_EDGE (e, ei, bb->succs)
1797
    gcc_assert (!e->aux);
1798
  mark_eh_edge_found_error = false;
1799
  if (TREE_CODE (stmt) == RESX_EXPR)
1800
    {
1801
      region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1802
      is_resx = true;
1803
    }
1804
  else
1805
    {
1806
      region_nr = lookup_stmt_eh_region (stmt);
1807
      if (region_nr < 0)
1808
        {
1809
          FOR_EACH_EDGE (e, ei, bb->succs)
1810
            if (e->flags & EDGE_EH)
1811
              {
1812
                error ("BB %i can not throw but has EH edges", bb->index);
1813
                return true;
1814
              }
1815
           return false;
1816
        }
1817
      if (!tree_could_throw_p (stmt))
1818
        {
1819
          error ("BB %i last statement has incorrectly set region", bb->index);
1820
          return true;
1821
        }
1822
      is_resx = false;
1823
    }
1824
 
1825
  foreach_reachable_handler (region_nr, is_resx, mark_eh_edge, stmt);
1826
  FOR_EACH_EDGE (e, ei, bb->succs)
1827
    {
1828
      if ((e->flags & EDGE_EH) && !e->aux)
1829
        {
1830
          error ("unnecessary EH edge %i->%i", bb->index, e->dest->index);
1831
          mark_eh_edge_found_error = true;
1832
          return true;
1833
        }
1834
      e->aux = NULL;
1835
    }
1836
  return mark_eh_edge_found_error;
1837
}
1838
 
1839
 
1840
/* Return true if the expr can trap, as in dereferencing an invalid pointer
1841
   location or floating point arithmetic.  C.f. the rtl version, may_trap_p.
1842
   This routine expects only GIMPLE lhs or rhs input.  */
1843
 
1844
bool
1845
tree_could_trap_p (tree expr)
1846
{
1847
  enum tree_code code = TREE_CODE (expr);
1848
  bool honor_nans = false;
1849
  bool honor_snans = false;
1850
  bool fp_operation = false;
1851
  bool honor_trapv = false;
1852
  tree t, base;
1853
 
1854
  if (TREE_CODE_CLASS (code) == tcc_comparison
1855
      || TREE_CODE_CLASS (code) == tcc_unary
1856
      || TREE_CODE_CLASS (code) == tcc_binary)
1857
    {
1858
      t = TREE_TYPE (expr);
1859
      fp_operation = FLOAT_TYPE_P (t);
1860
      if (fp_operation)
1861
        {
1862
          honor_nans = flag_trapping_math && !flag_finite_math_only;
1863
          honor_snans = flag_signaling_nans != 0;
1864
        }
1865
      else if (INTEGRAL_TYPE_P (t) && TYPE_TRAP_SIGNED (t))
1866
        honor_trapv = true;
1867
    }
1868
 
1869
 restart:
1870
  switch (code)
1871
    {
1872
    case TARGET_MEM_REF:
1873
      /* For TARGET_MEM_REFs use the information based on the original
1874
         reference.  */
1875
      expr = TMR_ORIGINAL (expr);
1876
      code = TREE_CODE (expr);
1877
      goto restart;
1878
 
1879
    case COMPONENT_REF:
1880
    case REALPART_EXPR:
1881
    case IMAGPART_EXPR:
1882
    case BIT_FIELD_REF:
1883
    case WITH_SIZE_EXPR:
1884
      expr = TREE_OPERAND (expr, 0);
1885
      code = TREE_CODE (expr);
1886
      goto restart;
1887
 
1888
    case ARRAY_RANGE_REF:
1889
      /* Let us be conservative here for now.  We might be checking bounds of
1890
         the access similarly to the case below.  */
1891
      if (!TREE_THIS_NOTRAP (expr))
1892
        return true;
1893
 
1894
      base = TREE_OPERAND (expr, 0);
1895
      return tree_could_trap_p (base);
1896
 
1897
    case ARRAY_REF:
1898
      base = TREE_OPERAND (expr, 0);
1899
      if (tree_could_trap_p (base))
1900
        return true;
1901
 
1902
      if (TREE_THIS_NOTRAP (expr))
1903
        return false;
1904
 
1905
      return !in_array_bounds_p (expr);
1906
 
1907
    case INDIRECT_REF:
1908
    case ALIGN_INDIRECT_REF:
1909
    case MISALIGNED_INDIRECT_REF:
1910
      return !TREE_THIS_NOTRAP (expr);
1911
 
1912
    case ASM_EXPR:
1913
      return TREE_THIS_VOLATILE (expr);
1914
 
1915
    case TRUNC_DIV_EXPR:
1916
    case CEIL_DIV_EXPR:
1917
    case FLOOR_DIV_EXPR:
1918
    case ROUND_DIV_EXPR:
1919
    case EXACT_DIV_EXPR:
1920
    case CEIL_MOD_EXPR:
1921
    case FLOOR_MOD_EXPR:
1922
    case ROUND_MOD_EXPR:
1923
    case TRUNC_MOD_EXPR:
1924
    case RDIV_EXPR:
1925
      if (honor_snans || honor_trapv)
1926
        return true;
1927
      if (fp_operation)
1928
        return flag_trapping_math;
1929
      t = TREE_OPERAND (expr, 1);
1930
      if (!TREE_CONSTANT (t) || integer_zerop (t))
1931
        return true;
1932
      return false;
1933
 
1934
    case LT_EXPR:
1935
    case LE_EXPR:
1936
    case GT_EXPR:
1937
    case GE_EXPR:
1938
    case LTGT_EXPR:
1939
      /* Some floating point comparisons may trap.  */
1940
      return honor_nans;
1941
 
1942
    case EQ_EXPR:
1943
    case NE_EXPR:
1944
    case UNORDERED_EXPR:
1945
    case ORDERED_EXPR:
1946
    case UNLT_EXPR:
1947
    case UNLE_EXPR:
1948
    case UNGT_EXPR:
1949
    case UNGE_EXPR:
1950
    case UNEQ_EXPR:
1951
      return honor_snans;
1952
 
1953
    case CONVERT_EXPR:
1954
    case FIX_TRUNC_EXPR:
1955
    case FIX_CEIL_EXPR:
1956
    case FIX_FLOOR_EXPR:
1957
    case FIX_ROUND_EXPR:
1958
      /* Conversion of floating point might trap.  */
1959
      return honor_nans;
1960
 
1961
    case NEGATE_EXPR:
1962
    case ABS_EXPR:
1963
    case CONJ_EXPR:
1964
      /* These operations don't trap with floating point.  */
1965
      if (honor_trapv)
1966
        return true;
1967
      return false;
1968
 
1969
    case PLUS_EXPR:
1970
    case MINUS_EXPR:
1971
    case MULT_EXPR:
1972
      /* Any floating arithmetic may trap.  */
1973
      if (fp_operation && flag_trapping_math)
1974
        return true;
1975
      if (honor_trapv)
1976
        return true;
1977
      return false;
1978
 
1979
    case CALL_EXPR:
1980
      t = get_callee_fndecl (expr);
1981
      /* Assume that calls to weak functions may trap.  */
1982
      if (!t || !DECL_P (t) || DECL_WEAK (t))
1983
        return true;
1984
      return false;
1985
 
1986
    default:
1987
      /* Any floating arithmetic may trap.  */
1988
      if (fp_operation && flag_trapping_math)
1989
        return true;
1990
      return false;
1991
    }
1992
}
1993
 
1994
bool
1995
tree_could_throw_p (tree t)
1996
{
1997
  if (!flag_exceptions)
1998
    return false;
1999
  if (TREE_CODE (t) == MODIFY_EXPR)
2000
    {
2001
      if (flag_non_call_exceptions
2002
          && tree_could_trap_p (TREE_OPERAND (t, 0)))
2003
        return true;
2004
      t = TREE_OPERAND (t, 1);
2005
    }
2006
 
2007
  if (TREE_CODE (t) == WITH_SIZE_EXPR)
2008
    t = TREE_OPERAND (t, 0);
2009
  if (TREE_CODE (t) == CALL_EXPR)
2010
    return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2011
  if (flag_non_call_exceptions)
2012
    return tree_could_trap_p (t);
2013
  return false;
2014
}
2015
 
2016
bool
2017
tree_can_throw_internal (tree stmt)
2018
{
2019
  int region_nr;
2020
  bool is_resx = false;
2021
 
2022
  if (TREE_CODE (stmt) == RESX_EXPR)
2023
    region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2024
  else
2025
    region_nr = lookup_stmt_eh_region (stmt);
2026
  if (region_nr < 0)
2027
    return false;
2028
  return can_throw_internal_1 (region_nr, is_resx);
2029
}
2030
 
2031
bool
2032
tree_can_throw_external (tree stmt)
2033
{
2034
  int region_nr;
2035
  bool is_resx = false;
2036
 
2037
  if (TREE_CODE (stmt) == RESX_EXPR)
2038
    region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2039
  else
2040
    region_nr = lookup_stmt_eh_region (stmt);
2041
  if (region_nr < 0)
2042
    return tree_could_throw_p (stmt);
2043
  else
2044
    return can_throw_external_1 (region_nr, is_resx);
2045
}
2046
 
2047
/* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2048
   OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2049
   in the table if it should be in there.  Return TRUE if a replacement was
2050
   done that my require an EH edge purge.  */
2051
 
2052
bool
2053
maybe_clean_or_replace_eh_stmt (tree old_stmt, tree new_stmt)
2054
{
2055
  int region_nr = lookup_stmt_eh_region (old_stmt);
2056
 
2057
  if (region_nr >= 0)
2058
    {
2059
      bool new_stmt_could_throw = tree_could_throw_p (new_stmt);
2060
 
2061
      if (new_stmt == old_stmt && new_stmt_could_throw)
2062
        return false;
2063
 
2064
      remove_stmt_from_eh_region (old_stmt);
2065
      if (new_stmt_could_throw)
2066
        {
2067
          add_stmt_to_eh_region (new_stmt, region_nr);
2068
          return false;
2069
        }
2070
      else
2071
        return true;
2072
    }
2073
 
2074
  return false;
2075
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.