OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [tree-eh.c] - Blame information for rev 858

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Exception handling semantics and decomposition for trees.
2
   Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
3
 
4
This file is part of GCC.
5
 
6
GCC is free software; you can redistribute it and/or modify
7
it under the terms of the GNU General Public License as published by
8
the Free Software Foundation; either version 3, or (at your option)
9
any later version.
10
 
11
GCC is distributed in the hope that it will be useful,
12
but WITHOUT ANY WARRANTY; without even the implied warranty of
13
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
GNU General Public License for more details.
15
 
16
You should have received a copy of the GNU General Public License
17
along with GCC; see the file COPYING3.  If not see
18
<http://www.gnu.org/licenses/>.  */
19
 
20
#include "config.h"
21
#include "system.h"
22
#include "coretypes.h"
23
#include "tm.h"
24
#include "tree.h"
25
#include "rtl.h"
26
#include "tm_p.h"
27
#include "flags.h"
28
#include "function.h"
29
#include "except.h"
30
#include "tree-flow.h"
31
#include "tree-dump.h"
32
#include "tree-inline.h"
33
#include "tree-iterator.h"
34
#include "tree-pass.h"
35
#include "timevar.h"
36
#include "langhooks.h"
37
#include "ggc.h"
38
#include "toplev.h"
39
 
40
 
41
/* Nonzero if we are using EH to handle cleanups.  */
42
static int using_eh_for_cleanups_p = 0;
43
 
44
void
45
using_eh_for_cleanups (void)
46
{
47
  using_eh_for_cleanups_p = 1;
48
}
49
 
50
/* Misc functions used in this file.  */
51
 
52
/* Compare and hash for any structure which begins with a canonical
53
   pointer.  Assumes all pointers are interchangeable, which is sort
54
   of already assumed by gcc elsewhere IIRC.  */
55
 
56
static int
57
struct_ptr_eq (const void *a, const void *b)
58
{
59
  const void * const * x = (const void * const *) a;
60
  const void * const * y = (const void * const *) b;
61
  return *x == *y;
62
}
63
 
64
static hashval_t
65
struct_ptr_hash (const void *a)
66
{
67
  const void * const * x = (const void * const *) a;
68
  return (size_t)*x >> 4;
69
}
70
 
71
 
72
/* Remember and lookup EH region data for arbitrary statements.
73
   Really this means any statement that could_throw_p.  We could
74
   stuff this information into the stmt_ann data structure, but:
75
 
76
   (1) We absolutely rely on this information being kept until
77
   we get to rtl.  Once we're done with lowering here, if we lose
78
   the information there's no way to recover it!
79
 
80
   (2) There are many more statements that *cannot* throw as
81
   compared to those that can.  We should be saving some amount
82
   of space by only allocating memory for those that can throw.  */
83
 
84
static void
85
record_stmt_eh_region (struct eh_region *region, tree t)
86
{
87
  if (!region)
88
    return;
89
 
90
  add_stmt_to_eh_region (t, get_eh_region_number (region));
91
}
92
 
93
void
94
add_stmt_to_eh_region_fn (struct function *ifun, tree t, int num)
95
{
96
  struct throw_stmt_node *n;
97
  void **slot;
98
 
99
  gcc_assert (num >= 0);
100
  gcc_assert (TREE_CODE (t) != RESX_EXPR);
101
 
102
  n = GGC_NEW (struct throw_stmt_node);
103
  n->stmt = t;
104
  n->region_nr = num;
105
 
106
  if (!get_eh_throw_stmt_table (ifun))
107
    set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
108
                                                    struct_ptr_eq,
109
                                                    ggc_free));
110
 
111
  slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
112
  gcc_assert (!*slot);
113
  *slot = n;
114
  /* ??? For the benefit of calls.c, converting all this to rtl,
115
     we need to record the call expression, not just the outer
116
     modify statement.  */
117
  if (TREE_CODE (t) == MODIFY_EXPR
118
      && (t = get_call_expr_in (t)))
119
    add_stmt_to_eh_region_fn (ifun, t, num);
120
}
121
 
122
void
123
add_stmt_to_eh_region (tree t, int num)
124
{
125
  add_stmt_to_eh_region_fn (cfun, t, num);
126
}
127
 
128
bool
129
remove_stmt_from_eh_region_fn (struct function *ifun, tree t)
130
{
131
  struct throw_stmt_node dummy;
132
  void **slot;
133
 
134
  if (!get_eh_throw_stmt_table (ifun))
135
    return false;
136
 
137
  dummy.stmt = t;
138
  slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
139
                        NO_INSERT);
140
  if (slot)
141
    {
142
      htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
143
      /* ??? For the benefit of calls.c, converting all this to rtl,
144
         we need to record the call expression, not just the outer
145
         modify statement.  */
146
      if (TREE_CODE (t) == MODIFY_EXPR
147
          && (t = get_call_expr_in (t)))
148
        remove_stmt_from_eh_region_fn (ifun, t);
149
      return true;
150
    }
151
  else
152
    return false;
153
}
154
 
155
bool
156
remove_stmt_from_eh_region (tree t)
157
{
158
  return remove_stmt_from_eh_region_fn (cfun, t);
159
}
160
 
161
int
162
lookup_stmt_eh_region_fn (struct function *ifun, tree t)
163
{
164
  struct throw_stmt_node *p, n;
165
 
166
  if (!get_eh_throw_stmt_table (ifun))
167
    return -2;
168
 
169
  n.stmt = t;
170
  p = (struct throw_stmt_node *) htab_find (get_eh_throw_stmt_table (ifun),
171
                                            &n);
172
 
173
  return (p ? p->region_nr : -1);
174
}
175
 
176
int
177
lookup_stmt_eh_region (tree t)
178
{
179
  /* We can get called from initialized data when -fnon-call-exceptions
180
     is on; prevent crash.  */
181
  if (!cfun)
182
    return -1;
183
  return lookup_stmt_eh_region_fn (cfun, t);
184
}
185
 
186
 
187
/* First pass of EH node decomposition.  Build up a tree of TRY_FINALLY_EXPR
188
   nodes and LABEL_DECL nodes.  We will use this during the second phase to
189
   determine if a goto leaves the body of a TRY_FINALLY_EXPR node.  */
190
 
191
struct finally_tree_node
192
{
193
  tree child, parent;
194
};
195
 
196
/* Note that this table is *not* marked GTY.  It is short-lived.  */
197
static htab_t finally_tree;
198
 
199
static void
200
record_in_finally_tree (tree child, tree parent)
201
{
202
  struct finally_tree_node *n;
203
  void **slot;
204
 
205
  n = XNEW (struct finally_tree_node);
206
  n->child = child;
207
  n->parent = parent;
208
 
209
  slot = htab_find_slot (finally_tree, n, INSERT);
210
  gcc_assert (!*slot);
211
  *slot = n;
212
}
213
 
214
static void
215
collect_finally_tree (tree t, tree region)
216
{
217
 tailrecurse:
218
  switch (TREE_CODE (t))
219
    {
220
    case LABEL_EXPR:
221
      record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
222
      break;
223
 
224
    case TRY_FINALLY_EXPR:
225
      record_in_finally_tree (t, region);
226
      collect_finally_tree (TREE_OPERAND (t, 0), t);
227
      t = TREE_OPERAND (t, 1);
228
      goto tailrecurse;
229
 
230
    case TRY_CATCH_EXPR:
231
      collect_finally_tree (TREE_OPERAND (t, 0), region);
232
      t = TREE_OPERAND (t, 1);
233
      goto tailrecurse;
234
 
235
    case CATCH_EXPR:
236
      t = CATCH_BODY (t);
237
      goto tailrecurse;
238
 
239
    case EH_FILTER_EXPR:
240
      t = EH_FILTER_FAILURE (t);
241
      goto tailrecurse;
242
 
243
    case STATEMENT_LIST:
244
      {
245
        tree_stmt_iterator i;
246
        for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
247
          collect_finally_tree (tsi_stmt (i), region);
248
      }
249
      break;
250
 
251
    default:
252
      /* A type, a decl, or some kind of statement that we're not
253
         interested in.  Don't walk them.  */
254
      break;
255
    }
256
}
257
 
258
/* Use the finally tree to determine if a jump from START to TARGET
259
   would leave the try_finally node that START lives in.  */
260
 
261
static bool
262
outside_finally_tree (tree start, tree target)
263
{
264
  struct finally_tree_node n, *p;
265
 
266
  do
267
    {
268
      n.child = start;
269
      p = (struct finally_tree_node *) htab_find (finally_tree, &n);
270
      if (!p)
271
        return true;
272
      start = p->parent;
273
    }
274
  while (start != target);
275
 
276
  return false;
277
}
278
 
279
/* Second pass of EH node decomposition.  Actually transform the TRY_FINALLY
280
   and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
281
   The eh region creation is straight-forward, but frobbing all the gotos
282
   and such into shape isn't.  */
283
 
284
/* State of the world while lowering.  */
285
 
286
struct leh_state
287
{
288
  /* What's "current" while constructing the eh region tree.  These
289
     correspond to variables of the same name in cfun->eh, which we
290
     don't have easy access to.  */
291
  struct eh_region *cur_region;
292
  struct eh_region *prev_try;
293
 
294
  /* Processing of TRY_FINALLY requires a bit more state.  This is
295
     split out into a separate structure so that we don't have to
296
     copy so much when processing other nodes.  */
297
  struct leh_tf_state *tf;
298
};
299
 
300
struct leh_tf_state
301
{
302
  /* Pointer to the TRY_FINALLY node under discussion.  The try_finally_expr
303
     is the original TRY_FINALLY_EXPR.  We need to retain this so that
304
     outside_finally_tree can reliably reference the tree used in the
305
     collect_finally_tree data structures.  */
306
  tree try_finally_expr;
307
  tree *top_p;
308
 
309
  /* The state outside this try_finally node.  */
310
  struct leh_state *outer;
311
 
312
  /* The exception region created for it.  */
313
  struct eh_region *region;
314
 
315
  /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
316
     that are seen to escape this TRY_FINALLY_EXPR node.  */
317
  struct goto_queue_node {
318
    tree stmt;
319
    tree repl_stmt;
320
    tree cont_stmt;
321
    int index;
322
  } *goto_queue;
323
  size_t goto_queue_size;
324
  size_t goto_queue_active;
325
 
326
  /* The set of unique labels seen as entries in the goto queue.  */
327
  VEC(tree,heap) *dest_array;
328
 
329
  /* A label to be added at the end of the completed transformed
330
     sequence.  It will be set if may_fallthru was true *at one time*,
331
     though subsequent transformations may have cleared that flag.  */
332
  tree fallthru_label;
333
 
334
  /* A label that has been registered with except.c to be the
335
     landing pad for this try block.  */
336
  tree eh_label;
337
 
338
  /* True if it is possible to fall out the bottom of the try block.
339
     Cleared if the fallthru is converted to a goto.  */
340
  bool may_fallthru;
341
 
342
  /* True if any entry in goto_queue is a RETURN_EXPR.  */
343
  bool may_return;
344
 
345
  /* True if the finally block can receive an exception edge.
346
     Cleared if the exception case is handled by code duplication.  */
347
  bool may_throw;
348
};
349
 
350
static void lower_eh_filter (struct leh_state *, tree *);
351
static void lower_eh_constructs_1 (struct leh_state *, tree *);
352
 
353
/* Comparison function for qsort/bsearch.  We're interested in
354
   searching goto queue elements for source statements.  */
355
 
356
static int
357
goto_queue_cmp (const void *x, const void *y)
358
{
359
  tree a = ((const struct goto_queue_node *)x)->stmt;
360
  tree b = ((const struct goto_queue_node *)y)->stmt;
361
  return (a == b ? 0 : a < b ? -1 : 1);
362
}
363
 
364
/* Search for STMT in the goto queue.  Return the replacement,
365
   or null if the statement isn't in the queue.  */
366
 
367
static tree
368
find_goto_replacement (struct leh_tf_state *tf, tree stmt)
369
{
370
  struct goto_queue_node tmp, *ret;
371
  tmp.stmt = stmt;
372
  ret = (struct goto_queue_node *)
373
     bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
374
                 sizeof (struct goto_queue_node), goto_queue_cmp);
375
  return (ret ? ret->repl_stmt : NULL);
376
}
377
 
378
/* A subroutine of replace_goto_queue_1.  Handles the sub-clauses of a
379
   lowered COND_EXPR.  If, by chance, the replacement is a simple goto,
380
   then we can just splat it in, otherwise we add the new stmts immediately
381
   after the COND_EXPR and redirect.  */
382
 
383
static void
384
replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
385
                                tree_stmt_iterator *tsi)
386
{
387
  tree new, one, label;
388
 
389
  new = find_goto_replacement (tf, *tp);
390
  if (!new)
391
    return;
392
 
393
  one = expr_only (new);
394
  if (one && TREE_CODE (one) == GOTO_EXPR)
395
    {
396
      *tp = one;
397
      return;
398
    }
399
 
400
  label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
401
  *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
402
 
403
  tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
404
  tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
405
}
406
 
407
/* The real work of replace_goto_queue.  Returns with TSI updated to
408
   point to the next statement.  */
409
 
410
static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
411
 
412
static void
413
replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
414
{
415
  switch (TREE_CODE (t))
416
    {
417
    case GOTO_EXPR:
418
    case RETURN_EXPR:
419
      t = find_goto_replacement (tf, t);
420
      if (t)
421
        {
422
          tsi_link_before (tsi, t, TSI_SAME_STMT);
423
          tsi_delink (tsi);
424
          return;
425
        }
426
      break;
427
 
428
    case COND_EXPR:
429
      replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
430
      replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
431
      break;
432
 
433
    case TRY_FINALLY_EXPR:
434
    case TRY_CATCH_EXPR:
435
      replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
436
      replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
437
      break;
438
    case CATCH_EXPR:
439
      replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
440
      break;
441
    case EH_FILTER_EXPR:
442
      replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
443
      break;
444
 
445
    case STATEMENT_LIST:
446
      gcc_unreachable ();
447
 
448
    default:
449
      /* These won't have gotos in them.  */
450
      break;
451
    }
452
 
453
  tsi_next (tsi);
454
}
455
 
456
/* A subroutine of replace_goto_queue.  Handles STATEMENT_LISTs.  */
457
 
458
static void
459
replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
460
{
461
  tree_stmt_iterator i = tsi_start (t);
462
  while (!tsi_end_p (i))
463
    replace_goto_queue_1 (tsi_stmt (i), tf, &i);
464
}
465
 
466
/* Replace all goto queue members.  */
467
 
468
static void
469
replace_goto_queue (struct leh_tf_state *tf)
470
{
471
  if (tf->goto_queue_active == 0)
472
    return;
473
  replace_goto_queue_stmt_list (*tf->top_p, tf);
474
}
475
 
476
/* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
477
   node, and if so record that fact in the goto queue associated with that
478
   try_finally node.  */
479
 
480
static void
481
maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
482
{
483
  struct leh_tf_state *tf = state->tf;
484
  struct goto_queue_node *q;
485
  size_t active, size;
486
  int index;
487
 
488
  if (!tf)
489
    return;
490
 
491
  switch (TREE_CODE (stmt))
492
    {
493
    case GOTO_EXPR:
494
      {
495
        tree lab = GOTO_DESTINATION (stmt);
496
 
497
        /* Computed and non-local gotos do not get processed.  Given
498
           their nature we can neither tell whether we've escaped the
499
           finally block nor redirect them if we knew.  */
500
        if (TREE_CODE (lab) != LABEL_DECL)
501
          return;
502
 
503
        /* No need to record gotos that don't leave the try block.  */
504
        if (! outside_finally_tree (lab, tf->try_finally_expr))
505
          return;
506
 
507
        if (! tf->dest_array)
508
          {
509
            tf->dest_array = VEC_alloc (tree, heap, 10);
510
            VEC_quick_push (tree, tf->dest_array, lab);
511
            index = 0;
512
          }
513
        else
514
          {
515
            int n = VEC_length (tree, tf->dest_array);
516
            for (index = 0; index < n; ++index)
517
              if (VEC_index (tree, tf->dest_array, index) == lab)
518
                break;
519
            if (index == n)
520
              VEC_safe_push (tree, heap, tf->dest_array, lab);
521
          }
522
      }
523
      break;
524
 
525
    case RETURN_EXPR:
526
      tf->may_return = true;
527
      index = -1;
528
      break;
529
 
530
    default:
531
      gcc_unreachable ();
532
    }
533
 
534
  active = tf->goto_queue_active;
535
  size = tf->goto_queue_size;
536
  if (active >= size)
537
    {
538
      size = (size ? size * 2 : 32);
539
      tf->goto_queue_size = size;
540
      tf->goto_queue
541
         = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
542
    }
543
 
544
  q = &tf->goto_queue[active];
545
  tf->goto_queue_active = active + 1;
546
 
547
  memset (q, 0, sizeof (*q));
548
  q->stmt = stmt;
549
  q->index = index;
550
}
551
 
552
#ifdef ENABLE_CHECKING
553
/* We do not process SWITCH_EXPRs for now.  As long as the original source
554
   was in fact structured, and we've not yet done jump threading, then none
555
   of the labels will leave outer TRY_FINALLY_EXPRs.  Verify this.  */
556
 
557
static void
558
verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
559
{
560
  struct leh_tf_state *tf = state->tf;
561
  size_t i, n;
562
  tree vec;
563
 
564
  if (!tf)
565
    return;
566
 
567
  vec = SWITCH_LABELS (switch_expr);
568
  n = TREE_VEC_LENGTH (vec);
569
 
570
  for (i = 0; i < n; ++i)
571
    {
572
      tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
573
      gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
574
    }
575
}
576
#else
577
#define verify_norecord_switch_expr(state, switch_expr)
578
#endif
579
 
580
/* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB.  Place in CONT_P
581
   whatever is needed to finish the return.  If MOD is non-null, insert it
582
   before the new branch.  RETURN_VALUE_P is a cache containing a temporary
583
   variable to be used in manipulating the value returned from the function.  */
584
 
585
static void
586
do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
587
                       tree *return_value_p)
588
{
589
  tree ret_expr = TREE_OPERAND (q->stmt, 0);
590
  tree x;
591
 
592
  if (ret_expr)
593
    {
594
      /* The nasty part about redirecting the return value is that the
595
         return value itself is to be computed before the FINALLY block
596
         is executed.  e.g.
597
 
598
                int x;
599
                int foo (void)
600
                {
601
                  x = 0;
602
                  try {
603
                    return x;
604
                  } finally {
605
                    x++;
606
                  }
607
                }
608
 
609
          should return 0, not 1.  Arrange for this to happen by copying
610
          computed the return value into a local temporary.  This also
611
          allows us to redirect multiple return statements through the
612
          same destination block; whether this is a net win or not really
613
          depends, I guess, but it does make generation of the switch in
614
          lower_try_finally_switch easier.  */
615
 
616
      switch (TREE_CODE (ret_expr))
617
        {
618
        case RESULT_DECL:
619
          if (!*return_value_p)
620
            *return_value_p = ret_expr;
621
          else
622
            gcc_assert (*return_value_p == ret_expr);
623
          q->cont_stmt = q->stmt;
624
          break;
625
 
626
        case MODIFY_EXPR:
627
          {
628
            tree result = TREE_OPERAND (ret_expr, 0);
629
            tree new, old = TREE_OPERAND (ret_expr, 1);
630
 
631
            if (!*return_value_p)
632
              {
633
                if (aggregate_value_p (TREE_TYPE (result),
634
                                      TREE_TYPE (current_function_decl)))
635
                  /* If this function returns in memory, copy the argument
636
                    into the return slot now.  Otherwise, we might need to
637
                    worry about magic return semantics, so we need to use a
638
                    temporary to hold the value until we're actually ready
639
                    to return.  */
640
                  new = result;
641
                else
642
                  new = create_tmp_var (TREE_TYPE (old), "rettmp");
643
                *return_value_p = new;
644
              }
645
            else
646
              new = *return_value_p;
647
 
648
            x = build2 (MODIFY_EXPR, TREE_TYPE (new), new, old);
649
            append_to_statement_list (x, &q->repl_stmt);
650
 
651
            if (new == result)
652
              x = result;
653
            else
654
              x = build2 (MODIFY_EXPR, TREE_TYPE (result), result, new);
655
            q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
656
          }
657
 
658
        default:
659
          gcc_unreachable ();
660
        }
661
    }
662
  else
663
    {
664
      /* If we don't return a value, all return statements are the same.  */
665
      q->cont_stmt = q->stmt;
666
    }
667
 
668
  if (mod)
669
    append_to_statement_list (mod, &q->repl_stmt);
670
 
671
  x = build1 (GOTO_EXPR, void_type_node, finlab);
672
  append_to_statement_list (x, &q->repl_stmt);
673
}
674
 
675
/* Similar, but easier, for GOTO_EXPR.  */
676
 
677
static void
678
do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
679
{
680
  tree x;
681
 
682
  q->cont_stmt = q->stmt;
683
  if (mod)
684
    append_to_statement_list (mod, &q->repl_stmt);
685
 
686
  x = build1 (GOTO_EXPR, void_type_node, finlab);
687
  append_to_statement_list (x, &q->repl_stmt);
688
}
689
 
690
/* We want to transform
691
        try { body; } catch { stuff; }
692
   to
693
        body; goto over; lab: stuff; over:
694
 
695
   T is a TRY_FINALLY or TRY_CATCH node.  LAB is the label that
696
   should be placed before the second operand, or NULL.  OVER is
697
   an existing label that should be put at the exit, or NULL.  */
698
 
699
static void
700
frob_into_branch_around (tree *tp, tree lab, tree over)
701
{
702
  tree x, op1;
703
 
704
  op1 = TREE_OPERAND (*tp, 1);
705
  *tp = TREE_OPERAND (*tp, 0);
706
 
707
  if (block_may_fallthru (*tp))
708
    {
709
      if (!over)
710
        over = create_artificial_label ();
711
      x = build1 (GOTO_EXPR, void_type_node, over);
712
      append_to_statement_list (x, tp);
713
    }
714
 
715
  if (lab)
716
    {
717
      x = build1 (LABEL_EXPR, void_type_node, lab);
718
      append_to_statement_list (x, tp);
719
    }
720
 
721
  append_to_statement_list (op1, tp);
722
 
723
  if (over)
724
    {
725
      x = build1 (LABEL_EXPR, void_type_node, over);
726
      append_to_statement_list (x, tp);
727
    }
728
}
729
 
730
/* A subroutine of lower_try_finally.  Duplicate the tree rooted at T.
731
   Make sure to record all new labels found.  */
732
 
733
static tree
734
lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
735
{
736
  tree region = NULL;
737
 
738
  t = unsave_expr_now (t);
739
 
740
  if (outer_state->tf)
741
    region = outer_state->tf->try_finally_expr;
742
  collect_finally_tree (t, region);
743
 
744
  return t;
745
}
746
 
747
/* A subroutine of lower_try_finally.  Create a fallthru label for
748
   the given try_finally state.  The only tricky bit here is that
749
   we have to make sure to record the label in our outer context.  */
750
 
751
static tree
752
lower_try_finally_fallthru_label (struct leh_tf_state *tf)
753
{
754
  tree label = tf->fallthru_label;
755
  if (!label)
756
    {
757
      label = create_artificial_label ();
758
      tf->fallthru_label = label;
759
      if (tf->outer->tf)
760
        record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
761
    }
762
  return label;
763
}
764
 
765
/* A subroutine of lower_try_finally.  If lang_protect_cleanup_actions
766
   returns non-null, then the language requires that the exception path out
767
   of a try_finally be treated specially.  To wit: the code within the
768
   finally block may not itself throw an exception.  We have two choices here.
769
   First we can duplicate the finally block and wrap it in a must_not_throw
770
   region.  Second, we can generate code like
771
 
772
        try {
773
          finally_block;
774
        } catch {
775
          if (fintmp == eh_edge)
776
            protect_cleanup_actions;
777
        }
778
 
779
   where "fintmp" is the temporary used in the switch statement generation
780
   alternative considered below.  For the nonce, we always choose the first
781
   option.
782
 
783
   THIS_STATE may be null if this is a try-cleanup, not a try-finally.  */
784
 
785
static void
786
honor_protect_cleanup_actions (struct leh_state *outer_state,
787
                               struct leh_state *this_state,
788
                               struct leh_tf_state *tf)
789
{
790
  tree protect_cleanup_actions, finally, x;
791
  tree_stmt_iterator i;
792
  bool finally_may_fallthru;
793
 
794
  /* First check for nothing to do.  */
795
  if (lang_protect_cleanup_actions)
796
    protect_cleanup_actions = lang_protect_cleanup_actions ();
797
  else
798
    protect_cleanup_actions = NULL;
799
 
800
  finally = TREE_OPERAND (*tf->top_p, 1);
801
 
802
  /* If the EH case of the finally block can fall through, this may be a
803
     structure of the form
804
        try {
805
          try {
806
            throw ...;
807
          } cleanup {
808
            try {
809
              throw ...;
810
            } catch (...) {
811
            }
812
          }
813
        } catch (...) {
814
          yyy;
815
        }
816
    E.g. with an inline destructor with an embedded try block.  In this
817
    case we must save the runtime EH data around the nested exception.
818
 
819
    This complication means that any time the previous runtime data might
820
    be used (via fallthru from the finally) we handle the eh case here,
821
    whether or not protect_cleanup_actions is active.  */
822
 
823
  finally_may_fallthru = block_may_fallthru (finally);
824
  if (!finally_may_fallthru && !protect_cleanup_actions)
825
    return;
826
 
827
  /* Duplicate the FINALLY block.  Only need to do this for try-finally,
828
     and not for cleanups.  */
829
  if (this_state)
830
    finally = lower_try_finally_dup_block (finally, outer_state);
831
 
832
  /* Resume execution after the exception.  Adding this now lets
833
     lower_eh_filter not add unnecessary gotos, as it is clear that
834
     we never fallthru from this copy of the finally block.  */
835
  if (finally_may_fallthru)
836
    {
837
      tree save_eptr, save_filt;
838
 
839
      save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
840
      save_filt = create_tmp_var (integer_type_node, "save_filt");
841
 
842
      i = tsi_start (finally);
843
      x = build0 (EXC_PTR_EXPR, ptr_type_node);
844
      x = build2 (MODIFY_EXPR, void_type_node, save_eptr, x);
845
      tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
846
 
847
      x = build0 (FILTER_EXPR, integer_type_node);
848
      x = build2 (MODIFY_EXPR, void_type_node, save_filt, x);
849
      tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
850
 
851
      i = tsi_last (finally);
852
      x = build0 (EXC_PTR_EXPR, ptr_type_node);
853
      x = build2 (MODIFY_EXPR, void_type_node, x, save_eptr);
854
      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
855
 
856
      x = build0 (FILTER_EXPR, integer_type_node);
857
      x = build2 (MODIFY_EXPR, void_type_node, x, save_filt);
858
      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
859
 
860
      x = build_resx (get_eh_region_number (tf->region));
861
      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
862
    }
863
 
864
  /* Wrap the block with protect_cleanup_actions as the action.  */
865
  if (protect_cleanup_actions)
866
    {
867
      x = build2 (EH_FILTER_EXPR, void_type_node, NULL, NULL);
868
      append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
869
      EH_FILTER_MUST_NOT_THROW (x) = 1;
870
      finally = build2 (TRY_CATCH_EXPR, void_type_node, finally, x);
871
      lower_eh_filter (outer_state, &finally);
872
    }
873
  else
874
    lower_eh_constructs_1 (outer_state, &finally);
875
 
876
  /* Hook this up to the end of the existing try block.  If we
877
     previously fell through the end, we'll have to branch around.
878
     This means adding a new goto, and adding it to the queue.  */
879
 
880
  i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
881
 
882
  if (tf->may_fallthru)
883
    {
884
      x = lower_try_finally_fallthru_label (tf);
885
      x = build1 (GOTO_EXPR, void_type_node, x);
886
      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
887
 
888
      if (this_state)
889
        maybe_record_in_goto_queue (this_state, x);
890
 
891
      tf->may_fallthru = false;
892
    }
893
 
894
  x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
895
  tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
896
  tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
897
 
898
  /* Having now been handled, EH isn't to be considered with
899
     the rest of the outgoing edges.  */
900
  tf->may_throw = false;
901
}
902
 
903
/* A subroutine of lower_try_finally.  We have determined that there is
904
   no fallthru edge out of the finally block.  This means that there is
905
   no outgoing edge corresponding to any incoming edge.  Restructure the
906
   try_finally node for this special case.  */
907
 
908
static void
909
lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
910
{
911
  tree x, finally, lab, return_val;
912
  struct goto_queue_node *q, *qe;
913
 
914
  if (tf->may_throw)
915
    lab = tf->eh_label;
916
  else
917
    lab = create_artificial_label ();
918
 
919
  finally = TREE_OPERAND (*tf->top_p, 1);
920
  *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
921
 
922
  x = build1 (LABEL_EXPR, void_type_node, lab);
923
  append_to_statement_list (x, tf->top_p);
924
 
925
  return_val = NULL;
926
  q = tf->goto_queue;
927
  qe = q + tf->goto_queue_active;
928
  for (; q < qe; ++q)
929
    if (q->index < 0)
930
      do_return_redirection (q, lab, NULL, &return_val);
931
    else
932
      do_goto_redirection (q, lab, NULL);
933
 
934
  replace_goto_queue (tf);
935
 
936
  lower_eh_constructs_1 (state, &finally);
937
  append_to_statement_list (finally, tf->top_p);
938
}
939
 
940
/* A subroutine of lower_try_finally.  We have determined that there is
941
   exactly one destination of the finally block.  Restructure the
942
   try_finally node for this special case.  */
943
 
944
static void
945
lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
946
{
947
  struct goto_queue_node *q, *qe;
948
  tree x, finally, finally_label;
949
 
950
  finally = TREE_OPERAND (*tf->top_p, 1);
951
  *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
952
 
953
  lower_eh_constructs_1 (state, &finally);
954
 
955
  if (tf->may_throw)
956
    {
957
      /* Only reachable via the exception edge.  Add the given label to
958
         the head of the FINALLY block.  Append a RESX at the end.  */
959
 
960
      x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
961
      append_to_statement_list (x, tf->top_p);
962
 
963
      append_to_statement_list (finally, tf->top_p);
964
 
965
      x = build_resx (get_eh_region_number (tf->region));
966
 
967
      append_to_statement_list (x, tf->top_p);
968
 
969
      return;
970
    }
971
 
972
  if (tf->may_fallthru)
973
    {
974
      /* Only reachable via the fallthru edge.  Do nothing but let
975
         the two blocks run together; we'll fall out the bottom.  */
976
      append_to_statement_list (finally, tf->top_p);
977
      return;
978
    }
979
 
980
  finally_label = create_artificial_label ();
981
  x = build1 (LABEL_EXPR, void_type_node, finally_label);
982
  append_to_statement_list (x, tf->top_p);
983
 
984
  append_to_statement_list (finally, tf->top_p);
985
 
986
  q = tf->goto_queue;
987
  qe = q + tf->goto_queue_active;
988
 
989
  if (tf->may_return)
990
    {
991
      /* Reachable by return expressions only.  Redirect them.  */
992
      tree return_val = NULL;
993
      for (; q < qe; ++q)
994
        do_return_redirection (q, finally_label, NULL, &return_val);
995
      replace_goto_queue (tf);
996
    }
997
  else
998
    {
999
      /* Reachable by goto expressions only.  Redirect them.  */
1000
      for (; q < qe; ++q)
1001
        do_goto_redirection (q, finally_label, NULL);
1002
      replace_goto_queue (tf);
1003
 
1004
      if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1005
        {
1006
          /* Reachable by goto to fallthru label only.  Redirect it
1007
             to the new label (already created, sadly), and do not
1008
             emit the final branch out, or the fallthru label.  */
1009
          tf->fallthru_label = NULL;
1010
          return;
1011
        }
1012
    }
1013
 
1014
  append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
1015
  maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
1016
}
1017
 
1018
/* A subroutine of lower_try_finally.  There are multiple edges incoming
1019
   and outgoing from the finally block.  Implement this by duplicating the
1020
   finally block for every destination.  */
1021
 
1022
static void
1023
lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1024
{
1025
  tree finally, new_stmt;
1026
  tree x;
1027
 
1028
  finally = TREE_OPERAND (*tf->top_p, 1);
1029
  *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1030
 
1031
  new_stmt = NULL_TREE;
1032
 
1033
  if (tf->may_fallthru)
1034
    {
1035
      x = lower_try_finally_dup_block (finally, state);
1036
      lower_eh_constructs_1 (state, &x);
1037
      append_to_statement_list (x, &new_stmt);
1038
 
1039
      x = lower_try_finally_fallthru_label (tf);
1040
      x = build1 (GOTO_EXPR, void_type_node, x);
1041
      append_to_statement_list (x, &new_stmt);
1042
    }
1043
 
1044
  if (tf->may_throw)
1045
    {
1046
      x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1047
      append_to_statement_list (x, &new_stmt);
1048
 
1049
      x = lower_try_finally_dup_block (finally, state);
1050
      lower_eh_constructs_1 (state, &x);
1051
      append_to_statement_list (x, &new_stmt);
1052
 
1053
      x = build_resx (get_eh_region_number (tf->region));
1054
      append_to_statement_list (x, &new_stmt);
1055
    }
1056
 
1057
  if (tf->goto_queue)
1058
    {
1059
      struct goto_queue_node *q, *qe;
1060
      tree return_val = NULL;
1061
      int return_index, index;
1062
      struct labels_s
1063
      {
1064
        struct goto_queue_node *q;
1065
        tree label;
1066
      } *labels;
1067
 
1068
      return_index = VEC_length (tree, tf->dest_array);
1069
      labels = XCNEWVEC (struct labels_s, return_index + 1);
1070
 
1071
      q = tf->goto_queue;
1072
      qe = q + tf->goto_queue_active;
1073
      for (; q < qe; q++)
1074
        {
1075
          index = q->index < 0 ? return_index : q->index;
1076
 
1077
          if (!labels[index].q)
1078
            labels[index].q = q;
1079
        }
1080
 
1081
      for (index = 0; index < return_index + 1; index++)
1082
        {
1083
          tree lab;
1084
 
1085
          q = labels[index].q;
1086
          if (! q)
1087
            continue;
1088
 
1089
          lab = labels[index].label = create_artificial_label ();
1090
 
1091
          if (index == return_index)
1092
            do_return_redirection (q, lab, NULL, &return_val);
1093
          else
1094
            do_goto_redirection (q, lab, NULL);
1095
 
1096
          x = build1 (LABEL_EXPR, void_type_node, lab);
1097
          append_to_statement_list (x, &new_stmt);
1098
 
1099
          x = lower_try_finally_dup_block (finally, state);
1100
          lower_eh_constructs_1 (state, &x);
1101
          append_to_statement_list (x, &new_stmt);
1102
 
1103
          append_to_statement_list (q->cont_stmt, &new_stmt);
1104
          maybe_record_in_goto_queue (state, q->cont_stmt);
1105
        }
1106
 
1107
      for (q = tf->goto_queue; q < qe; q++)
1108
        {
1109
          tree lab;
1110
 
1111
          index = q->index < 0 ? return_index : q->index;
1112
 
1113
          if (labels[index].q == q)
1114
            continue;
1115
 
1116
          lab = labels[index].label;
1117
 
1118
          if (index == return_index)
1119
            do_return_redirection (q, lab, NULL, &return_val);
1120
          else
1121
            do_goto_redirection (q, lab, NULL);
1122
        }
1123
 
1124
      replace_goto_queue (tf);
1125
      free (labels);
1126
    }
1127
 
1128
  /* Need to link new stmts after running replace_goto_queue due
1129
     to not wanting to process the same goto stmts twice.  */
1130
  append_to_statement_list (new_stmt, tf->top_p);
1131
}
1132
 
1133
/* A subroutine of lower_try_finally.  There are multiple edges incoming
1134
   and outgoing from the finally block.  Implement this by instrumenting
1135
   each incoming edge and creating a switch statement at the end of the
1136
   finally block that branches to the appropriate destination.  */
1137
 
1138
static void
1139
lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1140
{
1141
  struct goto_queue_node *q, *qe;
1142
  tree return_val = NULL;
1143
  tree finally, finally_tmp, finally_label;
1144
  int return_index, eh_index, fallthru_index;
1145
  int nlabels, ndests, j, last_case_index;
1146
  tree case_label_vec, switch_stmt, last_case, switch_body;
1147
  tree x;
1148
 
1149
  /* Mash the TRY block to the head of the chain.  */
1150
  finally = TREE_OPERAND (*tf->top_p, 1);
1151
  *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1152
 
1153
  /* Lower the finally block itself.  */
1154
  lower_eh_constructs_1 (state, &finally);
1155
 
1156
  /* Prepare for switch statement generation.  */
1157
  nlabels = VEC_length (tree, tf->dest_array);
1158
  return_index = nlabels;
1159
  eh_index = return_index + tf->may_return;
1160
  fallthru_index = eh_index + tf->may_throw;
1161
  ndests = fallthru_index + tf->may_fallthru;
1162
 
1163
  finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1164
  finally_label = create_artificial_label ();
1165
 
1166
  case_label_vec = make_tree_vec (ndests);
1167
  switch_stmt = build3 (SWITCH_EXPR, integer_type_node, finally_tmp,
1168
                        NULL_TREE, case_label_vec);
1169
  switch_body = NULL;
1170
  last_case = NULL;
1171
  last_case_index = 0;
1172
 
1173
  /* Begin inserting code for getting to the finally block.  Things
1174
     are done in this order to correspond to the sequence the code is
1175
     layed out.  */
1176
 
1177
  if (tf->may_fallthru)
1178
    {
1179
      x = build2 (MODIFY_EXPR, void_type_node, finally_tmp,
1180
                  build_int_cst (NULL_TREE, fallthru_index));
1181
      append_to_statement_list (x, tf->top_p);
1182
 
1183
      if (tf->may_throw)
1184
        {
1185
          x = build1 (GOTO_EXPR, void_type_node, finally_label);
1186
          append_to_statement_list (x, tf->top_p);
1187
        }
1188
 
1189
 
1190
      last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1191
                          build_int_cst (NULL_TREE, fallthru_index), NULL,
1192
                          create_artificial_label ());
1193
      TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1194
      last_case_index++;
1195
 
1196
      x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1197
      append_to_statement_list (x, &switch_body);
1198
 
1199
      x = lower_try_finally_fallthru_label (tf);
1200
      x = build1 (GOTO_EXPR, void_type_node, x);
1201
      append_to_statement_list (x, &switch_body);
1202
    }
1203
 
1204
  if (tf->may_throw)
1205
    {
1206
      x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1207
      append_to_statement_list (x, tf->top_p);
1208
 
1209
      x = build2 (MODIFY_EXPR, void_type_node, finally_tmp,
1210
                  build_int_cst (NULL_TREE, eh_index));
1211
      append_to_statement_list (x, tf->top_p);
1212
 
1213
      last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1214
                          build_int_cst (NULL_TREE, eh_index), NULL,
1215
                          create_artificial_label ());
1216
      TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1217
      last_case_index++;
1218
 
1219
      x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1220
      append_to_statement_list (x, &switch_body);
1221
      x = build_resx (get_eh_region_number (tf->region));
1222
      append_to_statement_list (x, &switch_body);
1223
    }
1224
 
1225
  x = build1 (LABEL_EXPR, void_type_node, finally_label);
1226
  append_to_statement_list (x, tf->top_p);
1227
 
1228
  append_to_statement_list (finally, tf->top_p);
1229
 
1230
  /* Redirect each incoming goto edge.  */
1231
  q = tf->goto_queue;
1232
  qe = q + tf->goto_queue_active;
1233
  j = last_case_index + tf->may_return;
1234
  for (; q < qe; ++q)
1235
    {
1236
      tree mod;
1237
      int switch_id, case_index;
1238
 
1239
      if (q->index < 0)
1240
        {
1241
          mod = build2 (MODIFY_EXPR, void_type_node, finally_tmp,
1242
                        build_int_cst (NULL_TREE, return_index));
1243
          do_return_redirection (q, finally_label, mod, &return_val);
1244
          switch_id = return_index;
1245
        }
1246
      else
1247
        {
1248
          mod = build2 (MODIFY_EXPR, void_type_node, finally_tmp,
1249
                        build_int_cst (NULL_TREE, q->index));
1250
          do_goto_redirection (q, finally_label, mod);
1251
          switch_id = q->index;
1252
        }
1253
 
1254
      case_index = j + q->index;
1255
      if (!TREE_VEC_ELT (case_label_vec, case_index))
1256
        TREE_VEC_ELT (case_label_vec, case_index)
1257
          = build3 (CASE_LABEL_EXPR, void_type_node,
1258
                    build_int_cst (NULL_TREE, switch_id), NULL,
1259
                    /* We store the cont_stmt in the
1260
                       CASE_LABEL, so that we can recover it
1261
                       in the loop below.  We don't create
1262
                       the new label while walking the
1263
                       goto_queue because pointers don't
1264
                       offer a stable order.  */
1265
                    q->cont_stmt);
1266
    }
1267
  for (j = last_case_index; j < last_case_index + nlabels; j++)
1268
    {
1269
      tree label;
1270
      tree cont_stmt;
1271
 
1272
      last_case = TREE_VEC_ELT (case_label_vec, j);
1273
 
1274
      gcc_assert (last_case);
1275
 
1276
      cont_stmt = CASE_LABEL (last_case);
1277
 
1278
      label = create_artificial_label ();
1279
      CASE_LABEL (last_case) = label;
1280
 
1281
      x = build1 (LABEL_EXPR, void_type_node, label);
1282
      append_to_statement_list (x, &switch_body);
1283
      append_to_statement_list (cont_stmt, &switch_body);
1284
      maybe_record_in_goto_queue (state, cont_stmt);
1285
    }
1286
  replace_goto_queue (tf);
1287
 
1288
  /* Make sure that the last case is the default label, as one is required.
1289
     Then sort the labels, which is also required in GIMPLE.  */
1290
  CASE_LOW (last_case) = NULL;
1291
  sort_case_labels (case_label_vec);
1292
 
1293
  /* Need to link switch_stmt after running replace_goto_queue due
1294
     to not wanting to process the same goto stmts twice.  */
1295
  append_to_statement_list (switch_stmt, tf->top_p);
1296
  append_to_statement_list (switch_body, tf->top_p);
1297
}
1298
 
1299
/* Decide whether or not we are going to duplicate the finally block.
1300
   There are several considerations.
1301
 
1302
   First, if this is Java, then the finally block contains code
1303
   written by the user.  It has line numbers associated with it,
1304
   so duplicating the block means it's difficult to set a breakpoint.
1305
   Since controlling code generation via -g is verboten, we simply
1306
   never duplicate code without optimization.
1307
 
1308
   Second, we'd like to prevent egregious code growth.  One way to
1309
   do this is to estimate the size of the finally block, multiply
1310
   that by the number of copies we'd need to make, and compare against
1311
   the estimate of the size of the switch machinery we'd have to add.  */
1312
 
1313
static bool
1314
decide_copy_try_finally (int ndests, tree finally)
1315
{
1316
  int f_estimate, sw_estimate;
1317
 
1318
  if (!optimize)
1319
    return false;
1320
 
1321
  /* Finally estimate N times, plus N gotos.  */
1322
  f_estimate = estimate_num_insns (finally);
1323
  f_estimate = (f_estimate + 1) * ndests;
1324
 
1325
  /* Switch statement (cost 10), N variable assignments, N gotos.  */
1326
  sw_estimate = 10 + 2 * ndests;
1327
 
1328
  /* Optimize for size clearly wants our best guess.  */
1329
  if (optimize_size)
1330
    return f_estimate < sw_estimate;
1331
 
1332
  /* ??? These numbers are completely made up so far.  */
1333
  if (optimize > 1)
1334
    return f_estimate < 100 || f_estimate < sw_estimate * 2;
1335
  else
1336
    return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1337
}
1338
 
1339
/* A subroutine of lower_eh_constructs_1.  Lower a TRY_FINALLY_EXPR nodes
1340
   to a sequence of labels and blocks, plus the exception region trees
1341
   that record all the magic.  This is complicated by the need to
1342
   arrange for the FINALLY block to be executed on all exits.  */
1343
 
1344
static void
1345
lower_try_finally (struct leh_state *state, tree *tp)
1346
{
1347
  struct leh_tf_state this_tf;
1348
  struct leh_state this_state;
1349
  int ndests;
1350
 
1351
  /* Process the try block.  */
1352
 
1353
  memset (&this_tf, 0, sizeof (this_tf));
1354
  this_tf.try_finally_expr = *tp;
1355
  this_tf.top_p = tp;
1356
  this_tf.outer = state;
1357
  if (using_eh_for_cleanups_p)
1358
    this_tf.region
1359
      = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1360
  else
1361
    this_tf.region = NULL;
1362
 
1363
  this_state.cur_region = this_tf.region;
1364
  this_state.prev_try = state->prev_try;
1365
  this_state.tf = &this_tf;
1366
 
1367
  lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1368
 
1369
  /* Determine if the try block is escaped through the bottom.  */
1370
  this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1371
 
1372
  /* Determine if any exceptions are possible within the try block.  */
1373
  if (using_eh_for_cleanups_p)
1374
    this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1375
  if (this_tf.may_throw)
1376
    {
1377
      this_tf.eh_label = create_artificial_label ();
1378
      set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1379
      honor_protect_cleanup_actions (state, &this_state, &this_tf);
1380
    }
1381
 
1382
  /* Sort the goto queue for efficient searching later.  */
1383
  if (this_tf.goto_queue_active > 1)
1384
    qsort (this_tf.goto_queue, this_tf.goto_queue_active,
1385
           sizeof (struct goto_queue_node), goto_queue_cmp);
1386
 
1387
  /* Determine how many edges (still) reach the finally block.  Or rather,
1388
     how many destinations are reached by the finally block.  Use this to
1389
     determine how we process the finally block itself.  */
1390
 
1391
  ndests = VEC_length (tree, this_tf.dest_array);
1392
  ndests += this_tf.may_fallthru;
1393
  ndests += this_tf.may_return;
1394
  ndests += this_tf.may_throw;
1395
 
1396
  /* If the FINALLY block is not reachable, dike it out.  */
1397
  if (ndests == 0)
1398
    *tp = TREE_OPERAND (*tp, 0);
1399
 
1400
  /* If the finally block doesn't fall through, then any destination
1401
     we might try to impose there isn't reached either.  There may be
1402
     some minor amount of cleanup and redirection still needed.  */
1403
  else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1404
    lower_try_finally_nofallthru (state, &this_tf);
1405
 
1406
  /* We can easily special-case redirection to a single destination.  */
1407
  else if (ndests == 1)
1408
    lower_try_finally_onedest (state, &this_tf);
1409
 
1410
  else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1411
    lower_try_finally_copy (state, &this_tf);
1412
  else
1413
    lower_try_finally_switch (state, &this_tf);
1414
 
1415
  /* If someone requested we add a label at the end of the transformed
1416
     block, do so.  */
1417
  if (this_tf.fallthru_label)
1418
    {
1419
      tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1420
      append_to_statement_list (x, tp);
1421
    }
1422
 
1423
  VEC_free (tree, heap, this_tf.dest_array);
1424
  if (this_tf.goto_queue)
1425
    free (this_tf.goto_queue);
1426
}
1427
 
1428
/* A subroutine of lower_eh_constructs_1.  Lower a TRY_CATCH_EXPR with a
1429
   list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1430
   exception region trees that record all the magic.  */
1431
 
1432
static void
1433
lower_catch (struct leh_state *state, tree *tp)
1434
{
1435
  struct eh_region *try_region;
1436
  struct leh_state this_state;
1437
  tree_stmt_iterator i;
1438
  tree out_label;
1439
 
1440
  try_region = gen_eh_region_try (state->cur_region);
1441
  this_state.cur_region = try_region;
1442
  this_state.prev_try = try_region;
1443
  this_state.tf = state->tf;
1444
 
1445
  lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1446
 
1447
  if (!get_eh_region_may_contain_throw (try_region))
1448
    {
1449
      *tp = TREE_OPERAND (*tp, 0);
1450
      return;
1451
    }
1452
 
1453
  out_label = NULL;
1454
  for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1455
    {
1456
      struct eh_region *catch_region;
1457
      tree catch, x, eh_label;
1458
 
1459
      catch = tsi_stmt (i);
1460
      catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1461
 
1462
      this_state.cur_region = catch_region;
1463
      this_state.prev_try = state->prev_try;
1464
      lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1465
 
1466
      eh_label = create_artificial_label ();
1467
      set_eh_region_tree_label (catch_region, eh_label);
1468
 
1469
      x = build1 (LABEL_EXPR, void_type_node, eh_label);
1470
      tsi_link_before (&i, x, TSI_SAME_STMT);
1471
 
1472
      if (block_may_fallthru (CATCH_BODY (catch)))
1473
        {
1474
          if (!out_label)
1475
            out_label = create_artificial_label ();
1476
 
1477
          x = build1 (GOTO_EXPR, void_type_node, out_label);
1478
          append_to_statement_list (x, &CATCH_BODY (catch));
1479
        }
1480
 
1481
      tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1482
      tsi_delink (&i);
1483
    }
1484
 
1485
  frob_into_branch_around (tp, NULL, out_label);
1486
}
1487
 
1488
/* A subroutine of lower_eh_constructs_1.  Lower a TRY_CATCH_EXPR with a
1489
   EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1490
   region trees that record all the magic.  */
1491
 
1492
static void
1493
lower_eh_filter (struct leh_state *state, tree *tp)
1494
{
1495
  struct leh_state this_state;
1496
  struct eh_region *this_region;
1497
  tree inner = expr_first (TREE_OPERAND (*tp, 1));
1498
  tree eh_label;
1499
 
1500
  if (EH_FILTER_MUST_NOT_THROW (inner))
1501
    this_region = gen_eh_region_must_not_throw (state->cur_region);
1502
  else
1503
    this_region = gen_eh_region_allowed (state->cur_region,
1504
                                         EH_FILTER_TYPES (inner));
1505
  this_state = *state;
1506
  this_state.cur_region = this_region;
1507
  /* For must not throw regions any cleanup regions inside it
1508
     can't reach outer catch regions.  */
1509
  if (EH_FILTER_MUST_NOT_THROW (inner))
1510
    this_state.prev_try = NULL;
1511
 
1512
  lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1513
 
1514
  if (!get_eh_region_may_contain_throw (this_region))
1515
    {
1516
      *tp = TREE_OPERAND (*tp, 0);
1517
      return;
1518
    }
1519
 
1520
  lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1521
  TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1522
 
1523
  eh_label = create_artificial_label ();
1524
  set_eh_region_tree_label (this_region, eh_label);
1525
 
1526
  frob_into_branch_around (tp, eh_label, NULL);
1527
}
1528
 
1529
/* Implement a cleanup expression.  This is similar to try-finally,
1530
   except that we only execute the cleanup block for exception edges.  */
1531
 
1532
static void
1533
lower_cleanup (struct leh_state *state, tree *tp)
1534
{
1535
  struct leh_state this_state;
1536
  struct eh_region *this_region;
1537
  struct leh_tf_state fake_tf;
1538
 
1539
  /* If not using eh, then exception-only cleanups are no-ops.  */
1540
  if (!flag_exceptions)
1541
    {
1542
      *tp = TREE_OPERAND (*tp, 0);
1543
      lower_eh_constructs_1 (state, tp);
1544
      return;
1545
    }
1546
 
1547
  this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1548
  this_state = *state;
1549
  this_state.cur_region = this_region;
1550
 
1551
  lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1552
 
1553
  if (!get_eh_region_may_contain_throw (this_region))
1554
    {
1555
      *tp = TREE_OPERAND (*tp, 0);
1556
      return;
1557
    }
1558
 
1559
  /* Build enough of a try-finally state so that we can reuse
1560
     honor_protect_cleanup_actions.  */
1561
  memset (&fake_tf, 0, sizeof (fake_tf));
1562
  fake_tf.top_p = tp;
1563
  fake_tf.outer = state;
1564
  fake_tf.region = this_region;
1565
  fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1566
  fake_tf.may_throw = true;
1567
 
1568
  fake_tf.eh_label = create_artificial_label ();
1569
  set_eh_region_tree_label (this_region, fake_tf.eh_label);
1570
 
1571
  honor_protect_cleanup_actions (state, NULL, &fake_tf);
1572
 
1573
  if (fake_tf.may_throw)
1574
    {
1575
      /* In this case honor_protect_cleanup_actions had nothing to do,
1576
         and we should process this normally.  */
1577
      lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1578
      frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1579
    }
1580
  else
1581
    {
1582
      /* In this case honor_protect_cleanup_actions did nearly all of
1583
         the work.  All we have left is to append the fallthru_label.  */
1584
 
1585
      *tp = TREE_OPERAND (*tp, 0);
1586
      if (fake_tf.fallthru_label)
1587
        {
1588
          tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1589
          append_to_statement_list (x, tp);
1590
        }
1591
    }
1592
}
1593
 
1594
/* Main loop for lowering eh constructs.  */
1595
 
1596
static void
1597
lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1598
{
1599
  tree_stmt_iterator i;
1600
  tree t = *tp;
1601
 
1602
  switch (TREE_CODE (t))
1603
    {
1604
    case COND_EXPR:
1605
      lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1606
      lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1607
      break;
1608
 
1609
    case CALL_EXPR:
1610
      /* Look for things that can throw exceptions, and record them.  */
1611
      if (state->cur_region && tree_could_throw_p (t))
1612
        {
1613
          record_stmt_eh_region (state->cur_region, t);
1614
          note_eh_region_may_contain_throw (state->cur_region);
1615
        }
1616
      break;
1617
 
1618
    case MODIFY_EXPR:
1619
      /* Look for things that can throw exceptions, and record them.  */
1620
      if (state->cur_region && tree_could_throw_p (t))
1621
        {
1622
          record_stmt_eh_region (state->cur_region, t);
1623
          note_eh_region_may_contain_throw (state->cur_region);
1624
        }
1625
      break;
1626
 
1627
    case GOTO_EXPR:
1628
    case RETURN_EXPR:
1629
      maybe_record_in_goto_queue (state, t);
1630
      break;
1631
    case SWITCH_EXPR:
1632
      verify_norecord_switch_expr (state, t);
1633
      break;
1634
 
1635
    case TRY_FINALLY_EXPR:
1636
      lower_try_finally (state, tp);
1637
      break;
1638
 
1639
    case TRY_CATCH_EXPR:
1640
      i = tsi_start (TREE_OPERAND (t, 1));
1641
      switch (TREE_CODE (tsi_stmt (i)))
1642
        {
1643
        case CATCH_EXPR:
1644
          lower_catch (state, tp);
1645
          break;
1646
        case EH_FILTER_EXPR:
1647
          lower_eh_filter (state, tp);
1648
          break;
1649
        default:
1650
          lower_cleanup (state, tp);
1651
          break;
1652
        }
1653
      break;
1654
 
1655
    case STATEMENT_LIST:
1656
      for (i = tsi_start (t); !tsi_end_p (i); )
1657
        {
1658
          lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1659
          t = tsi_stmt (i);
1660
          if (TREE_CODE (t) == STATEMENT_LIST)
1661
            {
1662
              tsi_link_before (&i, t, TSI_SAME_STMT);
1663
              tsi_delink (&i);
1664
            }
1665
          else
1666
            tsi_next (&i);
1667
        }
1668
      break;
1669
 
1670
    default:
1671
      /* A type, a decl, or some kind of statement that we're not
1672
         interested in.  Don't walk them.  */
1673
      break;
1674
    }
1675
}
1676
 
1677
static unsigned int
1678
lower_eh_constructs (void)
1679
{
1680
  struct leh_state null_state;
1681
  tree *tp = &DECL_SAVED_TREE (current_function_decl);
1682
 
1683
  finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1684
 
1685
  collect_finally_tree (*tp, NULL);
1686
 
1687
  memset (&null_state, 0, sizeof (null_state));
1688
  lower_eh_constructs_1 (&null_state, tp);
1689
 
1690
  htab_delete (finally_tree);
1691
 
1692
  collect_eh_region_array ();
1693
  return 0;
1694
}
1695
 
1696
struct tree_opt_pass pass_lower_eh =
1697
{
1698
  "eh",                                 /* name */
1699
  NULL,                                 /* gate */
1700
  lower_eh_constructs,                  /* execute */
1701
  NULL,                                 /* sub */
1702
  NULL,                                 /* next */
1703
  0,                                     /* static_pass_number */
1704
  TV_TREE_EH,                           /* tv_id */
1705
  PROP_gimple_lcf,                      /* properties_required */
1706
  PROP_gimple_leh,                      /* properties_provided */
1707
  0,                                     /* properties_destroyed */
1708
  0,                                     /* todo_flags_start */
1709
  TODO_dump_func,                       /* todo_flags_finish */
1710
 
1711
};
1712
 
1713
 
1714
/* Construct EH edges for STMT.  */
1715
 
1716
static void
1717
make_eh_edge (struct eh_region *region, void *data)
1718
{
1719
  tree stmt, lab;
1720
  basic_block src, dst;
1721
 
1722
  stmt = (tree) data;
1723
  lab = get_eh_region_tree_label (region);
1724
 
1725
  src = bb_for_stmt (stmt);
1726
  dst = label_to_block (lab);
1727
 
1728
  make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1729
}
1730
 
1731
void
1732
make_eh_edges (tree stmt)
1733
{
1734
  int region_nr;
1735
  bool is_resx;
1736
 
1737
  if (TREE_CODE (stmt) == RESX_EXPR)
1738
    {
1739
      region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1740
      is_resx = true;
1741
    }
1742
  else
1743
    {
1744
      region_nr = lookup_stmt_eh_region (stmt);
1745
      if (region_nr < 0)
1746
        return;
1747
      is_resx = false;
1748
    }
1749
 
1750
  foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1751
}
1752
 
1753
static bool mark_eh_edge_found_error;
1754
 
1755
/* Mark edge make_eh_edge would create for given region by setting it aux
1756
   field, output error if something goes wrong.  */
1757
static void
1758
mark_eh_edge (struct eh_region *region, void *data)
1759
{
1760
  tree stmt, lab;
1761
  basic_block src, dst;
1762
  edge e;
1763
 
1764
  stmt = (tree) data;
1765
  lab = get_eh_region_tree_label (region);
1766
 
1767
  src = bb_for_stmt (stmt);
1768
  dst = label_to_block (lab);
1769
 
1770
  e = find_edge (src, dst);
1771
  if (!e)
1772
    {
1773
      error ("EH edge %i->%i is missing", src->index, dst->index);
1774
      mark_eh_edge_found_error = true;
1775
    }
1776
  else if (!(e->flags & EDGE_EH))
1777
    {
1778
      error ("EH edge %i->%i miss EH flag", src->index, dst->index);
1779
      mark_eh_edge_found_error = true;
1780
    }
1781
  else if (e->aux)
1782
    {
1783
      /* ??? might not be mistake.  */
1784
      error ("EH edge %i->%i has duplicated regions", src->index, dst->index);
1785
      mark_eh_edge_found_error = true;
1786
    }
1787
  else
1788
    e->aux = (void *)1;
1789
}
1790
 
1791
/* Verify that BB containing stmt as last stmt has precisely the edges
1792
   make_eh_edges would create.  */
1793
bool
1794
verify_eh_edges (tree stmt)
1795
{
1796
  int region_nr;
1797
  bool is_resx;
1798
  basic_block bb = bb_for_stmt (stmt);
1799
  edge_iterator ei;
1800
  edge e;
1801
 
1802
  FOR_EACH_EDGE (e, ei, bb->succs)
1803
    gcc_assert (!e->aux);
1804
  mark_eh_edge_found_error = false;
1805
  if (TREE_CODE (stmt) == RESX_EXPR)
1806
    {
1807
      region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1808
      is_resx = true;
1809
    }
1810
  else
1811
    {
1812
      region_nr = lookup_stmt_eh_region (stmt);
1813
      if (region_nr < 0)
1814
        {
1815
          FOR_EACH_EDGE (e, ei, bb->succs)
1816
            if (e->flags & EDGE_EH)
1817
              {
1818
                error ("BB %i can not throw but has EH edges", bb->index);
1819
                return true;
1820
              }
1821
           return false;
1822
        }
1823
      if (!tree_could_throw_p (stmt))
1824
        {
1825
          error ("BB %i last statement has incorrectly set region", bb->index);
1826
          return true;
1827
        }
1828
      is_resx = false;
1829
    }
1830
 
1831
  foreach_reachable_handler (region_nr, is_resx, mark_eh_edge, stmt);
1832
  FOR_EACH_EDGE (e, ei, bb->succs)
1833
    {
1834
      if ((e->flags & EDGE_EH) && !e->aux)
1835
        {
1836
          error ("unnecessary EH edge %i->%i", bb->index, e->dest->index);
1837
          mark_eh_edge_found_error = true;
1838
          return true;
1839
        }
1840
      e->aux = NULL;
1841
    }
1842
  return mark_eh_edge_found_error;
1843
}
1844
 
1845
 
1846
/* Return true if the expr can trap, as in dereferencing an invalid pointer
1847
   location or floating point arithmetic.  C.f. the rtl version, may_trap_p.
1848
   This routine expects only GIMPLE lhs or rhs input.  */
1849
 
1850
bool
1851
tree_could_trap_p (tree expr)
1852
{
1853
  enum tree_code code = TREE_CODE (expr);
1854
  bool honor_nans = false;
1855
  bool honor_snans = false;
1856
  bool fp_operation = false;
1857
  bool honor_trapv = false;
1858
  tree t, base;
1859
 
1860
  if (TREE_CODE_CLASS (code) == tcc_comparison
1861
      || TREE_CODE_CLASS (code) == tcc_unary
1862
      || TREE_CODE_CLASS (code) == tcc_binary)
1863
    {
1864
      t = TREE_TYPE (expr);
1865
      fp_operation = FLOAT_TYPE_P (t);
1866
      if (fp_operation)
1867
        {
1868
          honor_nans = flag_trapping_math && !flag_finite_math_only;
1869
          honor_snans = flag_signaling_nans != 0;
1870
        }
1871
      else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
1872
        honor_trapv = true;
1873
    }
1874
 
1875
 restart:
1876
  switch (code)
1877
    {
1878
    case TARGET_MEM_REF:
1879
      /* For TARGET_MEM_REFs use the information based on the original
1880
         reference.  */
1881
      expr = TMR_ORIGINAL (expr);
1882
      code = TREE_CODE (expr);
1883
      goto restart;
1884
 
1885
    case COMPONENT_REF:
1886
    case REALPART_EXPR:
1887
    case IMAGPART_EXPR:
1888
    case BIT_FIELD_REF:
1889
    case VIEW_CONVERT_EXPR:
1890
    case WITH_SIZE_EXPR:
1891
      expr = TREE_OPERAND (expr, 0);
1892
      code = TREE_CODE (expr);
1893
      goto restart;
1894
 
1895
    case ARRAY_RANGE_REF:
1896
      base = TREE_OPERAND (expr, 0);
1897
      if (tree_could_trap_p (base))
1898
        return true;
1899
 
1900
      if (TREE_THIS_NOTRAP (expr))
1901
        return false;
1902
 
1903
      return !range_in_array_bounds_p (expr);
1904
 
1905
    case ARRAY_REF:
1906
      base = TREE_OPERAND (expr, 0);
1907
      if (tree_could_trap_p (base))
1908
        return true;
1909
 
1910
      if (TREE_THIS_NOTRAP (expr))
1911
        return false;
1912
 
1913
      return !in_array_bounds_p (expr);
1914
 
1915
    case INDIRECT_REF:
1916
    case ALIGN_INDIRECT_REF:
1917
    case MISALIGNED_INDIRECT_REF:
1918
      return !TREE_THIS_NOTRAP (expr);
1919
 
1920
    case ASM_EXPR:
1921
      return TREE_THIS_VOLATILE (expr);
1922
 
1923
    case TRUNC_DIV_EXPR:
1924
    case CEIL_DIV_EXPR:
1925
    case FLOOR_DIV_EXPR:
1926
    case ROUND_DIV_EXPR:
1927
    case EXACT_DIV_EXPR:
1928
    case CEIL_MOD_EXPR:
1929
    case FLOOR_MOD_EXPR:
1930
    case ROUND_MOD_EXPR:
1931
    case TRUNC_MOD_EXPR:
1932
    case RDIV_EXPR:
1933
      if (honor_snans || honor_trapv)
1934
        return true;
1935
      if (fp_operation)
1936
        return flag_trapping_math;
1937
      t = TREE_OPERAND (expr, 1);
1938
      if (!TREE_CONSTANT (t) || integer_zerop (t))
1939
        return true;
1940
      return false;
1941
 
1942
    case LT_EXPR:
1943
    case LE_EXPR:
1944
    case GT_EXPR:
1945
    case GE_EXPR:
1946
    case LTGT_EXPR:
1947
      /* Some floating point comparisons may trap.  */
1948
      return honor_nans;
1949
 
1950
    case EQ_EXPR:
1951
    case NE_EXPR:
1952
    case UNORDERED_EXPR:
1953
    case ORDERED_EXPR:
1954
    case UNLT_EXPR:
1955
    case UNLE_EXPR:
1956
    case UNGT_EXPR:
1957
    case UNGE_EXPR:
1958
    case UNEQ_EXPR:
1959
      return honor_snans;
1960
 
1961
    case CONVERT_EXPR:
1962
    case FIX_TRUNC_EXPR:
1963
    case FIX_CEIL_EXPR:
1964
    case FIX_FLOOR_EXPR:
1965
    case FIX_ROUND_EXPR:
1966
      /* Conversion of floating point might trap.  */
1967
      return honor_nans;
1968
 
1969
    case NEGATE_EXPR:
1970
    case ABS_EXPR:
1971
    case CONJ_EXPR:
1972
      /* These operations don't trap with floating point.  */
1973
      if (honor_trapv)
1974
        return true;
1975
      return false;
1976
 
1977
    case PLUS_EXPR:
1978
    case MINUS_EXPR:
1979
    case MULT_EXPR:
1980
      /* Any floating arithmetic may trap.  */
1981
      if (fp_operation && flag_trapping_math)
1982
        return true;
1983
      if (honor_trapv)
1984
        return true;
1985
      return false;
1986
 
1987
    case CALL_EXPR:
1988
      t = get_callee_fndecl (expr);
1989
      /* Assume that calls to weak functions may trap.  */
1990
      if (!t || !DECL_P (t) || DECL_WEAK (t))
1991
        return true;
1992
      return false;
1993
 
1994
    default:
1995
      /* Any floating arithmetic may trap.  */
1996
      if (fp_operation && flag_trapping_math)
1997
        return true;
1998
      return false;
1999
    }
2000
}
2001
 
2002
bool
2003
tree_could_throw_p (tree t)
2004
{
2005
  if (!flag_exceptions)
2006
    return false;
2007
  if (TREE_CODE (t) == MODIFY_EXPR)
2008
    {
2009
      if (flag_non_call_exceptions
2010
          && tree_could_trap_p (TREE_OPERAND (t, 0)))
2011
        return true;
2012
      t = TREE_OPERAND (t, 1);
2013
    }
2014
 
2015
  if (TREE_CODE (t) == WITH_SIZE_EXPR)
2016
    t = TREE_OPERAND (t, 0);
2017
  if (TREE_CODE (t) == CALL_EXPR)
2018
    return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2019
  if (flag_non_call_exceptions)
2020
    return tree_could_trap_p (t);
2021
  return false;
2022
}
2023
 
2024
bool
2025
tree_can_throw_internal (tree stmt)
2026
{
2027
  int region_nr;
2028
  bool is_resx = false;
2029
 
2030
  if (TREE_CODE (stmt) == RESX_EXPR)
2031
    region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2032
  else
2033
    region_nr = lookup_stmt_eh_region (stmt);
2034
  if (region_nr < 0)
2035
    return false;
2036
  return can_throw_internal_1 (region_nr, is_resx);
2037
}
2038
 
2039
bool
2040
tree_can_throw_external (tree stmt)
2041
{
2042
  int region_nr;
2043
  bool is_resx = false;
2044
 
2045
  if (TREE_CODE (stmt) == RESX_EXPR)
2046
    region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2047
  else
2048
    region_nr = lookup_stmt_eh_region (stmt);
2049
  if (region_nr < 0)
2050
    return tree_could_throw_p (stmt);
2051
  else
2052
    return can_throw_external_1 (region_nr, is_resx);
2053
}
2054
 
2055
/* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2056
   OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2057
   in the table if it should be in there.  Return TRUE if a replacement was
2058
   done that my require an EH edge purge.  */
2059
 
2060
bool
2061
maybe_clean_or_replace_eh_stmt (tree old_stmt, tree new_stmt)
2062
{
2063
  int region_nr = lookup_stmt_eh_region (old_stmt);
2064
 
2065
  if (region_nr >= 0)
2066
    {
2067
      bool new_stmt_could_throw = tree_could_throw_p (new_stmt);
2068
 
2069
      if (new_stmt == old_stmt && new_stmt_could_throw)
2070
        return false;
2071
 
2072
      remove_stmt_from_eh_region (old_stmt);
2073
      if (new_stmt_could_throw)
2074
        {
2075
          add_stmt_to_eh_region (new_stmt, region_nr);
2076
          return false;
2077
        }
2078
      else
2079
        return true;
2080
    }
2081
 
2082
  return false;
2083
}
2084
 
2085
#ifdef ENABLE_CHECKING
2086
static int
2087
verify_eh_throw_stmt_node (void **slot, void *data ATTRIBUTE_UNUSED)
2088
{
2089
  struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
2090
 
2091
  gcc_assert (node->stmt->common.ann == NULL);
2092
  return 1;
2093
}
2094
 
2095
void
2096
verify_eh_throw_table_statements (void)
2097
{
2098
  if (!get_eh_throw_stmt_table (cfun))
2099
    return;
2100
  htab_traverse (get_eh_throw_stmt_table (cfun),
2101
                 verify_eh_throw_stmt_node,
2102
                 NULL);
2103
}
2104
 
2105
#endif

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.