OpenCores
URL https://opencores.org/ocsvn/openrisc_2011-10-31/openrisc_2011-10-31/trunk

Subversion Repositories openrisc_2011-10-31

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [tree-eh.c] - Blame information for rev 280

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Exception handling semantics and decomposition for trees.
2
   Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3
   Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify
8
it under the terms of the GNU General Public License as published by
9
the Free Software Foundation; either version 3, or (at your option)
10
any later version.
11
 
12
GCC is distributed in the hope that it will be useful,
13
but WITHOUT ANY WARRANTY; without even the implied warranty of
14
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15
GNU General Public License for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "tree.h"
26
#include "rtl.h"
27
#include "tm_p.h"
28
#include "flags.h"
29
#include "function.h"
30
#include "except.h"
31
#include "pointer-set.h"
32
#include "tree-flow.h"
33
#include "tree-dump.h"
34
#include "tree-inline.h"
35
#include "tree-iterator.h"
36
#include "tree-pass.h"
37
#include "timevar.h"
38
#include "langhooks.h"
39
#include "ggc.h"
40
#include "toplev.h"
41
#include "gimple.h"
42
#include "target.h"
43
 
44
/* In some instances a tree and a gimple need to be stored in a same table,
45
   i.e. in hash tables. This is a structure to do this. */
46
typedef union {tree *tp; tree t; gimple g;} treemple;
47
 
48
/* Nonzero if we are using EH to handle cleanups.  */
49
static int using_eh_for_cleanups_p = 0;
50
 
51
void
52
using_eh_for_cleanups (void)
53
{
54
  using_eh_for_cleanups_p = 1;
55
}
56
 
57
/* Misc functions used in this file.  */
58
 
59
/* Compare and hash for any structure which begins with a canonical
60
   pointer.  Assumes all pointers are interchangeable, which is sort
61
   of already assumed by gcc elsewhere IIRC.  */
62
 
63
static int
64
struct_ptr_eq (const void *a, const void *b)
65
{
66
  const void * const * x = (const void * const *) a;
67
  const void * const * y = (const void * const *) b;
68
  return *x == *y;
69
}
70
 
71
static hashval_t
72
struct_ptr_hash (const void *a)
73
{
74
  const void * const * x = (const void * const *) a;
75
  return (size_t)*x >> 4;
76
}
77
 
78
 
79
/* Remember and lookup EH landing pad data for arbitrary statements.
80
   Really this means any statement that could_throw_p.  We could
81
   stuff this information into the stmt_ann data structure, but:
82
 
83
   (1) We absolutely rely on this information being kept until
84
   we get to rtl.  Once we're done with lowering here, if we lose
85
   the information there's no way to recover it!
86
 
87
   (2) There are many more statements that *cannot* throw as
88
   compared to those that can.  We should be saving some amount
89
   of space by only allocating memory for those that can throw.  */
90
 
91
/* Add statement T in function IFUN to landing pad NUM.  */
92
 
93
void
94
add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num)
95
{
96
  struct throw_stmt_node *n;
97
  void **slot;
98
 
99
  gcc_assert (num != 0);
100
 
101
  n = GGC_NEW (struct throw_stmt_node);
102
  n->stmt = t;
103
  n->lp_nr = num;
104
 
105
  if (!get_eh_throw_stmt_table (ifun))
106
    set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
107
                                                    struct_ptr_eq,
108
                                                    ggc_free));
109
 
110
  slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
111
  gcc_assert (!*slot);
112
  *slot = n;
113
}
114
 
115
/* Add statement T in the current function (cfun) to EH landing pad NUM.  */
116
 
117
void
118
add_stmt_to_eh_lp (gimple t, int num)
119
{
120
  add_stmt_to_eh_lp_fn (cfun, t, num);
121
}
122
 
123
/* Add statement T to the single EH landing pad in REGION.  */
124
 
125
static void
126
record_stmt_eh_region (eh_region region, gimple t)
127
{
128
  if (region == NULL)
129
    return;
130
  if (region->type == ERT_MUST_NOT_THROW)
131
    add_stmt_to_eh_lp_fn (cfun, t, -region->index);
132
  else
133
    {
134
      eh_landing_pad lp = region->landing_pads;
135
      if (lp == NULL)
136
        lp = gen_eh_landing_pad (region);
137
      else
138
        gcc_assert (lp->next_lp == NULL);
139
      add_stmt_to_eh_lp_fn (cfun, t, lp->index);
140
    }
141
}
142
 
143
 
144
/* Remove statement T in function IFUN from its EH landing pad.  */
145
 
146
bool
147
remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t)
148
{
149
  struct throw_stmt_node dummy;
150
  void **slot;
151
 
152
  if (!get_eh_throw_stmt_table (ifun))
153
    return false;
154
 
155
  dummy.stmt = t;
156
  slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
157
                        NO_INSERT);
158
  if (slot)
159
    {
160
      htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
161
      return true;
162
    }
163
  else
164
    return false;
165
}
166
 
167
 
168
/* Remove statement T in the current function (cfun) from its
169
   EH landing pad.  */
170
 
171
bool
172
remove_stmt_from_eh_lp (gimple t)
173
{
174
  return remove_stmt_from_eh_lp_fn (cfun, t);
175
}
176
 
177
/* Determine if statement T is inside an EH region in function IFUN.
178
   Positive numbers indicate a landing pad index; negative numbers
179
   indicate a MUST_NOT_THROW region index; zero indicates that the
180
   statement is not recorded in the region table.  */
181
 
182
int
183
lookup_stmt_eh_lp_fn (struct function *ifun, gimple t)
184
{
185
  struct throw_stmt_node *p, n;
186
 
187
  if (ifun->eh->throw_stmt_table == NULL)
188
    return 0;
189
 
190
  n.stmt = t;
191
  p = (struct throw_stmt_node *) htab_find (ifun->eh->throw_stmt_table, &n);
192
  return p ? p->lp_nr : 0;
193
}
194
 
195
/* Likewise, but always use the current function.  */
196
 
197
int
198
lookup_stmt_eh_lp (gimple t)
199
{
200
  /* We can get called from initialized data when -fnon-call-exceptions
201
     is on; prevent crash.  */
202
  if (!cfun)
203
    return 0;
204
  return lookup_stmt_eh_lp_fn (cfun, t);
205
}
206
 
207
/* First pass of EH node decomposition.  Build up a tree of GIMPLE_TRY_FINALLY
208
   nodes and LABEL_DECL nodes.  We will use this during the second phase to
209
   determine if a goto leaves the body of a TRY_FINALLY_EXPR node.  */
210
 
211
struct finally_tree_node
212
{
213
  /* When storing a GIMPLE_TRY, we have to record a gimple.  However
214
     when deciding whether a GOTO to a certain LABEL_DECL (which is a
215
     tree) leaves the TRY block, its necessary to record a tree in
216
     this field.  Thus a treemple is used. */
217
  treemple child;
218
  gimple parent;
219
};
220
 
221
/* Note that this table is *not* marked GTY.  It is short-lived.  */
222
static htab_t finally_tree;
223
 
224
static void
225
record_in_finally_tree (treemple child, gimple parent)
226
{
227
  struct finally_tree_node *n;
228
  void **slot;
229
 
230
  n = XNEW (struct finally_tree_node);
231
  n->child = child;
232
  n->parent = parent;
233
 
234
  slot = htab_find_slot (finally_tree, n, INSERT);
235
  gcc_assert (!*slot);
236
  *slot = n;
237
}
238
 
239
static void
240
collect_finally_tree (gimple stmt, gimple region);
241
 
242
/* Go through the gimple sequence.  Works with collect_finally_tree to
243
   record all GIMPLE_LABEL and GIMPLE_TRY statements. */
244
 
245
static void
246
collect_finally_tree_1 (gimple_seq seq, gimple region)
247
{
248
  gimple_stmt_iterator gsi;
249
 
250
  for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
251
    collect_finally_tree (gsi_stmt (gsi), region);
252
}
253
 
254
static void
255
collect_finally_tree (gimple stmt, gimple region)
256
{
257
  treemple temp;
258
 
259
  switch (gimple_code (stmt))
260
    {
261
    case GIMPLE_LABEL:
262
      temp.t = gimple_label_label (stmt);
263
      record_in_finally_tree (temp, region);
264
      break;
265
 
266
    case GIMPLE_TRY:
267
      if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
268
        {
269
          temp.g = stmt;
270
          record_in_finally_tree (temp, region);
271
          collect_finally_tree_1 (gimple_try_eval (stmt), stmt);
272
          collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
273
        }
274
      else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
275
        {
276
          collect_finally_tree_1 (gimple_try_eval (stmt), region);
277
          collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
278
        }
279
      break;
280
 
281
    case GIMPLE_CATCH:
282
      collect_finally_tree_1 (gimple_catch_handler (stmt), region);
283
      break;
284
 
285
    case GIMPLE_EH_FILTER:
286
      collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
287
      break;
288
 
289
    default:
290
      /* A type, a decl, or some kind of statement that we're not
291
         interested in.  Don't walk them.  */
292
      break;
293
    }
294
}
295
 
296
 
297
/* Use the finally tree to determine if a jump from START to TARGET
298
   would leave the try_finally node that START lives in.  */
299
 
300
static bool
301
outside_finally_tree (treemple start, gimple target)
302
{
303
  struct finally_tree_node n, *p;
304
 
305
  do
306
    {
307
      n.child = start;
308
      p = (struct finally_tree_node *) htab_find (finally_tree, &n);
309
      if (!p)
310
        return true;
311
      start.g = p->parent;
312
    }
313
  while (start.g != target);
314
 
315
  return false;
316
}
317
 
318
/* Second pass of EH node decomposition.  Actually transform the GIMPLE_TRY
319
   nodes into a set of gotos, magic labels, and eh regions.
320
   The eh region creation is straight-forward, but frobbing all the gotos
321
   and such into shape isn't.  */
322
 
323
/* The sequence into which we record all EH stuff.  This will be
324
   placed at the end of the function when we're all done.  */
325
static gimple_seq eh_seq;
326
 
327
/* Record whether an EH region contains something that can throw,
328
   indexed by EH region number.  */
329
static bitmap eh_region_may_contain_throw_map;
330
 
331
/* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
332
   statements that are seen to escape this GIMPLE_TRY_FINALLY node.
333
   The idea is to record a gimple statement for everything except for
334
   the conditionals, which get their labels recorded. Since labels are
335
   of type 'tree', we need this node to store both gimple and tree
336
   objects.  REPL_STMT is the sequence used to replace the goto/return
337
   statement.  CONT_STMT is used to store the statement that allows
338
   the return/goto to jump to the original destination. */
339
 
340
struct goto_queue_node
341
{
342
  treemple stmt;
343
  gimple_seq repl_stmt;
344
  gimple cont_stmt;
345
  int index;
346
  /* This is used when index >= 0 to indicate that stmt is a label (as
347
     opposed to a goto stmt).  */
348
  int is_label;
349
};
350
 
351
/* State of the world while lowering.  */
352
 
353
struct leh_state
354
{
355
  /* What's "current" while constructing the eh region tree.  These
356
     correspond to variables of the same name in cfun->eh, which we
357
     don't have easy access to.  */
358
  eh_region cur_region;
359
 
360
  /* What's "current" for the purposes of __builtin_eh_pointer.  For
361
     a CATCH, this is the associated TRY.  For an EH_FILTER, this is
362
     the associated ALLOWED_EXCEPTIONS, etc.  */
363
  eh_region ehp_region;
364
 
365
  /* Processing of TRY_FINALLY requires a bit more state.  This is
366
     split out into a separate structure so that we don't have to
367
     copy so much when processing other nodes.  */
368
  struct leh_tf_state *tf;
369
};
370
 
371
struct leh_tf_state
372
{
373
  /* Pointer to the GIMPLE_TRY_FINALLY node under discussion.  The
374
     try_finally_expr is the original GIMPLE_TRY_FINALLY.  We need to retain
375
     this so that outside_finally_tree can reliably reference the tree used
376
     in the collect_finally_tree data structures.  */
377
  gimple try_finally_expr;
378
  gimple top_p;
379
 
380
  /* While lowering a top_p usually it is expanded into multiple statements,
381
     thus we need the following field to store them. */
382
  gimple_seq top_p_seq;
383
 
384
  /* The state outside this try_finally node.  */
385
  struct leh_state *outer;
386
 
387
  /* The exception region created for it.  */
388
  eh_region region;
389
 
390
  /* The goto queue.  */
391
  struct goto_queue_node *goto_queue;
392
  size_t goto_queue_size;
393
  size_t goto_queue_active;
394
 
395
  /* Pointer map to help in searching goto_queue when it is large.  */
396
  struct pointer_map_t *goto_queue_map;
397
 
398
  /* The set of unique labels seen as entries in the goto queue.  */
399
  VEC(tree,heap) *dest_array;
400
 
401
  /* A label to be added at the end of the completed transformed
402
     sequence.  It will be set if may_fallthru was true *at one time*,
403
     though subsequent transformations may have cleared that flag.  */
404
  tree fallthru_label;
405
 
406
  /* True if it is possible to fall out the bottom of the try block.
407
     Cleared if the fallthru is converted to a goto.  */
408
  bool may_fallthru;
409
 
410
  /* True if any entry in goto_queue is a GIMPLE_RETURN.  */
411
  bool may_return;
412
 
413
  /* True if the finally block can receive an exception edge.
414
     Cleared if the exception case is handled by code duplication.  */
415
  bool may_throw;
416
};
417
 
418
static gimple_seq lower_eh_must_not_throw (struct leh_state *, gimple);
419
 
420
/* Search for STMT in the goto queue.  Return the replacement,
421
   or null if the statement isn't in the queue.  */
422
 
423
#define LARGE_GOTO_QUEUE 20
424
 
425
static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq);
426
 
427
static gimple_seq
428
find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
429
{
430
  unsigned int i;
431
  void **slot;
432
 
433
  if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
434
    {
435
      for (i = 0; i < tf->goto_queue_active; i++)
436
        if ( tf->goto_queue[i].stmt.g == stmt.g)
437
          return tf->goto_queue[i].repl_stmt;
438
      return NULL;
439
    }
440
 
441
  /* If we have a large number of entries in the goto_queue, create a
442
     pointer map and use that for searching.  */
443
 
444
  if (!tf->goto_queue_map)
445
    {
446
      tf->goto_queue_map = pointer_map_create ();
447
      for (i = 0; i < tf->goto_queue_active; i++)
448
        {
449
          slot = pointer_map_insert (tf->goto_queue_map,
450
                                     tf->goto_queue[i].stmt.g);
451
          gcc_assert (*slot == NULL);
452
          *slot = &tf->goto_queue[i];
453
        }
454
    }
455
 
456
  slot = pointer_map_contains (tf->goto_queue_map, stmt.g);
457
  if (slot != NULL)
458
    return (((struct goto_queue_node *) *slot)->repl_stmt);
459
 
460
  return NULL;
461
}
462
 
463
/* A subroutine of replace_goto_queue_1.  Handles the sub-clauses of a
464
   lowered GIMPLE_COND.  If, by chance, the replacement is a simple goto,
465
   then we can just splat it in, otherwise we add the new stmts immediately
466
   after the GIMPLE_COND and redirect.  */
467
 
468
static void
469
replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
470
                                gimple_stmt_iterator *gsi)
471
{
472
  tree label;
473
  gimple_seq new_seq;
474
  treemple temp;
475
  location_t loc = gimple_location (gsi_stmt (*gsi));
476
 
477
  temp.tp = tp;
478
  new_seq = find_goto_replacement (tf, temp);
479
  if (!new_seq)
480
    return;
481
 
482
  if (gimple_seq_singleton_p (new_seq)
483
      && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
484
    {
485
      *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
486
      return;
487
    }
488
 
489
  label = create_artificial_label (loc);
490
  /* Set the new label for the GIMPLE_COND */
491
  *tp = label;
492
 
493
  gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
494
  gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
495
}
496
 
497
/* The real work of replace_goto_queue.  Returns with TSI updated to
498
   point to the next statement.  */
499
 
500
static void replace_goto_queue_stmt_list (gimple_seq, struct leh_tf_state *);
501
 
502
static void
503
replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
504
                      gimple_stmt_iterator *gsi)
505
{
506
  gimple_seq seq;
507
  treemple temp;
508
  temp.g = NULL;
509
 
510
  switch (gimple_code (stmt))
511
    {
512
    case GIMPLE_GOTO:
513
    case GIMPLE_RETURN:
514
      temp.g = stmt;
515
      seq = find_goto_replacement (tf, temp);
516
      if (seq)
517
        {
518
          gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
519
          gsi_remove (gsi, false);
520
          return;
521
        }
522
      break;
523
 
524
    case GIMPLE_COND:
525
      replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
526
      replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
527
      break;
528
 
529
    case GIMPLE_TRY:
530
      replace_goto_queue_stmt_list (gimple_try_eval (stmt), tf);
531
      replace_goto_queue_stmt_list (gimple_try_cleanup (stmt), tf);
532
      break;
533
    case GIMPLE_CATCH:
534
      replace_goto_queue_stmt_list (gimple_catch_handler (stmt), tf);
535
      break;
536
    case GIMPLE_EH_FILTER:
537
      replace_goto_queue_stmt_list (gimple_eh_filter_failure (stmt), tf);
538
      break;
539
 
540
    default:
541
      /* These won't have gotos in them.  */
542
      break;
543
    }
544
 
545
  gsi_next (gsi);
546
}
547
 
548
/* A subroutine of replace_goto_queue.  Handles GIMPLE_SEQ.  */
549
 
550
static void
551
replace_goto_queue_stmt_list (gimple_seq seq, struct leh_tf_state *tf)
552
{
553
  gimple_stmt_iterator gsi = gsi_start (seq);
554
 
555
  while (!gsi_end_p (gsi))
556
    replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
557
}
558
 
559
/* Replace all goto queue members.  */
560
 
561
static void
562
replace_goto_queue (struct leh_tf_state *tf)
563
{
564
  if (tf->goto_queue_active == 0)
565
    return;
566
  replace_goto_queue_stmt_list (tf->top_p_seq, tf);
567
  replace_goto_queue_stmt_list (eh_seq, tf);
568
}
569
 
570
/* Add a new record to the goto queue contained in TF. NEW_STMT is the
571
   data to be added, IS_LABEL indicates whether NEW_STMT is a label or
572
   a gimple return. */
573
 
574
static void
575
record_in_goto_queue (struct leh_tf_state *tf,
576
                      treemple new_stmt,
577
                      int index,
578
                      bool is_label)
579
{
580
  size_t active, size;
581
  struct goto_queue_node *q;
582
 
583
  gcc_assert (!tf->goto_queue_map);
584
 
585
  active = tf->goto_queue_active;
586
  size = tf->goto_queue_size;
587
  if (active >= size)
588
    {
589
      size = (size ? size * 2 : 32);
590
      tf->goto_queue_size = size;
591
      tf->goto_queue
592
         = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
593
    }
594
 
595
  q = &tf->goto_queue[active];
596
  tf->goto_queue_active = active + 1;
597
 
598
  memset (q, 0, sizeof (*q));
599
  q->stmt = new_stmt;
600
  q->index = index;
601
  q->is_label = is_label;
602
}
603
 
604
/* Record the LABEL label in the goto queue contained in TF.
605
   TF is not null.  */
606
 
607
static void
608
record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label)
609
{
610
  int index;
611
  treemple temp, new_stmt;
612
 
613
  if (!label)
614
    return;
615
 
616
  /* Computed and non-local gotos do not get processed.  Given
617
     their nature we can neither tell whether we've escaped the
618
     finally block nor redirect them if we knew.  */
619
  if (TREE_CODE (label) != LABEL_DECL)
620
    return;
621
 
622
  /* No need to record gotos that don't leave the try block.  */
623
  temp.t = label;
624
  if (!outside_finally_tree (temp, tf->try_finally_expr))
625
    return;
626
 
627
  if (! tf->dest_array)
628
    {
629
      tf->dest_array = VEC_alloc (tree, heap, 10);
630
      VEC_quick_push (tree, tf->dest_array, label);
631
      index = 0;
632
    }
633
  else
634
    {
635
      int n = VEC_length (tree, tf->dest_array);
636
      for (index = 0; index < n; ++index)
637
        if (VEC_index (tree, tf->dest_array, index) == label)
638
          break;
639
      if (index == n)
640
        VEC_safe_push (tree, heap, tf->dest_array, label);
641
    }
642
 
643
  /* In the case of a GOTO we want to record the destination label,
644
     since with a GIMPLE_COND we have an easy access to the then/else
645
     labels. */
646
  new_stmt = stmt;
647
  record_in_goto_queue (tf, new_stmt, index, true);
648
}
649
 
650
/* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
651
   node, and if so record that fact in the goto queue associated with that
652
   try_finally node.  */
653
 
654
static void
655
maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
656
{
657
  struct leh_tf_state *tf = state->tf;
658
  treemple new_stmt;
659
 
660
  if (!tf)
661
    return;
662
 
663
  switch (gimple_code (stmt))
664
    {
665
    case GIMPLE_COND:
666
      new_stmt.tp = gimple_op_ptr (stmt, 2);
667
      record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt));
668
      new_stmt.tp = gimple_op_ptr (stmt, 3);
669
      record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt));
670
      break;
671
    case GIMPLE_GOTO:
672
      new_stmt.g = stmt;
673
      record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt));
674
      break;
675
 
676
    case GIMPLE_RETURN:
677
      tf->may_return = true;
678
      new_stmt.g = stmt;
679
      record_in_goto_queue (tf, new_stmt, -1, false);
680
      break;
681
 
682
    default:
683
      gcc_unreachable ();
684
    }
685
}
686
 
687
 
688
#ifdef ENABLE_CHECKING
689
/* We do not process GIMPLE_SWITCHes for now.  As long as the original source
690
   was in fact structured, and we've not yet done jump threading, then none
691
   of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this.  */
692
 
693
static void
694
verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
695
{
696
  struct leh_tf_state *tf = state->tf;
697
  size_t i, n;
698
 
699
  if (!tf)
700
    return;
701
 
702
  n = gimple_switch_num_labels (switch_expr);
703
 
704
  for (i = 0; i < n; ++i)
705
    {
706
      treemple temp;
707
      tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
708
      temp.t = lab;
709
      gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
710
    }
711
}
712
#else
713
#define verify_norecord_switch_expr(state, switch_expr)
714
#endif
715
 
716
/* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB.  Place in CONT_P
717
   whatever is needed to finish the return.  If MOD is non-null, insert it
718
   before the new branch.  RETURN_VALUE_P is a cache containing a temporary
719
   variable to be used in manipulating the value returned from the function.  */
720
 
721
static void
722
do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
723
                       tree *return_value_p)
724
{
725
  tree ret_expr;
726
  gimple x;
727
 
728
  /* In the case of a return, the queue node must be a gimple statement. */
729
  gcc_assert (!q->is_label);
730
 
731
  ret_expr = gimple_return_retval (q->stmt.g);
732
 
733
  if (ret_expr)
734
    {
735
      if (!*return_value_p)
736
        *return_value_p = ret_expr;
737
      else
738
        gcc_assert (*return_value_p == ret_expr);
739
      q->cont_stmt = q->stmt.g;
740
      /* The nasty part about redirecting the return value is that the
741
         return value itself is to be computed before the FINALLY block
742
         is executed.  e.g.
743
 
744
                int x;
745
                int foo (void)
746
                {
747
                  x = 0;
748
                  try {
749
                    return x;
750
                  } finally {
751
                    x++;
752
                  }
753
                }
754
 
755
          should return 0, not 1.  Arrange for this to happen by copying
756
          computed the return value into a local temporary.  This also
757
          allows us to redirect multiple return statements through the
758
          same destination block; whether this is a net win or not really
759
          depends, I guess, but it does make generation of the switch in
760
          lower_try_finally_switch easier.  */
761
 
762
      if (TREE_CODE (ret_expr) == RESULT_DECL)
763
        {
764
          if (!*return_value_p)
765
            *return_value_p = ret_expr;
766
          else
767
            gcc_assert (*return_value_p == ret_expr);
768
          q->cont_stmt = q->stmt.g;
769
        }
770
      else
771
          gcc_unreachable ();
772
    }
773
  else
774
      /* If we don't return a value, all return statements are the same.  */
775
      q->cont_stmt = q->stmt.g;
776
 
777
  if (!q->repl_stmt)
778
    q->repl_stmt = gimple_seq_alloc ();
779
 
780
  if (mod)
781
    gimple_seq_add_seq (&q->repl_stmt, mod);
782
 
783
  x = gimple_build_goto (finlab);
784
  gimple_seq_add_stmt (&q->repl_stmt, x);
785
}
786
 
787
/* Similar, but easier, for GIMPLE_GOTO.  */
788
 
789
static void
790
do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
791
                     struct leh_tf_state *tf)
792
{
793
  gimple x;
794
 
795
  gcc_assert (q->is_label);
796
  if (!q->repl_stmt)
797
    q->repl_stmt = gimple_seq_alloc ();
798
 
799
  q->cont_stmt = gimple_build_goto (VEC_index (tree, tf->dest_array, q->index));
800
 
801
  if (mod)
802
    gimple_seq_add_seq (&q->repl_stmt, mod);
803
 
804
  x = gimple_build_goto (finlab);
805
  gimple_seq_add_stmt (&q->repl_stmt, x);
806
}
807
 
808
/* Emit a standard landing pad sequence into SEQ for REGION.  */
809
 
810
static void
811
emit_post_landing_pad (gimple_seq *seq, eh_region region)
812
{
813
  eh_landing_pad lp = region->landing_pads;
814
  gimple x;
815
 
816
  if (lp == NULL)
817
    lp = gen_eh_landing_pad (region);
818
 
819
  lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
820
  EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
821
 
822
  x = gimple_build_label (lp->post_landing_pad);
823
  gimple_seq_add_stmt (seq, x);
824
}
825
 
826
/* Emit a RESX statement into SEQ for REGION.  */
827
 
828
static void
829
emit_resx (gimple_seq *seq, eh_region region)
830
{
831
  gimple x = gimple_build_resx (region->index);
832
  gimple_seq_add_stmt (seq, x);
833
  if (region->outer)
834
    record_stmt_eh_region (region->outer, x);
835
}
836
 
837
/* Emit an EH_DISPATCH statement into SEQ for REGION.  */
838
 
839
static void
840
emit_eh_dispatch (gimple_seq *seq, eh_region region)
841
{
842
  gimple x = gimple_build_eh_dispatch (region->index);
843
  gimple_seq_add_stmt (seq, x);
844
}
845
 
846
/* Note that the current EH region may contain a throw, or a
847
   call to a function which itself may contain a throw.  */
848
 
849
static void
850
note_eh_region_may_contain_throw (eh_region region)
851
{
852
  while (!bitmap_bit_p (eh_region_may_contain_throw_map, region->index))
853
    {
854
      bitmap_set_bit (eh_region_may_contain_throw_map, region->index);
855
      region = region->outer;
856
      if (region == NULL)
857
        break;
858
    }
859
}
860
 
861
/* Check if REGION has been marked as containing a throw.  If REGION is
862
   NULL, this predicate is false.  */
863
 
864
static inline bool
865
eh_region_may_contain_throw (eh_region r)
866
{
867
  return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
868
}
869
 
870
/* We want to transform
871
        try { body; } catch { stuff; }
872
   to
873
        normal_seqence:
874
          body;
875
          over:
876
        eh_seqence:
877
          landing_pad:
878
          stuff;
879
          goto over;
880
 
881
   TP is a GIMPLE_TRY node.  REGION is the region whose post_landing_pad
882
   should be placed before the second operand, or NULL.  OVER is
883
   an existing label that should be put at the exit, or NULL.  */
884
 
885
static gimple_seq
886
frob_into_branch_around (gimple tp, eh_region region, tree over)
887
{
888
  gimple x;
889
  gimple_seq cleanup, result;
890
  location_t loc = gimple_location (tp);
891
 
892
  cleanup = gimple_try_cleanup (tp);
893
  result = gimple_try_eval (tp);
894
 
895
  if (region)
896
    emit_post_landing_pad (&eh_seq, region);
897
 
898
  if (gimple_seq_may_fallthru (cleanup))
899
    {
900
      if (!over)
901
        over = create_artificial_label (loc);
902
      x = gimple_build_goto (over);
903
      gimple_seq_add_stmt (&cleanup, x);
904
    }
905
  gimple_seq_add_seq (&eh_seq, cleanup);
906
 
907
  if (over)
908
    {
909
      x = gimple_build_label (over);
910
      gimple_seq_add_stmt (&result, x);
911
    }
912
  return result;
913
}
914
 
915
/* A subroutine of lower_try_finally.  Duplicate the tree rooted at T.
916
   Make sure to record all new labels found.  */
917
 
918
static gimple_seq
919
lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state)
920
{
921
  gimple region = NULL;
922
  gimple_seq new_seq;
923
 
924
  new_seq = copy_gimple_seq_and_replace_locals (seq);
925
 
926
  if (outer_state->tf)
927
    region = outer_state->tf->try_finally_expr;
928
  collect_finally_tree_1 (new_seq, region);
929
 
930
  return new_seq;
931
}
932
 
933
/* A subroutine of lower_try_finally.  Create a fallthru label for
934
   the given try_finally state.  The only tricky bit here is that
935
   we have to make sure to record the label in our outer context.  */
936
 
937
static tree
938
lower_try_finally_fallthru_label (struct leh_tf_state *tf)
939
{
940
  tree label = tf->fallthru_label;
941
  treemple temp;
942
 
943
  if (!label)
944
    {
945
      label = create_artificial_label (gimple_location (tf->try_finally_expr));
946
      tf->fallthru_label = label;
947
      if (tf->outer->tf)
948
        {
949
          temp.t = label;
950
          record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
951
        }
952
    }
953
  return label;
954
}
955
 
956
/* A subroutine of lower_try_finally.  If lang_protect_cleanup_actions
957
   returns non-null, then the language requires that the exception path out
958
   of a try_finally be treated specially.  To wit: the code within the
959
   finally block may not itself throw an exception.  We have two choices here.
960
   First we can duplicate the finally block and wrap it in a must_not_throw
961
   region.  Second, we can generate code like
962
 
963
        try {
964
          finally_block;
965
        } catch {
966
          if (fintmp == eh_edge)
967
            protect_cleanup_actions;
968
        }
969
 
970
   where "fintmp" is the temporary used in the switch statement generation
971
   alternative considered below.  For the nonce, we always choose the first
972
   option.
973
 
974
   THIS_STATE may be null if this is a try-cleanup, not a try-finally.  */
975
 
976
static void
977
honor_protect_cleanup_actions (struct leh_state *outer_state,
978
                               struct leh_state *this_state,
979
                               struct leh_tf_state *tf)
980
{
981
  tree protect_cleanup_actions;
982
  gimple_stmt_iterator gsi;
983
  bool finally_may_fallthru;
984
  gimple_seq finally;
985
  gimple x;
986
 
987
  /* First check for nothing to do.  */
988
  if (lang_protect_cleanup_actions == NULL)
989
    return;
990
  protect_cleanup_actions = lang_protect_cleanup_actions ();
991
  if (protect_cleanup_actions == NULL)
992
    return;
993
 
994
  finally = gimple_try_cleanup (tf->top_p);
995
  finally_may_fallthru = gimple_seq_may_fallthru (finally);
996
 
997
  /* Duplicate the FINALLY block.  Only need to do this for try-finally,
998
     and not for cleanups.  */
999
  if (this_state)
1000
    finally = lower_try_finally_dup_block (finally, outer_state);
1001
 
1002
  /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
1003
     set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
1004
     to be in an enclosing scope, but needs to be implemented at this level
1005
     to avoid a nesting violation (see wrap_temporary_cleanups in
1006
     cp/decl.c).  Since it's logically at an outer level, we should call
1007
     terminate before we get to it, so strip it away before adding the
1008
     MUST_NOT_THROW filter.  */
1009
  gsi = gsi_start (finally);
1010
  x = gsi_stmt (gsi);
1011
  if (gimple_code (x) == GIMPLE_TRY
1012
      && gimple_try_kind (x) == GIMPLE_TRY_CATCH
1013
      && gimple_try_catch_is_cleanup (x))
1014
    {
1015
      gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
1016
      gsi_remove (&gsi, false);
1017
    }
1018
 
1019
  /* Wrap the block with protect_cleanup_actions as the action.  */
1020
  x = gimple_build_eh_must_not_throw (protect_cleanup_actions);
1021
  x = gimple_build_try (finally, gimple_seq_alloc_with_stmt (x),
1022
                        GIMPLE_TRY_CATCH);
1023
  finally = lower_eh_must_not_throw (outer_state, x);
1024
 
1025
  /* Drop all of this into the exception sequence.  */
1026
  emit_post_landing_pad (&eh_seq, tf->region);
1027
  gimple_seq_add_seq (&eh_seq, finally);
1028
  if (finally_may_fallthru)
1029
    emit_resx (&eh_seq, tf->region);
1030
 
1031
  /* Having now been handled, EH isn't to be considered with
1032
     the rest of the outgoing edges.  */
1033
  tf->may_throw = false;
1034
}
1035
 
1036
/* A subroutine of lower_try_finally.  We have determined that there is
1037
   no fallthru edge out of the finally block.  This means that there is
1038
   no outgoing edge corresponding to any incoming edge.  Restructure the
1039
   try_finally node for this special case.  */
1040
 
1041
static void
1042
lower_try_finally_nofallthru (struct leh_state *state,
1043
                              struct leh_tf_state *tf)
1044
{
1045
  tree lab, return_val;
1046
  gimple x;
1047
  gimple_seq finally;
1048
  struct goto_queue_node *q, *qe;
1049
 
1050
  lab = create_artificial_label (gimple_location (tf->try_finally_expr));
1051
 
1052
  /* We expect that tf->top_p is a GIMPLE_TRY. */
1053
  finally = gimple_try_cleanup (tf->top_p);
1054
  tf->top_p_seq = gimple_try_eval (tf->top_p);
1055
 
1056
  x = gimple_build_label (lab);
1057
  gimple_seq_add_stmt (&tf->top_p_seq, x);
1058
 
1059
  return_val = NULL;
1060
  q = tf->goto_queue;
1061
  qe = q + tf->goto_queue_active;
1062
  for (; q < qe; ++q)
1063
    if (q->index < 0)
1064
      do_return_redirection (q, lab, NULL, &return_val);
1065
    else
1066
      do_goto_redirection (q, lab, NULL, tf);
1067
 
1068
  replace_goto_queue (tf);
1069
 
1070
  lower_eh_constructs_1 (state, finally);
1071
  gimple_seq_add_seq (&tf->top_p_seq, finally);
1072
 
1073
  if (tf->may_throw)
1074
    {
1075
      emit_post_landing_pad (&eh_seq, tf->region);
1076
 
1077
      x = gimple_build_goto (lab);
1078
      gimple_seq_add_stmt (&eh_seq, x);
1079
    }
1080
}
1081
 
1082
/* A subroutine of lower_try_finally.  We have determined that there is
1083
   exactly one destination of the finally block.  Restructure the
1084
   try_finally node for this special case.  */
1085
 
1086
static void
1087
lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1088
{
1089
  struct goto_queue_node *q, *qe;
1090
  gimple x;
1091
  gimple_seq finally;
1092
  tree finally_label;
1093
  location_t loc = gimple_location (tf->try_finally_expr);
1094
 
1095
  finally = gimple_try_cleanup (tf->top_p);
1096
  tf->top_p_seq = gimple_try_eval (tf->top_p);
1097
 
1098
  lower_eh_constructs_1 (state, finally);
1099
 
1100
  if (tf->may_throw)
1101
    {
1102
      /* Only reachable via the exception edge.  Add the given label to
1103
         the head of the FINALLY block.  Append a RESX at the end.  */
1104
      emit_post_landing_pad (&eh_seq, tf->region);
1105
      gimple_seq_add_seq (&eh_seq, finally);
1106
      emit_resx (&eh_seq, tf->region);
1107
      return;
1108
    }
1109
 
1110
  if (tf->may_fallthru)
1111
    {
1112
      /* Only reachable via the fallthru edge.  Do nothing but let
1113
         the two blocks run together; we'll fall out the bottom.  */
1114
      gimple_seq_add_seq (&tf->top_p_seq, finally);
1115
      return;
1116
    }
1117
 
1118
  finally_label = create_artificial_label (loc);
1119
  x = gimple_build_label (finally_label);
1120
  gimple_seq_add_stmt (&tf->top_p_seq, x);
1121
 
1122
  gimple_seq_add_seq (&tf->top_p_seq, finally);
1123
 
1124
  q = tf->goto_queue;
1125
  qe = q + tf->goto_queue_active;
1126
 
1127
  if (tf->may_return)
1128
    {
1129
      /* Reachable by return expressions only.  Redirect them.  */
1130
      tree return_val = NULL;
1131
      for (; q < qe; ++q)
1132
        do_return_redirection (q, finally_label, NULL, &return_val);
1133
      replace_goto_queue (tf);
1134
    }
1135
  else
1136
    {
1137
      /* Reachable by goto expressions only.  Redirect them.  */
1138
      for (; q < qe; ++q)
1139
        do_goto_redirection (q, finally_label, NULL, tf);
1140
      replace_goto_queue (tf);
1141
 
1142
      if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1143
        {
1144
          /* Reachable by goto to fallthru label only.  Redirect it
1145
             to the new label (already created, sadly), and do not
1146
             emit the final branch out, or the fallthru label.  */
1147
          tf->fallthru_label = NULL;
1148
          return;
1149
        }
1150
    }
1151
 
1152
  /* Place the original return/goto to the original destination
1153
     immediately after the finally block. */
1154
  x = tf->goto_queue[0].cont_stmt;
1155
  gimple_seq_add_stmt (&tf->top_p_seq, x);
1156
  maybe_record_in_goto_queue (state, x);
1157
}
1158
 
1159
/* A subroutine of lower_try_finally.  There are multiple edges incoming
1160
   and outgoing from the finally block.  Implement this by duplicating the
1161
   finally block for every destination.  */
1162
 
1163
static void
1164
lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1165
{
1166
  gimple_seq finally;
1167
  gimple_seq new_stmt;
1168
  gimple_seq seq;
1169
  gimple x;
1170
  tree tmp;
1171
  location_t tf_loc = gimple_location (tf->try_finally_expr);
1172
 
1173
  finally = gimple_try_cleanup (tf->top_p);
1174
  tf->top_p_seq = gimple_try_eval (tf->top_p);
1175
  new_stmt = NULL;
1176
 
1177
  if (tf->may_fallthru)
1178
    {
1179
      seq = lower_try_finally_dup_block (finally, state);
1180
      lower_eh_constructs_1 (state, seq);
1181
      gimple_seq_add_seq (&new_stmt, seq);
1182
 
1183
      tmp = lower_try_finally_fallthru_label (tf);
1184
      x = gimple_build_goto (tmp);
1185
      gimple_seq_add_stmt (&new_stmt, x);
1186
    }
1187
 
1188
  if (tf->may_throw)
1189
    {
1190
      seq = lower_try_finally_dup_block (finally, state);
1191
      lower_eh_constructs_1 (state, seq);
1192
 
1193
      emit_post_landing_pad (&eh_seq, tf->region);
1194
      gimple_seq_add_seq (&eh_seq, seq);
1195
      emit_resx (&eh_seq, tf->region);
1196
    }
1197
 
1198
  if (tf->goto_queue)
1199
    {
1200
      struct goto_queue_node *q, *qe;
1201
      tree return_val = NULL;
1202
      int return_index, index;
1203
      struct labels_s
1204
      {
1205
        struct goto_queue_node *q;
1206
        tree label;
1207
      } *labels;
1208
 
1209
      return_index = VEC_length (tree, tf->dest_array);
1210
      labels = XCNEWVEC (struct labels_s, return_index + 1);
1211
 
1212
      q = tf->goto_queue;
1213
      qe = q + tf->goto_queue_active;
1214
      for (; q < qe; q++)
1215
        {
1216
          index = q->index < 0 ? return_index : q->index;
1217
 
1218
          if (!labels[index].q)
1219
            labels[index].q = q;
1220
        }
1221
 
1222
      for (index = 0; index < return_index + 1; index++)
1223
        {
1224
          tree lab;
1225
 
1226
          q = labels[index].q;
1227
          if (! q)
1228
            continue;
1229
 
1230
          lab = labels[index].label
1231
            = create_artificial_label (tf_loc);
1232
 
1233
          if (index == return_index)
1234
            do_return_redirection (q, lab, NULL, &return_val);
1235
          else
1236
            do_goto_redirection (q, lab, NULL, tf);
1237
 
1238
          x = gimple_build_label (lab);
1239
          gimple_seq_add_stmt (&new_stmt, x);
1240
 
1241
          seq = lower_try_finally_dup_block (finally, state);
1242
          lower_eh_constructs_1 (state, seq);
1243
          gimple_seq_add_seq (&new_stmt, seq);
1244
 
1245
          gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
1246
          maybe_record_in_goto_queue (state, q->cont_stmt);
1247
        }
1248
 
1249
      for (q = tf->goto_queue; q < qe; q++)
1250
        {
1251
          tree lab;
1252
 
1253
          index = q->index < 0 ? return_index : q->index;
1254
 
1255
          if (labels[index].q == q)
1256
            continue;
1257
 
1258
          lab = labels[index].label;
1259
 
1260
          if (index == return_index)
1261
            do_return_redirection (q, lab, NULL, &return_val);
1262
          else
1263
            do_goto_redirection (q, lab, NULL, tf);
1264
        }
1265
 
1266
      replace_goto_queue (tf);
1267
      free (labels);
1268
    }
1269
 
1270
  /* Need to link new stmts after running replace_goto_queue due
1271
     to not wanting to process the same goto stmts twice.  */
1272
  gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
1273
}
1274
 
1275
/* A subroutine of lower_try_finally.  There are multiple edges incoming
1276
   and outgoing from the finally block.  Implement this by instrumenting
1277
   each incoming edge and creating a switch statement at the end of the
1278
   finally block that branches to the appropriate destination.  */
1279
 
1280
static void
1281
lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1282
{
1283
  struct goto_queue_node *q, *qe;
1284
  tree return_val = NULL;
1285
  tree finally_tmp, finally_label;
1286
  int return_index, eh_index, fallthru_index;
1287
  int nlabels, ndests, j, last_case_index;
1288
  tree last_case;
1289
  VEC (tree,heap) *case_label_vec;
1290
  gimple_seq switch_body;
1291
  gimple x;
1292
  tree tmp;
1293
  gimple switch_stmt;
1294
  gimple_seq finally;
1295
  struct pointer_map_t *cont_map = NULL;
1296
  /* The location of the TRY_FINALLY stmt.  */
1297
  location_t tf_loc = gimple_location (tf->try_finally_expr);
1298
  /* The location of the finally block.  */
1299
  location_t finally_loc;
1300
 
1301
  switch_body = gimple_seq_alloc ();
1302
 
1303
  /* Mash the TRY block to the head of the chain.  */
1304
  finally = gimple_try_cleanup (tf->top_p);
1305
  tf->top_p_seq = gimple_try_eval (tf->top_p);
1306
 
1307
  /* The location of the finally is either the last stmt in the finally
1308
     block or the location of the TRY_FINALLY itself.  */
1309
  finally_loc = gimple_seq_last_stmt (tf->top_p_seq) != NULL ?
1310
    gimple_location (gimple_seq_last_stmt (tf->top_p_seq))
1311
    : tf_loc;
1312
 
1313
  /* Lower the finally block itself.  */
1314
  lower_eh_constructs_1 (state, finally);
1315
 
1316
  /* Prepare for switch statement generation.  */
1317
  nlabels = VEC_length (tree, tf->dest_array);
1318
  return_index = nlabels;
1319
  eh_index = return_index + tf->may_return;
1320
  fallthru_index = eh_index + tf->may_throw;
1321
  ndests = fallthru_index + tf->may_fallthru;
1322
 
1323
  finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1324
  finally_label = create_artificial_label (finally_loc);
1325
 
1326
  /* We use VEC_quick_push on case_label_vec throughout this function,
1327
     since we know the size in advance and allocate precisely as muce
1328
     space as needed.  */
1329
  case_label_vec = VEC_alloc (tree, heap, ndests);
1330
  last_case = NULL;
1331
  last_case_index = 0;
1332
 
1333
  /* Begin inserting code for getting to the finally block.  Things
1334
     are done in this order to correspond to the sequence the code is
1335
     layed out.  */
1336
 
1337
  if (tf->may_fallthru)
1338
    {
1339
      x = gimple_build_assign (finally_tmp,
1340
                               build_int_cst (NULL, fallthru_index));
1341
      gimple_seq_add_stmt (&tf->top_p_seq, x);
1342
 
1343
      last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1344
                          build_int_cst (NULL, fallthru_index),
1345
                          NULL, create_artificial_label (tf_loc));
1346
      VEC_quick_push (tree, case_label_vec, last_case);
1347
      last_case_index++;
1348
 
1349
      x = gimple_build_label (CASE_LABEL (last_case));
1350
      gimple_seq_add_stmt (&switch_body, x);
1351
 
1352
      tmp = lower_try_finally_fallthru_label (tf);
1353
      x = gimple_build_goto (tmp);
1354
      gimple_seq_add_stmt (&switch_body, x);
1355
    }
1356
 
1357
  if (tf->may_throw)
1358
    {
1359
      emit_post_landing_pad (&eh_seq, tf->region);
1360
 
1361
      x = gimple_build_assign (finally_tmp,
1362
                               build_int_cst (NULL, eh_index));
1363
      gimple_seq_add_stmt (&eh_seq, x);
1364
 
1365
      x = gimple_build_goto (finally_label);
1366
      gimple_seq_add_stmt (&eh_seq, x);
1367
 
1368
      last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1369
                          build_int_cst (NULL, eh_index),
1370
                          NULL, create_artificial_label (tf_loc));
1371
      VEC_quick_push (tree, case_label_vec, last_case);
1372
      last_case_index++;
1373
 
1374
      x = gimple_build_label (CASE_LABEL (last_case));
1375
      gimple_seq_add_stmt (&eh_seq, x);
1376
      emit_resx (&eh_seq, tf->region);
1377
    }
1378
 
1379
  x = gimple_build_label (finally_label);
1380
  gimple_seq_add_stmt (&tf->top_p_seq, x);
1381
 
1382
  gimple_seq_add_seq (&tf->top_p_seq, finally);
1383
 
1384
  /* Redirect each incoming goto edge.  */
1385
  q = tf->goto_queue;
1386
  qe = q + tf->goto_queue_active;
1387
  j = last_case_index + tf->may_return;
1388
  /* Prepare the assignments to finally_tmp that are executed upon the
1389
     entrance through a particular edge. */
1390
  for (; q < qe; ++q)
1391
    {
1392
      gimple_seq mod;
1393
      int switch_id;
1394
      unsigned int case_index;
1395
 
1396
      mod = gimple_seq_alloc ();
1397
 
1398
      if (q->index < 0)
1399
        {
1400
          x = gimple_build_assign (finally_tmp,
1401
                                   build_int_cst (NULL, return_index));
1402
          gimple_seq_add_stmt (&mod, x);
1403
          do_return_redirection (q, finally_label, mod, &return_val);
1404
          switch_id = return_index;
1405
        }
1406
      else
1407
        {
1408
          x = gimple_build_assign (finally_tmp,
1409
                                   build_int_cst (NULL, q->index));
1410
          gimple_seq_add_stmt (&mod, x);
1411
          do_goto_redirection (q, finally_label, mod, tf);
1412
          switch_id = q->index;
1413
        }
1414
 
1415
      case_index = j + q->index;
1416
      if (VEC_length (tree, case_label_vec) <= case_index
1417
          || !VEC_index (tree, case_label_vec, case_index))
1418
        {
1419
          tree case_lab;
1420
          void **slot;
1421
          case_lab = build3 (CASE_LABEL_EXPR, void_type_node,
1422
                             build_int_cst (NULL, switch_id),
1423
                             NULL, NULL);
1424
          /* We store the cont_stmt in the pointer map, so that we can recover
1425
             it in the loop below.  We don't create the new label while
1426
             walking the goto_queue because pointers don't offer a stable
1427
             order.  */
1428
          if (!cont_map)
1429
            cont_map = pointer_map_create ();
1430
          slot = pointer_map_insert (cont_map, case_lab);
1431
          *slot = q->cont_stmt;
1432
          VEC_quick_push (tree, case_label_vec, case_lab);
1433
        }
1434
    }
1435
  for (j = last_case_index; j < last_case_index + nlabels; j++)
1436
    {
1437
      tree label;
1438
      gimple cont_stmt;
1439
      void **slot;
1440
 
1441
      last_case = VEC_index (tree, case_label_vec, j);
1442
 
1443
      gcc_assert (last_case);
1444
      gcc_assert (cont_map);
1445
 
1446
      slot = pointer_map_contains (cont_map, last_case);
1447
      /* As the comment above suggests, CASE_LABEL (last_case) was just a
1448
         placeholder, it does not store an actual label, yet. */
1449
      gcc_assert (slot);
1450
      cont_stmt = *(gimple *) slot;
1451
 
1452
      label = create_artificial_label (tf_loc);
1453
      CASE_LABEL (last_case) = label;
1454
 
1455
      x = gimple_build_label (label);
1456
      gimple_seq_add_stmt (&switch_body, x);
1457
      gimple_seq_add_stmt (&switch_body, cont_stmt);
1458
      maybe_record_in_goto_queue (state, cont_stmt);
1459
    }
1460
  if (cont_map)
1461
    pointer_map_destroy (cont_map);
1462
 
1463
  replace_goto_queue (tf);
1464
 
1465
  /* Make sure that the last case is the default label, as one is required.
1466
     Then sort the labels, which is also required in GIMPLE.  */
1467
  CASE_LOW (last_case) = NULL;
1468
  sort_case_labels (case_label_vec);
1469
 
1470
  /* Build the switch statement, setting last_case to be the default
1471
     label.  */
1472
  switch_stmt = gimple_build_switch_vec (finally_tmp, last_case,
1473
                                         case_label_vec);
1474
  gimple_set_location (switch_stmt, finally_loc);
1475
 
1476
  /* Need to link SWITCH_STMT after running replace_goto_queue
1477
     due to not wanting to process the same goto stmts twice.  */
1478
  gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1479
  gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1480
}
1481
 
1482
/* Decide whether or not we are going to duplicate the finally block.
1483
   There are several considerations.
1484
 
1485
   First, if this is Java, then the finally block contains code
1486
   written by the user.  It has line numbers associated with it,
1487
   so duplicating the block means it's difficult to set a breakpoint.
1488
   Since controlling code generation via -g is verboten, we simply
1489
   never duplicate code without optimization.
1490
 
1491
   Second, we'd like to prevent egregious code growth.  One way to
1492
   do this is to estimate the size of the finally block, multiply
1493
   that by the number of copies we'd need to make, and compare against
1494
   the estimate of the size of the switch machinery we'd have to add.  */
1495
 
1496
static bool
1497
decide_copy_try_finally (int ndests, gimple_seq finally)
1498
{
1499
  int f_estimate, sw_estimate;
1500
 
1501
  if (!optimize)
1502
    return false;
1503
 
1504
  /* Finally estimate N times, plus N gotos.  */
1505
  f_estimate = count_insns_seq (finally, &eni_size_weights);
1506
  f_estimate = (f_estimate + 1) * ndests;
1507
 
1508
  /* Switch statement (cost 10), N variable assignments, N gotos.  */
1509
  sw_estimate = 10 + 2 * ndests;
1510
 
1511
  /* Optimize for size clearly wants our best guess.  */
1512
  if (optimize_function_for_size_p (cfun))
1513
    return f_estimate < sw_estimate;
1514
 
1515
  /* ??? These numbers are completely made up so far.  */
1516
  if (optimize > 1)
1517
    return f_estimate < 100 || f_estimate < sw_estimate * 2;
1518
  else
1519
    return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1520
}
1521
 
1522
 
1523
/* A subroutine of lower_eh_constructs_1.  Lower a GIMPLE_TRY_FINALLY nodes
1524
   to a sequence of labels and blocks, plus the exception region trees
1525
   that record all the magic.  This is complicated by the need to
1526
   arrange for the FINALLY block to be executed on all exits.  */
1527
 
1528
static gimple_seq
1529
lower_try_finally (struct leh_state *state, gimple tp)
1530
{
1531
  struct leh_tf_state this_tf;
1532
  struct leh_state this_state;
1533
  int ndests;
1534
  gimple_seq old_eh_seq;
1535
 
1536
  /* Process the try block.  */
1537
 
1538
  memset (&this_tf, 0, sizeof (this_tf));
1539
  this_tf.try_finally_expr = tp;
1540
  this_tf.top_p = tp;
1541
  this_tf.outer = state;
1542
  if (using_eh_for_cleanups_p)
1543
    this_tf.region = gen_eh_region_cleanup (state->cur_region);
1544
  else
1545
    this_tf.region = NULL;
1546
 
1547
  this_state.cur_region = this_tf.region;
1548
  this_state.ehp_region = state->ehp_region;
1549
  this_state.tf = &this_tf;
1550
 
1551
  old_eh_seq = eh_seq;
1552
  eh_seq = NULL;
1553
 
1554
  lower_eh_constructs_1 (&this_state, gimple_try_eval(tp));
1555
 
1556
  /* Determine if the try block is escaped through the bottom.  */
1557
  this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1558
 
1559
  /* Determine if any exceptions are possible within the try block.  */
1560
  if (using_eh_for_cleanups_p)
1561
    this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
1562
  if (this_tf.may_throw)
1563
    honor_protect_cleanup_actions (state, &this_state, &this_tf);
1564
 
1565
  /* Determine how many edges (still) reach the finally block.  Or rather,
1566
     how many destinations are reached by the finally block.  Use this to
1567
     determine how we process the finally block itself.  */
1568
 
1569
  ndests = VEC_length (tree, this_tf.dest_array);
1570
  ndests += this_tf.may_fallthru;
1571
  ndests += this_tf.may_return;
1572
  ndests += this_tf.may_throw;
1573
 
1574
  /* If the FINALLY block is not reachable, dike it out.  */
1575
  if (ndests == 0)
1576
    {
1577
      gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1578
      gimple_try_set_cleanup (tp, NULL);
1579
    }
1580
  /* If the finally block doesn't fall through, then any destination
1581
     we might try to impose there isn't reached either.  There may be
1582
     some minor amount of cleanup and redirection still needed.  */
1583
  else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
1584
    lower_try_finally_nofallthru (state, &this_tf);
1585
 
1586
  /* We can easily special-case redirection to a single destination.  */
1587
  else if (ndests == 1)
1588
    lower_try_finally_onedest (state, &this_tf);
1589
  else if (decide_copy_try_finally (ndests, gimple_try_cleanup (tp)))
1590
    lower_try_finally_copy (state, &this_tf);
1591
  else
1592
    lower_try_finally_switch (state, &this_tf);
1593
 
1594
  /* If someone requested we add a label at the end of the transformed
1595
     block, do so.  */
1596
  if (this_tf.fallthru_label)
1597
    {
1598
      /* This must be reached only if ndests == 0. */
1599
      gimple x = gimple_build_label (this_tf.fallthru_label);
1600
      gimple_seq_add_stmt (&this_tf.top_p_seq, x);
1601
    }
1602
 
1603
  VEC_free (tree, heap, this_tf.dest_array);
1604
  if (this_tf.goto_queue)
1605
    free (this_tf.goto_queue);
1606
  if (this_tf.goto_queue_map)
1607
    pointer_map_destroy (this_tf.goto_queue_map);
1608
 
1609
  /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1610
     If there was no old eh_seq, then the append is trivially already done.  */
1611
  if (old_eh_seq)
1612
    {
1613
      if (eh_seq == NULL)
1614
        eh_seq = old_eh_seq;
1615
      else
1616
        {
1617
          gimple_seq new_eh_seq = eh_seq;
1618
          eh_seq = old_eh_seq;
1619
          gimple_seq_add_seq(&eh_seq, new_eh_seq);
1620
        }
1621
    }
1622
 
1623
  return this_tf.top_p_seq;
1624
}
1625
 
1626
/* A subroutine of lower_eh_constructs_1.  Lower a GIMPLE_TRY_CATCH with a
1627
   list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1628
   exception region trees that records all the magic.  */
1629
 
1630
static gimple_seq
1631
lower_catch (struct leh_state *state, gimple tp)
1632
{
1633
  eh_region try_region = NULL;
1634
  struct leh_state this_state = *state;
1635
  gimple_stmt_iterator gsi;
1636
  tree out_label;
1637
  gimple_seq new_seq;
1638
  gimple x;
1639
  location_t try_catch_loc = gimple_location (tp);
1640
 
1641
  if (flag_exceptions)
1642
    {
1643
      try_region = gen_eh_region_try (state->cur_region);
1644
      this_state.cur_region = try_region;
1645
    }
1646
 
1647
  lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1648
 
1649
  if (!eh_region_may_contain_throw (try_region))
1650
    return gimple_try_eval (tp);
1651
 
1652
  new_seq = NULL;
1653
  emit_eh_dispatch (&new_seq, try_region);
1654
  emit_resx (&new_seq, try_region);
1655
 
1656
  this_state.cur_region = state->cur_region;
1657
  this_state.ehp_region = try_region;
1658
 
1659
  out_label = NULL;
1660
  for (gsi = gsi_start (gimple_try_cleanup (tp));
1661
       !gsi_end_p (gsi);
1662
       gsi_next (&gsi))
1663
    {
1664
      eh_catch c;
1665
      gimple gcatch;
1666
      gimple_seq handler;
1667
 
1668
      gcatch = gsi_stmt (gsi);
1669
      c = gen_eh_region_catch (try_region, gimple_catch_types (gcatch));
1670
 
1671
      handler = gimple_catch_handler (gcatch);
1672
      lower_eh_constructs_1 (&this_state, handler);
1673
 
1674
      c->label = create_artificial_label (UNKNOWN_LOCATION);
1675
      x = gimple_build_label (c->label);
1676
      gimple_seq_add_stmt (&new_seq, x);
1677
 
1678
      gimple_seq_add_seq (&new_seq, handler);
1679
 
1680
      if (gimple_seq_may_fallthru (new_seq))
1681
        {
1682
          if (!out_label)
1683
            out_label = create_artificial_label (try_catch_loc);
1684
 
1685
          x = gimple_build_goto (out_label);
1686
          gimple_seq_add_stmt (&new_seq, x);
1687
        }
1688
      if (!c->type_list)
1689
        break;
1690
    }
1691
 
1692
  gimple_try_set_cleanup (tp, new_seq);
1693
 
1694
  return frob_into_branch_around (tp, try_region, out_label);
1695
}
1696
 
1697
/* A subroutine of lower_eh_constructs_1.  Lower a GIMPLE_TRY with a
1698
   GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1699
   region trees that record all the magic.  */
1700
 
1701
static gimple_seq
1702
lower_eh_filter (struct leh_state *state, gimple tp)
1703
{
1704
  struct leh_state this_state = *state;
1705
  eh_region this_region = NULL;
1706
  gimple inner, x;
1707
  gimple_seq new_seq;
1708
 
1709
  inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1710
 
1711
  if (flag_exceptions)
1712
    {
1713
      this_region = gen_eh_region_allowed (state->cur_region,
1714
                                           gimple_eh_filter_types (inner));
1715
      this_state.cur_region = this_region;
1716
    }
1717
 
1718
  lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1719
 
1720
  if (!eh_region_may_contain_throw (this_region))
1721
    return gimple_try_eval (tp);
1722
 
1723
  new_seq = NULL;
1724
  this_state.cur_region = state->cur_region;
1725
  this_state.ehp_region = this_region;
1726
 
1727
  emit_eh_dispatch (&new_seq, this_region);
1728
  emit_resx (&new_seq, this_region);
1729
 
1730
  this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1731
  x = gimple_build_label (this_region->u.allowed.label);
1732
  gimple_seq_add_stmt (&new_seq, x);
1733
 
1734
  lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure (inner));
1735
  gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1736
 
1737
  gimple_try_set_cleanup (tp, new_seq);
1738
 
1739
  return frob_into_branch_around (tp, this_region, NULL);
1740
}
1741
 
1742
/* A subroutine of lower_eh_constructs_1.  Lower a GIMPLE_TRY with
1743
   an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1744
   plus the exception region trees that record all the magic.  */
1745
 
1746
static gimple_seq
1747
lower_eh_must_not_throw (struct leh_state *state, gimple tp)
1748
{
1749
  struct leh_state this_state = *state;
1750
 
1751
  if (flag_exceptions)
1752
    {
1753
      gimple inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1754
      eh_region this_region;
1755
 
1756
      this_region = gen_eh_region_must_not_throw (state->cur_region);
1757
      this_region->u.must_not_throw.failure_decl
1758
        = gimple_eh_must_not_throw_fndecl (inner);
1759
      this_region->u.must_not_throw.failure_loc = gimple_location (tp);
1760
 
1761
      /* In order to get mangling applied to this decl, we must mark it
1762
         used now.  Otherwise, pass_ipa_free_lang_data won't think it
1763
         needs to happen.  */
1764
      TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
1765
 
1766
      this_state.cur_region = this_region;
1767
    }
1768
 
1769
  lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1770
 
1771
  return gimple_try_eval (tp);
1772
}
1773
 
1774
/* Implement a cleanup expression.  This is similar to try-finally,
1775
   except that we only execute the cleanup block for exception edges.  */
1776
 
1777
static gimple_seq
1778
lower_cleanup (struct leh_state *state, gimple tp)
1779
{
1780
  struct leh_state this_state = *state;
1781
  eh_region this_region = NULL;
1782
  struct leh_tf_state fake_tf;
1783
  gimple_seq result;
1784
 
1785
  if (flag_exceptions)
1786
    {
1787
      this_region = gen_eh_region_cleanup (state->cur_region);
1788
      this_state.cur_region = this_region;
1789
    }
1790
 
1791
  lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1792
 
1793
  if (!eh_region_may_contain_throw (this_region))
1794
    return gimple_try_eval (tp);
1795
 
1796
  /* Build enough of a try-finally state so that we can reuse
1797
     honor_protect_cleanup_actions.  */
1798
  memset (&fake_tf, 0, sizeof (fake_tf));
1799
  fake_tf.top_p = fake_tf.try_finally_expr = tp;
1800
  fake_tf.outer = state;
1801
  fake_tf.region = this_region;
1802
  fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1803
  fake_tf.may_throw = true;
1804
 
1805
  honor_protect_cleanup_actions (state, NULL, &fake_tf);
1806
 
1807
  if (fake_tf.may_throw)
1808
    {
1809
      /* In this case honor_protect_cleanup_actions had nothing to do,
1810
         and we should process this normally.  */
1811
      lower_eh_constructs_1 (state, gimple_try_cleanup (tp));
1812
      result = frob_into_branch_around (tp, this_region,
1813
                                        fake_tf.fallthru_label);
1814
    }
1815
  else
1816
    {
1817
      /* In this case honor_protect_cleanup_actions did nearly all of
1818
         the work.  All we have left is to append the fallthru_label.  */
1819
 
1820
      result = gimple_try_eval (tp);
1821
      if (fake_tf.fallthru_label)
1822
        {
1823
          gimple x = gimple_build_label (fake_tf.fallthru_label);
1824
          gimple_seq_add_stmt (&result, x);
1825
        }
1826
    }
1827
  return result;
1828
}
1829
 
1830
/* Main loop for lowering eh constructs. Also moves gsi to the next
1831
   statement. */
1832
 
1833
static void
1834
lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
1835
{
1836
  gimple_seq replace;
1837
  gimple x;
1838
  gimple stmt = gsi_stmt (*gsi);
1839
 
1840
  switch (gimple_code (stmt))
1841
    {
1842
    case GIMPLE_CALL:
1843
      {
1844
        tree fndecl = gimple_call_fndecl (stmt);
1845
        tree rhs, lhs;
1846
 
1847
        if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1848
          switch (DECL_FUNCTION_CODE (fndecl))
1849
            {
1850
            case BUILT_IN_EH_POINTER:
1851
              /* The front end may have generated a call to
1852
                 __builtin_eh_pointer (0) within a catch region.  Replace
1853
                 this zero argument with the current catch region number.  */
1854
              if (state->ehp_region)
1855
                {
1856
                  tree nr = build_int_cst (NULL, state->ehp_region->index);
1857
                  gimple_call_set_arg (stmt, 0, nr);
1858
                }
1859
              else
1860
                {
1861
                  /* The user has dome something silly.  Remove it.  */
1862
                  rhs = build_int_cst (ptr_type_node, 0);
1863
                  goto do_replace;
1864
                }
1865
              break;
1866
 
1867
            case BUILT_IN_EH_FILTER:
1868
              /* ??? This should never appear, but since it's a builtin it
1869
                 is accessible to abuse by users.  Just remove it and
1870
                 replace the use with the arbitrary value zero.  */
1871
              rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
1872
            do_replace:
1873
              lhs = gimple_call_lhs (stmt);
1874
              x = gimple_build_assign (lhs, rhs);
1875
              gsi_insert_before (gsi, x, GSI_SAME_STMT);
1876
              /* FALLTHRU */
1877
 
1878
            case BUILT_IN_EH_COPY_VALUES:
1879
              /* Likewise this should not appear.  Remove it.  */
1880
              gsi_remove (gsi, true);
1881
              return;
1882
 
1883
            default:
1884
              break;
1885
            }
1886
      }
1887
      /* FALLTHRU */
1888
 
1889
    case GIMPLE_ASSIGN:
1890
      /* If the stmt can throw use a new temporary for the assignment
1891
         to a LHS.  This makes sure the old value of the LHS is
1892
         available on the EH edge.  Only do so for statements that
1893
         potentially fall thru (no noreturn calls e.g.), otherwise
1894
         this new assignment might create fake fallthru regions.  */
1895
      if (stmt_could_throw_p (stmt)
1896
          && gimple_has_lhs (stmt)
1897
          && gimple_stmt_may_fallthru (stmt)
1898
          && !tree_could_throw_p (gimple_get_lhs (stmt))
1899
          && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
1900
        {
1901
          tree lhs = gimple_get_lhs (stmt);
1902
          tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
1903
          gimple s = gimple_build_assign (lhs, tmp);
1904
          gimple_set_location (s, gimple_location (stmt));
1905
          gimple_set_block (s, gimple_block (stmt));
1906
          gimple_set_lhs (stmt, tmp);
1907
          if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
1908
              || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
1909
            DECL_GIMPLE_REG_P (tmp) = 1;
1910
          gsi_insert_after (gsi, s, GSI_SAME_STMT);
1911
        }
1912
      /* Look for things that can throw exceptions, and record them.  */
1913
      if (state->cur_region && stmt_could_throw_p (stmt))
1914
        {
1915
          record_stmt_eh_region (state->cur_region, stmt);
1916
          note_eh_region_may_contain_throw (state->cur_region);
1917
        }
1918
      break;
1919
 
1920
    case GIMPLE_COND:
1921
    case GIMPLE_GOTO:
1922
    case GIMPLE_RETURN:
1923
      maybe_record_in_goto_queue (state, stmt);
1924
      break;
1925
 
1926
    case GIMPLE_SWITCH:
1927
      verify_norecord_switch_expr (state, stmt);
1928
      break;
1929
 
1930
    case GIMPLE_TRY:
1931
      if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
1932
        replace = lower_try_finally (state, stmt);
1933
      else
1934
        {
1935
          x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
1936
          if (!x)
1937
            {
1938
              replace = gimple_try_eval (stmt);
1939
              lower_eh_constructs_1 (state, replace);
1940
            }
1941
          else
1942
            switch (gimple_code (x))
1943
              {
1944
                case GIMPLE_CATCH:
1945
                    replace = lower_catch (state, stmt);
1946
                    break;
1947
                case GIMPLE_EH_FILTER:
1948
                    replace = lower_eh_filter (state, stmt);
1949
                    break;
1950
                case GIMPLE_EH_MUST_NOT_THROW:
1951
                    replace = lower_eh_must_not_throw (state, stmt);
1952
                    break;
1953
                default:
1954
                    replace = lower_cleanup (state, stmt);
1955
                    break;
1956
              }
1957
        }
1958
 
1959
      /* Remove the old stmt and insert the transformed sequence
1960
         instead. */
1961
      gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
1962
      gsi_remove (gsi, true);
1963
 
1964
      /* Return since we don't want gsi_next () */
1965
      return;
1966
 
1967
    default:
1968
      /* A type, a decl, or some kind of statement that we're not
1969
         interested in.  Don't walk them.  */
1970
      break;
1971
    }
1972
 
1973
  gsi_next (gsi);
1974
}
1975
 
1976
/* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
1977
 
1978
static void
1979
lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq)
1980
{
1981
  gimple_stmt_iterator gsi;
1982
  for (gsi = gsi_start (seq); !gsi_end_p (gsi);)
1983
    lower_eh_constructs_2 (state, &gsi);
1984
}
1985
 
1986
static unsigned int
1987
lower_eh_constructs (void)
1988
{
1989
  struct leh_state null_state;
1990
  gimple_seq bodyp;
1991
 
1992
  bodyp = gimple_body (current_function_decl);
1993
  if (bodyp == NULL)
1994
    return 0;
1995
 
1996
  finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1997
  eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
1998
  memset (&null_state, 0, sizeof (null_state));
1999
 
2000
  collect_finally_tree_1 (bodyp, NULL);
2001
  lower_eh_constructs_1 (&null_state, bodyp);
2002
 
2003
  /* We assume there's a return statement, or something, at the end of
2004
     the function, and thus ploping the EH sequence afterward won't
2005
     change anything.  */
2006
  gcc_assert (!gimple_seq_may_fallthru (bodyp));
2007
  gimple_seq_add_seq (&bodyp, eh_seq);
2008
 
2009
  /* We assume that since BODYP already existed, adding EH_SEQ to it
2010
     didn't change its value, and we don't have to re-set the function.  */
2011
  gcc_assert (bodyp == gimple_body (current_function_decl));
2012
 
2013
  htab_delete (finally_tree);
2014
  BITMAP_FREE (eh_region_may_contain_throw_map);
2015
  eh_seq = NULL;
2016
 
2017
  /* If this function needs a language specific EH personality routine
2018
     and the frontend didn't already set one do so now.  */
2019
  if (function_needs_eh_personality (cfun) == eh_personality_lang
2020
      && !DECL_FUNCTION_PERSONALITY (current_function_decl))
2021
    DECL_FUNCTION_PERSONALITY (current_function_decl)
2022
      = lang_hooks.eh_personality ();
2023
 
2024
  return 0;
2025
}
2026
 
2027
struct gimple_opt_pass pass_lower_eh =
2028
{
2029
 {
2030
  GIMPLE_PASS,
2031
  "eh",                                 /* name */
2032
  NULL,                                 /* gate */
2033
  lower_eh_constructs,                  /* execute */
2034
  NULL,                                 /* sub */
2035
  NULL,                                 /* next */
2036
  0,                                     /* static_pass_number */
2037
  TV_TREE_EH,                           /* tv_id */
2038
  PROP_gimple_lcf,                      /* properties_required */
2039
  PROP_gimple_leh,                      /* properties_provided */
2040
  0,                                     /* properties_destroyed */
2041
  0,                                     /* todo_flags_start */
2042
  TODO_dump_func                        /* todo_flags_finish */
2043
 }
2044
};
2045
 
2046
/* Create the multiple edges from an EH_DISPATCH statement to all of
2047
   the possible handlers for its EH region.  Return true if there's
2048
   no fallthru edge; false if there is.  */
2049
 
2050
bool
2051
make_eh_dispatch_edges (gimple stmt)
2052
{
2053
  eh_region r;
2054
  eh_catch c;
2055
  basic_block src, dst;
2056
 
2057
  r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2058
  src = gimple_bb (stmt);
2059
 
2060
  switch (r->type)
2061
    {
2062
    case ERT_TRY:
2063
      for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2064
        {
2065
          dst = label_to_block (c->label);
2066
          make_edge (src, dst, 0);
2067
 
2068
          /* A catch-all handler doesn't have a fallthru.  */
2069
          if (c->type_list == NULL)
2070
            return false;
2071
        }
2072
      break;
2073
 
2074
    case ERT_ALLOWED_EXCEPTIONS:
2075
      dst = label_to_block (r->u.allowed.label);
2076
      make_edge (src, dst, 0);
2077
      break;
2078
 
2079
    default:
2080
      gcc_unreachable ();
2081
    }
2082
 
2083
  return true;
2084
}
2085
 
2086
/* Create the single EH edge from STMT to its nearest landing pad,
2087
   if there is such a landing pad within the current function.  */
2088
 
2089
void
2090
make_eh_edges (gimple stmt)
2091
{
2092
  basic_block src, dst;
2093
  eh_landing_pad lp;
2094
  int lp_nr;
2095
 
2096
  lp_nr = lookup_stmt_eh_lp (stmt);
2097
  if (lp_nr <= 0)
2098
    return;
2099
 
2100
  lp = get_eh_landing_pad_from_number (lp_nr);
2101
  gcc_assert (lp != NULL);
2102
 
2103
  src = gimple_bb (stmt);
2104
  dst = label_to_block (lp->post_landing_pad);
2105
  make_edge (src, dst, EDGE_EH);
2106
}
2107
 
2108
/* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2109
   do not actually perform the final edge redirection.
2110
 
2111
   CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2112
   we intend to change the destination EH region as well; this means
2113
   EH_LANDING_PAD_NR must already be set on the destination block label.
2114
   If false, we're being called from generic cfg manipulation code and we
2115
   should preserve our place within the region tree.  */
2116
 
2117
static void
2118
redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
2119
{
2120
  eh_landing_pad old_lp, new_lp;
2121
  basic_block old_bb;
2122
  gimple throw_stmt;
2123
  int old_lp_nr, new_lp_nr;
2124
  tree old_label, new_label;
2125
  edge_iterator ei;
2126
  edge e;
2127
 
2128
  old_bb = edge_in->dest;
2129
  old_label = gimple_block_label (old_bb);
2130
  old_lp_nr = EH_LANDING_PAD_NR (old_label);
2131
  gcc_assert (old_lp_nr > 0);
2132
  old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2133
 
2134
  throw_stmt = last_stmt (edge_in->src);
2135
  gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2136
 
2137
  new_label = gimple_block_label (new_bb);
2138
 
2139
  /* Look for an existing region that might be using NEW_BB already.  */
2140
  new_lp_nr = EH_LANDING_PAD_NR (new_label);
2141
  if (new_lp_nr)
2142
    {
2143
      new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2144
      gcc_assert (new_lp);
2145
 
2146
      /* Unless CHANGE_REGION is true, the new and old landing pad
2147
         had better be associated with the same EH region.  */
2148
      gcc_assert (change_region || new_lp->region == old_lp->region);
2149
    }
2150
  else
2151
    {
2152
      new_lp = NULL;
2153
      gcc_assert (!change_region);
2154
    }
2155
 
2156
  /* Notice when we redirect the last EH edge away from OLD_BB.  */
2157
  FOR_EACH_EDGE (e, ei, old_bb->preds)
2158
    if (e != edge_in && (e->flags & EDGE_EH))
2159
      break;
2160
 
2161
  if (new_lp)
2162
    {
2163
      /* NEW_LP already exists.  If there are still edges into OLD_LP,
2164
         there's nothing to do with the EH tree.  If there are no more
2165
         edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2166
         If CHANGE_REGION is true, then our caller is expecting to remove
2167
         the landing pad.  */
2168
      if (e == NULL && !change_region)
2169
        remove_eh_landing_pad (old_lp);
2170
    }
2171
  else
2172
    {
2173
      /* No correct landing pad exists.  If there are no more edges
2174
         into OLD_LP, then we can simply re-use the existing landing pad.
2175
         Otherwise, we have to create a new landing pad.  */
2176
      if (e == NULL)
2177
        {
2178
          EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2179
          new_lp = old_lp;
2180
        }
2181
      else
2182
        new_lp = gen_eh_landing_pad (old_lp->region);
2183
      new_lp->post_landing_pad = new_label;
2184
      EH_LANDING_PAD_NR (new_label) = new_lp->index;
2185
    }
2186
 
2187
  /* Maybe move the throwing statement to the new region.  */
2188
  if (old_lp != new_lp)
2189
    {
2190
      remove_stmt_from_eh_lp (throw_stmt);
2191
      add_stmt_to_eh_lp (throw_stmt, new_lp->index);
2192
    }
2193
}
2194
 
2195
/* Redirect EH edge E to NEW_BB.  */
2196
 
2197
edge
2198
redirect_eh_edge (edge edge_in, basic_block new_bb)
2199
{
2200
  redirect_eh_edge_1 (edge_in, new_bb, false);
2201
  return ssa_redirect_edge (edge_in, new_bb);
2202
}
2203
 
2204
/* This is a subroutine of gimple_redirect_edge_and_branch.  Update the
2205
   labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2206
   The actual edge update will happen in the caller.  */
2207
 
2208
void
2209
redirect_eh_dispatch_edge (gimple stmt, edge e, basic_block new_bb)
2210
{
2211
  tree new_lab = gimple_block_label (new_bb);
2212
  bool any_changed = false;
2213
  basic_block old_bb;
2214
  eh_region r;
2215
  eh_catch c;
2216
 
2217
  r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2218
  switch (r->type)
2219
    {
2220
    case ERT_TRY:
2221
      for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2222
        {
2223
          old_bb = label_to_block (c->label);
2224
          if (old_bb == e->dest)
2225
            {
2226
              c->label = new_lab;
2227
              any_changed = true;
2228
            }
2229
        }
2230
      break;
2231
 
2232
    case ERT_ALLOWED_EXCEPTIONS:
2233
      old_bb = label_to_block (r->u.allowed.label);
2234
      gcc_assert (old_bb == e->dest);
2235
      r->u.allowed.label = new_lab;
2236
      any_changed = true;
2237
      break;
2238
 
2239
    default:
2240
      gcc_unreachable ();
2241
    }
2242
 
2243
  gcc_assert (any_changed);
2244
}
2245
 
2246
/* Helper function for operation_could_trap_p and stmt_could_throw_p.  */
2247
 
2248
bool
2249
operation_could_trap_helper_p (enum tree_code op,
2250
                               bool fp_operation,
2251
                               bool honor_trapv,
2252
                               bool honor_nans,
2253
                               bool honor_snans,
2254
                               tree divisor,
2255
                               bool *handled)
2256
{
2257
  *handled = true;
2258
  switch (op)
2259
    {
2260
    case TRUNC_DIV_EXPR:
2261
    case CEIL_DIV_EXPR:
2262
    case FLOOR_DIV_EXPR:
2263
    case ROUND_DIV_EXPR:
2264
    case EXACT_DIV_EXPR:
2265
    case CEIL_MOD_EXPR:
2266
    case FLOOR_MOD_EXPR:
2267
    case ROUND_MOD_EXPR:
2268
    case TRUNC_MOD_EXPR:
2269
    case RDIV_EXPR:
2270
      if (honor_snans || honor_trapv)
2271
        return true;
2272
      if (fp_operation)
2273
        return flag_trapping_math;
2274
      if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2275
        return true;
2276
      return false;
2277
 
2278
    case LT_EXPR:
2279
    case LE_EXPR:
2280
    case GT_EXPR:
2281
    case GE_EXPR:
2282
    case LTGT_EXPR:
2283
      /* Some floating point comparisons may trap.  */
2284
      return honor_nans;
2285
 
2286
    case EQ_EXPR:
2287
    case NE_EXPR:
2288
    case UNORDERED_EXPR:
2289
    case ORDERED_EXPR:
2290
    case UNLT_EXPR:
2291
    case UNLE_EXPR:
2292
    case UNGT_EXPR:
2293
    case UNGE_EXPR:
2294
    case UNEQ_EXPR:
2295
      return honor_snans;
2296
 
2297
    case CONVERT_EXPR:
2298
    case FIX_TRUNC_EXPR:
2299
      /* Conversion of floating point might trap.  */
2300
      return honor_nans;
2301
 
2302
    case NEGATE_EXPR:
2303
    case ABS_EXPR:
2304
    case CONJ_EXPR:
2305
      /* These operations don't trap with floating point.  */
2306
      if (honor_trapv)
2307
        return true;
2308
      return false;
2309
 
2310
    case PLUS_EXPR:
2311
    case MINUS_EXPR:
2312
    case MULT_EXPR:
2313
      /* Any floating arithmetic may trap.  */
2314
      if (fp_operation && flag_trapping_math)
2315
        return true;
2316
      if (honor_trapv)
2317
        return true;
2318
      return false;
2319
 
2320
    default:
2321
      /* Any floating arithmetic may trap.  */
2322
      if (fp_operation && flag_trapping_math)
2323
        return true;
2324
 
2325
      *handled = false;
2326
      return false;
2327
    }
2328
}
2329
 
2330
/* Return true if operation OP may trap.  FP_OPERATION is true if OP is applied
2331
   on floating-point values.  HONOR_TRAPV is true if OP is applied on integer
2332
   type operands that may trap.  If OP is a division operator, DIVISOR contains
2333
   the value of the divisor.  */
2334
 
2335
bool
2336
operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2337
                        tree divisor)
2338
{
2339
  bool honor_nans = (fp_operation && flag_trapping_math
2340
                     && !flag_finite_math_only);
2341
  bool honor_snans = fp_operation && flag_signaling_nans != 0;
2342
  bool handled;
2343
 
2344
  if (TREE_CODE_CLASS (op) != tcc_comparison
2345
      && TREE_CODE_CLASS (op) != tcc_unary
2346
      && TREE_CODE_CLASS (op) != tcc_binary)
2347
    return false;
2348
 
2349
  return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2350
                                        honor_nans, honor_snans, divisor,
2351
                                        &handled);
2352
}
2353
 
2354
/* Return true if EXPR can trap, as in dereferencing an invalid pointer
2355
   location or floating point arithmetic.  C.f. the rtl version, may_trap_p.
2356
   This routine expects only GIMPLE lhs or rhs input.  */
2357
 
2358
bool
2359
tree_could_trap_p (tree expr)
2360
{
2361
  enum tree_code code;
2362
  bool fp_operation = false;
2363
  bool honor_trapv = false;
2364
  tree t, base, div = NULL_TREE;
2365
 
2366
  if (!expr)
2367
    return false;
2368
 
2369
  code = TREE_CODE (expr);
2370
  t = TREE_TYPE (expr);
2371
 
2372
  if (t)
2373
    {
2374
      if (COMPARISON_CLASS_P (expr))
2375
        fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2376
      else
2377
        fp_operation = FLOAT_TYPE_P (t);
2378
      honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
2379
    }
2380
 
2381
  if (TREE_CODE_CLASS (code) == tcc_binary)
2382
    div = TREE_OPERAND (expr, 1);
2383
  if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2384
    return true;
2385
 
2386
 restart:
2387
  switch (code)
2388
    {
2389
    case TARGET_MEM_REF:
2390
      /* For TARGET_MEM_REFs use the information based on the original
2391
         reference.  */
2392
      expr = TMR_ORIGINAL (expr);
2393
      code = TREE_CODE (expr);
2394
      goto restart;
2395
 
2396
    case COMPONENT_REF:
2397
    case REALPART_EXPR:
2398
    case IMAGPART_EXPR:
2399
    case BIT_FIELD_REF:
2400
    case VIEW_CONVERT_EXPR:
2401
    case WITH_SIZE_EXPR:
2402
      expr = TREE_OPERAND (expr, 0);
2403
      code = TREE_CODE (expr);
2404
      goto restart;
2405
 
2406
    case ARRAY_RANGE_REF:
2407
      base = TREE_OPERAND (expr, 0);
2408
      if (tree_could_trap_p (base))
2409
        return true;
2410
      if (TREE_THIS_NOTRAP (expr))
2411
        return false;
2412
      return !range_in_array_bounds_p (expr);
2413
 
2414
    case ARRAY_REF:
2415
      base = TREE_OPERAND (expr, 0);
2416
      if (tree_could_trap_p (base))
2417
        return true;
2418
      if (TREE_THIS_NOTRAP (expr))
2419
        return false;
2420
      return !in_array_bounds_p (expr);
2421
 
2422
    case INDIRECT_REF:
2423
    case ALIGN_INDIRECT_REF:
2424
    case MISALIGNED_INDIRECT_REF:
2425
      return !TREE_THIS_NOTRAP (expr);
2426
 
2427
    case ASM_EXPR:
2428
      return TREE_THIS_VOLATILE (expr);
2429
 
2430
    case CALL_EXPR:
2431
      t = get_callee_fndecl (expr);
2432
      /* Assume that calls to weak functions may trap.  */
2433
      if (!t || !DECL_P (t) || DECL_WEAK (t))
2434
        return true;
2435
      return false;
2436
 
2437
    default:
2438
      return false;
2439
    }
2440
}
2441
 
2442
 
2443
/* Helper for stmt_could_throw_p.  Return true if STMT (assumed to be a
2444
   an assignment or a conditional) may throw.  */
2445
 
2446
static bool
2447
stmt_could_throw_1_p (gimple stmt)
2448
{
2449
  enum tree_code code = gimple_expr_code (stmt);
2450
  bool honor_nans = false;
2451
  bool honor_snans = false;
2452
  bool fp_operation = false;
2453
  bool honor_trapv = false;
2454
  tree t;
2455
  size_t i;
2456
  bool handled, ret;
2457
 
2458
  if (TREE_CODE_CLASS (code) == tcc_comparison
2459
      || TREE_CODE_CLASS (code) == tcc_unary
2460
      || TREE_CODE_CLASS (code) == tcc_binary)
2461
    {
2462
      t = gimple_expr_type (stmt);
2463
      fp_operation = FLOAT_TYPE_P (t);
2464
      if (fp_operation)
2465
        {
2466
          honor_nans = flag_trapping_math && !flag_finite_math_only;
2467
          honor_snans = flag_signaling_nans != 0;
2468
        }
2469
      else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2470
        honor_trapv = true;
2471
    }
2472
 
2473
  /* Check if the main expression may trap.  */
2474
  t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL;
2475
  ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
2476
                                       honor_nans, honor_snans, t,
2477
                                       &handled);
2478
  if (handled)
2479
    return ret;
2480
 
2481
  /* If the expression does not trap, see if any of the individual operands may
2482
     trap.  */
2483
  for (i = 0; i < gimple_num_ops (stmt); i++)
2484
    if (tree_could_trap_p (gimple_op (stmt, i)))
2485
      return true;
2486
 
2487
  return false;
2488
}
2489
 
2490
 
2491
/* Return true if statement STMT could throw an exception.  */
2492
 
2493
bool
2494
stmt_could_throw_p (gimple stmt)
2495
{
2496
  if (!flag_exceptions)
2497
    return false;
2498
 
2499
  /* The only statements that can throw an exception are assignments,
2500
     conditionals, calls, resx, and asms.  */
2501
  switch (gimple_code (stmt))
2502
    {
2503
    case GIMPLE_RESX:
2504
      return true;
2505
 
2506
    case GIMPLE_CALL:
2507
      return !gimple_call_nothrow_p (stmt);
2508
 
2509
    case GIMPLE_ASSIGN:
2510
    case GIMPLE_COND:
2511
      if (!flag_non_call_exceptions)
2512
        return false;
2513
      return stmt_could_throw_1_p (stmt);
2514
 
2515
    case GIMPLE_ASM:
2516
      if (!flag_non_call_exceptions)
2517
        return false;
2518
      return gimple_asm_volatile_p (stmt);
2519
 
2520
    default:
2521
      return false;
2522
    }
2523
}
2524
 
2525
 
2526
/* Return true if expression T could throw an exception.  */
2527
 
2528
bool
2529
tree_could_throw_p (tree t)
2530
{
2531
  if (!flag_exceptions)
2532
    return false;
2533
  if (TREE_CODE (t) == MODIFY_EXPR)
2534
    {
2535
      if (flag_non_call_exceptions
2536
          && tree_could_trap_p (TREE_OPERAND (t, 0)))
2537
        return true;
2538
      t = TREE_OPERAND (t, 1);
2539
    }
2540
 
2541
  if (TREE_CODE (t) == WITH_SIZE_EXPR)
2542
    t = TREE_OPERAND (t, 0);
2543
  if (TREE_CODE (t) == CALL_EXPR)
2544
    return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2545
  if (flag_non_call_exceptions)
2546
    return tree_could_trap_p (t);
2547
  return false;
2548
}
2549
 
2550
/* Return true if STMT can throw an exception that is not caught within
2551
   the current function (CFUN).  */
2552
 
2553
bool
2554
stmt_can_throw_external (gimple stmt)
2555
{
2556
  int lp_nr;
2557
 
2558
  if (!stmt_could_throw_p (stmt))
2559
    return false;
2560
 
2561
  lp_nr = lookup_stmt_eh_lp (stmt);
2562
  return lp_nr == 0;
2563
}
2564
 
2565
/* Return true if STMT can throw an exception that is caught within
2566
   the current function (CFUN).  */
2567
 
2568
bool
2569
stmt_can_throw_internal (gimple stmt)
2570
{
2571
  int lp_nr;
2572
 
2573
  if (!stmt_could_throw_p (stmt))
2574
    return false;
2575
 
2576
  lp_nr = lookup_stmt_eh_lp (stmt);
2577
  return lp_nr > 0;
2578
}
2579
 
2580
/* Given a statement STMT in IFUN, if STMT can no longer throw, then
2581
   remove any entry it might have from the EH table.  Return true if
2582
   any change was made.  */
2583
 
2584
bool
2585
maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt)
2586
{
2587
  if (stmt_could_throw_p (stmt))
2588
    return false;
2589
  return remove_stmt_from_eh_lp_fn (ifun, stmt);
2590
}
2591
 
2592
/* Likewise, but always use the current function.  */
2593
 
2594
bool
2595
maybe_clean_eh_stmt (gimple stmt)
2596
{
2597
  return maybe_clean_eh_stmt_fn (cfun, stmt);
2598
}
2599
 
2600
/* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2601
   OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2602
   in the table if it should be in there.  Return TRUE if a replacement was
2603
   done that my require an EH edge purge.  */
2604
 
2605
bool
2606
maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
2607
{
2608
  int lp_nr = lookup_stmt_eh_lp (old_stmt);
2609
 
2610
  if (lp_nr != 0)
2611
    {
2612
      bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
2613
 
2614
      if (new_stmt == old_stmt && new_stmt_could_throw)
2615
        return false;
2616
 
2617
      remove_stmt_from_eh_lp (old_stmt);
2618
      if (new_stmt_could_throw)
2619
        {
2620
          add_stmt_to_eh_lp (new_stmt, lp_nr);
2621
          return false;
2622
        }
2623
      else
2624
        return true;
2625
    }
2626
 
2627
  return false;
2628
}
2629
 
2630
/* Given a statement OLD_STMT in OLD_FUN and a duplicate statment NEW_STMT
2631
   in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT.  The MAP
2632
   operand is the return value of duplicate_eh_regions.  */
2633
 
2634
bool
2635
maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
2636
                            struct function *old_fun, gimple old_stmt,
2637
                            struct pointer_map_t *map, int default_lp_nr)
2638
{
2639
  int old_lp_nr, new_lp_nr;
2640
  void **slot;
2641
 
2642
  if (!stmt_could_throw_p (new_stmt))
2643
    return false;
2644
 
2645
  old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
2646
  if (old_lp_nr == 0)
2647
    {
2648
      if (default_lp_nr == 0)
2649
        return false;
2650
      new_lp_nr = default_lp_nr;
2651
    }
2652
  else if (old_lp_nr > 0)
2653
    {
2654
      eh_landing_pad old_lp, new_lp;
2655
 
2656
      old_lp = VEC_index (eh_landing_pad, old_fun->eh->lp_array, old_lp_nr);
2657
      slot = pointer_map_contains (map, old_lp);
2658
      new_lp = (eh_landing_pad) *slot;
2659
      new_lp_nr = new_lp->index;
2660
    }
2661
  else
2662
    {
2663
      eh_region old_r, new_r;
2664
 
2665
      old_r = VEC_index (eh_region, old_fun->eh->region_array, -old_lp_nr);
2666
      slot = pointer_map_contains (map, old_r);
2667
      new_r = (eh_region) *slot;
2668
      new_lp_nr = -new_r->index;
2669
    }
2670
 
2671
  add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
2672
  return true;
2673
}
2674
 
2675
/* Similar, but both OLD_STMT and NEW_STMT are within the current function,
2676
   and thus no remapping is required.  */
2677
 
2678
bool
2679
maybe_duplicate_eh_stmt (gimple new_stmt, gimple old_stmt)
2680
{
2681
  int lp_nr;
2682
 
2683
  if (!stmt_could_throw_p (new_stmt))
2684
    return false;
2685
 
2686
  lp_nr = lookup_stmt_eh_lp (old_stmt);
2687
  if (lp_nr == 0)
2688
    return false;
2689
 
2690
  add_stmt_to_eh_lp (new_stmt, lp_nr);
2691
  return true;
2692
}
2693
 
2694
/* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2695
   GIMPLE_TRY) that are similar enough to be considered the same.  Currently
2696
   this only handles handlers consisting of a single call, as that's the
2697
   important case for C++: a destructor call for a particular object showing
2698
   up in multiple handlers.  */
2699
 
2700
static bool
2701
same_handler_p (gimple_seq oneh, gimple_seq twoh)
2702
{
2703
  gimple_stmt_iterator gsi;
2704
  gimple ones, twos;
2705
  unsigned int ai;
2706
 
2707
  gsi = gsi_start (oneh);
2708
  if (!gsi_one_before_end_p (gsi))
2709
    return false;
2710
  ones = gsi_stmt (gsi);
2711
 
2712
  gsi = gsi_start (twoh);
2713
  if (!gsi_one_before_end_p (gsi))
2714
    return false;
2715
  twos = gsi_stmt (gsi);
2716
 
2717
  if (!is_gimple_call (ones)
2718
      || !is_gimple_call (twos)
2719
      || gimple_call_lhs (ones)
2720
      || gimple_call_lhs (twos)
2721
      || gimple_call_chain (ones)
2722
      || gimple_call_chain (twos)
2723
      || !operand_equal_p (gimple_call_fn (ones), gimple_call_fn (twos), 0)
2724
      || gimple_call_num_args (ones) != gimple_call_num_args (twos))
2725
    return false;
2726
 
2727
  for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
2728
    if (!operand_equal_p (gimple_call_arg (ones, ai),
2729
                          gimple_call_arg (twos, ai), 0))
2730
      return false;
2731
 
2732
  return true;
2733
}
2734
 
2735
/* Optimize
2736
    try { A() } finally { try { ~B() } catch { ~A() } }
2737
    try { ... } finally { ~A() }
2738
   into
2739
    try { A() } catch { ~B() }
2740
    try { ~B() ... } finally { ~A() }
2741
 
2742
   This occurs frequently in C++, where A is a local variable and B is a
2743
   temporary used in the initializer for A.  */
2744
 
2745
static void
2746
optimize_double_finally (gimple one, gimple two)
2747
{
2748
  gimple oneh;
2749
  gimple_stmt_iterator gsi;
2750
 
2751
  gsi = gsi_start (gimple_try_cleanup (one));
2752
  if (!gsi_one_before_end_p (gsi))
2753
    return;
2754
 
2755
  oneh = gsi_stmt (gsi);
2756
  if (gimple_code (oneh) != GIMPLE_TRY
2757
      || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
2758
    return;
2759
 
2760
  if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
2761
    {
2762
      gimple_seq seq = gimple_try_eval (oneh);
2763
 
2764
      gimple_try_set_cleanup (one, seq);
2765
      gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
2766
      seq = copy_gimple_seq_and_replace_locals (seq);
2767
      gimple_seq_add_seq (&seq, gimple_try_eval (two));
2768
      gimple_try_set_eval (two, seq);
2769
    }
2770
}
2771
 
2772
/* Perform EH refactoring optimizations that are simpler to do when code
2773
   flow has been lowered but EH structures haven't.  */
2774
 
2775
static void
2776
refactor_eh_r (gimple_seq seq)
2777
{
2778
  gimple_stmt_iterator gsi;
2779
  gimple one, two;
2780
 
2781
  one = NULL;
2782
  two = NULL;
2783
  gsi = gsi_start (seq);
2784
  while (1)
2785
    {
2786
      one = two;
2787
      if (gsi_end_p (gsi))
2788
        two = NULL;
2789
      else
2790
        two = gsi_stmt (gsi);
2791
      if (one
2792
          && two
2793
          && gimple_code (one) == GIMPLE_TRY
2794
          && gimple_code (two) == GIMPLE_TRY
2795
          && gimple_try_kind (one) == GIMPLE_TRY_FINALLY
2796
          && gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
2797
        optimize_double_finally (one, two);
2798
      if (one)
2799
        switch (gimple_code (one))
2800
          {
2801
          case GIMPLE_TRY:
2802
            refactor_eh_r (gimple_try_eval (one));
2803
            refactor_eh_r (gimple_try_cleanup (one));
2804
            break;
2805
          case GIMPLE_CATCH:
2806
            refactor_eh_r (gimple_catch_handler (one));
2807
            break;
2808
          case GIMPLE_EH_FILTER:
2809
            refactor_eh_r (gimple_eh_filter_failure (one));
2810
            break;
2811
          default:
2812
            break;
2813
          }
2814
      if (two)
2815
        gsi_next (&gsi);
2816
      else
2817
        break;
2818
    }
2819
}
2820
 
2821
static unsigned
2822
refactor_eh (void)
2823
{
2824
  refactor_eh_r (gimple_body (current_function_decl));
2825
  return 0;
2826
}
2827
 
2828
static bool
2829
gate_refactor_eh (void)
2830
{
2831
  return flag_exceptions != 0;
2832
}
2833
 
2834
struct gimple_opt_pass pass_refactor_eh =
2835
{
2836
 {
2837
  GIMPLE_PASS,
2838
  "ehopt",                              /* name */
2839
  gate_refactor_eh,                     /* gate */
2840
  refactor_eh,                          /* execute */
2841
  NULL,                                 /* sub */
2842
  NULL,                                 /* next */
2843
  0,                                     /* static_pass_number */
2844
  TV_TREE_EH,                           /* tv_id */
2845
  PROP_gimple_lcf,                      /* properties_required */
2846
  0,                                     /* properties_provided */
2847
  0,                                     /* properties_destroyed */
2848
  0,                                     /* todo_flags_start */
2849
  TODO_dump_func                        /* todo_flags_finish */
2850
 }
2851
};
2852
 
2853
/* At the end of gimple optimization, we can lower RESX.  */
2854
 
2855
static bool
2856
lower_resx (basic_block bb, gimple stmt, struct pointer_map_t *mnt_map)
2857
{
2858
  int lp_nr;
2859
  eh_region src_r, dst_r;
2860
  gimple_stmt_iterator gsi;
2861
  gimple x;
2862
  tree fn, src_nr;
2863
  bool ret = false;
2864
 
2865
  lp_nr = lookup_stmt_eh_lp (stmt);
2866
  if (lp_nr != 0)
2867
    dst_r = get_eh_region_from_lp_number (lp_nr);
2868
  else
2869
    dst_r = NULL;
2870
 
2871
  src_r = get_eh_region_from_number (gimple_resx_region (stmt));
2872
  gsi = gsi_last_bb (bb);
2873
 
2874
  if (src_r == NULL)
2875
    {
2876
      /* We can wind up with no source region when pass_cleanup_eh shows
2877
         that there are no entries into an eh region and deletes it, but
2878
         then the block that contains the resx isn't removed.  This can
2879
         happen without optimization when the switch statement created by
2880
         lower_try_finally_switch isn't simplified to remove the eh case.
2881
 
2882
         Resolve this by expanding the resx node to an abort.  */
2883
 
2884
      fn = implicit_built_in_decls[BUILT_IN_TRAP];
2885
      x = gimple_build_call (fn, 0);
2886
      gsi_insert_before (&gsi, x, GSI_SAME_STMT);
2887
 
2888
      while (EDGE_COUNT (bb->succs) > 0)
2889
        remove_edge (EDGE_SUCC (bb, 0));
2890
    }
2891
  else if (dst_r)
2892
    {
2893
      /* When we have a destination region, we resolve this by copying
2894
         the excptr and filter values into place, and changing the edge
2895
         to immediately after the landing pad.  */
2896
      edge e;
2897
 
2898
      if (lp_nr < 0)
2899
        {
2900
          basic_block new_bb;
2901
          void **slot;
2902
          tree lab;
2903
 
2904
          /* We are resuming into a MUST_NOT_CALL region.  Expand a call to
2905
             the failure decl into a new block, if needed.  */
2906
          gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
2907
 
2908
          slot = pointer_map_contains (mnt_map, dst_r);
2909
          if (slot == NULL)
2910
            {
2911
              gimple_stmt_iterator gsi2;
2912
 
2913
              new_bb = create_empty_bb (bb);
2914
              lab = gimple_block_label (new_bb);
2915
              gsi2 = gsi_start_bb (new_bb);
2916
 
2917
              fn = dst_r->u.must_not_throw.failure_decl;
2918
              x = gimple_build_call (fn, 0);
2919
              gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
2920
              gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
2921
 
2922
              slot = pointer_map_insert (mnt_map, dst_r);
2923
              *slot = lab;
2924
            }
2925
          else
2926
            {
2927
              lab = (tree) *slot;
2928
              new_bb = label_to_block (lab);
2929
            }
2930
 
2931
          gcc_assert (EDGE_COUNT (bb->succs) == 0);
2932
          e = make_edge (bb, new_bb, EDGE_FALLTHRU);
2933
          e->count = bb->count;
2934
          e->probability = REG_BR_PROB_BASE;
2935
        }
2936
      else
2937
        {
2938
          edge_iterator ei;
2939
          tree dst_nr = build_int_cst (NULL, dst_r->index);
2940
 
2941
          fn = implicit_built_in_decls[BUILT_IN_EH_COPY_VALUES];
2942
          src_nr = build_int_cst (NULL, src_r->index);
2943
          x = gimple_build_call (fn, 2, dst_nr, src_nr);
2944
          gsi_insert_before (&gsi, x, GSI_SAME_STMT);
2945
 
2946
          /* Update the flags for the outgoing edge.  */
2947
          e = single_succ_edge (bb);
2948
          gcc_assert (e->flags & EDGE_EH);
2949
          e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
2950
 
2951
          /* If there are no more EH users of the landing pad, delete it.  */
2952
          FOR_EACH_EDGE (e, ei, e->dest->preds)
2953
            if (e->flags & EDGE_EH)
2954
              break;
2955
          if (e == NULL)
2956
            {
2957
              eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2958
              remove_eh_landing_pad (lp);
2959
            }
2960
        }
2961
 
2962
      ret = true;
2963
    }
2964
  else
2965
    {
2966
      tree var;
2967
 
2968
      /* When we don't have a destination region, this exception escapes
2969
         up the call chain.  We resolve this by generating a call to the
2970
         _Unwind_Resume library function.  */
2971
 
2972
      /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
2973
         with no arguments for C++ and Java.  Check for that.  */
2974
      if (src_r->use_cxa_end_cleanup)
2975
        {
2976
          fn = implicit_built_in_decls[BUILT_IN_CXA_END_CLEANUP];
2977
          x = gimple_build_call (fn, 0);
2978
          gsi_insert_before (&gsi, x, GSI_SAME_STMT);
2979
        }
2980
      else
2981
        {
2982
          fn = implicit_built_in_decls[BUILT_IN_EH_POINTER];
2983
          src_nr = build_int_cst (NULL, src_r->index);
2984
          x = gimple_build_call (fn, 1, src_nr);
2985
          var = create_tmp_var (ptr_type_node, NULL);
2986
          var = make_ssa_name (var, x);
2987
          gimple_call_set_lhs (x, var);
2988
          gsi_insert_before (&gsi, x, GSI_SAME_STMT);
2989
 
2990
          fn = implicit_built_in_decls[BUILT_IN_UNWIND_RESUME];
2991
          x = gimple_build_call (fn, 1, var);
2992
          gsi_insert_before (&gsi, x, GSI_SAME_STMT);
2993
        }
2994
 
2995
      gcc_assert (EDGE_COUNT (bb->succs) == 0);
2996
    }
2997
 
2998
  gsi_remove (&gsi, true);
2999
 
3000
  return ret;
3001
}
3002
 
3003
static unsigned
3004
execute_lower_resx (void)
3005
{
3006
  basic_block bb;
3007
  struct pointer_map_t *mnt_map;
3008
  bool dominance_invalidated = false;
3009
  bool any_rewritten = false;
3010
 
3011
  mnt_map = pointer_map_create ();
3012
 
3013
  FOR_EACH_BB (bb)
3014
    {
3015
      gimple last = last_stmt (bb);
3016
      if (last && is_gimple_resx (last))
3017
        {
3018
          dominance_invalidated |= lower_resx (bb, last, mnt_map);
3019
          any_rewritten = true;
3020
        }
3021
    }
3022
 
3023
  pointer_map_destroy (mnt_map);
3024
 
3025
  if (dominance_invalidated)
3026
    {
3027
      free_dominance_info (CDI_DOMINATORS);
3028
      free_dominance_info (CDI_POST_DOMINATORS);
3029
    }
3030
 
3031
  return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3032
}
3033
 
3034
static bool
3035
gate_lower_resx (void)
3036
{
3037
  return flag_exceptions != 0;
3038
}
3039
 
3040
struct gimple_opt_pass pass_lower_resx =
3041
{
3042
 {
3043
  GIMPLE_PASS,
3044
  "resx",                               /* name */
3045
  gate_lower_resx,                      /* gate */
3046
  execute_lower_resx,                   /* execute */
3047
  NULL,                                 /* sub */
3048
  NULL,                                 /* next */
3049
  0,                                     /* static_pass_number */
3050
  TV_TREE_EH,                           /* tv_id */
3051
  PROP_gimple_lcf,                      /* properties_required */
3052
  0,                                     /* properties_provided */
3053
  0,                                     /* properties_destroyed */
3054
  0,                                     /* todo_flags_start */
3055
  TODO_dump_func | TODO_verify_flow     /* todo_flags_finish */
3056
 }
3057
};
3058
 
3059
 
3060
/* At the end of inlining, we can lower EH_DISPATCH.  Return true when
3061
   we have found some duplicate labels and removed some edges.  */
3062
 
3063
static bool
3064
lower_eh_dispatch (basic_block src, gimple stmt)
3065
{
3066
  gimple_stmt_iterator gsi;
3067
  int region_nr;
3068
  eh_region r;
3069
  tree filter, fn;
3070
  gimple x;
3071
  bool redirected = false;
3072
 
3073
  region_nr = gimple_eh_dispatch_region (stmt);
3074
  r = get_eh_region_from_number (region_nr);
3075
 
3076
  gsi = gsi_last_bb (src);
3077
 
3078
  switch (r->type)
3079
    {
3080
    case ERT_TRY:
3081
      {
3082
        VEC (tree, heap) *labels = NULL;
3083
        tree default_label = NULL;
3084
        eh_catch c;
3085
        edge_iterator ei;
3086
        edge e;
3087
        struct pointer_set_t *seen_values = pointer_set_create ();
3088
 
3089
        /* Collect the labels for a switch.  Zero the post_landing_pad
3090
           field becase we'll no longer have anything keeping these labels
3091
           in existance and the optimizer will be free to merge these
3092
           blocks at will.  */
3093
        for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3094
          {
3095
            tree tp_node, flt_node, lab = c->label;
3096
            bool have_label = false;
3097
 
3098
            c->label = NULL;
3099
            tp_node = c->type_list;
3100
            flt_node = c->filter_list;
3101
 
3102
            if (tp_node == NULL)
3103
              {
3104
                default_label = lab;
3105
                break;
3106
              }
3107
            do
3108
              {
3109
                /* Filter out duplicate labels that arise when this handler
3110
                   is shadowed by an earlier one.  When no labels are
3111
                   attached to the handler anymore, we remove
3112
                   the corresponding edge and then we delete unreachable
3113
                   blocks at the end of this pass.  */
3114
                if (! pointer_set_contains (seen_values, TREE_VALUE (flt_node)))
3115
                  {
3116
                    tree t = build3 (CASE_LABEL_EXPR, void_type_node,
3117
                                     TREE_VALUE (flt_node), NULL, lab);
3118
                    VEC_safe_push (tree, heap, labels, t);
3119
                    pointer_set_insert (seen_values, TREE_VALUE (flt_node));
3120
                    have_label = true;
3121
                  }
3122
 
3123
                tp_node = TREE_CHAIN (tp_node);
3124
                flt_node = TREE_CHAIN (flt_node);
3125
              }
3126
            while (tp_node);
3127
            if (! have_label)
3128
              {
3129
                remove_edge (find_edge (src, label_to_block (lab)));
3130
                redirected = true;
3131
              }
3132
          }
3133
 
3134
        /* Clean up the edge flags.  */
3135
        FOR_EACH_EDGE (e, ei, src->succs)
3136
          {
3137
            if (e->flags & EDGE_FALLTHRU)
3138
              {
3139
                /* If there was no catch-all, use the fallthru edge.  */
3140
                if (default_label == NULL)
3141
                  default_label = gimple_block_label (e->dest);
3142
                e->flags &= ~EDGE_FALLTHRU;
3143
              }
3144
          }
3145
        gcc_assert (default_label != NULL);
3146
 
3147
        /* Don't generate a switch if there's only a default case.
3148
           This is common in the form of try { A; } catch (...) { B; }.  */
3149
        if (labels == NULL)
3150
          {
3151
            e = single_succ_edge (src);
3152
            e->flags |= EDGE_FALLTHRU;
3153
          }
3154
        else
3155
          {
3156
            fn = implicit_built_in_decls[BUILT_IN_EH_FILTER];
3157
            x = gimple_build_call (fn, 1, build_int_cst (NULL, region_nr));
3158
            filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3159
            filter = make_ssa_name (filter, x);
3160
            gimple_call_set_lhs (x, filter);
3161
            gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3162
 
3163
            /* Turn the default label into a default case.  */
3164
            default_label = build3 (CASE_LABEL_EXPR, void_type_node,
3165
                                    NULL, NULL, default_label);
3166
            sort_case_labels (labels);
3167
 
3168
            x = gimple_build_switch_vec (filter, default_label, labels);
3169
            gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3170
 
3171
            VEC_free (tree, heap, labels);
3172
          }
3173
        pointer_set_destroy (seen_values);
3174
      }
3175
      break;
3176
 
3177
    case ERT_ALLOWED_EXCEPTIONS:
3178
      {
3179
        edge b_e = BRANCH_EDGE (src);
3180
        edge f_e = FALLTHRU_EDGE (src);
3181
 
3182
        fn = implicit_built_in_decls[BUILT_IN_EH_FILTER];
3183
        x = gimple_build_call (fn, 1, build_int_cst (NULL, region_nr));
3184
        filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3185
        filter = make_ssa_name (filter, x);
3186
        gimple_call_set_lhs (x, filter);
3187
        gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3188
 
3189
        r->u.allowed.label = NULL;
3190
        x = gimple_build_cond (EQ_EXPR, filter,
3191
                               build_int_cst (TREE_TYPE (filter),
3192
                                              r->u.allowed.filter),
3193
                               NULL_TREE, NULL_TREE);
3194
        gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3195
 
3196
        b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
3197
        f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
3198
      }
3199
      break;
3200
 
3201
    default:
3202
      gcc_unreachable ();
3203
    }
3204
 
3205
  /* Replace the EH_DISPATCH with the SWITCH or COND generated above.  */
3206
  gsi_remove (&gsi, true);
3207
  return redirected;
3208
}
3209
 
3210
static unsigned
3211
execute_lower_eh_dispatch (void)
3212
{
3213
  basic_block bb;
3214
  bool any_rewritten = false;
3215
  bool redirected = false;
3216
 
3217
  assign_filter_values ();
3218
 
3219
  FOR_EACH_BB (bb)
3220
    {
3221
      gimple last = last_stmt (bb);
3222
      if (last && gimple_code (last) == GIMPLE_EH_DISPATCH)
3223
        {
3224
          redirected |= lower_eh_dispatch (bb, last);
3225
          any_rewritten = true;
3226
        }
3227
    }
3228
 
3229
  if (redirected)
3230
    delete_unreachable_blocks ();
3231
  return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3232
}
3233
 
3234
static bool
3235
gate_lower_eh_dispatch (void)
3236
{
3237
  return cfun->eh->region_tree != NULL;
3238
}
3239
 
3240
struct gimple_opt_pass pass_lower_eh_dispatch =
3241
{
3242
 {
3243
  GIMPLE_PASS,
3244
  "ehdisp",                             /* name */
3245
  gate_lower_eh_dispatch,               /* gate */
3246
  execute_lower_eh_dispatch,            /* execute */
3247
  NULL,                                 /* sub */
3248
  NULL,                                 /* next */
3249
  0,                                     /* static_pass_number */
3250
  TV_TREE_EH,                           /* tv_id */
3251
  PROP_gimple_lcf,                      /* properties_required */
3252
  0,                                     /* properties_provided */
3253
  0,                                     /* properties_destroyed */
3254
  0,                                     /* todo_flags_start */
3255
  TODO_dump_func | TODO_verify_flow     /* todo_flags_finish */
3256
 }
3257
};
3258
 
3259
/* Walk statements, see what regions are really referenced and remove
3260
   those that are unused.  */
3261
 
3262
static void
3263
remove_unreachable_handlers (void)
3264
{
3265
  sbitmap r_reachable, lp_reachable;
3266
  eh_region region;
3267
  eh_landing_pad lp;
3268
  basic_block bb;
3269
  int lp_nr, r_nr;
3270
 
3271
  r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
3272
  lp_reachable
3273
    = sbitmap_alloc (VEC_length (eh_landing_pad, cfun->eh->lp_array));
3274
  sbitmap_zero (r_reachable);
3275
  sbitmap_zero (lp_reachable);
3276
 
3277
  FOR_EACH_BB (bb)
3278
    {
3279
      gimple_stmt_iterator gsi = gsi_start_bb (bb);
3280
 
3281
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3282
        {
3283
          gimple stmt = gsi_stmt (gsi);
3284
          lp_nr = lookup_stmt_eh_lp (stmt);
3285
 
3286
          /* Negative LP numbers are MUST_NOT_THROW regions which
3287
             are not considered BB enders.  */
3288
          if (lp_nr < 0)
3289
            SET_BIT (r_reachable, -lp_nr);
3290
 
3291
          /* Positive LP numbers are real landing pads, are are BB enders.  */
3292
          else if (lp_nr > 0)
3293
            {
3294
              gcc_assert (gsi_one_before_end_p (gsi));
3295
              region = get_eh_region_from_lp_number (lp_nr);
3296
              SET_BIT (r_reachable, region->index);
3297
              SET_BIT (lp_reachable, lp_nr);
3298
            }
3299
        }
3300
    }
3301
 
3302
  if (dump_file)
3303
    {
3304
      fprintf (dump_file, "Before removal of unreachable regions:\n");
3305
      dump_eh_tree (dump_file, cfun);
3306
      fprintf (dump_file, "Reachable regions: ");
3307
      dump_sbitmap_file (dump_file, r_reachable);
3308
      fprintf (dump_file, "Reachable landing pads: ");
3309
      dump_sbitmap_file (dump_file, lp_reachable);
3310
    }
3311
 
3312
  for (r_nr = 1;
3313
       VEC_iterate (eh_region, cfun->eh->region_array, r_nr, region); ++r_nr)
3314
    if (region && !TEST_BIT (r_reachable, r_nr))
3315
      {
3316
        if (dump_file)
3317
          fprintf (dump_file, "Removing unreachable region %d\n", r_nr);
3318
        remove_eh_handler (region);
3319
      }
3320
 
3321
  for (lp_nr = 1;
3322
       VEC_iterate (eh_landing_pad, cfun->eh->lp_array, lp_nr, lp); ++lp_nr)
3323
    if (lp && !TEST_BIT (lp_reachable, lp_nr))
3324
      {
3325
        if (dump_file)
3326
          fprintf (dump_file, "Removing unreachable landing pad %d\n", lp_nr);
3327
        remove_eh_landing_pad (lp);
3328
      }
3329
 
3330
  if (dump_file)
3331
    {
3332
      fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
3333
      dump_eh_tree (dump_file, cfun);
3334
      fprintf (dump_file, "\n\n");
3335
    }
3336
 
3337
  sbitmap_free (r_reachable);
3338
  sbitmap_free (lp_reachable);
3339
 
3340
#ifdef ENABLE_CHECKING
3341
  verify_eh_tree (cfun);
3342
#endif
3343
}
3344
 
3345
/* Remove regions that do not have landing pads.  This assumes
3346
   that remove_unreachable_handlers has already been run, and
3347
   that we've just manipulated the landing pads since then.  */
3348
 
3349
static void
3350
remove_unreachable_handlers_no_lp (void)
3351
{
3352
  eh_region r;
3353
  int i;
3354
 
3355
  for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
3356
    if (r && r->landing_pads == NULL && r->type != ERT_MUST_NOT_THROW)
3357
      {
3358
        if (dump_file)
3359
          fprintf (dump_file, "Removing unreachable region %d\n", i);
3360
        remove_eh_handler (r);
3361
      }
3362
}
3363
 
3364
/* Undo critical edge splitting on an EH landing pad.  Earlier, we
3365
   optimisticaly split all sorts of edges, including EH edges.  The
3366
   optimization passes in between may not have needed them; if not,
3367
   we should undo the split.
3368
 
3369
   Recognize this case by having one EH edge incoming to the BB and
3370
   one normal edge outgoing; BB should be empty apart from the
3371
   post_landing_pad label.
3372
 
3373
   Note that this is slightly different from the empty handler case
3374
   handled by cleanup_empty_eh, in that the actual handler may yet
3375
   have actual code but the landing pad has been separated from the
3376
   handler.  As such, cleanup_empty_eh relies on this transformation
3377
   having been done first.  */
3378
 
3379
static bool
3380
unsplit_eh (eh_landing_pad lp)
3381
{
3382
  basic_block bb = label_to_block (lp->post_landing_pad);
3383
  gimple_stmt_iterator gsi;
3384
  edge e_in, e_out;
3385
 
3386
  /* Quickly check the edge counts on BB for singularity.  */
3387
  if (EDGE_COUNT (bb->preds) != 1 || EDGE_COUNT (bb->succs) != 1)
3388
    return false;
3389
  e_in = EDGE_PRED (bb, 0);
3390
  e_out = EDGE_SUCC (bb, 0);
3391
 
3392
  /* Input edge must be EH and output edge must be normal.  */
3393
  if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
3394
    return false;
3395
 
3396
  /* The block must be empty except for the labels and debug insns.  */
3397
  gsi = gsi_after_labels (bb);
3398
  if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
3399
    gsi_next_nondebug (&gsi);
3400
  if (!gsi_end_p (gsi))
3401
    return false;
3402
 
3403
  /* The destination block must not already have a landing pad
3404
     for a different region.  */
3405
  for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3406
    {
3407
      gimple stmt = gsi_stmt (gsi);
3408
      tree lab;
3409
      int lp_nr;
3410
 
3411
      if (gimple_code (stmt) != GIMPLE_LABEL)
3412
        break;
3413
      lab = gimple_label_label (stmt);
3414
      lp_nr = EH_LANDING_PAD_NR (lab);
3415
      if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
3416
        return false;
3417
    }
3418
 
3419
  /* The new destination block must not already be a destination of
3420
     the source block, lest we merge fallthru and eh edges and get
3421
     all sorts of confused.  */
3422
  if (find_edge (e_in->src, e_out->dest))
3423
    return false;
3424
 
3425
  /* ??? We can get degenerate phis due to cfg cleanups.  I would have
3426
     thought this should have been cleaned up by a phicprop pass, but
3427
     that doesn't appear to handle virtuals.  Propagate by hand.  */
3428
  if (!gimple_seq_empty_p (phi_nodes (bb)))
3429
    {
3430
      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
3431
        {
3432
          gimple use_stmt, phi = gsi_stmt (gsi);
3433
          tree lhs = gimple_phi_result (phi);
3434
          tree rhs = gimple_phi_arg_def (phi, 0);
3435
          use_operand_p use_p;
3436
          imm_use_iterator iter;
3437
 
3438
          FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
3439
            {
3440
              FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3441
                SET_USE (use_p, rhs);
3442
            }
3443
 
3444
          if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3445
            SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
3446
 
3447
          remove_phi_node (&gsi, true);
3448
        }
3449
    }
3450
 
3451
  if (dump_file && (dump_flags & TDF_DETAILS))
3452
    fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
3453
             lp->index, e_out->dest->index);
3454
 
3455
  /* Redirect the edge.  Since redirect_eh_edge_1 expects to be moving
3456
     a successor edge, humor it.  But do the real CFG change with the
3457
     predecessor of E_OUT in order to preserve the ordering of arguments
3458
     to the PHI nodes in E_OUT->DEST.  */
3459
  redirect_eh_edge_1 (e_in, e_out->dest, false);
3460
  redirect_edge_pred (e_out, e_in->src);
3461
  e_out->flags = e_in->flags;
3462
  e_out->probability = e_in->probability;
3463
  e_out->count = e_in->count;
3464
  remove_edge (e_in);
3465
 
3466
  return true;
3467
}
3468
 
3469
/* Examine each landing pad block and see if it matches unsplit_eh.  */
3470
 
3471
static bool
3472
unsplit_all_eh (void)
3473
{
3474
  bool changed = false;
3475
  eh_landing_pad lp;
3476
  int i;
3477
 
3478
  for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3479
    if (lp)
3480
      changed |= unsplit_eh (lp);
3481
 
3482
  return changed;
3483
}
3484
 
3485
/* A subroutine of cleanup_empty_eh.  Redirect all EH edges incoming
3486
   to OLD_BB to NEW_BB; return true on success, false on failure.
3487
 
3488
   OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
3489
   PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
3490
   Virtual PHIs may be deleted and marked for renaming.  */
3491
 
3492
static bool
3493
cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
3494
                             edge old_bb_out, bool change_region)
3495
{
3496
  gimple_stmt_iterator ngsi, ogsi;
3497
  edge_iterator ei;
3498
  edge e;
3499
  bitmap rename_virts;
3500
  bitmap ophi_handled;
3501
 
3502
  FOR_EACH_EDGE (e, ei, old_bb->preds)
3503
    redirect_edge_var_map_clear (e);
3504
 
3505
  ophi_handled = BITMAP_ALLOC (NULL);
3506
  rename_virts = BITMAP_ALLOC (NULL);
3507
 
3508
  /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
3509
     for the edges we're going to move.  */
3510
  for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
3511
    {
3512
      gimple ophi, nphi = gsi_stmt (ngsi);
3513
      tree nresult, nop;
3514
 
3515
      nresult = gimple_phi_result (nphi);
3516
      nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
3517
 
3518
      /* Find the corresponding PHI in OLD_BB so we can forward-propagate
3519
         the source ssa_name.  */
3520
      ophi = NULL;
3521
      for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3522
        {
3523
          ophi = gsi_stmt (ogsi);
3524
          if (gimple_phi_result (ophi) == nop)
3525
            break;
3526
          ophi = NULL;
3527
        }
3528
 
3529
      /* If we did find the corresponding PHI, copy those inputs.  */
3530
      if (ophi)
3531
        {
3532
          bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
3533
          FOR_EACH_EDGE (e, ei, old_bb->preds)
3534
            {
3535
              location_t oloc;
3536
              tree oop;
3537
 
3538
              if ((e->flags & EDGE_EH) == 0)
3539
                continue;
3540
              oop = gimple_phi_arg_def (ophi, e->dest_idx);
3541
              oloc = gimple_phi_arg_location (ophi, e->dest_idx);
3542
              redirect_edge_var_map_add (e, nresult, oop, oloc);
3543
            }
3544
        }
3545
      /* If we didn't find the PHI, but it's a VOP, remember to rename
3546
         it later, assuming all other tests succeed.  */
3547
      else if (!is_gimple_reg (nresult))
3548
        bitmap_set_bit (rename_virts, SSA_NAME_VERSION (nresult));
3549
      /* If we didn't find the PHI, and it's a real variable, we know
3550
         from the fact that OLD_BB is tree_empty_eh_handler_p that the
3551
         variable is unchanged from input to the block and we can simply
3552
         re-use the input to NEW_BB from the OLD_BB_OUT edge.  */
3553
      else
3554
        {
3555
          location_t nloc
3556
            = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
3557
          FOR_EACH_EDGE (e, ei, old_bb->preds)
3558
            redirect_edge_var_map_add (e, nresult, nop, nloc);
3559
        }
3560
    }
3561
 
3562
  /* Second, verify that all PHIs from OLD_BB have been handled.  If not,
3563
     we don't know what values from the other edges into NEW_BB to use.  */
3564
  for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3565
    {
3566
      gimple ophi = gsi_stmt (ogsi);
3567
      tree oresult = gimple_phi_result (ophi);
3568
      if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
3569
        goto fail;
3570
    }
3571
 
3572
  /* At this point we know that the merge will succeed.  Remove the PHI
3573
     nodes for the virtuals that we want to rename.  */
3574
  if (!bitmap_empty_p (rename_virts))
3575
    {
3576
      for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); )
3577
        {
3578
          gimple nphi = gsi_stmt (ngsi);
3579
          tree nresult = gimple_phi_result (nphi);
3580
          if (bitmap_bit_p (rename_virts, SSA_NAME_VERSION (nresult)))
3581
            {
3582
              mark_virtual_phi_result_for_renaming (nphi);
3583
              remove_phi_node (&ngsi, true);
3584
            }
3585
          else
3586
            gsi_next (&ngsi);
3587
        }
3588
    }
3589
 
3590
  /* Finally, move the edges and update the PHIs.  */
3591
  for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
3592
    if (e->flags & EDGE_EH)
3593
      {
3594
        redirect_eh_edge_1 (e, new_bb, change_region);
3595
        redirect_edge_succ (e, new_bb);
3596
        flush_pending_stmts (e);
3597
      }
3598
    else
3599
      ei_next (&ei);
3600
 
3601
  BITMAP_FREE (ophi_handled);
3602
  BITMAP_FREE (rename_virts);
3603
  return true;
3604
 
3605
 fail:
3606
  FOR_EACH_EDGE (e, ei, old_bb->preds)
3607
    redirect_edge_var_map_clear (e);
3608
  BITMAP_FREE (ophi_handled);
3609
  BITMAP_FREE (rename_virts);
3610
  return false;
3611
}
3612
 
3613
/* A subroutine of cleanup_empty_eh.  Move a landing pad LP from its
3614
   old region to NEW_REGION at BB.  */
3615
 
3616
static void
3617
cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
3618
                          eh_landing_pad lp, eh_region new_region)
3619
{
3620
  gimple_stmt_iterator gsi;
3621
  eh_landing_pad *pp;
3622
 
3623
  for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
3624
    continue;
3625
  *pp = lp->next_lp;
3626
 
3627
  lp->region = new_region;
3628
  lp->next_lp = new_region->landing_pads;
3629
  new_region->landing_pads = lp;
3630
 
3631
  /* Delete the RESX that was matched within the empty handler block.  */
3632
  gsi = gsi_last_bb (bb);
3633
  mark_virtual_ops_for_renaming (gsi_stmt (gsi));
3634
  gsi_remove (&gsi, true);
3635
 
3636
  /* Clean up E_OUT for the fallthru.  */
3637
  e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3638
  e_out->probability = REG_BR_PROB_BASE;
3639
}
3640
 
3641
/* A subroutine of cleanup_empty_eh.  Handle more complex cases of
3642
   unsplitting than unsplit_eh was prepared to handle, e.g. when
3643
   multiple incoming edges and phis are involved.  */
3644
 
3645
static bool
3646
cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
3647
{
3648
  gimple_stmt_iterator gsi;
3649
  tree lab;
3650
 
3651
  /* We really ought not have totally lost everything following
3652
     a landing pad label.  Given that BB is empty, there had better
3653
     be a successor.  */
3654
  gcc_assert (e_out != NULL);
3655
 
3656
  /* The destination block must not already have a landing pad
3657
     for a different region.  */
3658
  lab = NULL;
3659
  for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3660
    {
3661
      gimple stmt = gsi_stmt (gsi);
3662
      int lp_nr;
3663
 
3664
      if (gimple_code (stmt) != GIMPLE_LABEL)
3665
        break;
3666
      lab = gimple_label_label (stmt);
3667
      lp_nr = EH_LANDING_PAD_NR (lab);
3668
      if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
3669
        return false;
3670
    }
3671
 
3672
  /* Attempt to move the PHIs into the successor block.  */
3673
  if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
3674
    {
3675
      if (dump_file && (dump_flags & TDF_DETAILS))
3676
        fprintf (dump_file,
3677
                 "Unsplit EH landing pad %d to block %i "
3678
                 "(via cleanup_empty_eh).\n",
3679
                 lp->index, e_out->dest->index);
3680
      return true;
3681
    }
3682
 
3683
  return false;
3684
}
3685
 
3686
/* Examine the block associated with LP to determine if it's an empty
3687
   handler for its EH region.  If so, attempt to redirect EH edges to
3688
   an outer region.  Return true the CFG was updated in any way.  This
3689
   is similar to jump forwarding, just across EH edges.  */
3690
 
3691
static bool
3692
cleanup_empty_eh (eh_landing_pad lp)
3693
{
3694
  basic_block bb = label_to_block (lp->post_landing_pad);
3695
  gimple_stmt_iterator gsi;
3696
  gimple resx;
3697
  eh_region new_region;
3698
  edge_iterator ei;
3699
  edge e, e_out;
3700
  bool has_non_eh_pred;
3701
  int new_lp_nr;
3702
 
3703
  /* There can be zero or one edges out of BB.  This is the quickest test.  */
3704
  switch (EDGE_COUNT (bb->succs))
3705
    {
3706
    case 0:
3707
      e_out = NULL;
3708
      break;
3709
    case 1:
3710
      e_out = EDGE_SUCC (bb, 0);
3711
      break;
3712
    default:
3713
      return false;
3714
    }
3715
  gsi = gsi_after_labels (bb);
3716
 
3717
  /* Make sure to skip debug statements.  */
3718
  if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
3719
    gsi_next_nondebug (&gsi);
3720
 
3721
  /* If the block is totally empty, look for more unsplitting cases.  */
3722
  if (gsi_end_p (gsi))
3723
    return cleanup_empty_eh_unsplit (bb, e_out, lp);
3724
 
3725
  /* The block should consist only of a single RESX statement.  */
3726
  resx = gsi_stmt (gsi);
3727
  if (!is_gimple_resx (resx))
3728
    return false;
3729
  gcc_assert (gsi_one_before_end_p (gsi));
3730
 
3731
  /* Determine if there are non-EH edges, or resx edges into the handler.  */
3732
  has_non_eh_pred = false;
3733
  FOR_EACH_EDGE (e, ei, bb->preds)
3734
    if (!(e->flags & EDGE_EH))
3735
      has_non_eh_pred = true;
3736
 
3737
  /* Find the handler that's outer of the empty handler by looking at
3738
     where the RESX instruction was vectored.  */
3739
  new_lp_nr = lookup_stmt_eh_lp (resx);
3740
  new_region = get_eh_region_from_lp_number (new_lp_nr);
3741
 
3742
  /* If there's no destination region within the current function,
3743
     redirection is trivial via removing the throwing statements from
3744
     the EH region, removing the EH edges, and allowing the block
3745
     to go unreachable.  */
3746
  if (new_region == NULL)
3747
    {
3748
      gcc_assert (e_out == NULL);
3749
      for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3750
        if (e->flags & EDGE_EH)
3751
          {
3752
            gimple stmt = last_stmt (e->src);
3753
            remove_stmt_from_eh_lp (stmt);
3754
            remove_edge (e);
3755
          }
3756
        else
3757
          ei_next (&ei);
3758
      goto succeed;
3759
    }
3760
 
3761
  /* If the destination region is a MUST_NOT_THROW, allow the runtime
3762
     to handle the abort and allow the blocks to go unreachable.  */
3763
  if (new_region->type == ERT_MUST_NOT_THROW)
3764
    {
3765
      for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3766
        if (e->flags & EDGE_EH)
3767
          {
3768
            gimple stmt = last_stmt (e->src);
3769
            remove_stmt_from_eh_lp (stmt);
3770
            add_stmt_to_eh_lp (stmt, new_lp_nr);
3771
            remove_edge (e);
3772
          }
3773
        else
3774
          ei_next (&ei);
3775
      goto succeed;
3776
    }
3777
 
3778
  /* Try to redirect the EH edges and merge the PHIs into the destination
3779
     landing pad block.  If the merge succeeds, we'll already have redirected
3780
     all the EH edges.  The handler itself will go unreachable if there were
3781
     no normal edges.  */
3782
  if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
3783
    goto succeed;
3784
 
3785
  /* Finally, if all input edges are EH edges, then we can (potentially)
3786
     reduce the number of transfers from the runtime by moving the landing
3787
     pad from the original region to the new region.  This is a win when
3788
     we remove the last CLEANUP region along a particular exception
3789
     propagation path.  Since nothing changes except for the region with
3790
     which the landing pad is associated, the PHI nodes do not need to be
3791
     adjusted at all.  */
3792
  if (!has_non_eh_pred)
3793
    {
3794
      cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
3795
      if (dump_file && (dump_flags & TDF_DETAILS))
3796
        fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
3797
                 lp->index, new_region->index);
3798
 
3799
      /* ??? The CFG didn't change, but we may have rendered the
3800
         old EH region unreachable.  Trigger a cleanup there.  */
3801
      return true;
3802
    }
3803
 
3804
  return false;
3805
 
3806
 succeed:
3807
  if (dump_file && (dump_flags & TDF_DETAILS))
3808
    fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
3809
  remove_eh_landing_pad (lp);
3810
  return true;
3811
}
3812
 
3813
/* Do a post-order traversal of the EH region tree.  Examine each
3814
   post_landing_pad block and see if we can eliminate it as empty.  */
3815
 
3816
static bool
3817
cleanup_all_empty_eh (void)
3818
{
3819
  bool changed = false;
3820
  eh_landing_pad lp;
3821
  int i;
3822
 
3823
  for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3824
    if (lp)
3825
      changed |= cleanup_empty_eh (lp);
3826
 
3827
  return changed;
3828
}
3829
 
3830
/* Perform cleanups and lowering of exception handling
3831
    1) cleanups regions with handlers doing nothing are optimized out
3832
    2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
3833
    3) Info about regions that are containing instructions, and regions
3834
       reachable via local EH edges is collected
3835
    4) Eh tree is pruned for regions no longer neccesary.
3836
 
3837
   TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
3838
         Unify those that have the same failure decl and locus.
3839
*/
3840
 
3841
static unsigned int
3842
execute_cleanup_eh (void)
3843
{
3844
  /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
3845
     looking up unreachable landing pads.  */
3846
  remove_unreachable_handlers ();
3847
 
3848
  /* Watch out for the region tree vanishing due to all unreachable.  */
3849
  if (cfun->eh->region_tree && optimize)
3850
    {
3851
      bool changed = false;
3852
 
3853
      changed |= unsplit_all_eh ();
3854
      changed |= cleanup_all_empty_eh ();
3855
 
3856
      if (changed)
3857
        {
3858
          free_dominance_info (CDI_DOMINATORS);
3859
          free_dominance_info (CDI_POST_DOMINATORS);
3860
 
3861
          /* We delayed all basic block deletion, as we may have performed
3862
             cleanups on EH edges while non-EH edges were still present.  */
3863
          delete_unreachable_blocks ();
3864
 
3865
          /* We manipulated the landing pads.  Remove any region that no
3866
             longer has a landing pad.  */
3867
          remove_unreachable_handlers_no_lp ();
3868
 
3869
          return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
3870
        }
3871
    }
3872
 
3873
  return 0;
3874
}
3875
 
3876
static bool
3877
gate_cleanup_eh (void)
3878
{
3879
  return cfun->eh != NULL && cfun->eh->region_tree != NULL;
3880
}
3881
 
3882
struct gimple_opt_pass pass_cleanup_eh = {
3883
  {
3884
   GIMPLE_PASS,
3885
   "ehcleanup",                 /* name */
3886
   gate_cleanup_eh,             /* gate */
3887
   execute_cleanup_eh,          /* execute */
3888
   NULL,                        /* sub */
3889
   NULL,                        /* next */
3890
   0,                            /* static_pass_number */
3891
   TV_TREE_EH,                  /* tv_id */
3892
   PROP_gimple_lcf,             /* properties_required */
3893
   0,                            /* properties_provided */
3894
   0,                            /* properties_destroyed */
3895
   0,                            /* todo_flags_start */
3896
   TODO_dump_func               /* todo_flags_finish */
3897
   }
3898
};
3899
 
3900
/* Verify that BB containing STMT as the last statement, has precisely the
3901
   edge that make_eh_edges would create.  */
3902
 
3903
bool
3904
verify_eh_edges (gimple stmt)
3905
{
3906
  basic_block bb = gimple_bb (stmt);
3907
  eh_landing_pad lp = NULL;
3908
  int lp_nr;
3909
  edge_iterator ei;
3910
  edge e, eh_edge;
3911
 
3912
  lp_nr = lookup_stmt_eh_lp (stmt);
3913
  if (lp_nr > 0)
3914
    lp = get_eh_landing_pad_from_number (lp_nr);
3915
 
3916
  eh_edge = NULL;
3917
  FOR_EACH_EDGE (e, ei, bb->succs)
3918
    {
3919
      if (e->flags & EDGE_EH)
3920
        {
3921
          if (eh_edge)
3922
            {
3923
              error ("BB %i has multiple EH edges", bb->index);
3924
              return true;
3925
            }
3926
          else
3927
            eh_edge = e;
3928
        }
3929
    }
3930
 
3931
  if (lp == NULL)
3932
    {
3933
      if (eh_edge)
3934
        {
3935
          error ("BB %i can not throw but has an EH edge", bb->index);
3936
          return true;
3937
        }
3938
      return false;
3939
    }
3940
 
3941
  if (!stmt_could_throw_p (stmt))
3942
    {
3943
      error ("BB %i last statement has incorrectly set lp", bb->index);
3944
      return true;
3945
    }
3946
 
3947
  if (eh_edge == NULL)
3948
    {
3949
      error ("BB %i is missing an EH edge", bb->index);
3950
      return true;
3951
    }
3952
 
3953
  if (eh_edge->dest != label_to_block (lp->post_landing_pad))
3954
    {
3955
      error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
3956
      return true;
3957
    }
3958
 
3959
  return false;
3960
}
3961
 
3962
/* Similarly, but handle GIMPLE_EH_DISPATCH specifically.  */
3963
 
3964
bool
3965
verify_eh_dispatch_edge (gimple stmt)
3966
{
3967
  eh_region r;
3968
  eh_catch c;
3969
  basic_block src, dst;
3970
  bool want_fallthru = true;
3971
  edge_iterator ei;
3972
  edge e, fall_edge;
3973
 
3974
  r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
3975
  src = gimple_bb (stmt);
3976
 
3977
  FOR_EACH_EDGE (e, ei, src->succs)
3978
    gcc_assert (e->aux == NULL);
3979
 
3980
  switch (r->type)
3981
    {
3982
    case ERT_TRY:
3983
      for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3984
        {
3985
          dst = label_to_block (c->label);
3986
          e = find_edge (src, dst);
3987
          if (e == NULL)
3988
            {
3989
              error ("BB %i is missing an edge", src->index);
3990
              return true;
3991
            }
3992
          e->aux = (void *)e;
3993
 
3994
          /* A catch-all handler doesn't have a fallthru.  */
3995
          if (c->type_list == NULL)
3996
            {
3997
              want_fallthru = false;
3998
              break;
3999
            }
4000
        }
4001
      break;
4002
 
4003
    case ERT_ALLOWED_EXCEPTIONS:
4004
      dst = label_to_block (r->u.allowed.label);
4005
      e = find_edge (src, dst);
4006
      if (e == NULL)
4007
        {
4008
          error ("BB %i is missing an edge", src->index);
4009
          return true;
4010
        }
4011
      e->aux = (void *)e;
4012
      break;
4013
 
4014
    default:
4015
      gcc_unreachable ();
4016
    }
4017
 
4018
  fall_edge = NULL;
4019
  FOR_EACH_EDGE (e, ei, src->succs)
4020
    {
4021
      if (e->flags & EDGE_FALLTHRU)
4022
        {
4023
          if (fall_edge != NULL)
4024
            {
4025
              error ("BB %i too many fallthru edges", src->index);
4026
              return true;
4027
            }
4028
          fall_edge = e;
4029
        }
4030
      else if (e->aux)
4031
        e->aux = NULL;
4032
      else
4033
        {
4034
          error ("BB %i has incorrect edge", src->index);
4035
          return true;
4036
        }
4037
    }
4038
  if ((fall_edge != NULL) ^ want_fallthru)
4039
    {
4040
      error ("BB %i has incorrect fallthru edge", src->index);
4041
      return true;
4042
    }
4043
 
4044
  return false;
4045
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.