OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [except.c] - Blame information for rev 12

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
/* Implements exception handling.
2
   Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
   Contributed by Mike Stump <mrs@cygnus.com>.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 2, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING.  If not, write to the Free
20
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21
02110-1301, USA.  */
22
 
23
 
24
/* An exception is an event that can be signaled from within a
25
   function. This event can then be "caught" or "trapped" by the
26
   callers of this function. This potentially allows program flow to
27
   be transferred to any arbitrary code associated with a function call
28
   several levels up the stack.
29
 
30
   The intended use for this mechanism is for signaling "exceptional
31
   events" in an out-of-band fashion, hence its name. The C++ language
32
   (and many other OO-styled or functional languages) practically
33
   requires such a mechanism, as otherwise it becomes very difficult
34
   or even impossible to signal failure conditions in complex
35
   situations.  The traditional C++ example is when an error occurs in
36
   the process of constructing an object; without such a mechanism, it
37
   is impossible to signal that the error occurs without adding global
38
   state variables and error checks around every object construction.
39
 
40
   The act of causing this event to occur is referred to as "throwing
41
   an exception". (Alternate terms include "raising an exception" or
42
   "signaling an exception".) The term "throw" is used because control
43
   is returned to the callers of the function that is signaling the
44
   exception, and thus there is the concept of "throwing" the
45
   exception up the call stack.
46
 
47
   [ Add updated documentation on how to use this.  ]  */
48
 
49
 
50
#include "config.h"
51
#include "system.h"
52
#include "coretypes.h"
53
#include "tm.h"
54
#include "rtl.h"
55
#include "tree.h"
56
#include "flags.h"
57
#include "function.h"
58
#include "expr.h"
59
#include "libfuncs.h"
60
#include "insn-config.h"
61
#include "except.h"
62
#include "integrate.h"
63
#include "hard-reg-set.h"
64
#include "basic-block.h"
65
#include "output.h"
66
#include "dwarf2asm.h"
67
#include "dwarf2out.h"
68
#include "dwarf2.h"
69
#include "toplev.h"
70
#include "hashtab.h"
71
#include "intl.h"
72
#include "ggc.h"
73
#include "tm_p.h"
74
#include "target.h"
75
#include "langhooks.h"
76
#include "cgraph.h"
77
#include "diagnostic.h"
78
#include "tree-pass.h"
79
#include "timevar.h"
80
 
81
/* Provide defaults for stuff that may not be defined when using
82
   sjlj exceptions.  */
83
#ifndef EH_RETURN_DATA_REGNO
84
#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
85
#endif
86
 
87
 
88
/* Protect cleanup actions with must-not-throw regions, with a call
89
   to the given failure handler.  */
90
tree (*lang_protect_cleanup_actions) (void);
91
 
92
/* Return true if type A catches type B.  */
93
int (*lang_eh_type_covers) (tree a, tree b);
94
 
95
/* Map a type to a runtime object to match type.  */
96
tree (*lang_eh_runtime_type) (tree);
97
 
98
/* A hash table of label to region number.  */
99
 
100
struct ehl_map_entry GTY(())
101
{
102
  rtx label;
103
  struct eh_region *region;
104
};
105
 
106
static GTY(()) int call_site_base;
107
static GTY ((param_is (union tree_node)))
108
  htab_t type_to_runtime_map;
109
 
110
/* Describe the SjLj_Function_Context structure.  */
111
static GTY(()) tree sjlj_fc_type_node;
112
static int sjlj_fc_call_site_ofs;
113
static int sjlj_fc_data_ofs;
114
static int sjlj_fc_personality_ofs;
115
static int sjlj_fc_lsda_ofs;
116
static int sjlj_fc_jbuf_ofs;
117
 
118
/* Describes one exception region.  */
119
struct eh_region GTY(())
120
{
121
  /* The immediately surrounding region.  */
122
  struct eh_region *outer;
123
 
124
  /* The list of immediately contained regions.  */
125
  struct eh_region *inner;
126
  struct eh_region *next_peer;
127
 
128
  /* An identifier for this region.  */
129
  int region_number;
130
 
131
  /* When a region is deleted, its parents inherit the REG_EH_REGION
132
     numbers already assigned.  */
133
  bitmap aka;
134
 
135
  /* Each region does exactly one thing.  */
136
  enum eh_region_type
137
  {
138
    ERT_UNKNOWN = 0,
139
    ERT_CLEANUP,
140
    ERT_TRY,
141
    ERT_CATCH,
142
    ERT_ALLOWED_EXCEPTIONS,
143
    ERT_MUST_NOT_THROW,
144
    ERT_THROW
145
  } type;
146
 
147
  /* Holds the action to perform based on the preceding type.  */
148
  union eh_region_u {
149
    /* A list of catch blocks, a surrounding try block,
150
       and the label for continuing after a catch.  */
151
    struct eh_region_u_try {
152
      struct eh_region *catch;
153
      struct eh_region *last_catch;
154
    } GTY ((tag ("ERT_TRY"))) try;
155
 
156
    /* The list through the catch handlers, the list of type objects
157
       matched, and the list of associated filters.  */
158
    struct eh_region_u_catch {
159
      struct eh_region *next_catch;
160
      struct eh_region *prev_catch;
161
      tree type_list;
162
      tree filter_list;
163
    } GTY ((tag ("ERT_CATCH"))) catch;
164
 
165
    /* A tree_list of allowed types.  */
166
    struct eh_region_u_allowed {
167
      tree type_list;
168
      int filter;
169
    } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
170
 
171
    /* The type given by a call to "throw foo();", or discovered
172
       for a throw.  */
173
    struct eh_region_u_throw {
174
      tree type;
175
    } GTY ((tag ("ERT_THROW"))) throw;
176
 
177
    /* Retain the cleanup expression even after expansion so that
178
       we can match up fixup regions.  */
179
    struct eh_region_u_cleanup {
180
      struct eh_region *prev_try;
181
    } GTY ((tag ("ERT_CLEANUP"))) cleanup;
182
  } GTY ((desc ("%0.type"))) u;
183
 
184
  /* Entry point for this region's handler before landing pads are built.  */
185
  rtx label;
186
  tree tree_label;
187
 
188
  /* Entry point for this region's handler from the runtime eh library.  */
189
  rtx landing_pad;
190
 
191
  /* Entry point for this region's handler from an inner region.  */
192
  rtx post_landing_pad;
193
 
194
  /* The RESX insn for handing off control to the next outermost handler,
195
     if appropriate.  */
196
  rtx resume;
197
 
198
  /* True if something in this region may throw.  */
199
  unsigned may_contain_throw : 1;
200
};
201
 
202
typedef struct eh_region *eh_region;
203
 
204
struct call_site_record GTY(())
205
{
206
  rtx landing_pad;
207
  int action;
208
};
209
 
210
DEF_VEC_P(eh_region);
211
DEF_VEC_ALLOC_P(eh_region, gc);
212
 
213
/* Used to save exception status for each function.  */
214
struct eh_status GTY(())
215
{
216
  /* The tree of all regions for this function.  */
217
  struct eh_region *region_tree;
218
 
219
  /* The same information as an indexable array.  */
220
  VEC(eh_region,gc) *region_array;
221
 
222
  /* The most recently open region.  */
223
  struct eh_region *cur_region;
224
 
225
  /* This is the region for which we are processing catch blocks.  */
226
  struct eh_region *try_region;
227
 
228
  rtx filter;
229
  rtx exc_ptr;
230
 
231
  int built_landing_pads;
232
  int last_region_number;
233
 
234
  VEC(tree,gc) *ttype_data;
235
  varray_type ehspec_data;
236
  varray_type action_record_data;
237
 
238
  htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
239
 
240
  struct call_site_record * GTY ((length ("%h.call_site_data_used")))
241
    call_site_data;
242
  int call_site_data_used;
243
  int call_site_data_size;
244
 
245
  rtx ehr_stackadj;
246
  rtx ehr_handler;
247
  rtx ehr_label;
248
 
249
  rtx sjlj_fc;
250
  rtx sjlj_exit_after;
251
 
252
  htab_t GTY((param_is (struct throw_stmt_node))) throw_stmt_table;
253
};
254
 
255
 
256
static int t2r_eq (const void *, const void *);
257
static hashval_t t2r_hash (const void *);
258
static void add_type_for_runtime (tree);
259
static tree lookup_type_for_runtime (tree);
260
 
261
static void remove_unreachable_regions (rtx);
262
 
263
static int ttypes_filter_eq (const void *, const void *);
264
static hashval_t ttypes_filter_hash (const void *);
265
static int ehspec_filter_eq (const void *, const void *);
266
static hashval_t ehspec_filter_hash (const void *);
267
static int add_ttypes_entry (htab_t, tree);
268
static int add_ehspec_entry (htab_t, htab_t, tree);
269
static void assign_filter_values (void);
270
static void build_post_landing_pads (void);
271
static void connect_post_landing_pads (void);
272
static void dw2_build_landing_pads (void);
273
 
274
struct sjlj_lp_info;
275
static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
276
static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
277
static void sjlj_mark_call_sites (struct sjlj_lp_info *);
278
static void sjlj_emit_function_enter (rtx);
279
static void sjlj_emit_function_exit (void);
280
static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
281
static void sjlj_build_landing_pads (void);
282
 
283
static hashval_t ehl_hash (const void *);
284
static int ehl_eq (const void *, const void *);
285
static void add_ehl_entry (rtx, struct eh_region *);
286
static void remove_exception_handler_label (rtx);
287
static void remove_eh_handler (struct eh_region *);
288
static int for_each_eh_label_1 (void **, void *);
289
 
290
/* The return value of reachable_next_level.  */
291
enum reachable_code
292
{
293
  /* The given exception is not processed by the given region.  */
294
  RNL_NOT_CAUGHT,
295
  /* The given exception may need processing by the given region.  */
296
  RNL_MAYBE_CAUGHT,
297
  /* The given exception is completely processed by the given region.  */
298
  RNL_CAUGHT,
299
  /* The given exception is completely processed by the runtime.  */
300
  RNL_BLOCKED
301
};
302
 
303
struct reachable_info;
304
static enum reachable_code reachable_next_level (struct eh_region *, tree,
305
                                                 struct reachable_info *);
306
 
307
static int action_record_eq (const void *, const void *);
308
static hashval_t action_record_hash (const void *);
309
static int add_action_record (htab_t, int, int);
310
static int collect_one_action_chain (htab_t, struct eh_region *);
311
static int add_call_site (rtx, int);
312
 
313
static void push_uleb128 (varray_type *, unsigned int);
314
static void push_sleb128 (varray_type *, int);
315
#ifndef HAVE_AS_LEB128
316
static int dw2_size_of_call_site_table (void);
317
static int sjlj_size_of_call_site_table (void);
318
#endif
319
static void dw2_output_call_site_table (void);
320
static void sjlj_output_call_site_table (void);
321
 
322
 
323
/* Routine to see if exception handling is turned on.
324
   DO_WARN is nonzero if we want to inform the user that exception
325
   handling is turned off.
326
 
327
   This is used to ensure that -fexceptions has been specified if the
328
   compiler tries to use any exception-specific functions.  */
329
 
330
int
331
doing_eh (int do_warn)
332
{
333
  if (! flag_exceptions)
334
    {
335
      static int warned = 0;
336
      if (! warned && do_warn)
337
        {
338
          error ("exception handling disabled, use -fexceptions to enable");
339
          warned = 1;
340
        }
341
      return 0;
342
    }
343
  return 1;
344
}
345
 
346
 
347
void
348
init_eh (void)
349
{
350
  if (! flag_exceptions)
351
    return;
352
 
353
  type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
354
 
355
  /* Create the SjLj_Function_Context structure.  This should match
356
     the definition in unwind-sjlj.c.  */
357
  if (USING_SJLJ_EXCEPTIONS)
358
    {
359
      tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
360
 
361
      sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
362
 
363
      f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
364
                           build_pointer_type (sjlj_fc_type_node));
365
      DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
366
 
367
      f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
368
                         integer_type_node);
369
      DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
370
 
371
      tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
372
      tmp = build_array_type (lang_hooks.types.type_for_mode (word_mode, 1),
373
                              tmp);
374
      f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
375
      DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
376
 
377
      f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
378
                          ptr_type_node);
379
      DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
380
 
381
      f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
382
                           ptr_type_node);
383
      DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
384
 
385
#ifdef DONT_USE_BUILTIN_SETJMP
386
#ifdef JMP_BUF_SIZE
387
      tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
388
#else
389
      /* Should be large enough for most systems, if it is not,
390
         JMP_BUF_SIZE should be defined with the proper value.  It will
391
         also tend to be larger than necessary for most systems, a more
392
         optimal port will define JMP_BUF_SIZE.  */
393
      tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
394
#endif
395
#else
396
      /* builtin_setjmp takes a pointer to 5 words.  */
397
      tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
398
#endif
399
      tmp = build_index_type (tmp);
400
      tmp = build_array_type (ptr_type_node, tmp);
401
      f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
402
#ifdef DONT_USE_BUILTIN_SETJMP
403
      /* We don't know what the alignment requirements of the
404
         runtime's jmp_buf has.  Overestimate.  */
405
      DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
406
      DECL_USER_ALIGN (f_jbuf) = 1;
407
#endif
408
      DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
409
 
410
      TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
411
      TREE_CHAIN (f_prev) = f_cs;
412
      TREE_CHAIN (f_cs) = f_data;
413
      TREE_CHAIN (f_data) = f_per;
414
      TREE_CHAIN (f_per) = f_lsda;
415
      TREE_CHAIN (f_lsda) = f_jbuf;
416
 
417
      layout_type (sjlj_fc_type_node);
418
 
419
      /* Cache the interesting field offsets so that we have
420
         easy access from rtl.  */
421
      sjlj_fc_call_site_ofs
422
        = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
423
           + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
424
      sjlj_fc_data_ofs
425
        = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
426
           + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
427
      sjlj_fc_personality_ofs
428
        = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
429
           + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
430
      sjlj_fc_lsda_ofs
431
        = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
432
           + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
433
      sjlj_fc_jbuf_ofs
434
        = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
435
           + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
436
    }
437
}
438
 
439
void
440
init_eh_for_function (void)
441
{
442
  cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
443
}
444
 
445
/* Routines to generate the exception tree somewhat directly.
446
   These are used from tree-eh.c when processing exception related
447
   nodes during tree optimization.  */
448
 
449
static struct eh_region *
450
gen_eh_region (enum eh_region_type type, struct eh_region *outer)
451
{
452
  struct eh_region *new;
453
 
454
#ifdef ENABLE_CHECKING
455
  gcc_assert (doing_eh (0));
456
#endif
457
 
458
  /* Insert a new blank region as a leaf in the tree.  */
459
  new = ggc_alloc_cleared (sizeof (*new));
460
  new->type = type;
461
  new->outer = outer;
462
  if (outer)
463
    {
464
      new->next_peer = outer->inner;
465
      outer->inner = new;
466
    }
467
  else
468
    {
469
      new->next_peer = cfun->eh->region_tree;
470
      cfun->eh->region_tree = new;
471
    }
472
 
473
  new->region_number = ++cfun->eh->last_region_number;
474
 
475
  return new;
476
}
477
 
478
struct eh_region *
479
gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
480
{
481
  struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
482
  cleanup->u.cleanup.prev_try = prev_try;
483
  return cleanup;
484
}
485
 
486
struct eh_region *
487
gen_eh_region_try (struct eh_region *outer)
488
{
489
  return gen_eh_region (ERT_TRY, outer);
490
}
491
 
492
struct eh_region *
493
gen_eh_region_catch (struct eh_region *t, tree type_or_list)
494
{
495
  struct eh_region *c, *l;
496
  tree type_list, type_node;
497
 
498
  /* Ensure to always end up with a type list to normalize further
499
     processing, then register each type against the runtime types map.  */
500
  type_list = type_or_list;
501
  if (type_or_list)
502
    {
503
      if (TREE_CODE (type_or_list) != TREE_LIST)
504
        type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
505
 
506
      type_node = type_list;
507
      for (; type_node; type_node = TREE_CHAIN (type_node))
508
        add_type_for_runtime (TREE_VALUE (type_node));
509
    }
510
 
511
  c = gen_eh_region (ERT_CATCH, t->outer);
512
  c->u.catch.type_list = type_list;
513
  l = t->u.try.last_catch;
514
  c->u.catch.prev_catch = l;
515
  if (l)
516
    l->u.catch.next_catch = c;
517
  else
518
    t->u.try.catch = c;
519
  t->u.try.last_catch = c;
520
 
521
  return c;
522
}
523
 
524
struct eh_region *
525
gen_eh_region_allowed (struct eh_region *outer, tree allowed)
526
{
527
  struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
528
  region->u.allowed.type_list = allowed;
529
 
530
  for (; allowed ; allowed = TREE_CHAIN (allowed))
531
    add_type_for_runtime (TREE_VALUE (allowed));
532
 
533
  return region;
534
}
535
 
536
struct eh_region *
537
gen_eh_region_must_not_throw (struct eh_region *outer)
538
{
539
  return gen_eh_region (ERT_MUST_NOT_THROW, outer);
540
}
541
 
542
int
543
get_eh_region_number (struct eh_region *region)
544
{
545
  return region->region_number;
546
}
547
 
548
bool
549
get_eh_region_may_contain_throw (struct eh_region *region)
550
{
551
  return region->may_contain_throw;
552
}
553
 
554
tree
555
get_eh_region_tree_label (struct eh_region *region)
556
{
557
  return region->tree_label;
558
}
559
 
560
void
561
set_eh_region_tree_label (struct eh_region *region, tree lab)
562
{
563
  region->tree_label = lab;
564
}
565
 
566
void
567
expand_resx_expr (tree exp)
568
{
569
  int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
570
  struct eh_region *reg = VEC_index (eh_region,
571
                                     cfun->eh->region_array, region_nr);
572
 
573
  gcc_assert (!reg->resume);
574
  reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
575
  emit_barrier ();
576
}
577
 
578
/* Note that the current EH region (if any) may contain a throw, or a
579
   call to a function which itself may contain a throw.  */
580
 
581
void
582
note_eh_region_may_contain_throw (struct eh_region *region)
583
{
584
  while (region && !region->may_contain_throw)
585
    {
586
      region->may_contain_throw = 1;
587
      region = region->outer;
588
    }
589
}
590
 
591
void
592
note_current_region_may_contain_throw (void)
593
{
594
  note_eh_region_may_contain_throw (cfun->eh->cur_region);
595
}
596
 
597
 
598
/* Return an rtl expression for a pointer to the exception object
599
   within a handler.  */
600
 
601
rtx
602
get_exception_pointer (struct function *fun)
603
{
604
  rtx exc_ptr = fun->eh->exc_ptr;
605
  if (fun == cfun && ! exc_ptr)
606
    {
607
      exc_ptr = gen_reg_rtx (ptr_mode);
608
      fun->eh->exc_ptr = exc_ptr;
609
    }
610
  return exc_ptr;
611
}
612
 
613
/* Return an rtl expression for the exception dispatch filter
614
   within a handler.  */
615
 
616
rtx
617
get_exception_filter (struct function *fun)
618
{
619
  rtx filter = fun->eh->filter;
620
  if (fun == cfun && ! filter)
621
    {
622
      filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
623
      fun->eh->filter = filter;
624
    }
625
  return filter;
626
}
627
 
628
/* This section is for the exception handling specific optimization pass.  */
629
 
630
/* Random access the exception region tree.  */
631
 
632
void
633
collect_eh_region_array (void)
634
{
635
  struct eh_region *i;
636
 
637
  i = cfun->eh->region_tree;
638
  if (! i)
639
    return;
640
 
641
  VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
642
                 cfun->eh->last_region_number + 1);
643
  VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
644
 
645
  while (1)
646
    {
647
      VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
648
 
649
      /* If there are sub-regions, process them.  */
650
      if (i->inner)
651
        i = i->inner;
652
      /* If there are peers, process them.  */
653
      else if (i->next_peer)
654
        i = i->next_peer;
655
      /* Otherwise, step back up the tree to the next peer.  */
656
      else
657
        {
658
          do {
659
            i = i->outer;
660
            if (i == NULL)
661
              return;
662
          } while (i->next_peer == NULL);
663
          i = i->next_peer;
664
        }
665
    }
666
}
667
 
668
/* Remove all regions whose labels are not reachable from insns.  */
669
 
670
static void
671
remove_unreachable_regions (rtx insns)
672
{
673
  int i, *uid_region_num;
674
  bool *reachable;
675
  struct eh_region *r;
676
  rtx insn;
677
 
678
  uid_region_num = xcalloc (get_max_uid (), sizeof(int));
679
  reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
680
 
681
  for (i = cfun->eh->last_region_number; i > 0; --i)
682
    {
683
      r = VEC_index (eh_region, cfun->eh->region_array, i);
684
      if (!r || r->region_number != i)
685
        continue;
686
 
687
      if (r->resume)
688
        {
689
          gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
690
          uid_region_num[INSN_UID (r->resume)] = i;
691
        }
692
      if (r->label)
693
        {
694
          gcc_assert (!uid_region_num[INSN_UID (r->label)]);
695
          uid_region_num[INSN_UID (r->label)] = i;
696
        }
697
    }
698
 
699
  for (insn = insns; insn; insn = NEXT_INSN (insn))
700
    reachable[uid_region_num[INSN_UID (insn)]] = true;
701
 
702
  for (i = cfun->eh->last_region_number; i > 0; --i)
703
    {
704
      r = VEC_index (eh_region, cfun->eh->region_array, i);
705
      if (r && r->region_number == i && !reachable[i])
706
        {
707
          bool kill_it = true;
708
          switch (r->type)
709
            {
710
            case ERT_THROW:
711
              /* Don't remove ERT_THROW regions if their outer region
712
                 is reachable.  */
713
              if (r->outer && reachable[r->outer->region_number])
714
                kill_it = false;
715
              break;
716
 
717
            case ERT_MUST_NOT_THROW:
718
              /* MUST_NOT_THROW regions are implementable solely in the
719
                 runtime, but their existence continues to affect calls
720
                 within that region.  Never delete them here.  */
721
              kill_it = false;
722
              break;
723
 
724
            case ERT_TRY:
725
              {
726
                /* TRY regions are reachable if any of its CATCH regions
727
                   are reachable.  */
728
                struct eh_region *c;
729
                for (c = r->u.try.catch; c ; c = c->u.catch.next_catch)
730
                  if (reachable[c->region_number])
731
                    {
732
                      kill_it = false;
733
                      break;
734
                    }
735
                break;
736
              }
737
 
738
            default:
739
              break;
740
            }
741
 
742
          if (kill_it)
743
            remove_eh_handler (r);
744
        }
745
    }
746
 
747
  free (reachable);
748
  free (uid_region_num);
749
}
750
 
751
/* Set up EH labels for RTL.  */
752
 
753
void
754
convert_from_eh_region_ranges (void)
755
{
756
  rtx insns = get_insns ();
757
  int i, n = cfun->eh->last_region_number;
758
 
759
  /* Most of the work is already done at the tree level.  All we need to
760
     do is collect the rtl labels that correspond to the tree labels that
761
     collect the rtl labels that correspond to the tree labels
762
     we allocated earlier.  */
763
  for (i = 1; i <= n; ++i)
764
    {
765
      struct eh_region *region;
766
 
767
      region = VEC_index (eh_region, cfun->eh->region_array, i);
768
      if (region && region->tree_label)
769
        region->label = DECL_RTL_IF_SET (region->tree_label);
770
    }
771
 
772
  remove_unreachable_regions (insns);
773
}
774
 
775
static void
776
add_ehl_entry (rtx label, struct eh_region *region)
777
{
778
  struct ehl_map_entry **slot, *entry;
779
 
780
  LABEL_PRESERVE_P (label) = 1;
781
 
782
  entry = ggc_alloc (sizeof (*entry));
783
  entry->label = label;
784
  entry->region = region;
785
 
786
  slot = (struct ehl_map_entry **)
787
    htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
788
 
789
  /* Before landing pad creation, each exception handler has its own
790
     label.  After landing pad creation, the exception handlers may
791
     share landing pads.  This is ok, since maybe_remove_eh_handler
792
     only requires the 1-1 mapping before landing pad creation.  */
793
  gcc_assert (!*slot || cfun->eh->built_landing_pads);
794
 
795
  *slot = entry;
796
}
797
 
798
void
799
find_exception_handler_labels (void)
800
{
801
  int i;
802
 
803
  if (cfun->eh->exception_handler_label_map)
804
    htab_empty (cfun->eh->exception_handler_label_map);
805
  else
806
    {
807
      /* ??? The expansion factor here (3/2) must be greater than the htab
808
         occupancy factor (4/3) to avoid unnecessary resizing.  */
809
      cfun->eh->exception_handler_label_map
810
        = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
811
                           ehl_hash, ehl_eq, NULL);
812
    }
813
 
814
  if (cfun->eh->region_tree == NULL)
815
    return;
816
 
817
  for (i = cfun->eh->last_region_number; i > 0; --i)
818
    {
819
      struct eh_region *region;
820
      rtx lab;
821
 
822
      region = VEC_index (eh_region, cfun->eh->region_array, i);
823
      if (! region || region->region_number != i)
824
        continue;
825
      if (cfun->eh->built_landing_pads)
826
        lab = region->landing_pad;
827
      else
828
        lab = region->label;
829
 
830
      if (lab)
831
        add_ehl_entry (lab, region);
832
    }
833
 
834
  /* For sjlj exceptions, need the return label to remain live until
835
     after landing pad generation.  */
836
  if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
837
    add_ehl_entry (return_label, NULL);
838
}
839
 
840
/* Returns true if the current function has exception handling regions.  */
841
 
842
bool
843
current_function_has_exception_handlers (void)
844
{
845
  int i;
846
 
847
  for (i = cfun->eh->last_region_number; i > 0; --i)
848
    {
849
      struct eh_region *region;
850
 
851
      region = VEC_index (eh_region, cfun->eh->region_array, i);
852
      if (region
853
          && region->region_number == i
854
          && region->type != ERT_THROW)
855
        return true;
856
    }
857
 
858
  return false;
859
}
860
 
861
static struct eh_region *
862
duplicate_eh_region_1 (struct eh_region *o)
863
{
864
  struct eh_region *n = ggc_alloc_cleared (sizeof (struct eh_region));
865
 
866
  *n = *o;
867
 
868
  n->region_number = o->region_number + cfun->eh->last_region_number;
869
  VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
870
  gcc_assert (!o->aka);
871
 
872
  return n;
873
}
874
 
875
static void
876
duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array,
877
                       struct eh_region *prev_try)
878
{
879
  struct eh_region *n = n_array[o->region_number];
880
 
881
  switch (n->type)
882
    {
883
    case ERT_TRY:
884
      if (o->u.try.catch)
885
        n->u.try.catch = n_array[o->u.try.catch->region_number];
886
      if (o->u.try.last_catch)
887
        n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
888
      break;
889
 
890
    case ERT_CATCH:
891
      if (o->u.catch.next_catch)
892
        n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
893
      if (o->u.catch.prev_catch)
894
        n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
895
      break;
896
 
897
    case ERT_CLEANUP:
898
      if (o->u.cleanup.prev_try)
899
        n->u.cleanup.prev_try = n_array[o->u.cleanup.prev_try->region_number];
900
      else
901
        n->u.cleanup.prev_try = prev_try;
902
      break;
903
 
904
    default:
905
      break;
906
    }
907
 
908
  if (o->outer)
909
    n->outer = n_array[o->outer->region_number];
910
  if (o->inner)
911
    n->inner = n_array[o->inner->region_number];
912
  if (o->next_peer)
913
    n->next_peer = n_array[o->next_peer->region_number];
914
}
915
 
916
/* Duplicate the EH regions of IFUN into current function, root the tree in
917
   OUTER_REGION and remap labels using MAP callback.  */
918
int
919
duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
920
                      void *data, int outer_region)
921
{
922
  int ifun_last_region_number = ifun->eh->last_region_number;
923
  struct eh_region **n_array, *root, *cur, *prev_try;
924
  int i;
925
 
926
  if (ifun_last_region_number == 0 || !ifun->eh->region_tree)
927
    return 0;
928
 
929
  n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
930
  VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
931
                 cfun->eh->last_region_number + 1 + ifun_last_region_number);
932
 
933
  /* We might've created new cfun->eh->region_array so zero out nonexisting region 0.  */
934
  VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
935
 
936
  for (i = cfun->eh->last_region_number + 1;
937
       i < cfun->eh->last_region_number + 1 + ifun_last_region_number; i++)
938
    VEC_replace (eh_region, cfun->eh->region_array, i, 0);
939
 
940
  /* Search for the containing ERT_TRY region to fix up
941
     the prev_try short-cuts for ERT_CLEANUP regions.  */
942
  prev_try = NULL;
943
  if (outer_region > 0)
944
    for (prev_try = VEC_index (eh_region, cfun->eh->region_array, outer_region);
945
         prev_try && prev_try->type != ERT_TRY;
946
         prev_try = prev_try->outer)
947
      ;
948
 
949
  for (i = 1; i <= ifun_last_region_number; ++i)
950
    {
951
      cur = VEC_index (eh_region, ifun->eh->region_array, i);
952
      if (!cur || cur->region_number != i)
953
        continue;
954
      n_array[i] = duplicate_eh_region_1 (cur);
955
      if (cur->tree_label)
956
        {
957
          tree newlabel = map (cur->tree_label, data);
958
          n_array[i]->tree_label = newlabel;
959
        }
960
      else
961
        n_array[i]->tree_label = NULL;
962
    }
963
  for (i = 1; i <= ifun_last_region_number; ++i)
964
    {
965
      cur = VEC_index (eh_region, ifun->eh->region_array, i);
966
      if (!cur || cur->region_number != i)
967
        continue;
968
      duplicate_eh_region_2 (cur, n_array, prev_try);
969
    }
970
 
971
  root = n_array[ifun->eh->region_tree->region_number];
972
  gcc_assert (root->outer == NULL);
973
  if (outer_region > 0)
974
    {
975
      struct eh_region *cur
976
         = VEC_index (eh_region, cfun->eh->region_array, outer_region);
977
      struct eh_region *p = cur->inner;
978
 
979
      if (p)
980
        {
981
          while (p->next_peer)
982
            p = p->next_peer;
983
          p->next_peer = root;
984
        }
985
      else
986
        cur->inner = root;
987
      for (i = 1; i <= ifun_last_region_number; ++i)
988
        if (n_array[i] && n_array[i]->outer == NULL)
989
          n_array[i]->outer = cur;
990
    }
991
  else
992
    {
993
      struct eh_region *p = cfun->eh->region_tree;
994
      if (p)
995
        {
996
          while (p->next_peer)
997
            p = p->next_peer;
998
          p->next_peer = root;
999
        }
1000
      else
1001
        cfun->eh->region_tree = root;
1002
    }
1003
 
1004
  free (n_array);
1005
 
1006
  i = cfun->eh->last_region_number;
1007
  cfun->eh->last_region_number = i + ifun_last_region_number;
1008
 
1009
  return i;
1010
}
1011
 
1012
static int
1013
t2r_eq (const void *pentry, const void *pdata)
1014
{
1015
  tree entry = (tree) pentry;
1016
  tree data = (tree) pdata;
1017
 
1018
  return TREE_PURPOSE (entry) == data;
1019
}
1020
 
1021
static hashval_t
1022
t2r_hash (const void *pentry)
1023
{
1024
  tree entry = (tree) pentry;
1025
  return TREE_HASH (TREE_PURPOSE (entry));
1026
}
1027
 
1028
static void
1029
add_type_for_runtime (tree type)
1030
{
1031
  tree *slot;
1032
 
1033
  slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1034
                                            TREE_HASH (type), INSERT);
1035
  if (*slot == NULL)
1036
    {
1037
      tree runtime = (*lang_eh_runtime_type) (type);
1038
      *slot = tree_cons (type, runtime, NULL_TREE);
1039
    }
1040
}
1041
 
1042
static tree
1043
lookup_type_for_runtime (tree type)
1044
{
1045
  tree *slot;
1046
 
1047
  slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1048
                                            TREE_HASH (type), NO_INSERT);
1049
 
1050
  /* We should have always inserted the data earlier.  */
1051
  return TREE_VALUE (*slot);
1052
}
1053
 
1054
 
1055
/* Represent an entry in @TTypes for either catch actions
1056
   or exception filter actions.  */
1057
struct ttypes_filter GTY(())
1058
{
1059
  tree t;
1060
  int filter;
1061
};
1062
 
1063
/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1064
   (a tree) for a @TTypes type node we are thinking about adding.  */
1065
 
1066
static int
1067
ttypes_filter_eq (const void *pentry, const void *pdata)
1068
{
1069
  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1070
  tree data = (tree) pdata;
1071
 
1072
  return entry->t == data;
1073
}
1074
 
1075
static hashval_t
1076
ttypes_filter_hash (const void *pentry)
1077
{
1078
  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1079
  return TREE_HASH (entry->t);
1080
}
1081
 
1082
/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1083
   exception specification list we are thinking about adding.  */
1084
/* ??? Currently we use the type lists in the order given.  Someone
1085
   should put these in some canonical order.  */
1086
 
1087
static int
1088
ehspec_filter_eq (const void *pentry, const void *pdata)
1089
{
1090
  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1091
  const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1092
 
1093
  return type_list_equal (entry->t, data->t);
1094
}
1095
 
1096
/* Hash function for exception specification lists.  */
1097
 
1098
static hashval_t
1099
ehspec_filter_hash (const void *pentry)
1100
{
1101
  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1102
  hashval_t h = 0;
1103
  tree list;
1104
 
1105
  for (list = entry->t; list ; list = TREE_CHAIN (list))
1106
    h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1107
  return h;
1108
}
1109
 
1110
/* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
1111
   to speed up the search.  Return the filter value to be used.  */
1112
 
1113
static int
1114
add_ttypes_entry (htab_t ttypes_hash, tree type)
1115
{
1116
  struct ttypes_filter **slot, *n;
1117
 
1118
  slot = (struct ttypes_filter **)
1119
    htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1120
 
1121
  if ((n = *slot) == NULL)
1122
    {
1123
      /* Filter value is a 1 based table index.  */
1124
 
1125
      n = xmalloc (sizeof (*n));
1126
      n->t = type;
1127
      n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1;
1128
      *slot = n;
1129
 
1130
      VEC_safe_push (tree, gc, cfun->eh->ttype_data, type);
1131
    }
1132
 
1133
  return n->filter;
1134
}
1135
 
1136
/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1137
   to speed up the search.  Return the filter value to be used.  */
1138
 
1139
static int
1140
add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1141
{
1142
  struct ttypes_filter **slot, *n;
1143
  struct ttypes_filter dummy;
1144
 
1145
  dummy.t = list;
1146
  slot = (struct ttypes_filter **)
1147
    htab_find_slot (ehspec_hash, &dummy, INSERT);
1148
 
1149
  if ((n = *slot) == NULL)
1150
    {
1151
      /* Filter value is a -1 based byte index into a uleb128 buffer.  */
1152
 
1153
      n = xmalloc (sizeof (*n));
1154
      n->t = list;
1155
      n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1156
      *slot = n;
1157
 
1158
      /* Generate a 0 terminated list of filter values.  */
1159
      for (; list ; list = TREE_CHAIN (list))
1160
        {
1161
          if (targetm.arm_eabi_unwinder)
1162
            VARRAY_PUSH_TREE (cfun->eh->ehspec_data, TREE_VALUE (list));
1163
          else
1164
            {
1165
              /* Look up each type in the list and encode its filter
1166
                 value as a uleb128.  */
1167
              push_uleb128 (&cfun->eh->ehspec_data,
1168
                  add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1169
            }
1170
        }
1171
      if (targetm.arm_eabi_unwinder)
1172
        VARRAY_PUSH_TREE (cfun->eh->ehspec_data, NULL_TREE);
1173
      else
1174
        VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1175
    }
1176
 
1177
  return n->filter;
1178
}
1179
 
1180
/* Generate the action filter values to be used for CATCH and
1181
   ALLOWED_EXCEPTIONS regions.  When using dwarf2 exception regions,
1182
   we use lots of landing pads, and so every type or list can share
1183
   the same filter value, which saves table space.  */
1184
 
1185
static void
1186
assign_filter_values (void)
1187
{
1188
  int i;
1189
  htab_t ttypes, ehspec;
1190
 
1191
  cfun->eh->ttype_data = VEC_alloc (tree, gc, 16);
1192
  if (targetm.arm_eabi_unwinder)
1193
    VARRAY_TREE_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1194
  else
1195
    VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1196
 
1197
  ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1198
  ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1199
 
1200
  for (i = cfun->eh->last_region_number; i > 0; --i)
1201
    {
1202
      struct eh_region *r;
1203
 
1204
      r = VEC_index (eh_region, cfun->eh->region_array, i);
1205
 
1206
      /* Mind we don't process a region more than once.  */
1207
      if (!r || r->region_number != i)
1208
        continue;
1209
 
1210
      switch (r->type)
1211
        {
1212
        case ERT_CATCH:
1213
          /* Whatever type_list is (NULL or true list), we build a list
1214
             of filters for the region.  */
1215
          r->u.catch.filter_list = NULL_TREE;
1216
 
1217
          if (r->u.catch.type_list != NULL)
1218
            {
1219
              /* Get a filter value for each of the types caught and store
1220
                 them in the region's dedicated list.  */
1221
              tree tp_node = r->u.catch.type_list;
1222
 
1223
              for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1224
                {
1225
                  int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1226
                  tree flt_node = build_int_cst (NULL_TREE, flt);
1227
 
1228
                  r->u.catch.filter_list
1229
                    = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1230
                }
1231
            }
1232
          else
1233
            {
1234
              /* Get a filter value for the NULL list also since it will need
1235
                 an action record anyway.  */
1236
              int flt = add_ttypes_entry (ttypes, NULL);
1237
              tree flt_node = build_int_cst (NULL_TREE, flt);
1238
 
1239
              r->u.catch.filter_list
1240
                = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1241
            }
1242
 
1243
          break;
1244
 
1245
        case ERT_ALLOWED_EXCEPTIONS:
1246
          r->u.allowed.filter
1247
            = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1248
          break;
1249
 
1250
        default:
1251
          break;
1252
        }
1253
    }
1254
 
1255
  htab_delete (ttypes);
1256
  htab_delete (ehspec);
1257
}
1258
 
1259
/* Emit SEQ into basic block just before INSN (that is assumed to be
1260
   first instruction of some existing BB and return the newly
1261
   produced block.  */
1262
static basic_block
1263
emit_to_new_bb_before (rtx seq, rtx insn)
1264
{
1265
  rtx last;
1266
  basic_block bb;
1267
  edge e;
1268
  edge_iterator ei;
1269
 
1270
  /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1271
     call), we don't want it to go into newly created landing pad or other EH
1272
     construct.  */
1273
  for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1274
    if (e->flags & EDGE_FALLTHRU)
1275
      force_nonfallthru (e);
1276
    else
1277
      ei_next (&ei);
1278
  last = emit_insn_before (seq, insn);
1279
  if (BARRIER_P (last))
1280
    last = PREV_INSN (last);
1281
  bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1282
  update_bb_for_insn (bb);
1283
  bb->flags |= BB_SUPERBLOCK;
1284
  return bb;
1285
}
1286
 
1287
/* Generate the code to actually handle exceptions, which will follow the
1288
   landing pads.  */
1289
 
1290
static void
1291
build_post_landing_pads (void)
1292
{
1293
  int i;
1294
 
1295
  for (i = cfun->eh->last_region_number; i > 0; --i)
1296
    {
1297
      struct eh_region *region;
1298
      rtx seq;
1299
 
1300
      region = VEC_index (eh_region, cfun->eh->region_array, i);
1301
      /* Mind we don't process a region more than once.  */
1302
      if (!region || region->region_number != i)
1303
        continue;
1304
 
1305
      switch (region->type)
1306
        {
1307
        case ERT_TRY:
1308
          /* ??? Collect the set of all non-overlapping catch handlers
1309
               all the way up the chain until blocked by a cleanup.  */
1310
          /* ??? Outer try regions can share landing pads with inner
1311
             try regions if the types are completely non-overlapping,
1312
             and there are no intervening cleanups.  */
1313
 
1314
          region->post_landing_pad = gen_label_rtx ();
1315
 
1316
          start_sequence ();
1317
 
1318
          emit_label (region->post_landing_pad);
1319
 
1320
          /* ??? It is mighty inconvenient to call back into the
1321
             switch statement generation code in expand_end_case.
1322
             Rapid prototyping sez a sequence of ifs.  */
1323
          {
1324
            struct eh_region *c;
1325
            for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1326
              {
1327
                if (c->u.catch.type_list == NULL)
1328
                  emit_jump (c->label);
1329
                else
1330
                  {
1331
                    /* Need for one cmp/jump per type caught. Each type
1332
                       list entry has a matching entry in the filter list
1333
                       (see assign_filter_values).  */
1334
                    tree tp_node = c->u.catch.type_list;
1335
                    tree flt_node = c->u.catch.filter_list;
1336
 
1337
                    for (; tp_node; )
1338
                      {
1339
                        emit_cmp_and_jump_insns
1340
                          (cfun->eh->filter,
1341
                           GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1342
                           EQ, NULL_RTX,
1343
                           targetm.eh_return_filter_mode (), 0, c->label);
1344
 
1345
                        tp_node = TREE_CHAIN (tp_node);
1346
                        flt_node = TREE_CHAIN (flt_node);
1347
                      }
1348
                  }
1349
              }
1350
          }
1351
 
1352
          /* We delay the generation of the _Unwind_Resume until we generate
1353
             landing pads.  We emit a marker here so as to get good control
1354
             flow data in the meantime.  */
1355
          region->resume
1356
            = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1357
          emit_barrier ();
1358
 
1359
          seq = get_insns ();
1360
          end_sequence ();
1361
 
1362
          emit_to_new_bb_before (seq, region->u.try.catch->label);
1363
 
1364
          break;
1365
 
1366
        case ERT_ALLOWED_EXCEPTIONS:
1367
          region->post_landing_pad = gen_label_rtx ();
1368
 
1369
          start_sequence ();
1370
 
1371
          emit_label (region->post_landing_pad);
1372
 
1373
          emit_cmp_and_jump_insns (cfun->eh->filter,
1374
                                   GEN_INT (region->u.allowed.filter),
1375
                                   EQ, NULL_RTX,
1376
                                   targetm.eh_return_filter_mode (), 0, region->label);
1377
 
1378
          /* We delay the generation of the _Unwind_Resume until we generate
1379
             landing pads.  We emit a marker here so as to get good control
1380
             flow data in the meantime.  */
1381
          region->resume
1382
            = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1383
          emit_barrier ();
1384
 
1385
          seq = get_insns ();
1386
          end_sequence ();
1387
 
1388
          emit_to_new_bb_before (seq, region->label);
1389
          break;
1390
 
1391
        case ERT_CLEANUP:
1392
        case ERT_MUST_NOT_THROW:
1393
          region->post_landing_pad = region->label;
1394
          break;
1395
 
1396
        case ERT_CATCH:
1397
        case ERT_THROW:
1398
          /* Nothing to do.  */
1399
          break;
1400
 
1401
        default:
1402
          gcc_unreachable ();
1403
        }
1404
    }
1405
}
1406
 
1407
/* Replace RESX patterns with jumps to the next handler if any, or calls to
1408
   _Unwind_Resume otherwise.  */
1409
 
1410
static void
1411
connect_post_landing_pads (void)
1412
{
1413
  int i;
1414
 
1415
  for (i = cfun->eh->last_region_number; i > 0; --i)
1416
    {
1417
      struct eh_region *region;
1418
      struct eh_region *outer;
1419
      rtx seq;
1420
      rtx barrier;
1421
 
1422
      region = VEC_index (eh_region, cfun->eh->region_array, i);
1423
      /* Mind we don't process a region more than once.  */
1424
      if (!region || region->region_number != i)
1425
        continue;
1426
 
1427
      /* If there is no RESX, or it has been deleted by flow, there's
1428
         nothing to fix up.  */
1429
      if (! region->resume || INSN_DELETED_P (region->resume))
1430
        continue;
1431
 
1432
      /* Search for another landing pad in this function.  */
1433
      for (outer = region->outer; outer ; outer = outer->outer)
1434
        if (outer->post_landing_pad)
1435
          break;
1436
 
1437
      start_sequence ();
1438
 
1439
      if (outer)
1440
        {
1441
          edge e;
1442
          basic_block src, dest;
1443
 
1444
          emit_jump (outer->post_landing_pad);
1445
          src = BLOCK_FOR_INSN (region->resume);
1446
          dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1447
          while (EDGE_COUNT (src->succs) > 0)
1448
            remove_edge (EDGE_SUCC (src, 0));
1449
          e = make_edge (src, dest, 0);
1450
          e->probability = REG_BR_PROB_BASE;
1451
          e->count = src->count;
1452
        }
1453
      else
1454
        {
1455
          emit_library_call (unwind_resume_libfunc, LCT_THROW,
1456
                             VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1457
 
1458
          /* What we just emitted was a throwing libcall, so it got a
1459
             barrier automatically added after it.  If the last insn in
1460
             the libcall sequence isn't the barrier, it's because the
1461
             target emits multiple insns for a call, and there are insns
1462
             after the actual call insn (which are redundant and would be
1463
             optimized away).  The barrier is inserted exactly after the
1464
             call insn, so let's go get that and delete the insns after
1465
             it, because below we need the barrier to be the last insn in
1466
             the sequence.  */
1467
          delete_insns_since (NEXT_INSN (last_call_insn ()));
1468
        }
1469
 
1470
      seq = get_insns ();
1471
      end_sequence ();
1472
      barrier = emit_insn_before (seq, region->resume);
1473
      /* Avoid duplicate barrier.  */
1474
      gcc_assert (BARRIER_P (barrier));
1475
      delete_insn (barrier);
1476
      delete_insn (region->resume);
1477
 
1478
      /* ??? From tree-ssa we can wind up with catch regions whose
1479
         label is not instantiated, but whose resx is present.  Now
1480
         that we've dealt with the resx, kill the region.  */
1481
      if (region->label == NULL && region->type == ERT_CLEANUP)
1482
        remove_eh_handler (region);
1483
    }
1484
}
1485
 
1486
 
1487
static void
1488
dw2_build_landing_pads (void)
1489
{
1490
  int i;
1491
  unsigned int j;
1492
 
1493
  for (i = cfun->eh->last_region_number; i > 0; --i)
1494
    {
1495
      struct eh_region *region;
1496
      rtx seq;
1497
      basic_block bb;
1498
      bool clobbers_hard_regs = false;
1499
      edge e;
1500
 
1501
      region = VEC_index (eh_region, cfun->eh->region_array, i);
1502
      /* Mind we don't process a region more than once.  */
1503
      if (!region || region->region_number != i)
1504
        continue;
1505
 
1506
      if (region->type != ERT_CLEANUP
1507
          && region->type != ERT_TRY
1508
          && region->type != ERT_ALLOWED_EXCEPTIONS)
1509
        continue;
1510
 
1511
      start_sequence ();
1512
 
1513
      region->landing_pad = gen_label_rtx ();
1514
      emit_label (region->landing_pad);
1515
 
1516
#ifdef HAVE_exception_receiver
1517
      if (HAVE_exception_receiver)
1518
        emit_insn (gen_exception_receiver ());
1519
      else
1520
#endif
1521
#ifdef HAVE_nonlocal_goto_receiver
1522
        if (HAVE_nonlocal_goto_receiver)
1523
          emit_insn (gen_nonlocal_goto_receiver ());
1524
        else
1525
#endif
1526
          { /* Nothing */ }
1527
 
1528
      /* If the eh_return data registers are call-saved, then we
1529
         won't have considered them clobbered from the call that
1530
         threw.  Kill them now.  */
1531
      for (j = 0; ; ++j)
1532
        {
1533
          unsigned r = EH_RETURN_DATA_REGNO (j);
1534
          if (r == INVALID_REGNUM)
1535
            break;
1536
          if (! call_used_regs[r])
1537
            {
1538
              emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1539
              clobbers_hard_regs = true;
1540
            }
1541
        }
1542
 
1543
      if (clobbers_hard_regs)
1544
        {
1545
          /* @@@ This is a kludge.  Not all machine descriptions define a
1546
             blockage insn, but we must not allow the code we just generated
1547
             to be reordered by scheduling.  So emit an ASM_INPUT to act as
1548
             blockage insn.  */
1549
          emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1550
        }
1551
 
1552
      emit_move_insn (cfun->eh->exc_ptr,
1553
                      gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1554
      emit_move_insn (cfun->eh->filter,
1555
                      gen_rtx_REG (targetm.eh_return_filter_mode (),
1556
                                   EH_RETURN_DATA_REGNO (1)));
1557
 
1558
      seq = get_insns ();
1559
      end_sequence ();
1560
 
1561
      bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1562
      e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1563
      e->count = bb->count;
1564
      e->probability = REG_BR_PROB_BASE;
1565
    }
1566
}
1567
 
1568
 
1569
struct sjlj_lp_info
1570
{
1571
  int directly_reachable;
1572
  int action_index;
1573
  int dispatch_index;
1574
  int call_site_index;
1575
};
1576
 
1577
static bool
1578
sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1579
{
1580
  rtx insn;
1581
  bool found_one = false;
1582
 
1583
  for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1584
    {
1585
      struct eh_region *region;
1586
      enum reachable_code rc;
1587
      tree type_thrown;
1588
      rtx note;
1589
 
1590
      if (! INSN_P (insn))
1591
        continue;
1592
 
1593
      note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1594
      if (!note || INTVAL (XEXP (note, 0)) <= 0)
1595
        continue;
1596
 
1597
      region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1598
 
1599
      type_thrown = NULL_TREE;
1600
      if (region->type == ERT_THROW)
1601
        {
1602
          type_thrown = region->u.throw.type;
1603
          region = region->outer;
1604
        }
1605
 
1606
      /* Find the first containing region that might handle the exception.
1607
         That's the landing pad to which we will transfer control.  */
1608
      rc = RNL_NOT_CAUGHT;
1609
      for (; region; region = region->outer)
1610
        {
1611
          rc = reachable_next_level (region, type_thrown, NULL);
1612
          if (rc != RNL_NOT_CAUGHT)
1613
            break;
1614
        }
1615
      if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1616
        {
1617
          lp_info[region->region_number].directly_reachable = 1;
1618
          found_one = true;
1619
        }
1620
    }
1621
 
1622
  return found_one;
1623
}
1624
 
1625
static void
1626
sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1627
{
1628
  htab_t ar_hash;
1629
  int i, index;
1630
 
1631
  /* First task: build the action table.  */
1632
 
1633
  VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1634
  ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1635
 
1636
  for (i = cfun->eh->last_region_number; i > 0; --i)
1637
    if (lp_info[i].directly_reachable)
1638
      {
1639
        struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
1640
 
1641
        r->landing_pad = dispatch_label;
1642
        lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1643
        if (lp_info[i].action_index != -1)
1644
          cfun->uses_eh_lsda = 1;
1645
      }
1646
 
1647
  htab_delete (ar_hash);
1648
 
1649
  /* Next: assign dispatch values.  In dwarf2 terms, this would be the
1650
     landing pad label for the region.  For sjlj though, there is one
1651
     common landing pad from which we dispatch to the post-landing pads.
1652
 
1653
     A region receives a dispatch index if it is directly reachable
1654
     and requires in-function processing.  Regions that share post-landing
1655
     pads may share dispatch indices.  */
1656
  /* ??? Post-landing pad sharing doesn't actually happen at the moment
1657
     (see build_post_landing_pads) so we don't bother checking for it.  */
1658
 
1659
  index = 0;
1660
  for (i = cfun->eh->last_region_number; i > 0; --i)
1661
    if (lp_info[i].directly_reachable)
1662
      lp_info[i].dispatch_index = index++;
1663
 
1664
  /* Finally: assign call-site values.  If dwarf2 terms, this would be
1665
     the region number assigned by convert_to_eh_region_ranges, but
1666
     handles no-action and must-not-throw differently.  */
1667
 
1668
  call_site_base = 1;
1669
  for (i = cfun->eh->last_region_number; i > 0; --i)
1670
    if (lp_info[i].directly_reachable)
1671
      {
1672
        int action = lp_info[i].action_index;
1673
 
1674
        /* Map must-not-throw to otherwise unused call-site index 0.  */
1675
        if (action == -2)
1676
          index = 0;
1677
        /* Map no-action to otherwise unused call-site index -1.  */
1678
        else if (action == -1)
1679
          index = -1;
1680
        /* Otherwise, look it up in the table.  */
1681
        else
1682
          index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1683
 
1684
        lp_info[i].call_site_index = index;
1685
      }
1686
}
1687
 
1688
static void
1689
sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1690
{
1691
  int last_call_site = -2;
1692
  rtx insn, mem;
1693
 
1694
  for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1695
    {
1696
      struct eh_region *region;
1697
      int this_call_site;
1698
      rtx note, before, p;
1699
 
1700
      /* Reset value tracking at extended basic block boundaries.  */
1701
      if (LABEL_P (insn))
1702
        last_call_site = -2;
1703
 
1704
      if (! INSN_P (insn))
1705
        continue;
1706
 
1707
      note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1708
      if (!note)
1709
        {
1710
          /* Calls (and trapping insns) without notes are outside any
1711
             exception handling region in this function.  Mark them as
1712
             no action.  */
1713
          if (CALL_P (insn)
1714
              || (flag_non_call_exceptions
1715
                  && may_trap_p (PATTERN (insn))))
1716
            this_call_site = -1;
1717
          else
1718
            continue;
1719
        }
1720
      else
1721
        {
1722
          /* Calls that are known to not throw need not be marked.  */
1723
          if (INTVAL (XEXP (note, 0)) <= 0)
1724
            continue;
1725
 
1726
          region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1727
          this_call_site = lp_info[region->region_number].call_site_index;
1728
        }
1729
 
1730
      if (this_call_site == last_call_site)
1731
        continue;
1732
 
1733
      /* Don't separate a call from it's argument loads.  */
1734
      before = insn;
1735
      if (CALL_P (insn))
1736
        before = find_first_parameter_load (insn, NULL_RTX);
1737
 
1738
      start_sequence ();
1739
      mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
1740
                            sjlj_fc_call_site_ofs);
1741
      emit_move_insn (mem, GEN_INT (this_call_site));
1742
      p = get_insns ();
1743
      end_sequence ();
1744
 
1745
      emit_insn_before (p, before);
1746
      last_call_site = this_call_site;
1747
    }
1748
}
1749
 
1750
/* Construct the SjLj_Function_Context.  */
1751
 
1752
static void
1753
sjlj_emit_function_enter (rtx dispatch_label)
1754
{
1755
  rtx fn_begin, fc, mem, seq;
1756
  bool fn_begin_outside_block;
1757
 
1758
  fc = cfun->eh->sjlj_fc;
1759
 
1760
  start_sequence ();
1761
 
1762
  /* We're storing this libcall's address into memory instead of
1763
     calling it directly.  Thus, we must call assemble_external_libcall
1764
     here, as we can not depend on emit_library_call to do it for us.  */
1765
  assemble_external_libcall (eh_personality_libfunc);
1766
  mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1767
  emit_move_insn (mem, eh_personality_libfunc);
1768
 
1769
  mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1770
  if (cfun->uses_eh_lsda)
1771
    {
1772
      char buf[20];
1773
      rtx sym;
1774
 
1775
      ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1776
      sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1777
      SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1778
      emit_move_insn (mem, sym);
1779
    }
1780
  else
1781
    emit_move_insn (mem, const0_rtx);
1782
 
1783
#ifdef DONT_USE_BUILTIN_SETJMP
1784
  {
1785
    rtx x, note;
1786
    x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1787
                                 TYPE_MODE (integer_type_node), 1,
1788
                                 plus_constant (XEXP (fc, 0),
1789
                                                sjlj_fc_jbuf_ofs), Pmode);
1790
 
1791
    note = emit_note (NOTE_INSN_EXPECTED_VALUE);
1792
    NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
1793
 
1794
    emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1795
                             TYPE_MODE (integer_type_node), 0, dispatch_label);
1796
  }
1797
#else
1798
  expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
1799
                               dispatch_label);
1800
#endif
1801
 
1802
  emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1803
                     1, XEXP (fc, 0), Pmode);
1804
 
1805
  seq = get_insns ();
1806
  end_sequence ();
1807
 
1808
  /* ??? Instead of doing this at the beginning of the function,
1809
     do this in a block that is at loop level 0 and dominates all
1810
     can_throw_internal instructions.  */
1811
 
1812
  fn_begin_outside_block = true;
1813
  for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1814
    if (NOTE_P (fn_begin))
1815
      {
1816
        if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1817
          break;
1818
        else if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK)
1819
          fn_begin_outside_block = false;
1820
      }
1821
 
1822
  if (fn_begin_outside_block)
1823
    insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
1824
  else
1825
    emit_insn_after (seq, fn_begin);
1826
}
1827
 
1828
/* Call back from expand_function_end to know where we should put
1829
   the call to unwind_sjlj_unregister_libfunc if needed.  */
1830
 
1831
void
1832
sjlj_emit_function_exit_after (rtx after)
1833
{
1834
  cfun->eh->sjlj_exit_after = after;
1835
}
1836
 
1837
static void
1838
sjlj_emit_function_exit (void)
1839
{
1840
  rtx seq;
1841
  edge e;
1842
  edge_iterator ei;
1843
 
1844
  start_sequence ();
1845
 
1846
  emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1847
                     1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
1848
 
1849
  seq = get_insns ();
1850
  end_sequence ();
1851
 
1852
  /* ??? Really this can be done in any block at loop level 0 that
1853
     post-dominates all can_throw_internal instructions.  This is
1854
     the last possible moment.  */
1855
 
1856
  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1857
    if (e->flags & EDGE_FALLTHRU)
1858
      break;
1859
  if (e)
1860
    {
1861
      rtx insn;
1862
 
1863
      /* Figure out whether the place we are supposed to insert libcall
1864
         is inside the last basic block or after it.  In the other case
1865
         we need to emit to edge.  */
1866
      gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
1867
      for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
1868
        {
1869
          if (insn == cfun->eh->sjlj_exit_after)
1870
            {
1871
              if (LABEL_P (insn))
1872
                insn = NEXT_INSN (insn);
1873
              emit_insn_after (seq, insn);
1874
              return;
1875
            }
1876
          if (insn == BB_END (e->src))
1877
            break;
1878
        }
1879
      insert_insn_on_edge (seq, e);
1880
    }
1881
}
1882
 
1883
static void
1884
sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1885
{
1886
  int i, first_reachable;
1887
  rtx mem, dispatch, seq, fc;
1888
  rtx before;
1889
  basic_block bb;
1890
  edge e;
1891
 
1892
  fc = cfun->eh->sjlj_fc;
1893
 
1894
  start_sequence ();
1895
 
1896
  emit_label (dispatch_label);
1897
 
1898
#ifndef DONT_USE_BUILTIN_SETJMP
1899
  expand_builtin_setjmp_receiver (dispatch_label);
1900
#endif
1901
 
1902
  /* Load up dispatch index, exc_ptr and filter values from the
1903
     function context.  */
1904
  mem = adjust_address (fc, TYPE_MODE (integer_type_node),
1905
                        sjlj_fc_call_site_ofs);
1906
  dispatch = copy_to_reg (mem);
1907
 
1908
  mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
1909
  if (word_mode != ptr_mode)
1910
    {
1911
#ifdef POINTERS_EXTEND_UNSIGNED
1912
      mem = convert_memory_address (ptr_mode, mem);
1913
#else
1914
      mem = convert_to_mode (ptr_mode, mem, 0);
1915
#endif
1916
    }
1917
  emit_move_insn (cfun->eh->exc_ptr, mem);
1918
 
1919
  mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
1920
  emit_move_insn (cfun->eh->filter, mem);
1921
 
1922
  /* Jump to one of the directly reachable regions.  */
1923
  /* ??? This really ought to be using a switch statement.  */
1924
 
1925
  first_reachable = 0;
1926
  for (i = cfun->eh->last_region_number; i > 0; --i)
1927
    {
1928
      if (! lp_info[i].directly_reachable)
1929
        continue;
1930
 
1931
      if (! first_reachable)
1932
        {
1933
          first_reachable = i;
1934
          continue;
1935
        }
1936
 
1937
      emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
1938
                               EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
1939
                               ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
1940
                                ->post_landing_pad);
1941
    }
1942
 
1943
  seq = get_insns ();
1944
  end_sequence ();
1945
 
1946
  before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
1947
            ->post_landing_pad);
1948
 
1949
  bb = emit_to_new_bb_before (seq, before);
1950
  e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1951
  e->count = bb->count;
1952
  e->probability = REG_BR_PROB_BASE;
1953
}
1954
 
1955
static void
1956
sjlj_build_landing_pads (void)
1957
{
1958
  struct sjlj_lp_info *lp_info;
1959
 
1960
  lp_info = xcalloc (cfun->eh->last_region_number + 1,
1961
                     sizeof (struct sjlj_lp_info));
1962
 
1963
  if (sjlj_find_directly_reachable_regions (lp_info))
1964
    {
1965
      rtx dispatch_label = gen_label_rtx ();
1966
 
1967
      cfun->eh->sjlj_fc
1968
        = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1969
                              int_size_in_bytes (sjlj_fc_type_node),
1970
                              TYPE_ALIGN (sjlj_fc_type_node));
1971
 
1972
      sjlj_assign_call_site_values (dispatch_label, lp_info);
1973
      sjlj_mark_call_sites (lp_info);
1974
 
1975
      sjlj_emit_function_enter (dispatch_label);
1976
      sjlj_emit_dispatch_table (dispatch_label, lp_info);
1977
      sjlj_emit_function_exit ();
1978
    }
1979
 
1980
  free (lp_info);
1981
}
1982
 
1983
void
1984
finish_eh_generation (void)
1985
{
1986
  basic_block bb;
1987
 
1988
  /* Nothing to do if no regions created.  */
1989
  if (cfun->eh->region_tree == NULL)
1990
    return;
1991
 
1992
  /* The object here is to provide find_basic_blocks with detailed
1993
     information (via reachable_handlers) on how exception control
1994
     flows within the function.  In this first pass, we can include
1995
     type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
1996
     regions, and hope that it will be useful in deleting unreachable
1997
     handlers.  Subsequently, we will generate landing pads which will
1998
     connect many of the handlers, and then type information will not
1999
     be effective.  Still, this is a win over previous implementations.  */
2000
 
2001
  /* These registers are used by the landing pads.  Make sure they
2002
     have been generated.  */
2003
  get_exception_pointer (cfun);
2004
  get_exception_filter (cfun);
2005
 
2006
  /* Construct the landing pads.  */
2007
 
2008
  assign_filter_values ();
2009
  build_post_landing_pads ();
2010
  connect_post_landing_pads ();
2011
  if (USING_SJLJ_EXCEPTIONS)
2012
    sjlj_build_landing_pads ();
2013
  else
2014
    dw2_build_landing_pads ();
2015
 
2016
  cfun->eh->built_landing_pads = 1;
2017
 
2018
  /* We've totally changed the CFG.  Start over.  */
2019
  find_exception_handler_labels ();
2020
  break_superblocks ();
2021
  if (USING_SJLJ_EXCEPTIONS)
2022
    commit_edge_insertions ();
2023
  FOR_EACH_BB (bb)
2024
    {
2025
      edge e;
2026
      edge_iterator ei;
2027
      bool eh = false;
2028
      for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2029
        {
2030
          if (e->flags & EDGE_EH)
2031
            {
2032
              remove_edge (e);
2033
              eh = true;
2034
            }
2035
          else
2036
            ei_next (&ei);
2037
        }
2038
      if (eh)
2039
        rtl_make_eh_edge (NULL, bb, BB_END (bb));
2040
    }
2041
}
2042
 
2043
static hashval_t
2044
ehl_hash (const void *pentry)
2045
{
2046
  struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2047
 
2048
  /* 2^32 * ((sqrt(5) - 1) / 2) */
2049
  const hashval_t scaled_golden_ratio = 0x9e3779b9;
2050
  return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2051
}
2052
 
2053
static int
2054
ehl_eq (const void *pentry, const void *pdata)
2055
{
2056
  struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2057
  struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2058
 
2059
  return entry->label == data->label;
2060
}
2061
 
2062
/* This section handles removing dead code for flow.  */
2063
 
2064
/* Remove LABEL from exception_handler_label_map.  */
2065
 
2066
static void
2067
remove_exception_handler_label (rtx label)
2068
{
2069
  struct ehl_map_entry **slot, tmp;
2070
 
2071
  /* If exception_handler_label_map was not built yet,
2072
     there is nothing to do.  */
2073
  if (cfun->eh->exception_handler_label_map == NULL)
2074
    return;
2075
 
2076
  tmp.label = label;
2077
  slot = (struct ehl_map_entry **)
2078
    htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2079
  gcc_assert (slot);
2080
 
2081
  htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2082
}
2083
 
2084
/* Splice REGION from the region tree etc.  */
2085
 
2086
static void
2087
remove_eh_handler (struct eh_region *region)
2088
{
2089
  struct eh_region **pp, **pp_start, *p, *outer, *inner;
2090
  rtx lab;
2091
 
2092
  /* For the benefit of efficiently handling REG_EH_REGION notes,
2093
     replace this region in the region array with its containing
2094
     region.  Note that previous region deletions may result in
2095
     multiple copies of this region in the array, so we have a
2096
     list of alternate numbers by which we are known.  */
2097
 
2098
  outer = region->outer;
2099
  VEC_replace (eh_region, cfun->eh->region_array, region->region_number, outer);
2100
  if (region->aka)
2101
    {
2102
      unsigned i;
2103
      bitmap_iterator bi;
2104
 
2105
      EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2106
        {
2107
          VEC_replace (eh_region, cfun->eh->region_array, i, outer);
2108
        }
2109
    }
2110
 
2111
  if (outer)
2112
    {
2113
      if (!outer->aka)
2114
        outer->aka = BITMAP_GGC_ALLOC ();
2115
      if (region->aka)
2116
        bitmap_ior_into (outer->aka, region->aka);
2117
      bitmap_set_bit (outer->aka, region->region_number);
2118
    }
2119
 
2120
  if (cfun->eh->built_landing_pads)
2121
    lab = region->landing_pad;
2122
  else
2123
    lab = region->label;
2124
  if (lab)
2125
    remove_exception_handler_label (lab);
2126
 
2127
  if (outer)
2128
    pp_start = &outer->inner;
2129
  else
2130
    pp_start = &cfun->eh->region_tree;
2131
  for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2132
    continue;
2133
  *pp = region->next_peer;
2134
 
2135
  inner = region->inner;
2136
  if (inner)
2137
    {
2138
      for (p = inner; p->next_peer ; p = p->next_peer)
2139
        p->outer = outer;
2140
      p->outer = outer;
2141
 
2142
      p->next_peer = *pp_start;
2143
      *pp_start = inner;
2144
    }
2145
 
2146
  if (region->type == ERT_CATCH)
2147
    {
2148
      struct eh_region *try, *next, *prev;
2149
 
2150
      for (try = region->next_peer;
2151
           try->type == ERT_CATCH;
2152
           try = try->next_peer)
2153
        continue;
2154
      gcc_assert (try->type == ERT_TRY);
2155
 
2156
      next = region->u.catch.next_catch;
2157
      prev = region->u.catch.prev_catch;
2158
 
2159
      if (next)
2160
        next->u.catch.prev_catch = prev;
2161
      else
2162
        try->u.try.last_catch = prev;
2163
      if (prev)
2164
        prev->u.catch.next_catch = next;
2165
      else
2166
        {
2167
          try->u.try.catch = next;
2168
          if (! next)
2169
            remove_eh_handler (try);
2170
        }
2171
    }
2172
}
2173
 
2174
/* LABEL heads a basic block that is about to be deleted.  If this
2175
   label corresponds to an exception region, we may be able to
2176
   delete the region.  */
2177
 
2178
void
2179
maybe_remove_eh_handler (rtx label)
2180
{
2181
  struct ehl_map_entry **slot, tmp;
2182
  struct eh_region *region;
2183
 
2184
  /* ??? After generating landing pads, it's not so simple to determine
2185
     if the region data is completely unused.  One must examine the
2186
     landing pad and the post landing pad, and whether an inner try block
2187
     is referencing the catch handlers directly.  */
2188
  if (cfun->eh->built_landing_pads)
2189
    return;
2190
 
2191
  tmp.label = label;
2192
  slot = (struct ehl_map_entry **)
2193
    htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2194
  if (! slot)
2195
    return;
2196
  region = (*slot)->region;
2197
  if (! region)
2198
    return;
2199
 
2200
  /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2201
     because there is no path to the fallback call to terminate.
2202
     But the region continues to affect call-site data until there
2203
     are no more contained calls, which we don't see here.  */
2204
  if (region->type == ERT_MUST_NOT_THROW)
2205
    {
2206
      htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2207
      region->label = NULL_RTX;
2208
    }
2209
  else
2210
    remove_eh_handler (region);
2211
}
2212
 
2213
/* Invokes CALLBACK for every exception handler label.  Only used by old
2214
   loop hackery; should not be used by new code.  */
2215
 
2216
void
2217
for_each_eh_label (void (*callback) (rtx))
2218
{
2219
  htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
2220
                 (void *) &callback);
2221
}
2222
 
2223
static int
2224
for_each_eh_label_1 (void **pentry, void *data)
2225
{
2226
  struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2227
  void (*callback) (rtx) = *(void (**) (rtx)) data;
2228
 
2229
  (*callback) (entry->label);
2230
  return 1;
2231
}
2232
 
2233
/* Invoke CALLBACK for every exception region in the current function.  */
2234
 
2235
void
2236
for_each_eh_region (void (*callback) (struct eh_region *))
2237
{
2238
  int i, n = cfun->eh->last_region_number;
2239
  for (i = 1; i <= n; ++i)
2240
    {
2241
      struct eh_region *region;
2242
 
2243
      region = VEC_index (eh_region, cfun->eh->region_array, i);
2244
      if (region)
2245
        (*callback) (region);
2246
    }
2247
}
2248
 
2249
/* This section describes CFG exception edges for flow.  */
2250
 
2251
/* For communicating between calls to reachable_next_level.  */
2252
struct reachable_info
2253
{
2254
  tree types_caught;
2255
  tree types_allowed;
2256
  void (*callback) (struct eh_region *, void *);
2257
  void *callback_data;
2258
  bool saw_any_handlers;
2259
};
2260
 
2261
/* A subroutine of reachable_next_level.  Return true if TYPE, or a
2262
   base class of TYPE, is in HANDLED.  */
2263
 
2264
static int
2265
check_handled (tree handled, tree type)
2266
{
2267
  tree t;
2268
 
2269
  /* We can check for exact matches without front-end help.  */
2270
  if (! lang_eh_type_covers)
2271
    {
2272
      for (t = handled; t ; t = TREE_CHAIN (t))
2273
        if (TREE_VALUE (t) == type)
2274
          return 1;
2275
    }
2276
  else
2277
    {
2278
      for (t = handled; t ; t = TREE_CHAIN (t))
2279
        if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2280
          return 1;
2281
    }
2282
 
2283
  return 0;
2284
}
2285
 
2286
/* A subroutine of reachable_next_level.  If we are collecting a list
2287
   of handlers, add one.  After landing pad generation, reference
2288
   it instead of the handlers themselves.  Further, the handlers are
2289
   all wired together, so by referencing one, we've got them all.
2290
   Before landing pad generation we reference each handler individually.
2291
 
2292
   LP_REGION contains the landing pad; REGION is the handler.  */
2293
 
2294
static void
2295
add_reachable_handler (struct reachable_info *info,
2296
                       struct eh_region *lp_region, struct eh_region *region)
2297
{
2298
  if (! info)
2299
    return;
2300
 
2301
  info->saw_any_handlers = true;
2302
 
2303
  if (cfun->eh->built_landing_pads)
2304
    info->callback (lp_region, info->callback_data);
2305
  else
2306
    info->callback (region, info->callback_data);
2307
}
2308
 
2309
/* Process one level of exception regions for reachability.
2310
   If TYPE_THROWN is non-null, then it is the *exact* type being
2311
   propagated.  If INFO is non-null, then collect handler labels
2312
   and caught/allowed type information between invocations.  */
2313
 
2314
static enum reachable_code
2315
reachable_next_level (struct eh_region *region, tree type_thrown,
2316
                      struct reachable_info *info)
2317
{
2318
  switch (region->type)
2319
    {
2320
    case ERT_CLEANUP:
2321
      /* Before landing-pad generation, we model control flow
2322
         directly to the individual handlers.  In this way we can
2323
         see that catch handler types may shadow one another.  */
2324
      add_reachable_handler (info, region, region);
2325
      return RNL_MAYBE_CAUGHT;
2326
 
2327
    case ERT_TRY:
2328
      {
2329
        struct eh_region *c;
2330
        enum reachable_code ret = RNL_NOT_CAUGHT;
2331
 
2332
        for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2333
          {
2334
            /* A catch-all handler ends the search.  */
2335
            if (c->u.catch.type_list == NULL)
2336
              {
2337
                add_reachable_handler (info, region, c);
2338
                return RNL_CAUGHT;
2339
              }
2340
 
2341
            if (type_thrown)
2342
              {
2343
                /* If we have at least one type match, end the search.  */
2344
                tree tp_node = c->u.catch.type_list;
2345
 
2346
                for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2347
                  {
2348
                    tree type = TREE_VALUE (tp_node);
2349
 
2350
                    if (type == type_thrown
2351
                        || (lang_eh_type_covers
2352
                            && (*lang_eh_type_covers) (type, type_thrown)))
2353
                      {
2354
                        add_reachable_handler (info, region, c);
2355
                        return RNL_CAUGHT;
2356
                      }
2357
                  }
2358
 
2359
                /* If we have definitive information of a match failure,
2360
                   the catch won't trigger.  */
2361
                if (lang_eh_type_covers)
2362
                  return RNL_NOT_CAUGHT;
2363
              }
2364
 
2365
            /* At this point, we either don't know what type is thrown or
2366
               don't have front-end assistance to help deciding if it is
2367
               covered by one of the types in the list for this region.
2368
 
2369
               We'd then like to add this region to the list of reachable
2370
               handlers since it is indeed potentially reachable based on the
2371
               information we have.
2372
 
2373
               Actually, this handler is for sure not reachable if all the
2374
               types it matches have already been caught. That is, it is only
2375
               potentially reachable if at least one of the types it catches
2376
               has not been previously caught.  */
2377
 
2378
            if (! info)
2379
              ret = RNL_MAYBE_CAUGHT;
2380
            else
2381
              {
2382
                tree tp_node = c->u.catch.type_list;
2383
                bool maybe_reachable = false;
2384
 
2385
                /* Compute the potential reachability of this handler and
2386
                   update the list of types caught at the same time.  */
2387
                for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2388
                  {
2389
                    tree type = TREE_VALUE (tp_node);
2390
 
2391
                    if (! check_handled (info->types_caught, type))
2392
                      {
2393
                        info->types_caught
2394
                          = tree_cons (NULL, type, info->types_caught);
2395
 
2396
                        maybe_reachable = true;
2397
                      }
2398
                  }
2399
 
2400
                if (maybe_reachable)
2401
                  {
2402
                    add_reachable_handler (info, region, c);
2403
 
2404
                    /* ??? If the catch type is a base class of every allowed
2405
                       type, then we know we can stop the search.  */
2406
                    ret = RNL_MAYBE_CAUGHT;
2407
                  }
2408
              }
2409
          }
2410
 
2411
        return ret;
2412
      }
2413
 
2414
    case ERT_ALLOWED_EXCEPTIONS:
2415
      /* An empty list of types definitely ends the search.  */
2416
      if (region->u.allowed.type_list == NULL_TREE)
2417
        {
2418
          add_reachable_handler (info, region, region);
2419
          return RNL_CAUGHT;
2420
        }
2421
 
2422
      /* Collect a list of lists of allowed types for use in detecting
2423
         when a catch may be transformed into a catch-all.  */
2424
      if (info)
2425
        info->types_allowed = tree_cons (NULL_TREE,
2426
                                         region->u.allowed.type_list,
2427
                                         info->types_allowed);
2428
 
2429
      /* If we have definitive information about the type hierarchy,
2430
         then we can tell if the thrown type will pass through the
2431
         filter.  */
2432
      if (type_thrown && lang_eh_type_covers)
2433
        {
2434
          if (check_handled (region->u.allowed.type_list, type_thrown))
2435
            return RNL_NOT_CAUGHT;
2436
          else
2437
            {
2438
              add_reachable_handler (info, region, region);
2439
              return RNL_CAUGHT;
2440
            }
2441
        }
2442
 
2443
      add_reachable_handler (info, region, region);
2444
      return RNL_MAYBE_CAUGHT;
2445
 
2446
    case ERT_CATCH:
2447
      /* Catch regions are handled by their controlling try region.  */
2448
      return RNL_NOT_CAUGHT;
2449
 
2450
    case ERT_MUST_NOT_THROW:
2451
      /* Here we end our search, since no exceptions may propagate.
2452
         If we've touched down at some landing pad previous, then the
2453
         explicit function call we generated may be used.  Otherwise
2454
         the call is made by the runtime.
2455
 
2456
         Before inlining, do not perform this optimization.  We may
2457
         inline a subroutine that contains handlers, and that will
2458
         change the value of saw_any_handlers.  */
2459
 
2460
      if ((info && info->saw_any_handlers) || !cfun->after_inlining)
2461
        {
2462
          add_reachable_handler (info, region, region);
2463
          return RNL_CAUGHT;
2464
        }
2465
      else
2466
        return RNL_BLOCKED;
2467
 
2468
    case ERT_THROW:
2469
    case ERT_UNKNOWN:
2470
      /* Shouldn't see these here.  */
2471
      gcc_unreachable ();
2472
      break;
2473
    default:
2474
      gcc_unreachable ();
2475
    }
2476
}
2477
 
2478
/* Invoke CALLBACK on each region reachable from REGION_NUMBER.  */
2479
 
2480
void
2481
foreach_reachable_handler (int region_number, bool is_resx,
2482
                           void (*callback) (struct eh_region *, void *),
2483
                           void *callback_data)
2484
{
2485
  struct reachable_info info;
2486
  struct eh_region *region;
2487
  tree type_thrown;
2488
 
2489
  memset (&info, 0, sizeof (info));
2490
  info.callback = callback;
2491
  info.callback_data = callback_data;
2492
 
2493
  region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2494
 
2495
  type_thrown = NULL_TREE;
2496
  if (is_resx)
2497
    {
2498
      /* A RESX leaves a region instead of entering it.  Thus the
2499
         region itself may have been deleted out from under us.  */
2500
      if (region == NULL)
2501
        return;
2502
      region = region->outer;
2503
    }
2504
  else if (region->type == ERT_THROW)
2505
    {
2506
      type_thrown = region->u.throw.type;
2507
      region = region->outer;
2508
    }
2509
 
2510
  while (region)
2511
    {
2512
      if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2513
        break;
2514
      /* If we have processed one cleanup, there is no point in
2515
         processing any more of them.  Each cleanup will have an edge
2516
         to the next outer cleanup region, so the flow graph will be
2517
         accurate.  */
2518
      if (region->type == ERT_CLEANUP)
2519
        region = region->u.cleanup.prev_try;
2520
      else
2521
        region = region->outer;
2522
    }
2523
}
2524
 
2525
/* Retrieve a list of labels of exception handlers which can be
2526
   reached by a given insn.  */
2527
 
2528
static void
2529
arh_to_landing_pad (struct eh_region *region, void *data)
2530
{
2531
  rtx *p_handlers = data;
2532
  if (! *p_handlers)
2533
    *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2534
}
2535
 
2536
static void
2537
arh_to_label (struct eh_region *region, void *data)
2538
{
2539
  rtx *p_handlers = data;
2540
  *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2541
}
2542
 
2543
rtx
2544
reachable_handlers (rtx insn)
2545
{
2546
  bool is_resx = false;
2547
  rtx handlers = NULL;
2548
  int region_number;
2549
 
2550
  if (JUMP_P (insn)
2551
      && GET_CODE (PATTERN (insn)) == RESX)
2552
    {
2553
      region_number = XINT (PATTERN (insn), 0);
2554
      is_resx = true;
2555
    }
2556
  else
2557
    {
2558
      rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2559
      if (!note || INTVAL (XEXP (note, 0)) <= 0)
2560
        return NULL;
2561
      region_number = INTVAL (XEXP (note, 0));
2562
    }
2563
 
2564
  foreach_reachable_handler (region_number, is_resx,
2565
                             (cfun->eh->built_landing_pads
2566
                              ? arh_to_landing_pad
2567
                              : arh_to_label),
2568
                             &handlers);
2569
 
2570
  return handlers;
2571
}
2572
 
2573
/* Determine if the given INSN can throw an exception that is caught
2574
   within the function.  */
2575
 
2576
bool
2577
can_throw_internal_1 (int region_number, bool is_resx)
2578
{
2579
  struct eh_region *region;
2580
  tree type_thrown;
2581
 
2582
  region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2583
 
2584
  type_thrown = NULL_TREE;
2585
  if (is_resx)
2586
    region = region->outer;
2587
  else if (region->type == ERT_THROW)
2588
    {
2589
      type_thrown = region->u.throw.type;
2590
      region = region->outer;
2591
    }
2592
 
2593
  /* If this exception is ignored by each and every containing region,
2594
     then control passes straight out.  The runtime may handle some
2595
     regions, which also do not require processing internally.  */
2596
  for (; region; region = region->outer)
2597
    {
2598
      enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2599
      if (how == RNL_BLOCKED)
2600
        return false;
2601
      if (how != RNL_NOT_CAUGHT)
2602
        return true;
2603
    }
2604
 
2605
  return false;
2606
}
2607
 
2608
bool
2609
can_throw_internal (rtx insn)
2610
{
2611
  rtx note;
2612
 
2613
  if (! INSN_P (insn))
2614
    return false;
2615
 
2616
  if (JUMP_P (insn)
2617
      && GET_CODE (PATTERN (insn)) == RESX
2618
      && XINT (PATTERN (insn), 0) > 0)
2619
    return can_throw_internal_1 (XINT (PATTERN (insn), 0), true);
2620
 
2621
  if (NONJUMP_INSN_P (insn)
2622
      && GET_CODE (PATTERN (insn)) == SEQUENCE)
2623
    insn = XVECEXP (PATTERN (insn), 0, 0);
2624
 
2625
  /* Every insn that might throw has an EH_REGION note.  */
2626
  note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2627
  if (!note || INTVAL (XEXP (note, 0)) <= 0)
2628
    return false;
2629
 
2630
  return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false);
2631
}
2632
 
2633
/* Determine if the given INSN can throw an exception that is
2634
   visible outside the function.  */
2635
 
2636
bool
2637
can_throw_external_1 (int region_number, bool is_resx)
2638
{
2639
  struct eh_region *region;
2640
  tree type_thrown;
2641
 
2642
  region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2643
 
2644
  type_thrown = NULL_TREE;
2645
  if (is_resx)
2646
    region = region->outer;
2647
  else if (region->type == ERT_THROW)
2648
    {
2649
      type_thrown = region->u.throw.type;
2650
      region = region->outer;
2651
    }
2652
 
2653
  /* If the exception is caught or blocked by any containing region,
2654
     then it is not seen by any calling function.  */
2655
  for (; region ; region = region->outer)
2656
    if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2657
      return false;
2658
 
2659
  return true;
2660
}
2661
 
2662
bool
2663
can_throw_external (rtx insn)
2664
{
2665
  rtx note;
2666
 
2667
  if (! INSN_P (insn))
2668
    return false;
2669
 
2670
  if (JUMP_P (insn)
2671
      && GET_CODE (PATTERN (insn)) == RESX
2672
      && XINT (PATTERN (insn), 0) > 0)
2673
    return can_throw_external_1 (XINT (PATTERN (insn), 0), true);
2674
 
2675
  if (NONJUMP_INSN_P (insn)
2676
      && GET_CODE (PATTERN (insn)) == SEQUENCE)
2677
    insn = XVECEXP (PATTERN (insn), 0, 0);
2678
 
2679
  note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2680
  if (!note)
2681
    {
2682
      /* Calls (and trapping insns) without notes are outside any
2683
         exception handling region in this function.  We have to
2684
         assume it might throw.  Given that the front end and middle
2685
         ends mark known NOTHROW functions, this isn't so wildly
2686
         inaccurate.  */
2687
      return (CALL_P (insn)
2688
              || (flag_non_call_exceptions
2689
                  && may_trap_p (PATTERN (insn))));
2690
    }
2691
  if (INTVAL (XEXP (note, 0)) <= 0)
2692
    return false;
2693
 
2694
  return can_throw_external_1 (INTVAL (XEXP (note, 0)), false);
2695
}
2696
 
2697
/* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls.  */
2698
 
2699
void
2700
set_nothrow_function_flags (void)
2701
{
2702
  rtx insn;
2703
 
2704
  TREE_NOTHROW (current_function_decl) = 1;
2705
 
2706
  /* Assume cfun->all_throwers_are_sibcalls until we encounter
2707
     something that can throw an exception.  We specifically exempt
2708
     CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2709
     and can't throw.  Most CALL_INSNs are not SIBLING_CALL_P, so this
2710
     is optimistic.  */
2711
 
2712
  cfun->all_throwers_are_sibcalls = 1;
2713
 
2714
  if (! flag_exceptions)
2715
    return;
2716
 
2717
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2718
    if (can_throw_external (insn))
2719
      {
2720
        TREE_NOTHROW (current_function_decl) = 0;
2721
 
2722
        if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2723
          {
2724
            cfun->all_throwers_are_sibcalls = 0;
2725
            return;
2726
          }
2727
      }
2728
 
2729
  for (insn = current_function_epilogue_delay_list; insn;
2730
       insn = XEXP (insn, 1))
2731
    if (can_throw_external (insn))
2732
      {
2733
        TREE_NOTHROW (current_function_decl) = 0;
2734
 
2735
        if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2736
          {
2737
            cfun->all_throwers_are_sibcalls = 0;
2738
            return;
2739
          }
2740
      }
2741
}
2742
 
2743
struct tree_opt_pass pass_set_nothrow_function_flags =
2744
{
2745
  NULL,                                 /* name */
2746
  NULL,                                 /* gate */
2747
  set_nothrow_function_flags,           /* execute */
2748
  NULL,                                 /* sub */
2749
  NULL,                                 /* next */
2750
  0,                                    /* static_pass_number */
2751
  0,                                    /* tv_id */
2752
  0,                                    /* properties_required */
2753
  0,                                    /* properties_provided */
2754
  0,                                    /* properties_destroyed */
2755
  0,                                    /* todo_flags_start */
2756
  0,                                    /* todo_flags_finish */
2757
 
2758
};
2759
 
2760
 
2761
/* Various hooks for unwind library.  */
2762
 
2763
/* Do any necessary initialization to access arbitrary stack frames.
2764
   On the SPARC, this means flushing the register windows.  */
2765
 
2766
void
2767
expand_builtin_unwind_init (void)
2768
{
2769
  /* Set this so all the registers get saved in our frame; we need to be
2770
     able to copy the saved values for any registers from frames we unwind.  */
2771
  current_function_has_nonlocal_label = 1;
2772
 
2773
#ifdef SETUP_FRAME_ADDRESSES
2774
  SETUP_FRAME_ADDRESSES ();
2775
#endif
2776
}
2777
 
2778
rtx
2779
expand_builtin_eh_return_data_regno (tree arglist)
2780
{
2781
  tree which = TREE_VALUE (arglist);
2782
  unsigned HOST_WIDE_INT iwhich;
2783
 
2784
  if (TREE_CODE (which) != INTEGER_CST)
2785
    {
2786
      error ("argument of %<__builtin_eh_return_regno%> must be constant");
2787
      return constm1_rtx;
2788
    }
2789
 
2790
  iwhich = tree_low_cst (which, 1);
2791
  iwhich = EH_RETURN_DATA_REGNO (iwhich);
2792
  if (iwhich == INVALID_REGNUM)
2793
    return constm1_rtx;
2794
 
2795
#ifdef DWARF_FRAME_REGNUM
2796
  iwhich = DWARF_FRAME_REGNUM (iwhich);
2797
#else
2798
  iwhich = DBX_REGISTER_NUMBER (iwhich);
2799
#endif
2800
 
2801
  return GEN_INT (iwhich);
2802
}
2803
 
2804
/* Given a value extracted from the return address register or stack slot,
2805
   return the actual address encoded in that value.  */
2806
 
2807
rtx
2808
expand_builtin_extract_return_addr (tree addr_tree)
2809
{
2810
  rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2811
 
2812
  if (GET_MODE (addr) != Pmode
2813
      && GET_MODE (addr) != VOIDmode)
2814
    {
2815
#ifdef POINTERS_EXTEND_UNSIGNED
2816
      addr = convert_memory_address (Pmode, addr);
2817
#else
2818
      addr = convert_to_mode (Pmode, addr, 0);
2819
#endif
2820
    }
2821
 
2822
  /* First mask out any unwanted bits.  */
2823
#ifdef MASK_RETURN_ADDR
2824
  expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2825
#endif
2826
 
2827
  /* Then adjust to find the real return address.  */
2828
#if defined (RETURN_ADDR_OFFSET)
2829
  addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2830
#endif
2831
 
2832
  return addr;
2833
}
2834
 
2835
/* Given an actual address in addr_tree, do any necessary encoding
2836
   and return the value to be stored in the return address register or
2837
   stack slot so the epilogue will return to that address.  */
2838
 
2839
rtx
2840
expand_builtin_frob_return_addr (tree addr_tree)
2841
{
2842
  rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2843
 
2844
  addr = convert_memory_address (Pmode, addr);
2845
 
2846
#ifdef RETURN_ADDR_OFFSET
2847
  addr = force_reg (Pmode, addr);
2848
  addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2849
#endif
2850
 
2851
  return addr;
2852
}
2853
 
2854
/* Set up the epilogue with the magic bits we'll need to return to the
2855
   exception handler.  */
2856
 
2857
void
2858
expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2859
                          tree handler_tree)
2860
{
2861
  rtx tmp;
2862
 
2863
#ifdef EH_RETURN_STACKADJ_RTX
2864
  tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2865
  tmp = convert_memory_address (Pmode, tmp);
2866
  if (!cfun->eh->ehr_stackadj)
2867
    cfun->eh->ehr_stackadj = copy_to_reg (tmp);
2868
  else if (tmp != cfun->eh->ehr_stackadj)
2869
    emit_move_insn (cfun->eh->ehr_stackadj, tmp);
2870
#endif
2871
 
2872
  tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
2873
  tmp = convert_memory_address (Pmode, tmp);
2874
  if (!cfun->eh->ehr_handler)
2875
    cfun->eh->ehr_handler = copy_to_reg (tmp);
2876
  else if (tmp != cfun->eh->ehr_handler)
2877
    emit_move_insn (cfun->eh->ehr_handler, tmp);
2878
 
2879
  if (!cfun->eh->ehr_label)
2880
    cfun->eh->ehr_label = gen_label_rtx ();
2881
  emit_jump (cfun->eh->ehr_label);
2882
}
2883
 
2884
void
2885
expand_eh_return (void)
2886
{
2887
  rtx around_label;
2888
 
2889
  if (! cfun->eh->ehr_label)
2890
    return;
2891
 
2892
  current_function_calls_eh_return = 1;
2893
 
2894
#ifdef EH_RETURN_STACKADJ_RTX
2895
  emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2896
#endif
2897
 
2898
  around_label = gen_label_rtx ();
2899
  emit_jump (around_label);
2900
 
2901
  emit_label (cfun->eh->ehr_label);
2902
  clobber_return_register ();
2903
 
2904
#ifdef EH_RETURN_STACKADJ_RTX
2905
  emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
2906
#endif
2907
 
2908
#ifdef HAVE_eh_return
2909
  if (HAVE_eh_return)
2910
    emit_insn (gen_eh_return (cfun->eh->ehr_handler));
2911
  else
2912
#endif
2913
    {
2914
#ifdef EH_RETURN_HANDLER_RTX
2915
      emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
2916
#else
2917
      error ("__builtin_eh_return not supported on this target");
2918
#endif
2919
    }
2920
 
2921
  emit_label (around_label);
2922
}
2923
 
2924
/* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2925
   POINTERS_EXTEND_UNSIGNED and return it.  */
2926
 
2927
rtx
2928
expand_builtin_extend_pointer (tree addr_tree)
2929
{
2930
  rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2931
  int extend;
2932
 
2933
#ifdef POINTERS_EXTEND_UNSIGNED
2934
  extend = POINTERS_EXTEND_UNSIGNED;
2935
#else
2936
  /* The previous EH code did an unsigned extend by default, so we do this also
2937
     for consistency.  */
2938
  extend = 1;
2939
#endif
2940
 
2941
  return convert_modes (word_mode, ptr_mode, addr, extend);
2942
}
2943
 
2944
/* In the following functions, we represent entries in the action table
2945
   as 1-based indices.  Special cases are:
2946
 
2947
         0:     null action record, non-null landing pad; implies cleanups
2948
        -1:     null action record, null landing pad; implies no action
2949
        -2:     no call-site entry; implies must_not_throw
2950
        -3:     we have yet to process outer regions
2951
 
2952
   Further, no special cases apply to the "next" field of the record.
2953
   For next, 0 means end of list.  */
2954
 
2955
struct action_record
2956
{
2957
  int offset;
2958
  int filter;
2959
  int next;
2960
};
2961
 
2962
static int
2963
action_record_eq (const void *pentry, const void *pdata)
2964
{
2965
  const struct action_record *entry = (const struct action_record *) pentry;
2966
  const struct action_record *data = (const struct action_record *) pdata;
2967
  return entry->filter == data->filter && entry->next == data->next;
2968
}
2969
 
2970
static hashval_t
2971
action_record_hash (const void *pentry)
2972
{
2973
  const struct action_record *entry = (const struct action_record *) pentry;
2974
  return entry->next * 1009 + entry->filter;
2975
}
2976
 
2977
static int
2978
add_action_record (htab_t ar_hash, int filter, int next)
2979
{
2980
  struct action_record **slot, *new, tmp;
2981
 
2982
  tmp.filter = filter;
2983
  tmp.next = next;
2984
  slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
2985
 
2986
  if ((new = *slot) == NULL)
2987
    {
2988
      new = xmalloc (sizeof (*new));
2989
      new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
2990
      new->filter = filter;
2991
      new->next = next;
2992
      *slot = new;
2993
 
2994
      /* The filter value goes in untouched.  The link to the next
2995
         record is a "self-relative" byte offset, or zero to indicate
2996
         that there is no next record.  So convert the absolute 1 based
2997
         indices we've been carrying around into a displacement.  */
2998
 
2999
      push_sleb128 (&cfun->eh->action_record_data, filter);
3000
      if (next)
3001
        next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3002
      push_sleb128 (&cfun->eh->action_record_data, next);
3003
    }
3004
 
3005
  return new->offset;
3006
}
3007
 
3008
static int
3009
collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3010
{
3011
  struct eh_region *c;
3012
  int next;
3013
 
3014
  /* If we've reached the top of the region chain, then we have
3015
     no actions, and require no landing pad.  */
3016
  if (region == NULL)
3017
    return -1;
3018
 
3019
  switch (region->type)
3020
    {
3021
    case ERT_CLEANUP:
3022
      /* A cleanup adds a zero filter to the beginning of the chain, but
3023
         there are special cases to look out for.  If there are *only*
3024
         cleanups along a path, then it compresses to a zero action.
3025
         Further, if there are multiple cleanups along a path, we only
3026
         need to represent one of them, as that is enough to trigger
3027
         entry to the landing pad at runtime.  */
3028
      next = collect_one_action_chain (ar_hash, region->outer);
3029
      if (next <= 0)
3030
        return 0;
3031
      for (c = region->outer; c ; c = c->outer)
3032
        if (c->type == ERT_CLEANUP)
3033
          return next;
3034
      return add_action_record (ar_hash, 0, next);
3035
 
3036
    case ERT_TRY:
3037
      /* Process the associated catch regions in reverse order.
3038
         If there's a catch-all handler, then we don't need to
3039
         search outer regions.  Use a magic -3 value to record
3040
         that we haven't done the outer search.  */
3041
      next = -3;
3042
      for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3043
        {
3044
          if (c->u.catch.type_list == NULL)
3045
            {
3046
              /* Retrieve the filter from the head of the filter list
3047
                 where we have stored it (see assign_filter_values).  */
3048
              int filter
3049
                = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3050
 
3051
              next = add_action_record (ar_hash, filter, 0);
3052
            }
3053
          else
3054
            {
3055
              /* Once the outer search is done, trigger an action record for
3056
                 each filter we have.  */
3057
              tree flt_node;
3058
 
3059
              if (next == -3)
3060
                {
3061
                  next = collect_one_action_chain (ar_hash, region->outer);
3062
 
3063
                  /* If there is no next action, terminate the chain.  */
3064
                  if (next == -1)
3065
                    next = 0;
3066
                  /* If all outer actions are cleanups or must_not_throw,
3067
                     we'll have no action record for it, since we had wanted
3068
                     to encode these states in the call-site record directly.
3069
                     Add a cleanup action to the chain to catch these.  */
3070
                  else if (next <= 0)
3071
                    next = add_action_record (ar_hash, 0, 0);
3072
                }
3073
 
3074
              flt_node = c->u.catch.filter_list;
3075
              for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3076
                {
3077
                  int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3078
                  next = add_action_record (ar_hash, filter, next);
3079
                }
3080
            }
3081
        }
3082
      return next;
3083
 
3084
    case ERT_ALLOWED_EXCEPTIONS:
3085
      /* An exception specification adds its filter to the
3086
         beginning of the chain.  */
3087
      next = collect_one_action_chain (ar_hash, region->outer);
3088
 
3089
      /* If there is no next action, terminate the chain.  */
3090
      if (next == -1)
3091
        next = 0;
3092
      /* If all outer actions are cleanups or must_not_throw,
3093
         we'll have no action record for it, since we had wanted
3094
         to encode these states in the call-site record directly.
3095
         Add a cleanup action to the chain to catch these.  */
3096
      else if (next <= 0)
3097
        next = add_action_record (ar_hash, 0, 0);
3098
 
3099
      return add_action_record (ar_hash, region->u.allowed.filter, next);
3100
 
3101
    case ERT_MUST_NOT_THROW:
3102
      /* A must-not-throw region with no inner handlers or cleanups
3103
         requires no call-site entry.  Note that this differs from
3104
         the no handler or cleanup case in that we do require an lsda
3105
         to be generated.  Return a magic -2 value to record this.  */
3106
      return -2;
3107
 
3108
    case ERT_CATCH:
3109
    case ERT_THROW:
3110
      /* CATCH regions are handled in TRY above.  THROW regions are
3111
         for optimization information only and produce no output.  */
3112
      return collect_one_action_chain (ar_hash, region->outer);
3113
 
3114
    default:
3115
      gcc_unreachable ();
3116
    }
3117
}
3118
 
3119
static int
3120
add_call_site (rtx landing_pad, int action)
3121
{
3122
  struct call_site_record *data = cfun->eh->call_site_data;
3123
  int used = cfun->eh->call_site_data_used;
3124
  int size = cfun->eh->call_site_data_size;
3125
 
3126
  if (used >= size)
3127
    {
3128
      size = (size ? size * 2 : 64);
3129
      data = ggc_realloc (data, sizeof (*data) * size);
3130
      cfun->eh->call_site_data = data;
3131
      cfun->eh->call_site_data_size = size;
3132
    }
3133
 
3134
  data[used].landing_pad = landing_pad;
3135
  data[used].action = action;
3136
 
3137
  cfun->eh->call_site_data_used = used + 1;
3138
 
3139
  return used + call_site_base;
3140
}
3141
 
3142
/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3143
   The new note numbers will not refer to region numbers, but
3144
   instead to call site entries.  */
3145
 
3146
void
3147
convert_to_eh_region_ranges (void)
3148
{
3149
  rtx insn, iter, note;
3150
  htab_t ar_hash;
3151
  int last_action = -3;
3152
  rtx last_action_insn = NULL_RTX;
3153
  rtx last_landing_pad = NULL_RTX;
3154
  rtx first_no_action_insn = NULL_RTX;
3155
  int call_site = 0;
3156
 
3157
  if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3158
    return;
3159
 
3160
  VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3161
 
3162
  ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3163
 
3164
  for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3165
    if (INSN_P (iter))
3166
      {
3167
        struct eh_region *region;
3168
        int this_action;
3169
        rtx this_landing_pad;
3170
 
3171
        insn = iter;
3172
        if (NONJUMP_INSN_P (insn)
3173
            && GET_CODE (PATTERN (insn)) == SEQUENCE)
3174
          insn = XVECEXP (PATTERN (insn), 0, 0);
3175
 
3176
        note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3177
        if (!note)
3178
          {
3179
            if (! (CALL_P (insn)
3180
                   || (flag_non_call_exceptions
3181
                       && may_trap_p (PATTERN (insn)))))
3182
              continue;
3183
            this_action = -1;
3184
            region = NULL;
3185
          }
3186
        else
3187
          {
3188
            if (INTVAL (XEXP (note, 0)) <= 0)
3189
              continue;
3190
            region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3191
            this_action = collect_one_action_chain (ar_hash, region);
3192
          }
3193
 
3194
        /* Existence of catch handlers, or must-not-throw regions
3195
           implies that an lsda is needed (even if empty).  */
3196
        if (this_action != -1)
3197
          cfun->uses_eh_lsda = 1;
3198
 
3199
        /* Delay creation of region notes for no-action regions
3200
           until we're sure that an lsda will be required.  */
3201
        else if (last_action == -3)
3202
          {
3203
            first_no_action_insn = iter;
3204
            last_action = -1;
3205
          }
3206
 
3207
        /* Cleanups and handlers may share action chains but not
3208
           landing pads.  Collect the landing pad for this region.  */
3209
        if (this_action >= 0)
3210
          {
3211
            struct eh_region *o;
3212
            for (o = region; ! o->landing_pad ; o = o->outer)
3213
              continue;
3214
            this_landing_pad = o->landing_pad;
3215
          }
3216
        else
3217
          this_landing_pad = NULL_RTX;
3218
 
3219
        /* Differing actions or landing pads implies a change in call-site
3220
           info, which implies some EH_REGION note should be emitted.  */
3221
        if (last_action != this_action
3222
            || last_landing_pad != this_landing_pad)
3223
          {
3224
            /* If we'd not seen a previous action (-3) or the previous
3225
               action was must-not-throw (-2), then we do not need an
3226
               end note.  */
3227
            if (last_action >= -1)
3228
              {
3229
                /* If we delayed the creation of the begin, do it now.  */
3230
                if (first_no_action_insn)
3231
                  {
3232
                    call_site = add_call_site (NULL_RTX, 0);
3233
                    note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3234
                                             first_no_action_insn);
3235
                    NOTE_EH_HANDLER (note) = call_site;
3236
                    first_no_action_insn = NULL_RTX;
3237
                  }
3238
 
3239
                note = emit_note_after (NOTE_INSN_EH_REGION_END,
3240
                                        last_action_insn);
3241
                NOTE_EH_HANDLER (note) = call_site;
3242
              }
3243
 
3244
            /* If the new action is must-not-throw, then no region notes
3245
               are created.  */
3246
            if (this_action >= -1)
3247
              {
3248
                call_site = add_call_site (this_landing_pad,
3249
                                           this_action < 0 ? 0 : this_action);
3250
                note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3251
                NOTE_EH_HANDLER (note) = call_site;
3252
              }
3253
 
3254
            last_action = this_action;
3255
            last_landing_pad = this_landing_pad;
3256
          }
3257
        last_action_insn = iter;
3258
      }
3259
 
3260
  if (last_action >= -1 && ! first_no_action_insn)
3261
    {
3262
      note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3263
      NOTE_EH_HANDLER (note) = call_site;
3264
    }
3265
 
3266
  htab_delete (ar_hash);
3267
}
3268
 
3269
struct tree_opt_pass pass_convert_to_eh_region_ranges =
3270
{
3271
  "eh-ranges",                          /* name */
3272
  NULL,                                 /* gate */
3273
  convert_to_eh_region_ranges,          /* execute */
3274
  NULL,                                 /* sub */
3275
  NULL,                                 /* next */
3276
  0,                                    /* static_pass_number */
3277
  0,                                    /* tv_id */
3278
  0,                                    /* properties_required */
3279
  0,                                    /* properties_provided */
3280
  0,                                    /* properties_destroyed */
3281
  0,                                    /* todo_flags_start */
3282
  TODO_dump_func,                       /* todo_flags_finish */
3283
 
3284
};
3285
 
3286
 
3287
static void
3288
push_uleb128 (varray_type *data_area, unsigned int value)
3289
{
3290
  do
3291
    {
3292
      unsigned char byte = value & 0x7f;
3293
      value >>= 7;
3294
      if (value)
3295
        byte |= 0x80;
3296
      VARRAY_PUSH_UCHAR (*data_area, byte);
3297
    }
3298
  while (value);
3299
}
3300
 
3301
static void
3302
push_sleb128 (varray_type *data_area, int value)
3303
{
3304
  unsigned char byte;
3305
  int more;
3306
 
3307
  do
3308
    {
3309
      byte = value & 0x7f;
3310
      value >>= 7;
3311
      more = ! ((value == 0 && (byte & 0x40) == 0)
3312
                || (value == -1 && (byte & 0x40) != 0));
3313
      if (more)
3314
        byte |= 0x80;
3315
      VARRAY_PUSH_UCHAR (*data_area, byte);
3316
    }
3317
  while (more);
3318
}
3319
 
3320
 
3321
#ifndef HAVE_AS_LEB128
3322
static int
3323
dw2_size_of_call_site_table (void)
3324
{
3325
  int n = cfun->eh->call_site_data_used;
3326
  int size = n * (4 + 4 + 4);
3327
  int i;
3328
 
3329
  for (i = 0; i < n; ++i)
3330
    {
3331
      struct call_site_record *cs = &cfun->eh->call_site_data[i];
3332
      size += size_of_uleb128 (cs->action);
3333
    }
3334
 
3335
  return size;
3336
}
3337
 
3338
static int
3339
sjlj_size_of_call_site_table (void)
3340
{
3341
  int n = cfun->eh->call_site_data_used;
3342
  int size = 0;
3343
  int i;
3344
 
3345
  for (i = 0; i < n; ++i)
3346
    {
3347
      struct call_site_record *cs = &cfun->eh->call_site_data[i];
3348
      size += size_of_uleb128 (INTVAL (cs->landing_pad));
3349
      size += size_of_uleb128 (cs->action);
3350
    }
3351
 
3352
  return size;
3353
}
3354
#endif
3355
 
3356
static void
3357
dw2_output_call_site_table (void)
3358
{
3359
  int n = cfun->eh->call_site_data_used;
3360
  int i;
3361
 
3362
  for (i = 0; i < n; ++i)
3363
    {
3364
      struct call_site_record *cs = &cfun->eh->call_site_data[i];
3365
      char reg_start_lab[32];
3366
      char reg_end_lab[32];
3367
      char landing_pad_lab[32];
3368
 
3369
      ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3370
      ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3371
 
3372
      if (cs->landing_pad)
3373
        ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3374
                                     CODE_LABEL_NUMBER (cs->landing_pad));
3375
 
3376
      /* ??? Perhaps use insn length scaling if the assembler supports
3377
         generic arithmetic.  */
3378
      /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3379
         data4 if the function is small enough.  */
3380
#ifdef HAVE_AS_LEB128
3381
      dw2_asm_output_delta_uleb128 (reg_start_lab,
3382
                                    current_function_func_begin_label,
3383
                                    "region %d start", i);
3384
      dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3385
                                    "length");
3386
      if (cs->landing_pad)
3387
        dw2_asm_output_delta_uleb128 (landing_pad_lab,
3388
                                      current_function_func_begin_label,
3389
                                      "landing pad");
3390
      else
3391
        dw2_asm_output_data_uleb128 (0, "landing pad");
3392
#else
3393
      dw2_asm_output_delta (4, reg_start_lab,
3394
                            current_function_func_begin_label,
3395
                            "region %d start", i);
3396
      dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3397
      if (cs->landing_pad)
3398
        dw2_asm_output_delta (4, landing_pad_lab,
3399
                              current_function_func_begin_label,
3400
                              "landing pad");
3401
      else
3402
        dw2_asm_output_data (4, 0, "landing pad");
3403
#endif
3404
      dw2_asm_output_data_uleb128 (cs->action, "action");
3405
    }
3406
 
3407
  call_site_base += n;
3408
}
3409
 
3410
static void
3411
sjlj_output_call_site_table (void)
3412
{
3413
  int n = cfun->eh->call_site_data_used;
3414
  int i;
3415
 
3416
  for (i = 0; i < n; ++i)
3417
    {
3418
      struct call_site_record *cs = &cfun->eh->call_site_data[i];
3419
 
3420
      dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3421
                                   "region %d landing pad", i);
3422
      dw2_asm_output_data_uleb128 (cs->action, "action");
3423
    }
3424
 
3425
  call_site_base += n;
3426
}
3427
 
3428
/* Tell assembler to switch to the section for the exception handling
3429
   table.  */
3430
 
3431
void
3432
default_exception_section (void)
3433
{
3434
  if (targetm.have_named_sections)
3435
    {
3436
      int flags;
3437
 
3438
      if (EH_TABLES_CAN_BE_READ_ONLY)
3439
        {
3440
          int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3441
 
3442
          flags = (! flag_pic
3443
                   || ((tt_format & 0x70) != DW_EH_PE_absptr
3444
                       && (tt_format & 0x70) != DW_EH_PE_aligned))
3445
            ? 0 : SECTION_WRITE;
3446
        }
3447
      else
3448
        flags = SECTION_WRITE;
3449
      named_section_flags (".gcc_except_table", flags);
3450
    }
3451
  else if (flag_pic)
3452
    data_section ();
3453
  else
3454
    readonly_data_section ();
3455
}
3456
 
3457
 
3458
/* Output a reference from an exception table to the type_info object TYPE.
3459
   TT_FORMAT and TT_FORMAT_SIZE descibe the DWARF encoding method used for
3460
   the value.  */
3461
 
3462
static void
3463
output_ttype (tree type, int tt_format, int tt_format_size)
3464
{
3465
  rtx value;
3466
  bool public = true;
3467
 
3468
  if (type == NULL_TREE)
3469
    value = const0_rtx;
3470
  else
3471
    {
3472
      struct cgraph_varpool_node *node;
3473
 
3474
      type = lookup_type_for_runtime (type);
3475
      value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3476
 
3477
      /* Let cgraph know that the rtti decl is used.  Not all of the
3478
         paths below go through assemble_integer, which would take
3479
         care of this for us.  */
3480
      STRIP_NOPS (type);
3481
      if (TREE_CODE (type) == ADDR_EXPR)
3482
        {
3483
          type = TREE_OPERAND (type, 0);
3484
          if (TREE_CODE (type) == VAR_DECL)
3485
            {
3486
              node = cgraph_varpool_node (type);
3487
              if (node)
3488
                cgraph_varpool_mark_needed_node (node);
3489
              public = TREE_PUBLIC (type);
3490
            }
3491
        }
3492
      else if (TREE_CODE (type) != INTEGER_CST)
3493
        abort ();
3494
    }
3495
 
3496
  /* Allow the target to override the type table entry format.  */
3497
  if (targetm.asm_out.ttype (value))
3498
    return;
3499
 
3500
  if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3501
    assemble_integer (value, tt_format_size,
3502
                      tt_format_size * BITS_PER_UNIT, 1);
3503
  else
3504
    dw2_asm_output_encoded_addr_rtx (tt_format, value, public, NULL);
3505
}
3506
 
3507
void
3508
output_function_exception_table (void)
3509
{
3510
  int tt_format, cs_format, lp_format, i, n;
3511
#ifdef HAVE_AS_LEB128
3512
  char ttype_label[32];
3513
  char cs_after_size_label[32];
3514
  char cs_end_label[32];
3515
#else
3516
  int call_site_len;
3517
#endif
3518
  int have_tt_data;
3519
  int tt_format_size = 0;
3520
 
3521
  if (eh_personality_libfunc)
3522
    assemble_external_libcall (eh_personality_libfunc);
3523
 
3524
  /* Not all functions need anything.  */
3525
  if (! cfun->uses_eh_lsda)
3526
    return;
3527
 
3528
#ifdef TARGET_UNWIND_INFO
3529
  /* TODO: Move this into target file.  */
3530
  fputs ("\t.personality\t", asm_out_file);
3531
  output_addr_const (asm_out_file, eh_personality_libfunc);
3532
  fputs ("\n\t.handlerdata\n", asm_out_file);
3533
  /* Note that varasm still thinks we're in the function's code section.
3534
     The ".endp" directive that will immediately follow will take us back.  */
3535
#else
3536
  targetm.asm_out.exception_section ();
3537
#endif
3538
 
3539
  have_tt_data = (VEC_length (tree, cfun->eh->ttype_data) > 0
3540
                  || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3541
 
3542
  /* Indicate the format of the @TType entries.  */
3543
  if (! have_tt_data)
3544
    tt_format = DW_EH_PE_omit;
3545
  else
3546
    {
3547
      tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3548
#ifdef HAVE_AS_LEB128
3549
      ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3550
                                   current_function_funcdef_no);
3551
#endif
3552
      tt_format_size = size_of_encoded_value (tt_format);
3553
 
3554
      assemble_align (tt_format_size * BITS_PER_UNIT);
3555
    }
3556
 
3557
  targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3558
                             current_function_funcdef_no);
3559
 
3560
  /* The LSDA header.  */
3561
 
3562
  /* Indicate the format of the landing pad start pointer.  An omitted
3563
     field implies @LPStart == @Start.  */
3564
  /* Currently we always put @LPStart == @Start.  This field would
3565
     be most useful in moving the landing pads completely out of
3566
     line to another section, but it could also be used to minimize
3567
     the size of uleb128 landing pad offsets.  */
3568
  lp_format = DW_EH_PE_omit;
3569
  dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3570
                       eh_data_format_name (lp_format));
3571
 
3572
  /* @LPStart pointer would go here.  */
3573
 
3574
  dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3575
                       eh_data_format_name (tt_format));
3576
 
3577
#ifndef HAVE_AS_LEB128
3578
  if (USING_SJLJ_EXCEPTIONS)
3579
    call_site_len = sjlj_size_of_call_site_table ();
3580
  else
3581
    call_site_len = dw2_size_of_call_site_table ();
3582
#endif
3583
 
3584
  /* A pc-relative 4-byte displacement to the @TType data.  */
3585
  if (have_tt_data)
3586
    {
3587
#ifdef HAVE_AS_LEB128
3588
      char ttype_after_disp_label[32];
3589
      ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3590
                                   current_function_funcdef_no);
3591
      dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3592
                                    "@TType base offset");
3593
      ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3594
#else
3595
      /* Ug.  Alignment queers things.  */
3596
      unsigned int before_disp, after_disp, last_disp, disp;
3597
 
3598
      before_disp = 1 + 1;
3599
      after_disp = (1 + size_of_uleb128 (call_site_len)
3600
                    + call_site_len
3601
                    + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3602
                    + (VEC_length (tree, cfun->eh->ttype_data)
3603
                       * tt_format_size));
3604
 
3605
      disp = after_disp;
3606
      do
3607
        {
3608
          unsigned int disp_size, pad;
3609
 
3610
          last_disp = disp;
3611
          disp_size = size_of_uleb128 (disp);
3612
          pad = before_disp + disp_size + after_disp;
3613
          if (pad % tt_format_size)
3614
            pad = tt_format_size - (pad % tt_format_size);
3615
          else
3616
            pad = 0;
3617
          disp = after_disp + pad;
3618
        }
3619
      while (disp != last_disp);
3620
 
3621
      dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3622
#endif
3623
    }
3624
 
3625
  /* Indicate the format of the call-site offsets.  */
3626
#ifdef HAVE_AS_LEB128
3627
  cs_format = DW_EH_PE_uleb128;
3628
#else
3629
  cs_format = DW_EH_PE_udata4;
3630
#endif
3631
  dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3632
                       eh_data_format_name (cs_format));
3633
 
3634
#ifdef HAVE_AS_LEB128
3635
  ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3636
                               current_function_funcdef_no);
3637
  ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3638
                               current_function_funcdef_no);
3639
  dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3640
                                "Call-site table length");
3641
  ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3642
  if (USING_SJLJ_EXCEPTIONS)
3643
    sjlj_output_call_site_table ();
3644
  else
3645
    dw2_output_call_site_table ();
3646
  ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3647
#else
3648
  dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3649
  if (USING_SJLJ_EXCEPTIONS)
3650
    sjlj_output_call_site_table ();
3651
  else
3652
    dw2_output_call_site_table ();
3653
#endif
3654
 
3655
  /* ??? Decode and interpret the data for flag_debug_asm.  */
3656
  n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3657
  for (i = 0; i < n; ++i)
3658
    dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3659
                         (i ? NULL : "Action record table"));
3660
 
3661
  if (have_tt_data)
3662
    assemble_align (tt_format_size * BITS_PER_UNIT);
3663
 
3664
  i = VEC_length (tree, cfun->eh->ttype_data);
3665
  while (i-- > 0)
3666
    {
3667
      tree type = VEC_index (tree, cfun->eh->ttype_data, i);
3668
      output_ttype (type, tt_format, tt_format_size);
3669
    }
3670
 
3671
#ifdef HAVE_AS_LEB128
3672
  if (have_tt_data)
3673
      ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3674
#endif
3675
 
3676
  /* ??? Decode and interpret the data for flag_debug_asm.  */
3677
  n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3678
  for (i = 0; i < n; ++i)
3679
    {
3680
      if (targetm.arm_eabi_unwinder)
3681
        {
3682
          tree type = VARRAY_TREE (cfun->eh->ehspec_data, i);
3683
          output_ttype (type, tt_format, tt_format_size);
3684
        }
3685
      else
3686
        dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3687
                             (i ? NULL : "Exception specification table"));
3688
    }
3689
 
3690
  current_function_section (current_function_decl);
3691
}
3692
 
3693
void
3694
set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3695
{
3696
  fun->eh->throw_stmt_table = table;
3697
}
3698
 
3699
htab_t
3700
get_eh_throw_stmt_table (struct function *fun)
3701
{
3702
  return fun->eh->throw_stmt_table;
3703
}
3704
 
3705
/* Dump EH information to OUT.  */
3706
void
3707
dump_eh_tree (FILE *out, struct function *fun)
3708
{
3709
  struct eh_region *i;
3710
  int depth = 0;
3711
  static const char * const type_name[] = {"unknown", "cleanup", "try", "catch",
3712
                                           "allowed_exceptions", "must_not_throw",
3713
                                           "throw"};
3714
 
3715
  i = fun->eh->region_tree;
3716
  if (! i)
3717
    return;
3718
 
3719
  fprintf (out, "Eh tree:\n");
3720
  while (1)
3721
    {
3722
      fprintf (out, "  %*s %i %s", depth * 2, "",
3723
               i->region_number, type_name [(int)i->type]);
3724
      if (i->tree_label)
3725
        {
3726
          fprintf (out, " tree_label:");
3727
          print_generic_expr (out, i->tree_label, 0);
3728
        }
3729
      fprintf (out, "\n");
3730
      /* If there are sub-regions, process them.  */
3731
      if (i->inner)
3732
        i = i->inner, depth++;
3733
      /* If there are peers, process them.  */
3734
      else if (i->next_peer)
3735
        i = i->next_peer;
3736
      /* Otherwise, step back up the tree to the next peer.  */
3737
      else
3738
        {
3739
          do {
3740
            i = i->outer;
3741
            depth--;
3742
            if (i == NULL)
3743
              return;
3744
          } while (i->next_peer == NULL);
3745
          i = i->next_peer;
3746
        }
3747
    }
3748
}
3749
 
3750
/* Verify some basic invariants on EH datastructures.  Could be extended to
3751
   catch more.  */
3752
void
3753
verify_eh_tree (struct function *fun)
3754
{
3755
  struct eh_region *i, *outer = NULL;
3756
  bool err = false;
3757
  int nvisited = 0;
3758
  int count = 0;
3759
  int j;
3760
  int depth = 0;
3761
 
3762
  i = fun->eh->region_tree;
3763
  if (! i)
3764
    return;
3765
  for (j = fun->eh->last_region_number; j > 0; --j)
3766
    if ((i = VEC_index (eh_region, cfun->eh->region_array, j)))
3767
      {
3768
        count++;
3769
        if (i->region_number != j)
3770
          {
3771
            error ("region_array is corrupted for region %i", i->region_number);
3772
            err = true;
3773
          }
3774
      }
3775
 
3776
  while (1)
3777
    {
3778
      if (VEC_index (eh_region, cfun->eh->region_array, i->region_number) != i)
3779
        {
3780
          error ("region_array is corrupted for region %i", i->region_number);
3781
          err = true;
3782
        }
3783
      if (i->outer != outer)
3784
        {
3785
          error ("outer block of region %i is wrong", i->region_number);
3786
          err = true;
3787
        }
3788
      if (i->may_contain_throw && outer && !outer->may_contain_throw)
3789
        {
3790
          error ("region %i may contain throw and is contained in region that may not",
3791
                 i->region_number);
3792
          err = true;
3793
        }
3794
      if (depth < 0)
3795
        {
3796
          error ("negative nesting depth of region %i", i->region_number);
3797
          err = true;
3798
        }
3799
      nvisited ++;
3800
      /* If there are sub-regions, process them.  */
3801
      if (i->inner)
3802
        outer = i, i = i->inner, depth++;
3803
      /* If there are peers, process them.  */
3804
      else if (i->next_peer)
3805
        i = i->next_peer;
3806
      /* Otherwise, step back up the tree to the next peer.  */
3807
      else
3808
        {
3809
          do {
3810
            i = i->outer;
3811
            depth--;
3812
            if (i == NULL)
3813
              {
3814
                if (depth != -1)
3815
                  {
3816
                    error ("tree list ends on depth %i", depth + 1);
3817
                    err = true;
3818
                  }
3819
                if (count != nvisited)
3820
                  {
3821
                    error ("array does not match the region tree");
3822
                    err = true;
3823
                  }
3824
                if (err)
3825
                  {
3826
                    dump_eh_tree (stderr, fun);
3827
                    internal_error ("verify_eh_tree failed");
3828
                  }
3829
                return;
3830
              }
3831
            outer = i->outer;
3832
          } while (i->next_peer == NULL);
3833
          i = i->next_peer;
3834
        }
3835
    }
3836
}
3837
 
3838
/* Initialize unwind_resume_libfunc.  */
3839
 
3840
void
3841
default_init_unwind_resume_libfunc (void)
3842
{
3843
  /* The default c++ routines aren't actually c++ specific, so use those.  */
3844
  unwind_resume_libfunc =
3845
    init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
3846
                                             : "_Unwind_Resume");
3847
}
3848
 
3849
 
3850
static bool
3851
gate_handle_eh (void)
3852
{
3853
  return doing_eh (0);
3854
}
3855
 
3856
/* Complete generation of exception handling code.  */
3857
static void
3858
rest_of_handle_eh (void)
3859
{
3860
  cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
3861
  finish_eh_generation ();
3862
  cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
3863
}
3864
 
3865
struct tree_opt_pass pass_rtl_eh =
3866
{
3867
  "eh",                                 /* name */
3868
  gate_handle_eh,                       /* gate */
3869
  rest_of_handle_eh,                    /* execute */
3870
  NULL,                                 /* sub */
3871
  NULL,                                 /* next */
3872
  0,                                    /* static_pass_number */
3873
  TV_JUMP,                              /* tv_id */
3874
  0,                                    /* properties_required */
3875
  0,                                    /* properties_provided */
3876
  0,                                    /* properties_destroyed */
3877
  0,                                    /* todo_flags_start */
3878
  TODO_dump_func,                       /* todo_flags_finish */
3879
  'h'                                   /* letter */
3880
};
3881
 
3882
#include "gt-except.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.