OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [tree-mudflap.c] - Blame information for rev 692

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* Mudflap: narrow-pointer bounds-checking by tree rewriting.
2
   Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2012
3
   Free Software Foundation, Inc.
4
   Contributed by Frank Ch. Eigler <fche@redhat.com>
5
   and Graydon Hoare <graydon@redhat.com>
6
 
7
This file is part of GCC.
8
 
9
GCC is free software; you can redistribute it and/or modify it under
10
the terms of the GNU General Public License as published by the Free
11
Software Foundation; either version 3, or (at your option) any later
12
version.
13
 
14
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15
WARRANTY; without even the implied warranty of MERCHANTABILITY or
16
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17
for more details.
18
 
19
You should have received a copy of the GNU General Public License
20
along with GCC; see the file COPYING3.  If not see
21
<http://www.gnu.org/licenses/>.  */
22
 
23
 
24
#include "config.h"
25
#include "system.h"
26
#include "coretypes.h"
27
#include "tm.h"
28
#include "tree.h"
29
#include "tm_p.h"
30
#include "basic-block.h"
31
#include "flags.h"
32
#include "function.h"
33
#include "tree-inline.h"
34
#include "gimple.h"
35
#include "tree-iterator.h"
36
#include "tree-flow.h"
37
#include "tree-mudflap.h"
38
#include "tree-dump.h"
39
#include "tree-pass.h"
40
#include "hashtab.h"
41
#include "diagnostic.h"
42
#include "demangle.h"
43
#include "langhooks.h"
44
#include "ggc.h"
45
#include "cgraph.h"
46
#include "gimple.h"
47
 
48
/* Internal function decls */
49
 
50
 
51
/* Options.  */
52
#define flag_mudflap_threads (flag_mudflap == 2)
53
 
54
/* Helpers.  */
55
static tree mf_build_string (const char *string);
56
static tree mf_varname_tree (tree);
57
static tree mf_file_function_line_tree (location_t);
58
 
59
/* Indirection-related instrumentation.  */
60
static void mf_decl_cache_locals (void);
61
static void mf_decl_clear_locals (void);
62
static void mf_xform_statements (void);
63
static unsigned int execute_mudflap_function_ops (void);
64
 
65
/* Addressable variables instrumentation.  */
66
static void mf_xform_decls (gimple_seq, tree);
67
static tree mx_xfn_xform_decls (gimple_stmt_iterator *, bool *,
68
                                struct walk_stmt_info *);
69
static gimple_seq mx_register_decls (tree, gimple_seq, location_t);
70
static unsigned int execute_mudflap_function_decls (void);
71
 
72
/* Return true if DECL is artificial stub that shouldn't be instrumented by
73
   mf.  We should instrument clones of non-artificial functions.  */
74
static inline bool
75
mf_artificial (const_tree decl)
76
{
77
  return DECL_ARTIFICIAL (DECL_ORIGIN (decl));
78
}
79
 
80
/* ------------------------------------------------------------------------ */
81
/* Some generally helpful functions for mudflap instrumentation.  */
82
 
83
/* Build a reference to a literal string.  */
84
static tree
85
mf_build_string (const char *string)
86
{
87
  size_t len = strlen (string);
88
  tree result = mf_mark (build_string (len + 1, string));
89
 
90
  TREE_TYPE (result) = build_array_type
91
    (char_type_node, build_index_type (size_int (len)));
92
  TREE_CONSTANT (result) = 1;
93
  TREE_READONLY (result) = 1;
94
  TREE_STATIC (result) = 1;
95
 
96
  result = build1 (ADDR_EXPR, build_pointer_type (char_type_node), result);
97
 
98
  return mf_mark (result);
99
}
100
 
101
/* Create a properly typed STRING_CST node that describes the given
102
   declaration.  It will be used as an argument for __mf_register().
103
   Try to construct a helpful string, including file/function/variable
104
   name.  */
105
 
106
static tree
107
mf_varname_tree (tree decl)
108
{
109
  static pretty_printer buf_rec;
110
  static int initialized = 0;
111
  pretty_printer *buf = & buf_rec;
112
  const char *buf_contents;
113
  tree result;
114
 
115
  gcc_assert (decl);
116
 
117
  if (!initialized)
118
    {
119
      pp_construct (buf, /* prefix */ NULL, /* line-width */ 0);
120
      initialized = 1;
121
    }
122
  pp_clear_output_area (buf);
123
 
124
  /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]].  */
125
  {
126
    expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (decl));
127
    const char *sourcefile;
128
    unsigned sourceline = xloc.line;
129
    unsigned sourcecolumn = 0;
130
    sourcecolumn = xloc.column;
131
    sourcefile = xloc.file;
132
    if (sourcefile == NULL && current_function_decl != NULL_TREE)
133
      sourcefile = DECL_SOURCE_FILE (current_function_decl);
134
    if (sourcefile == NULL)
135
      sourcefile = "<unknown file>";
136
 
137
    pp_string (buf, sourcefile);
138
 
139
    if (sourceline != 0)
140
      {
141
        pp_string (buf, ":");
142
        pp_decimal_int (buf, sourceline);
143
 
144
        if (sourcecolumn != 0)
145
          {
146
            pp_string (buf, ":");
147
            pp_decimal_int (buf, sourcecolumn);
148
          }
149
      }
150
  }
151
 
152
  if (current_function_decl != NULL_TREE)
153
    {
154
      /* Add (FUNCTION) */
155
      pp_string (buf, " (");
156
      {
157
        const char *funcname = NULL;
158
        if (DECL_NAME (current_function_decl))
159
          funcname = lang_hooks.decl_printable_name (current_function_decl, 1);
160
        if (funcname == NULL)
161
          funcname = "anonymous fn";
162
 
163
        pp_string (buf, funcname);
164
      }
165
      pp_string (buf, ") ");
166
    }
167
  else
168
    pp_string (buf, " ");
169
 
170
  /* Add <variable-declaration>, possibly demangled.  */
171
  {
172
    const char *declname = NULL;
173
 
174
    if (DECL_NAME (decl) != NULL)
175
      {
176
        if (strcmp ("GNU C++", lang_hooks.name) == 0)
177
          {
178
            /* The gcc/cp decl_printable_name hook doesn't do as good a job as
179
               the libiberty demangler.  */
180
            declname = cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl)),
181
                                       DMGL_AUTO | DMGL_VERBOSE);
182
          }
183
        if (declname == NULL)
184
          declname = lang_hooks.decl_printable_name (decl, 3);
185
      }
186
    if (declname == NULL)
187
      declname = "<unnamed variable>";
188
 
189
    pp_string (buf, declname);
190
  }
191
 
192
  /* Return the lot as a new STRING_CST.  */
193
  buf_contents = pp_base_formatted_text (buf);
194
  result = mf_build_string (buf_contents);
195
  pp_clear_output_area (buf);
196
 
197
  return result;
198
}
199
 
200
 
201
/* And another friend, for producing a simpler message.  */
202
 
203
static tree
204
mf_file_function_line_tree (location_t location)
205
{
206
  expanded_location xloc = expand_location (location);
207
  const char *file = NULL, *colon, *line, *op, *name, *cp;
208
  char linecolbuf[30]; /* Enough for two decimal numbers plus a colon.  */
209
  char *string;
210
  tree result;
211
 
212
  /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]].  */
213
  file = xloc.file;
214
  if (file == NULL && current_function_decl != NULL_TREE)
215
    file = DECL_SOURCE_FILE (current_function_decl);
216
  if (file == NULL)
217
    file = "<unknown file>";
218
 
219
  if (xloc.line > 0)
220
    {
221
      if (xloc.column > 0)
222
        sprintf (linecolbuf, "%d:%d", xloc.line, xloc.column);
223
      else
224
        sprintf (linecolbuf, "%d", xloc.line);
225
      colon = ":";
226
      line = linecolbuf;
227
    }
228
  else
229
    colon = line = "";
230
 
231
  /* Add (FUNCTION).  */
232
  name = lang_hooks.decl_printable_name (current_function_decl, 1);
233
  if (name)
234
    {
235
      op = " (";
236
      cp = ")";
237
    }
238
  else
239
    op = name = cp = "";
240
 
241
  string = concat (file, colon, line, op, name, cp, NULL);
242
  result = mf_build_string (string);
243
  free (string);
244
 
245
  return result;
246
}
247
 
248
 
249
/* global tree nodes */
250
 
251
/* Global tree objects for global variables and functions exported by
252
   mudflap runtime library.  mf_init_extern_trees must be called
253
   before using these.  */
254
 
255
/* uintptr_t (usually "unsigned long") */
256
static GTY (()) tree mf_uintptr_type;
257
 
258
/* struct __mf_cache { uintptr_t low; uintptr_t high; }; */
259
static GTY (()) tree mf_cache_struct_type;
260
 
261
/* struct __mf_cache * const */
262
static GTY (()) tree mf_cache_structptr_type;
263
 
264
/* extern struct __mf_cache __mf_lookup_cache []; */
265
static GTY (()) tree mf_cache_array_decl;
266
 
267
/* extern unsigned char __mf_lc_shift; */
268
static GTY (()) tree mf_cache_shift_decl;
269
 
270
/* extern uintptr_t __mf_lc_mask; */
271
static GTY (()) tree mf_cache_mask_decl;
272
 
273
/* Their function-scope local shadows, used in single-threaded mode only.  */
274
 
275
/* auto const unsigned char __mf_lc_shift_l; */
276
static GTY (()) tree mf_cache_shift_decl_l;
277
 
278
/* auto const uintptr_t __mf_lc_mask_l; */
279
static GTY (()) tree mf_cache_mask_decl_l;
280
 
281
/* extern void __mf_check (void *ptr, size_t sz, int type, const char *); */
282
static GTY (()) tree mf_check_fndecl;
283
 
284
/* extern void __mf_register (void *ptr, size_t sz, int type, const char *); */
285
static GTY (()) tree mf_register_fndecl;
286
 
287
/* extern void __mf_unregister (void *ptr, size_t sz, int type); */
288
static GTY (()) tree mf_unregister_fndecl;
289
 
290
/* extern void __mf_init (); */
291
static GTY (()) tree mf_init_fndecl;
292
 
293
/* extern int __mf_set_options (const char*); */
294
static GTY (()) tree mf_set_options_fndecl;
295
 
296
 
297
/* Helper for mudflap_init: construct a decl with the given category,
298
   name, and type, mark it an external reference, and pushdecl it.  */
299
static inline tree
300
mf_make_builtin (enum tree_code category, const char *name, tree type)
301
{
302
  tree decl = mf_mark (build_decl (UNKNOWN_LOCATION,
303
                                   category, get_identifier (name), type));
304
  TREE_PUBLIC (decl) = 1;
305
  DECL_EXTERNAL (decl) = 1;
306
  lang_hooks.decls.pushdecl (decl);
307
  /* The decl was declared by the compiler.  */
308
  DECL_ARTIFICIAL (decl) = 1;
309
  /* And we don't want debug info for it.  */
310
  DECL_IGNORED_P (decl) = 1;
311
  return decl;
312
}
313
 
314
/* Helper for mudflap_init: construct a tree corresponding to the type
315
     struct __mf_cache { uintptr_t low; uintptr_t high; };
316
     where uintptr_t is the FIELD_TYPE argument.  */
317
static inline tree
318
mf_make_mf_cache_struct_type (tree field_type)
319
{
320
  /* There is, abominably, no language-independent way to construct a
321
     RECORD_TYPE.  So we have to call the basic type construction
322
     primitives by hand.  */
323
  tree fieldlo = build_decl (UNKNOWN_LOCATION,
324
                             FIELD_DECL, get_identifier ("low"), field_type);
325
  tree fieldhi = build_decl (UNKNOWN_LOCATION,
326
                             FIELD_DECL, get_identifier ("high"), field_type);
327
 
328
  tree struct_type = make_node (RECORD_TYPE);
329
  DECL_CONTEXT (fieldlo) = struct_type;
330
  DECL_CONTEXT (fieldhi) = struct_type;
331
  DECL_CHAIN (fieldlo) = fieldhi;
332
  TYPE_FIELDS (struct_type) = fieldlo;
333
  TYPE_NAME (struct_type) = get_identifier ("__mf_cache");
334
  layout_type (struct_type);
335
 
336
  return struct_type;
337
}
338
 
339
/* Initialize the global tree nodes that correspond to mf-runtime.h
340
   declarations.  */
341
void
342
mudflap_init (void)
343
{
344
  static bool done = false;
345
  tree mf_const_string_type;
346
  tree mf_cache_array_type;
347
  tree mf_check_register_fntype;
348
  tree mf_unregister_fntype;
349
  tree mf_init_fntype;
350
  tree mf_set_options_fntype;
351
 
352
  if (done)
353
    return;
354
  done = true;
355
 
356
  mf_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode,
357
                                                    /*unsignedp=*/true);
358
  mf_const_string_type
359
    = build_pointer_type (build_qualified_type
360
                          (char_type_node, TYPE_QUAL_CONST));
361
 
362
  mf_cache_struct_type = mf_make_mf_cache_struct_type (mf_uintptr_type);
363
  mf_cache_structptr_type = build_pointer_type (mf_cache_struct_type);
364
  mf_cache_array_type = build_array_type (mf_cache_struct_type, 0);
365
  mf_check_register_fntype =
366
    build_function_type_list (void_type_node, ptr_type_node, size_type_node,
367
                              integer_type_node, mf_const_string_type, NULL_TREE);
368
  mf_unregister_fntype =
369
    build_function_type_list (void_type_node, ptr_type_node, size_type_node,
370
                              integer_type_node, NULL_TREE);
371
  mf_init_fntype =
372
    build_function_type_list (void_type_node, NULL_TREE);
373
  mf_set_options_fntype =
374
    build_function_type_list (integer_type_node, mf_const_string_type, NULL_TREE);
375
 
376
  mf_cache_array_decl = mf_make_builtin (VAR_DECL, "__mf_lookup_cache",
377
                                         mf_cache_array_type);
378
  mf_cache_shift_decl = mf_make_builtin (VAR_DECL, "__mf_lc_shift",
379
                                         unsigned_char_type_node);
380
  mf_cache_mask_decl = mf_make_builtin (VAR_DECL, "__mf_lc_mask",
381
                                        mf_uintptr_type);
382
  /* Don't process these in mudflap_enqueue_decl, should they come by
383
     there for some reason.  */
384
  mf_mark (mf_cache_array_decl);
385
  mf_mark (mf_cache_shift_decl);
386
  mf_mark (mf_cache_mask_decl);
387
  mf_check_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_check",
388
                                     mf_check_register_fntype);
389
  mf_register_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_register",
390
                                        mf_check_register_fntype);
391
  mf_unregister_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_unregister",
392
                                          mf_unregister_fntype);
393
  mf_init_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_init",
394
                                    mf_init_fntype);
395
  mf_set_options_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_set_options",
396
                                           mf_set_options_fntype);
397
}
398
 
399
 
400
/* ------------------------------------------------------------------------ */
401
/* This is the second part of the mudflap instrumentation.  It works on
402
   low-level GIMPLE using the CFG, because we want to run this pass after
403
   tree optimizations have been performed, but we have to preserve the CFG
404
   for expansion from trees to RTL.
405
   Below is the list of transformations performed on statements in the
406
   current function.
407
 
408
 1)  Memory reference transforms: Perform the mudflap indirection-related
409
    tree transforms on memory references.
410
 
411
 2) Mark BUILTIN_ALLOCA calls not inlineable.
412
 
413
 */
414
 
415
static unsigned int
416
execute_mudflap_function_ops (void)
417
{
418
  struct gimplify_ctx gctx;
419
 
420
  /* Don't instrument functions such as the synthetic constructor
421
     built during mudflap_finish_file.  */
422
  if (mf_marked_p (current_function_decl)
423
      || mf_artificial (current_function_decl))
424
    return 0;
425
 
426
  push_gimplify_context (&gctx);
427
 
428
  add_referenced_var (mf_cache_array_decl);
429
  add_referenced_var (mf_cache_shift_decl);
430
  add_referenced_var (mf_cache_mask_decl);
431
 
432
  /* In multithreaded mode, don't cache the lookup cache parameters.  */
433
  if (! flag_mudflap_threads)
434
    mf_decl_cache_locals ();
435
 
436
  mf_xform_statements ();
437
 
438
  if (! flag_mudflap_threads)
439
    mf_decl_clear_locals ();
440
 
441
  pop_gimplify_context (NULL);
442
  return 0;
443
}
444
 
445
/* Insert a gimple_seq SEQ on all the outgoing edges out of BB.  Note that
446
   if BB has more than one edge, STMT will be replicated for each edge.
447
   Also, abnormal edges will be ignored.  */
448
 
449
static void
450
insert_edge_copies_seq (gimple_seq seq, basic_block bb)
451
{
452
  edge e;
453
  edge_iterator ei;
454
  unsigned n_copies = -1;
455
 
456
  FOR_EACH_EDGE (e, ei, bb->succs)
457
    if (!(e->flags & EDGE_ABNORMAL))
458
      n_copies++;
459
 
460
  FOR_EACH_EDGE (e, ei, bb->succs)
461
    if (!(e->flags & EDGE_ABNORMAL))
462
      gsi_insert_seq_on_edge (e, n_copies-- > 0 ? gimple_seq_copy (seq) : seq);
463
}
464
 
465
/* Create and initialize local shadow variables for the lookup cache
466
   globals.  Put their decls in the *_l globals for use by
467
   mf_build_check_statement_for.  */
468
 
469
static void
470
mf_decl_cache_locals (void)
471
{
472
  gimple g;
473
  gimple_seq seq = gimple_seq_alloc ();
474
 
475
  /* Build the cache vars.  */
476
  mf_cache_shift_decl_l
477
    = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_shift_decl),
478
                               "__mf_lookup_shift_l"));
479
 
480
  mf_cache_mask_decl_l
481
    = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_mask_decl),
482
                               "__mf_lookup_mask_l"));
483
 
484
  /* Build initialization nodes for the cache vars.  We just load the
485
     globals into the cache variables.  */
486
  g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
487
  gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
488
  gimple_seq_add_stmt (&seq, g);
489
 
490
  g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
491
  gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
492
  gimple_seq_add_stmt (&seq, g);
493
 
494
  insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
495
 
496
  gsi_commit_edge_inserts ();
497
}
498
 
499
 
500
static void
501
mf_decl_clear_locals (void)
502
{
503
  /* Unset local shadows.  */
504
  mf_cache_shift_decl_l = NULL_TREE;
505
  mf_cache_mask_decl_l = NULL_TREE;
506
}
507
 
508
static void
509
mf_build_check_statement_for (tree base, tree limit,
510
                              gimple_stmt_iterator *instr_gsi,
511
                              location_t location, tree dirflag)
512
{
513
  gimple_stmt_iterator gsi;
514
  basic_block cond_bb, then_bb, join_bb;
515
  edge e;
516
  tree cond, t, u, v;
517
  tree mf_base;
518
  tree mf_elem;
519
  tree mf_limit;
520
  gimple g;
521
  gimple_seq seq, stmts;
522
 
523
  /* We first need to split the current basic block, and start altering
524
     the CFG.  This allows us to insert the statements we're about to
525
     construct into the right basic blocks.  */
526
 
527
  cond_bb = gimple_bb (gsi_stmt (*instr_gsi));
528
  gsi = *instr_gsi;
529
  gsi_prev (&gsi);
530
  if (! gsi_end_p (gsi))
531
    e = split_block (cond_bb, gsi_stmt (gsi));
532
  else
533
    e = split_block_after_labels (cond_bb);
534
  cond_bb = e->src;
535
  join_bb = e->dest;
536
 
537
  /* A recap at this point: join_bb is the basic block at whose head
538
     is the gimple statement for which this check expression is being
539
     built.  cond_bb is the (possibly new, synthetic) basic block the
540
     end of which will contain the cache-lookup code, and a
541
     conditional that jumps to the cache-miss code or, much more
542
     likely, over to join_bb.  */
543
 
544
  /* Create the bb that contains the cache-miss fallback block (mf_check).  */
545
  then_bb = create_empty_bb (cond_bb);
546
  make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
547
  make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU);
548
 
549
  /* Mark the pseudo-fallthrough edge from cond_bb to join_bb.  */
550
  e = find_edge (cond_bb, join_bb);
551
  e->flags = EDGE_FALSE_VALUE;
552
  e->count = cond_bb->count;
553
  e->probability = REG_BR_PROB_BASE;
554
 
555
  /* Update dominance info.  Note that bb_join's data was
556
     updated by split_block.  */
557
  if (dom_info_available_p (CDI_DOMINATORS))
558
    {
559
      set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
560
      set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
561
    }
562
 
563
  /* Build our local variables.  */
564
  mf_elem = make_rename_temp (mf_cache_structptr_type, "__mf_elem");
565
  mf_base = make_rename_temp (mf_uintptr_type, "__mf_base");
566
  mf_limit = make_rename_temp (mf_uintptr_type, "__mf_limit");
567
 
568
  /* Build: __mf_base = (uintptr_t) <base address expression>.  */
569
  seq = gimple_seq_alloc ();
570
  t = fold_convert_loc (location, mf_uintptr_type,
571
                        unshare_expr (base));
572
  t = force_gimple_operand (t, &stmts, false, NULL_TREE);
573
  gimple_seq_add_seq (&seq, stmts);
574
  g = gimple_build_assign (mf_base, t);
575
  gimple_set_location (g, location);
576
  gimple_seq_add_stmt (&seq, g);
577
 
578
  /* Build: __mf_limit = (uintptr_t) <limit address expression>.  */
579
  t = fold_convert_loc (location, mf_uintptr_type,
580
                        unshare_expr (limit));
581
  t = force_gimple_operand (t, &stmts, false, NULL_TREE);
582
  gimple_seq_add_seq (&seq, stmts);
583
  g = gimple_build_assign (mf_limit, t);
584
  gimple_set_location (g, location);
585
  gimple_seq_add_stmt (&seq, g);
586
 
587
  /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
588
                                            & __mf_mask].  */
589
  t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
590
              flag_mudflap_threads ? mf_cache_shift_decl
591
               : mf_cache_shift_decl_l);
592
  t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
593
              flag_mudflap_threads ? mf_cache_mask_decl
594
               : mf_cache_mask_decl_l);
595
  t = build4 (ARRAY_REF,
596
              TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
597
              mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
598
  t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
599
  t = force_gimple_operand (t, &stmts, false, NULL_TREE);
600
  gimple_seq_add_seq (&seq, stmts);
601
  g = gimple_build_assign (mf_elem, t);
602
  gimple_set_location (g, location);
603
  gimple_seq_add_stmt (&seq, g);
604
 
605
  /* Quick validity check.
606
 
607
     if (__mf_elem->low > __mf_base
608
         || (__mf_elem_high < __mf_limit))
609
        {
610
          __mf_check ();
611
          ... and only if single-threaded:
612
          __mf_lookup_shift_1 = f...;
613
          __mf_lookup_mask_l = ...;
614
        }
615
 
616
     It is expected that this body of code is rarely executed so we mark
617
     the edge to the THEN clause of the conditional jump as unlikely.  */
618
 
619
  /* Construct t <-- '__mf_elem->low  > __mf_base'.  */
620
  t = build3 (COMPONENT_REF, mf_uintptr_type,
621
              build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
622
              TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
623
  t = build2 (GT_EXPR, boolean_type_node, t, mf_base);
624
 
625
  /* Construct '__mf_elem->high < __mf_limit'.
626
 
627
     First build:
628
        1) u <--  '__mf_elem->high'
629
        2) v <--  '__mf_limit'.
630
 
631
     Then build 'u <-- (u < v).  */
632
 
633
  u = build3 (COMPONENT_REF, mf_uintptr_type,
634
              build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
635
              DECL_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);
636
 
637
  v = mf_limit;
638
 
639
  u = build2 (LT_EXPR, boolean_type_node, u, v);
640
 
641
  /* Build the composed conditional: t <-- 't || u'.  Then store the
642
     result of the evaluation of 't' in a temporary variable which we
643
     can use as the condition for the conditional jump.  */
644
  t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u);
645
  t = force_gimple_operand (t, &stmts, false, NULL_TREE);
646
  gimple_seq_add_seq (&seq, stmts);
647
  cond = make_rename_temp (boolean_type_node, "__mf_unlikely_cond");
648
  g = gimple_build_assign  (cond, t);
649
  gimple_set_location (g, location);
650
  gimple_seq_add_stmt (&seq, g);
651
 
652
  /* Build the conditional jump.  'cond' is just a temporary so we can
653
     simply build a void COND_EXPR.  We do need labels in both arms though.  */
654
  g = gimple_build_cond (NE_EXPR, cond, boolean_false_node, NULL_TREE,
655
                         NULL_TREE);
656
  gimple_set_location (g, location);
657
  gimple_seq_add_stmt (&seq, g);
658
 
659
  /* At this point, after so much hard work, we have only constructed
660
     the conditional jump,
661
 
662
     if (__mf_elem->low > __mf_base
663
         || (__mf_elem_high < __mf_limit))
664
 
665
     The lowered GIMPLE tree representing this code is in the statement
666
     list starting at 'head'.
667
 
668
     We can insert this now in the current basic block, i.e. the one that
669
     the statement we're instrumenting was originally in.  */
670
  gsi = gsi_last_bb (cond_bb);
671
  gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
672
 
673
  /*  Now build up the body of the cache-miss handling:
674
 
675
     __mf_check();
676
     refresh *_l vars.
677
 
678
     This is the body of the conditional.  */
679
 
680
  seq = gimple_seq_alloc ();
681
  /* u is a string, so it is already a gimple value.  */
682
  u = mf_file_function_line_tree (location);
683
  /* NB: we pass the overall [base..limit] range to mf_check.  */
684
  v = fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
685
                   fold_build2_loc (location,
686
                                MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
687
                   build_int_cst (mf_uintptr_type, 1));
688
  v = force_gimple_operand (v, &stmts, true, NULL_TREE);
689
  gimple_seq_add_seq (&seq, stmts);
690
  g = gimple_build_call (mf_check_fndecl, 4, mf_base, v, dirflag, u);
691
  gimple_seq_add_stmt (&seq, g);
692
 
693
  if (! flag_mudflap_threads)
694
    {
695
      if (stmt_ends_bb_p (g))
696
        {
697
          gsi = gsi_start_bb (then_bb);
698
          gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
699
          e = split_block (then_bb, g);
700
          then_bb = e->dest;
701
          seq = gimple_seq_alloc ();
702
        }
703
 
704
      g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
705
      gimple_seq_add_stmt (&seq, g);
706
 
707
      g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
708
      gimple_seq_add_stmt (&seq, g);
709
    }
710
 
711
  /* Insert the check code in the THEN block.  */
712
  gsi = gsi_start_bb (then_bb);
713
  gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
714
 
715
  *instr_gsi = gsi_start_bb (join_bb);
716
}
717
 
718
 
719
/* Check whether the given decl, generally a VAR_DECL or PARM_DECL, is
720
   eligible for instrumentation.  For the mudflap1 pass, this implies
721
   that it should be registered with the libmudflap runtime.  For the
722
   mudflap2 pass this means instrumenting an indirection operation with
723
   respect to the object.
724
*/
725
static int
726
mf_decl_eligible_p (tree decl)
727
{
728
  return ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
729
          /* The decl must have its address taken.  In the case of
730
             arrays, this flag is also set if the indexes are not
731
             compile-time known valid constants.  */
732
          /* XXX: not sufficient: return-by-value structs! */
733
          && TREE_ADDRESSABLE (decl)
734
          /* The type of the variable must be complete.  */
735
          && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl))
736
          /* The decl hasn't been decomposed somehow.  */
737
          && !DECL_HAS_VALUE_EXPR_P (decl));
738
}
739
 
740
 
741
static void
742
mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp,
743
                   location_t location, tree dirflag)
744
{
745
  tree type, base, limit, addr, size, t;
746
 
747
  /* Don't instrument read operations.  */
748
  if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
749
    return;
750
 
751
  /* Don't instrument marked nodes.  */
752
  if (mf_marked_p (*tp))
753
    return;
754
 
755
  t = *tp;
756
  type = TREE_TYPE (t);
757
 
758
  if (type == error_mark_node)
759
    return;
760
 
761
  size = TYPE_SIZE_UNIT (type);
762
 
763
  switch (TREE_CODE (t))
764
    {
765
    case ARRAY_REF:
766
    case COMPONENT_REF:
767
      {
768
        /* This is trickier than it may first appear.  The reason is
769
           that we are looking at expressions from the "inside out" at
770
           this point.  We may have a complex nested aggregate/array
771
           expression (e.g. "a.b[i].c"), maybe with an indirection as
772
           the leftmost operator ("p->a.b.d"), where instrumentation
773
           is necessary.  Or we may have an innocent "a.b.c"
774
           expression that must not be instrumented.  We need to
775
           recurse all the way down the nesting structure to figure it
776
           out: looking just at the outer node is not enough.  */
777
        tree var;
778
        int component_ref_only = (TREE_CODE (t) == COMPONENT_REF);
779
        /* If we have a bitfield component reference, we must note the
780
           innermost addressable object in ELT, from which we will
781
           construct the byte-addressable bounds of the bitfield.  */
782
        tree elt = NULL_TREE;
783
        int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF
784
                              && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1)));
785
 
786
        /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
787
           containment hierarchy to find the outermost VAR_DECL.  */
788
        var = TREE_OPERAND (t, 0);
789
        while (1)
790
          {
791
            if (bitfield_ref_p && elt == NULL_TREE
792
                && (TREE_CODE (var) == ARRAY_REF
793
                    || TREE_CODE (var) == COMPONENT_REF))
794
              elt = var;
795
 
796
            if (TREE_CODE (var) == ARRAY_REF)
797
              {
798
                component_ref_only = 0;
799
                var = TREE_OPERAND (var, 0);
800
              }
801
            else if (TREE_CODE (var) == COMPONENT_REF)
802
              var = TREE_OPERAND (var, 0);
803
            else if (INDIRECT_REF_P (var)
804
                     || TREE_CODE (var) == MEM_REF)
805
              {
806
                base = TREE_OPERAND (var, 0);
807
                break;
808
              }
809
            else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
810
              {
811
                var = TREE_OPERAND (var, 0);
812
                if (CONSTANT_CLASS_P (var)
813
                    && TREE_CODE (var) != STRING_CST)
814
                  return;
815
              }
816
            else
817
              {
818
                gcc_assert (TREE_CODE (var) == VAR_DECL
819
                            || TREE_CODE (var) == PARM_DECL
820
                            || TREE_CODE (var) == RESULT_DECL
821
                            || TREE_CODE (var) == STRING_CST);
822
                /* Don't instrument this access if the underlying
823
                   variable is not "eligible".  This test matches
824
                   those arrays that have only known-valid indexes,
825
                   and thus are not labeled TREE_ADDRESSABLE.  */
826
                if (! mf_decl_eligible_p (var) || component_ref_only)
827
                  return;
828
                else
829
                  {
830
                    base = build1 (ADDR_EXPR,
831
                                   build_pointer_type (TREE_TYPE (var)), var);
832
                    break;
833
                  }
834
              }
835
          }
836
 
837
        /* Handle the case of ordinary non-indirection structure
838
           accesses.  These have only nested COMPONENT_REF nodes (no
839
           INDIRECT_REF), but pass through the above filter loop.
840
           Note that it's possible for such a struct variable to match
841
           the eligible_p test because someone else might take its
842
           address sometime.  */
843
 
844
        /* We need special processing for bitfield components, because
845
           their addresses cannot be taken.  */
846
        if (bitfield_ref_p)
847
          {
848
            tree field = TREE_OPERAND (t, 1);
849
 
850
            if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
851
              size = DECL_SIZE_UNIT (field);
852
 
853
            if (elt)
854
              elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)),
855
                            elt);
856
            addr = fold_convert_loc (location, ptr_type_node, elt ? elt : base);
857
            addr = fold_build_pointer_plus_loc (location,
858
                                                addr, byte_position (field));
859
          }
860
        else
861
          addr = build1 (ADDR_EXPR, build_pointer_type (type), t);
862
 
863
        limit = fold_build2_loc (location, MINUS_EXPR, mf_uintptr_type,
864
                             fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
865
                                          fold_convert (mf_uintptr_type, addr),
866
                                          size),
867
                             integer_one_node);
868
      }
869
      break;
870
 
871
    case INDIRECT_REF:
872
      addr = TREE_OPERAND (t, 0);
873
      base = addr;
874
      limit = fold_build_pointer_plus_hwi_loc
875
        (location, fold_build_pointer_plus_loc (location, base, size), -1);
876
      break;
877
 
878
    case MEM_REF:
879
      addr = fold_build_pointer_plus_loc (location, TREE_OPERAND (t, 0),
880
                                          TREE_OPERAND (t, 1));
881
      base = addr;
882
      limit = fold_build_pointer_plus_hwi_loc (location,
883
                           fold_build_pointer_plus_loc (location,
884
                                                        base, size), -1);
885
      break;
886
 
887
    case TARGET_MEM_REF:
888
      addr = tree_mem_ref_addr (ptr_type_node, t);
889
      base = addr;
890
      limit = fold_build_pointer_plus_hwi_loc (location,
891
                           fold_build_pointer_plus_loc (location,
892
                                                        base, size), -1);
893
      break;
894
 
895
    case ARRAY_RANGE_REF:
896
      warning (OPT_Wmudflap,
897
               "mudflap checking not yet implemented for ARRAY_RANGE_REF");
898
      return;
899
 
900
    case BIT_FIELD_REF:
901
      /* ??? merge with COMPONENT_REF code above? */
902
      {
903
        tree ofs, rem, bpu;
904
 
905
        /* If we're not dereferencing something, then the access
906
           must be ok.  */
907
        if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF)
908
          return;
909
 
910
        bpu = bitsize_int (BITS_PER_UNIT);
911
        ofs = fold_convert (bitsizetype, TREE_OPERAND (t, 2));
912
        rem = size_binop_loc (location, TRUNC_MOD_EXPR, ofs, bpu);
913
        ofs = size_binop_loc (location, TRUNC_DIV_EXPR, ofs, bpu);
914
 
915
        size = fold_convert (bitsizetype, TREE_OPERAND (t, 1));
916
        size = size_binop_loc (location, PLUS_EXPR, size, rem);
917
        size = size_binop_loc (location, CEIL_DIV_EXPR, size, bpu);
918
        size = fold_convert (sizetype, size);
919
 
920
        addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
921
        addr = fold_convert (ptr_type_node, addr);
922
        addr = fold_build_pointer_plus_loc (location, addr, ofs);
923
 
924
        base = addr;
925
        limit = fold_build_pointer_plus_hwi_loc (location,
926
                             fold_build_pointer_plus_loc (location,
927
                                                          base, size), -1);
928
      }
929
      break;
930
 
931
    default:
932
      return;
933
    }
934
 
935
  mf_build_check_statement_for (base, limit, iter, location, dirflag);
936
}
937
/* Transform
938
   1) Memory references.
939
*/
940
static void
941
mf_xform_statements (void)
942
{
943
  basic_block bb, next;
944
  gimple_stmt_iterator i;
945
  int saved_last_basic_block = last_basic_block;
946
  enum gimple_rhs_class grhs_class;
947
 
948
  bb = ENTRY_BLOCK_PTR ->next_bb;
949
  do
950
    {
951
      next = bb->next_bb;
952
      for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
953
        {
954
          gimple s = gsi_stmt (i);
955
 
956
          /* Only a few GIMPLE statements can reference memory.  */
957
          switch (gimple_code (s))
958
            {
959
            case GIMPLE_ASSIGN:
960
              mf_xform_derefs_1 (&i, gimple_assign_lhs_ptr (s),
961
                                 gimple_location (s), integer_one_node);
962
              mf_xform_derefs_1 (&i, gimple_assign_rhs1_ptr (s),
963
                                 gimple_location (s), integer_zero_node);
964
              grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
965
              if (grhs_class == GIMPLE_BINARY_RHS)
966
                mf_xform_derefs_1 (&i, gimple_assign_rhs2_ptr (s),
967
                                   gimple_location (s), integer_zero_node);
968
              break;
969
 
970
            case GIMPLE_RETURN:
971
              if (gimple_return_retval (s) != NULL_TREE)
972
                {
973
                  mf_xform_derefs_1 (&i, gimple_return_retval_ptr (s),
974
                                     gimple_location (s),
975
                                     integer_zero_node);
976
                }
977
              break;
978
 
979
            default:
980
              ;
981
            }
982
        }
983
      bb = next;
984
    }
985
  while (bb && bb->index <= saved_last_basic_block);
986
}
987
 
988
/* ------------------------------------------------------------------------ */
989
/* ADDR_EXPR transforms.  Perform the declaration-related mudflap tree
990
   transforms on the current function.
991
 
992
   This is the first part of the mudflap instrumentation.  It works on
993
   high-level GIMPLE because after lowering, all variables are moved out
994
   of their BIND_EXPR binding context, and we lose liveness information
995
   for the declarations we wish to instrument.  */
996
 
997
static unsigned int
998
execute_mudflap_function_decls (void)
999
{
1000
  struct gimplify_ctx gctx;
1001
 
1002
  /* Don't instrument functions such as the synthetic constructor
1003
     built during mudflap_finish_file.  */
1004
  if (mf_marked_p (current_function_decl)
1005
      || mf_artificial (current_function_decl))
1006
    return 0;
1007
 
1008
  push_gimplify_context (&gctx);
1009
 
1010
  mf_xform_decls (gimple_body (current_function_decl),
1011
                  DECL_ARGUMENTS (current_function_decl));
1012
 
1013
  pop_gimplify_context (NULL);
1014
  return 0;
1015
}
1016
 
1017
/* This struct is passed between mf_xform_decls to store state needed
1018
   during the traversal searching for objects that have their
1019
   addresses taken.  */
1020
struct mf_xform_decls_data
1021
{
1022
  tree param_decls;
1023
};
1024
 
1025
 
1026
/* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
1027
   _DECLs if appropriate.  Arrange to call the __mf_register function
1028
   now, and the __mf_unregister function later for each.  Return the
1029
   gimple sequence after synthesis.  */
1030
gimple_seq
1031
mx_register_decls (tree decl, gimple_seq seq, location_t location)
1032
{
1033
  gimple_seq finally_stmts = NULL;
1034
  gimple_stmt_iterator initially_stmts = gsi_start (seq);
1035
 
1036
  while (decl != NULL_TREE)
1037
    {
1038
      if (mf_decl_eligible_p (decl)
1039
          /* Not already processed.  */
1040
          && ! mf_marked_p (decl)
1041
          /* Automatic variable.  */
1042
          && ! DECL_EXTERNAL (decl)
1043
          && ! TREE_STATIC (decl))
1044
        {
1045
          tree size = NULL_TREE, variable_name;
1046
          gimple unregister_fncall, register_fncall;
1047
          tree unregister_fncall_param, register_fncall_param;
1048
 
1049
          /* Variable-sized objects should have sizes already been
1050
             gimplified when we got here. */
1051
          size = fold_convert (size_type_node,
1052
                               TYPE_SIZE_UNIT (TREE_TYPE (decl)));
1053
          gcc_assert (is_gimple_val (size));
1054
 
1055
 
1056
          unregister_fncall_param =
1057
            mf_mark (build1 (ADDR_EXPR,
1058
                             build_pointer_type (TREE_TYPE (decl)),
1059
                             decl));
1060
          /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */
1061
          unregister_fncall = gimple_build_call (mf_unregister_fndecl, 3,
1062
                                                 unregister_fncall_param,
1063
                                                 size,
1064
                                                 integer_three_node);
1065
 
1066
 
1067
          variable_name = mf_varname_tree (decl);
1068
          register_fncall_param =
1069
            mf_mark (build1 (ADDR_EXPR,
1070
                             build_pointer_type (TREE_TYPE (decl)),
1071
                             decl));
1072
          /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK,
1073
                            "name") */
1074
          register_fncall = gimple_build_call (mf_register_fndecl, 4,
1075
                                               register_fncall_param,
1076
                                               size,
1077
                                               integer_three_node,
1078
                                               variable_name);
1079
 
1080
 
1081
          /* Accumulate the two calls.  */
1082
          gimple_set_location (register_fncall, location);
1083
          gimple_set_location (unregister_fncall, location);
1084
 
1085
          /* Add the __mf_register call at the current appending point.  */
1086
          if (gsi_end_p (initially_stmts))
1087
            {
1088
              if (!mf_artificial (decl))
1089
                warning (OPT_Wmudflap,
1090
                         "mudflap cannot track %qE in stub function",
1091
                         DECL_NAME (decl));
1092
            }
1093
          else
1094
            {
1095
              gsi_insert_before (&initially_stmts, register_fncall,
1096
                                 GSI_SAME_STMT);
1097
 
1098
              /* Accumulate the FINALLY piece.  */
1099
              gimple_seq_add_stmt (&finally_stmts, unregister_fncall);
1100
            }
1101
          mf_mark (decl);
1102
        }
1103
 
1104
      decl = DECL_CHAIN (decl);
1105
    }
1106
 
1107
  /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
1108
  if (finally_stmts != NULL)
1109
    {
1110
      gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY);
1111
      gimple_seq new_seq = gimple_seq_alloc ();
1112
 
1113
      gimple_seq_add_stmt (&new_seq, stmt);
1114
      return new_seq;
1115
    }
1116
   else
1117
    return seq;
1118
}
1119
 
1120
 
1121
/* Process every variable mentioned in BIND_EXPRs.  */
1122
static tree
1123
mx_xfn_xform_decls (gimple_stmt_iterator *gsi,
1124
                    bool *handled_operands_p ATTRIBUTE_UNUSED,
1125
                    struct walk_stmt_info *wi)
1126
{
1127
  struct mf_xform_decls_data *d = (struct mf_xform_decls_data *) wi->info;
1128
  gimple stmt = gsi_stmt (*gsi);
1129
 
1130
  switch (gimple_code (stmt))
1131
    {
1132
    case GIMPLE_BIND:
1133
      {
1134
        /* Process function parameters now (but only once).  */
1135
        if (d->param_decls)
1136
          {
1137
            gimple_bind_set_body (stmt,
1138
                                  mx_register_decls (d->param_decls,
1139
                                                     gimple_bind_body (stmt),
1140
                                                     gimple_location (stmt)));
1141
            d->param_decls = NULL_TREE;
1142
          }
1143
 
1144
        gimple_bind_set_body (stmt,
1145
                              mx_register_decls (gimple_bind_vars (stmt),
1146
                                                 gimple_bind_body (stmt),
1147
                                                 gimple_location (stmt)));
1148
      }
1149
      break;
1150
 
1151
    default:
1152
      break;
1153
    }
1154
 
1155
  return NULL_TREE;
1156
}
1157
 
1158
/* Perform the object lifetime tracking mudflap transform on the given function
1159
   tree.  The tree is mutated in place, with possibly copied subtree nodes.
1160
 
1161
   For every auto variable declared, if its address is ever taken
1162
   within the function, then supply its lifetime to the mudflap
1163
   runtime with the __mf_register and __mf_unregister calls.
1164
*/
1165
 
1166
static void
1167
mf_xform_decls (gimple_seq fnbody, tree fnparams)
1168
{
1169
  struct mf_xform_decls_data d;
1170
  struct walk_stmt_info wi;
1171
  struct pointer_set_t *pset = pointer_set_create ();
1172
 
1173
  d.param_decls = fnparams;
1174
  memset (&wi, 0, sizeof (wi));
1175
  wi.info = (void*) &d;
1176
  wi.pset = pset;
1177
  walk_gimple_seq (fnbody, mx_xfn_xform_decls, NULL, &wi);
1178
  pointer_set_destroy (pset);
1179
}
1180
 
1181
 
1182
/* ------------------------------------------------------------------------ */
1183
/* Externally visible mudflap functions.  */
1184
 
1185
 
1186
/* Mark and return the given tree node to prevent further mudflap
1187
   transforms.  */
1188
static GTY ((param_is (union tree_node))) htab_t marked_trees = NULL;
1189
 
1190
tree
1191
mf_mark (tree t)
1192
{
1193
  void **slot;
1194
 
1195
  if (marked_trees == NULL)
1196
    marked_trees = htab_create_ggc (31, htab_hash_pointer, htab_eq_pointer,
1197
                                    NULL);
1198
 
1199
  slot = htab_find_slot (marked_trees, t, INSERT);
1200
  *slot = t;
1201
  return t;
1202
}
1203
 
1204
int
1205
mf_marked_p (tree t)
1206
{
1207
  void *entry;
1208
 
1209
  if (marked_trees == NULL)
1210
    return 0;
1211
 
1212
  entry = htab_find (marked_trees, t);
1213
  return (entry != NULL);
1214
}
1215
 
1216
/* Remember given node as a static of some kind: global data,
1217
   function-scope static, or an anonymous constant.  Its assembler
1218
   label is given.  */
1219
 
1220
/* A list of globals whose incomplete declarations we encountered.
1221
   Instead of emitting the __mf_register call for them here, it's
1222
   delayed until program finish time.  If they're still incomplete by
1223
   then, warnings are emitted.  */
1224
 
1225
static GTY (()) VEC(tree,gc) *deferred_static_decls;
1226
 
1227
/* A list of statements for calling __mf_register() at startup time.  */
1228
static GTY (()) tree enqueued_call_stmt_chain;
1229
 
1230
static void
1231
mudflap_register_call (tree obj, tree object_size, tree varname)
1232
{
1233
  tree arg, call_stmt;
1234
 
1235
  arg = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (obj)), obj);
1236
  arg = fold_convert (ptr_type_node, arg);
1237
 
1238
  call_stmt = build_call_expr (mf_register_fndecl, 4,
1239
                               arg,
1240
                               fold_convert (size_type_node, object_size),
1241
                               /* __MF_TYPE_STATIC */
1242
                               build_int_cst (integer_type_node, 4),
1243
                               varname);
1244
 
1245
  append_to_statement_list (call_stmt, &enqueued_call_stmt_chain);
1246
}
1247
 
1248
void
1249
mudflap_enqueue_decl (tree obj)
1250
{
1251
  if (mf_marked_p (obj))
1252
    return;
1253
 
1254
  /* We don't need to process variable decls that are internally
1255
     generated extern.  If we did, we'd end up with warnings for them
1256
     during mudflap_finish_file ().  That would confuse the user,
1257
     since the text would refer to variables that don't show up in the
1258
     user's source code.  */
1259
  if (DECL_P (obj) && DECL_EXTERNAL (obj) && mf_artificial (obj))
1260
    return;
1261
 
1262
  VEC_safe_push (tree, gc, deferred_static_decls, obj);
1263
}
1264
 
1265
 
1266
void
1267
mudflap_enqueue_constant (tree obj)
1268
{
1269
  tree object_size, varname;
1270
 
1271
  if (mf_marked_p (obj))
1272
    return;
1273
 
1274
  if (TREE_CODE (obj) == STRING_CST)
1275
    object_size = size_int (TREE_STRING_LENGTH (obj));
1276
  else
1277
    object_size = size_in_bytes (TREE_TYPE (obj));
1278
 
1279
  if (TREE_CODE (obj) == STRING_CST)
1280
    varname = mf_build_string ("string literal");
1281
  else
1282
    varname = mf_build_string ("constant");
1283
 
1284
  mudflap_register_call (obj, object_size, varname);
1285
}
1286
 
1287
 
1288
/* Emit any file-wide instrumentation.  */
1289
void
1290
mudflap_finish_file (void)
1291
{
1292
  tree ctor_statements = NULL_TREE;
1293
 
1294
  /* No need to continue when there were errors.  */
1295
  if (seen_error ())
1296
    return;
1297
 
1298
  /* Insert a call to __mf_init.  */
1299
  {
1300
    tree call2_stmt = build_call_expr (mf_init_fndecl, 0);
1301
    append_to_statement_list (call2_stmt, &ctor_statements);
1302
  }
1303
 
1304
  /* If appropriate, call __mf_set_options to pass along read-ignore mode.  */
1305
  if (flag_mudflap_ignore_reads)
1306
    {
1307
      tree arg = mf_build_string ("-ignore-reads");
1308
      tree call_stmt = build_call_expr (mf_set_options_fndecl, 1, arg);
1309
      append_to_statement_list (call_stmt, &ctor_statements);
1310
    }
1311
 
1312
  /* Process all enqueued object decls.  */
1313
  if (deferred_static_decls)
1314
    {
1315
      size_t i;
1316
      tree obj;
1317
      FOR_EACH_VEC_ELT (tree, deferred_static_decls, i, obj)
1318
        {
1319
          gcc_assert (DECL_P (obj));
1320
 
1321
          if (mf_marked_p (obj))
1322
            continue;
1323
 
1324
          /* Omit registration for static unaddressed objects.  NB:
1325
             Perform registration for non-static objects regardless of
1326
             TREE_USED or TREE_ADDRESSABLE, because they may be used
1327
             from other compilation units.  */
1328
          if (! TREE_PUBLIC (obj) && ! TREE_ADDRESSABLE (obj))
1329
            continue;
1330
 
1331
          if (! COMPLETE_TYPE_P (TREE_TYPE (obj)))
1332
            {
1333
              warning (OPT_Wmudflap,
1334
                       "mudflap cannot track unknown size extern %qE",
1335
                       DECL_NAME (obj));
1336
              continue;
1337
            }
1338
 
1339
          mudflap_register_call (obj,
1340
                                 size_in_bytes (TREE_TYPE (obj)),
1341
                                 mf_varname_tree (obj));
1342
        }
1343
 
1344
      VEC_truncate (tree, deferred_static_decls, 0);
1345
    }
1346
 
1347
  /* Append all the enqueued registration calls.  */
1348
  if (enqueued_call_stmt_chain)
1349
    {
1350
      append_to_statement_list (enqueued_call_stmt_chain, &ctor_statements);
1351
      enqueued_call_stmt_chain = NULL_TREE;
1352
    }
1353
 
1354
  cgraph_build_static_cdtor ('I', ctor_statements,
1355
                             MAX_RESERVED_INIT_PRIORITY-1);
1356
}
1357
 
1358
 
1359
static bool
1360
gate_mudflap (void)
1361
{
1362
  return flag_mudflap != 0;
1363
}
1364
 
1365
struct gimple_opt_pass pass_mudflap_1 =
1366
{
1367
 {
1368
  GIMPLE_PASS,
1369
  "mudflap1",                           /* name */
1370
  gate_mudflap,                         /* gate */
1371
  execute_mudflap_function_decls,       /* execute */
1372
  NULL,                                 /* sub */
1373
  NULL,                                 /* next */
1374
  0,                                    /* static_pass_number */
1375
  TV_NONE,                              /* tv_id */
1376
  PROP_gimple_any,                      /* properties_required */
1377
  0,                                    /* properties_provided */
1378
  0,                                    /* properties_destroyed */
1379
  0,                                    /* todo_flags_start */
1380
 
1381
 }
1382
};
1383
 
1384
struct gimple_opt_pass pass_mudflap_2 =
1385
{
1386
 {
1387
  GIMPLE_PASS,
1388
  "mudflap2",                           /* name */
1389
  gate_mudflap,                         /* gate */
1390
  execute_mudflap_function_ops,         /* execute */
1391
  NULL,                                 /* sub */
1392
  NULL,                                 /* next */
1393
  0,                                    /* static_pass_number */
1394
  TV_NONE,                              /* tv_id */
1395
  PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
1396
  0,                                    /* properties_provided */
1397
  0,                                    /* properties_destroyed */
1398
  0,                                    /* todo_flags_start */
1399
  TODO_verify_flow | TODO_verify_stmts
1400
  | TODO_update_ssa                     /* todo_flags_finish */
1401
 }
1402
};
1403
 
1404
#include "gt-tree-mudflap.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.