OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [gimple-low.c] - Blame information for rev 849

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* GIMPLE lowering pass.  Converts High GIMPLE into Low GIMPLE.
2
 
3
   Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "tree.h"
27
#include "gimple.h"
28
#include "tree-iterator.h"
29
#include "tree-inline.h"
30
#include "tree-flow.h"
31
#include "flags.h"
32
#include "function.h"
33
#include "diagnostic-core.h"
34
#include "tree-pass.h"
35
 
36
/* The differences between High GIMPLE and Low GIMPLE are the
37
   following:
38
 
39
   1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
40
 
41
   2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
42
      flow and exception regions are built as an on-the-side region
43
      hierarchy (See tree-eh.c:lower_eh_constructs).
44
 
45
   3- Multiple identical return statements are grouped into a single
46
      return and gotos to the unique return site.  */
47
 
48
/* Match a return statement with a label.  During lowering, we identify
49
   identical return statements and replace duplicates with a jump to
50
   the corresponding label.  */
51
struct return_statements_t
52
{
53
  tree label;
54
  gimple stmt;
55
};
56
typedef struct return_statements_t return_statements_t;
57
 
58
DEF_VEC_O(return_statements_t);
59
DEF_VEC_ALLOC_O(return_statements_t,heap);
60
 
61
struct lower_data
62
{
63
  /* Block the current statement belongs to.  */
64
  tree block;
65
 
66
  /* A vector of label and return statements to be moved to the end
67
     of the function.  */
68
  VEC(return_statements_t,heap) *return_statements;
69
 
70
  /* True if the current statement cannot fall through.  */
71
  bool cannot_fallthru;
72
 
73
  /* True if the function calls __builtin_setjmp.  */
74
  bool calls_builtin_setjmp;
75
};
76
 
77
static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
78
static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
79
static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
80
static void lower_builtin_setjmp (gimple_stmt_iterator *);
81
 
82
 
83
/* Lower the body of current_function_decl from High GIMPLE into Low
84
   GIMPLE.  */
85
 
86
static unsigned int
87
lower_function_body (void)
88
{
89
  struct lower_data data;
90
  gimple_seq body = gimple_body (current_function_decl);
91
  gimple_seq lowered_body;
92
  gimple_stmt_iterator i;
93
  gimple bind;
94
  tree t;
95
  gimple x;
96
 
97
  /* The gimplifier should've left a body of exactly one statement,
98
     namely a GIMPLE_BIND.  */
99
  gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
100
              && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
101
 
102
  memset (&data, 0, sizeof (data));
103
  data.block = DECL_INITIAL (current_function_decl);
104
  BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
105
  BLOCK_CHAIN (data.block) = NULL_TREE;
106
  TREE_ASM_WRITTEN (data.block) = 1;
107
  data.return_statements = VEC_alloc (return_statements_t, heap, 8);
108
 
109
  bind = gimple_seq_first_stmt (body);
110
  lowered_body = NULL;
111
  gimple_seq_add_stmt (&lowered_body, bind);
112
  i = gsi_start (lowered_body);
113
  lower_gimple_bind (&i, &data);
114
 
115
  /* Once the old body has been lowered, replace it with the new
116
     lowered sequence.  */
117
  gimple_set_body (current_function_decl, lowered_body);
118
 
119
  i = gsi_last (lowered_body);
120
 
121
  /* If the function falls off the end, we need a null return statement.
122
     If we've already got one in the return_statements vector, we don't
123
     need to do anything special.  Otherwise build one by hand.  */
124
  if (gimple_seq_may_fallthru (lowered_body)
125
      && (VEC_empty (return_statements_t, data.return_statements)
126
          || gimple_return_retval (VEC_last (return_statements_t,
127
                                   data.return_statements)->stmt) != NULL))
128
    {
129
      x = gimple_build_return (NULL);
130
      gimple_set_location (x, cfun->function_end_locus);
131
      gimple_set_block (x, DECL_INITIAL (current_function_decl));
132
      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
133
    }
134
 
135
  /* If we lowered any return statements, emit the representative
136
     at the end of the function.  */
137
  while (!VEC_empty (return_statements_t, data.return_statements))
138
    {
139
      return_statements_t t;
140
 
141
      /* Unfortunately, we can't use VEC_pop because it returns void for
142
         objects.  */
143
      t = *VEC_last (return_statements_t, data.return_statements);
144
      VEC_truncate (return_statements_t,
145
                    data.return_statements,
146
                    VEC_length (return_statements_t,
147
                                data.return_statements) - 1);
148
 
149
      x = gimple_build_label (t.label);
150
      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
151
      gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
152
    }
153
 
154
  /* If the function calls __builtin_setjmp, we need to emit the computed
155
     goto that will serve as the unique dispatcher for all the receivers.  */
156
  if (data.calls_builtin_setjmp)
157
    {
158
      tree disp_label, disp_var, arg;
159
 
160
      /* Build 'DISP_LABEL:' and insert.  */
161
      disp_label = create_artificial_label (cfun->function_end_locus);
162
      /* This mark will create forward edges from every call site.  */
163
      DECL_NONLOCAL (disp_label) = 1;
164
      cfun->has_nonlocal_label = 1;
165
      x = gimple_build_label (disp_label);
166
      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
167
 
168
      /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
169
         and insert.  */
170
      disp_var = create_tmp_var (ptr_type_node, "setjmpvar");
171
      arg = build_addr (disp_label, current_function_decl);
172
      t = builtin_decl_implicit (BUILT_IN_SETJMP_DISPATCHER);
173
      x = gimple_build_call (t, 1, arg);
174
      gimple_call_set_lhs (x, disp_var);
175
 
176
      /* Build 'goto DISP_VAR;' and insert.  */
177
      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
178
      x = gimple_build_goto (disp_var);
179
      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
180
    }
181
 
182
  gcc_assert (data.block == DECL_INITIAL (current_function_decl));
183
  BLOCK_SUBBLOCKS (data.block)
184
    = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
185
 
186
  clear_block_marks (data.block);
187
  VEC_free(return_statements_t, heap, data.return_statements);
188
  return 0;
189
}
190
 
191
struct gimple_opt_pass pass_lower_cf =
192
{
193
 {
194
  GIMPLE_PASS,
195
  "lower",                              /* name */
196
  NULL,                                 /* gate */
197
  lower_function_body,                  /* execute */
198
  NULL,                                 /* sub */
199
  NULL,                                 /* next */
200
  0,                                     /* static_pass_number */
201
  TV_NONE,                              /* tv_id */
202
  PROP_gimple_any,                      /* properties_required */
203
  PROP_gimple_lcf,                      /* properties_provided */
204
  0,                                     /* properties_destroyed */
205
  0,                                     /* todo_flags_start */
206
 
207
 }
208
};
209
 
210
 
211
 
212
/* Verify if the type of the argument matches that of the function
213
   declaration.  If we cannot verify this or there is a mismatch,
214
   return false.  */
215
 
216
static bool
217
gimple_check_call_args (gimple stmt, tree fndecl)
218
{
219
  tree parms, p;
220
  unsigned int i, nargs;
221
 
222
  /* Calls to internal functions always match their signature.  */
223
  if (gimple_call_internal_p (stmt))
224
    return true;
225
 
226
  nargs = gimple_call_num_args (stmt);
227
 
228
  /* Get argument types for verification.  */
229
  if (fndecl)
230
    parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
231
  else
232
    parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
233
 
234
  /* Verify if the type of the argument matches that of the function
235
     declaration.  If we cannot verify this or there is a mismatch,
236
     return false.  */
237
  if (fndecl && DECL_ARGUMENTS (fndecl))
238
    {
239
      for (i = 0, p = DECL_ARGUMENTS (fndecl);
240
           i < nargs;
241
           i++, p = DECL_CHAIN (p))
242
        {
243
          /* We cannot distinguish a varargs function from the case
244
             of excess parameters, still deferring the inlining decision
245
             to the callee is possible.  */
246
          if (!p)
247
            break;
248
          if (p == error_mark_node
249
              || gimple_call_arg (stmt, i) == error_mark_node
250
              || !fold_convertible_p (DECL_ARG_TYPE (p),
251
                                      gimple_call_arg (stmt, i)))
252
            return false;
253
        }
254
    }
255
  else if (parms)
256
    {
257
      for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
258
        {
259
          /* If this is a varargs function defer inlining decision
260
             to callee.  */
261
          if (!p)
262
            break;
263
          if (TREE_VALUE (p) == error_mark_node
264
              || gimple_call_arg (stmt, i) == error_mark_node
265
              || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
266
              || !fold_convertible_p (TREE_VALUE (p),
267
                                      gimple_call_arg (stmt, i)))
268
            return false;
269
        }
270
    }
271
  else
272
    {
273
      if (nargs != 0)
274
        return false;
275
    }
276
  return true;
277
}
278
 
279
/* Verify if the type of the argument and lhs of CALL_STMT matches
280
   that of the function declaration CALLEE.
281
   If we cannot verify this or there is a mismatch, return false.  */
282
 
283
bool
284
gimple_check_call_matching_types (gimple call_stmt, tree callee)
285
{
286
  tree lhs;
287
 
288
  if ((DECL_RESULT (callee)
289
       && !DECL_BY_REFERENCE (DECL_RESULT (callee))
290
       && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
291
       && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
292
                                      TREE_TYPE (lhs))
293
       && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
294
      || !gimple_check_call_args (call_stmt, callee))
295
    return false;
296
  return true;
297
}
298
 
299
/* Lower sequence SEQ.  Unlike gimplification the statements are not relowered
300
   when they are changed -- if this has to be done, the lowering routine must
301
   do it explicitly.  DATA is passed through the recursion.  */
302
 
303
static void
304
lower_sequence (gimple_seq seq, struct lower_data *data)
305
{
306
  gimple_stmt_iterator gsi;
307
 
308
  for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
309
    lower_stmt (&gsi, data);
310
}
311
 
312
 
313
/* Lower the OpenMP directive statement pointed by GSI.  DATA is
314
   passed through the recursion.  */
315
 
316
static void
317
lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
318
{
319
  gimple stmt;
320
 
321
  stmt = gsi_stmt (*gsi);
322
 
323
  lower_sequence (gimple_omp_body (stmt), data);
324
  gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
325
  gsi_insert_seq_before (gsi, gimple_omp_body (stmt), GSI_SAME_STMT);
326
  gimple_omp_set_body (stmt, NULL);
327
  gsi_remove (gsi, false);
328
}
329
 
330
 
331
/* Lower statement GSI.  DATA is passed through the recursion.  We try to
332
   track the fallthruness of statements and get rid of unreachable return
333
   statements in order to prevent the EH lowering pass from adding useless
334
   edges that can cause bogus warnings to be issued later; this guess need
335
   not be 100% accurate, simply be conservative and reset cannot_fallthru
336
   to false if we don't know.  */
337
 
338
static void
339
lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
340
{
341
  gimple stmt = gsi_stmt (*gsi);
342
 
343
  gimple_set_block (stmt, data->block);
344
 
345
  switch (gimple_code (stmt))
346
    {
347
    case GIMPLE_BIND:
348
      lower_gimple_bind (gsi, data);
349
      /* Propagate fallthruness.  */
350
      return;
351
 
352
    case GIMPLE_COND:
353
    case GIMPLE_GOTO:
354
    case GIMPLE_SWITCH:
355
      data->cannot_fallthru = true;
356
      gsi_next (gsi);
357
      return;
358
 
359
    case GIMPLE_RETURN:
360
      if (data->cannot_fallthru)
361
        {
362
          gsi_remove (gsi, false);
363
          /* Propagate fallthruness.  */
364
        }
365
      else
366
        {
367
          lower_gimple_return (gsi, data);
368
          data->cannot_fallthru = true;
369
        }
370
      return;
371
 
372
    case GIMPLE_TRY:
373
      {
374
        bool try_cannot_fallthru;
375
        lower_sequence (gimple_try_eval (stmt), data);
376
        try_cannot_fallthru = data->cannot_fallthru;
377
        data->cannot_fallthru = false;
378
        lower_sequence (gimple_try_cleanup (stmt), data);
379
        /* See gimple_stmt_may_fallthru for the rationale.  */
380
        if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
381
          {
382
            data->cannot_fallthru |= try_cannot_fallthru;
383
            gsi_next (gsi);
384
            return;
385
          }
386
      }
387
      break;
388
 
389
    case GIMPLE_CATCH:
390
      data->cannot_fallthru = false;
391
      lower_sequence (gimple_catch_handler (stmt), data);
392
      break;
393
 
394
    case GIMPLE_EH_FILTER:
395
      data->cannot_fallthru = false;
396
      lower_sequence (gimple_eh_filter_failure (stmt), data);
397
      break;
398
 
399
    case GIMPLE_EH_ELSE:
400
      lower_sequence (gimple_eh_else_n_body (stmt), data);
401
      lower_sequence (gimple_eh_else_e_body (stmt), data);
402
      break;
403
 
404
    case GIMPLE_NOP:
405
    case GIMPLE_ASM:
406
    case GIMPLE_ASSIGN:
407
    case GIMPLE_PREDICT:
408
    case GIMPLE_LABEL:
409
    case GIMPLE_EH_MUST_NOT_THROW:
410
    case GIMPLE_OMP_FOR:
411
    case GIMPLE_OMP_SECTIONS:
412
    case GIMPLE_OMP_SECTIONS_SWITCH:
413
    case GIMPLE_OMP_SECTION:
414
    case GIMPLE_OMP_SINGLE:
415
    case GIMPLE_OMP_MASTER:
416
    case GIMPLE_OMP_ORDERED:
417
    case GIMPLE_OMP_CRITICAL:
418
    case GIMPLE_OMP_RETURN:
419
    case GIMPLE_OMP_ATOMIC_LOAD:
420
    case GIMPLE_OMP_ATOMIC_STORE:
421
    case GIMPLE_OMP_CONTINUE:
422
      break;
423
 
424
    case GIMPLE_CALL:
425
      {
426
        tree decl = gimple_call_fndecl (stmt);
427
 
428
        if (decl
429
            && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
430
            && DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
431
          {
432
            lower_builtin_setjmp (gsi);
433
            data->cannot_fallthru = false;
434
            data->calls_builtin_setjmp = true;
435
            return;
436
          }
437
 
438
        if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
439
          {
440
            data->cannot_fallthru = true;
441
            gsi_next (gsi);
442
            return;
443
          }
444
      }
445
      break;
446
 
447
    case GIMPLE_OMP_PARALLEL:
448
    case GIMPLE_OMP_TASK:
449
      data->cannot_fallthru = false;
450
      lower_omp_directive (gsi, data);
451
      data->cannot_fallthru = false;
452
      return;
453
 
454
    case GIMPLE_TRANSACTION:
455
      lower_sequence (gimple_transaction_body (stmt), data);
456
      break;
457
 
458
    default:
459
      gcc_unreachable ();
460
    }
461
 
462
  data->cannot_fallthru = false;
463
  gsi_next (gsi);
464
}
465
 
466
/* Lower a bind_expr TSI.  DATA is passed through the recursion.  */
467
 
468
static void
469
lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
470
{
471
  tree old_block = data->block;
472
  gimple stmt = gsi_stmt (*gsi);
473
  tree new_block = gimple_bind_block (stmt);
474
 
475
  if (new_block)
476
    {
477
      if (new_block == old_block)
478
        {
479
          /* The outermost block of the original function may not be the
480
             outermost statement chain of the gimplified function.  So we
481
             may see the outermost block just inside the function.  */
482
          gcc_assert (new_block == DECL_INITIAL (current_function_decl));
483
          new_block = NULL;
484
        }
485
      else
486
        {
487
          /* We do not expect to handle duplicate blocks.  */
488
          gcc_assert (!TREE_ASM_WRITTEN (new_block));
489
          TREE_ASM_WRITTEN (new_block) = 1;
490
 
491
          /* Block tree may get clobbered by inlining.  Normally this would
492
             be fixed in rest_of_decl_compilation using block notes, but
493
             since we are not going to emit them, it is up to us.  */
494
          BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
495
          BLOCK_SUBBLOCKS (old_block) = new_block;
496
          BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
497
          BLOCK_SUPERCONTEXT (new_block) = old_block;
498
 
499
          data->block = new_block;
500
        }
501
    }
502
 
503
  record_vars (gimple_bind_vars (stmt));
504
  lower_sequence (gimple_bind_body (stmt), data);
505
 
506
  if (new_block)
507
    {
508
      gcc_assert (data->block == new_block);
509
 
510
      BLOCK_SUBBLOCKS (new_block)
511
        = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
512
      data->block = old_block;
513
    }
514
 
515
  /* The GIMPLE_BIND no longer carries any useful information -- kill it.  */
516
  gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
517
  gsi_remove (gsi, false);
518
}
519
 
520
/* Try to determine whether a TRY_CATCH expression can fall through.
521
   This is a subroutine of block_may_fallthru.  */
522
 
523
static bool
524
try_catch_may_fallthru (const_tree stmt)
525
{
526
  tree_stmt_iterator i;
527
 
528
  /* If the TRY block can fall through, the whole TRY_CATCH can
529
     fall through.  */
530
  if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
531
    return true;
532
 
533
  i = tsi_start (TREE_OPERAND (stmt, 1));
534
  switch (TREE_CODE (tsi_stmt (i)))
535
    {
536
    case CATCH_EXPR:
537
      /* We expect to see a sequence of CATCH_EXPR trees, each with a
538
         catch expression and a body.  The whole TRY_CATCH may fall
539
         through iff any of the catch bodies falls through.  */
540
      for (; !tsi_end_p (i); tsi_next (&i))
541
        {
542
          if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
543
            return true;
544
        }
545
      return false;
546
 
547
    case EH_FILTER_EXPR:
548
      /* The exception filter expression only matters if there is an
549
         exception.  If the exception does not match EH_FILTER_TYPES,
550
         we will execute EH_FILTER_FAILURE, and we will fall through
551
         if that falls through.  If the exception does match
552
         EH_FILTER_TYPES, the stack unwinder will continue up the
553
         stack, so we will not fall through.  We don't know whether we
554
         will throw an exception which matches EH_FILTER_TYPES or not,
555
         so we just ignore EH_FILTER_TYPES and assume that we might
556
         throw an exception which doesn't match.  */
557
      return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
558
 
559
    default:
560
      /* This case represents statements to be executed when an
561
         exception occurs.  Those statements are implicitly followed
562
         by a RESX statement to resume execution after the exception.
563
         So in this case the TRY_CATCH never falls through.  */
564
      return false;
565
    }
566
}
567
 
568
 
569
/* Same as above, but for a GIMPLE_TRY_CATCH.  */
570
 
571
static bool
572
gimple_try_catch_may_fallthru (gimple stmt)
573
{
574
  gimple_stmt_iterator i;
575
 
576
  /* We don't handle GIMPLE_TRY_FINALLY.  */
577
  gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
578
 
579
  /* If the TRY block can fall through, the whole TRY_CATCH can
580
     fall through.  */
581
  if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
582
    return true;
583
 
584
  i = gsi_start (gimple_try_cleanup (stmt));
585
  switch (gimple_code (gsi_stmt (i)))
586
    {
587
    case GIMPLE_CATCH:
588
      /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
589
         catch expression and a body.  The whole try/catch may fall
590
         through iff any of the catch bodies falls through.  */
591
      for (; !gsi_end_p (i); gsi_next (&i))
592
        {
593
          if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
594
            return true;
595
        }
596
      return false;
597
 
598
    case GIMPLE_EH_FILTER:
599
      /* The exception filter expression only matters if there is an
600
         exception.  If the exception does not match EH_FILTER_TYPES,
601
         we will execute EH_FILTER_FAILURE, and we will fall through
602
         if that falls through.  If the exception does match
603
         EH_FILTER_TYPES, the stack unwinder will continue up the
604
         stack, so we will not fall through.  We don't know whether we
605
         will throw an exception which matches EH_FILTER_TYPES or not,
606
         so we just ignore EH_FILTER_TYPES and assume that we might
607
         throw an exception which doesn't match.  */
608
      return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
609
 
610
    default:
611
      /* This case represents statements to be executed when an
612
         exception occurs.  Those statements are implicitly followed
613
         by a GIMPLE_RESX to resume execution after the exception.  So
614
         in this case the try/catch never falls through.  */
615
      return false;
616
    }
617
}
618
 
619
 
620
/* Try to determine if we can fall out of the bottom of BLOCK.  This guess
621
   need not be 100% accurate; simply be conservative and return true if we
622
   don't know.  This is used only to avoid stupidly generating extra code.
623
   If we're wrong, we'll just delete the extra code later.  */
624
 
625
bool
626
block_may_fallthru (const_tree block)
627
{
628
  /* This CONST_CAST is okay because expr_last returns its argument
629
     unmodified and we assign it to a const_tree.  */
630
  const_tree stmt = expr_last (CONST_CAST_TREE(block));
631
 
632
  switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
633
    {
634
    case GOTO_EXPR:
635
    case RETURN_EXPR:
636
      /* Easy cases.  If the last statement of the block implies
637
         control transfer, then we can't fall through.  */
638
      return false;
639
 
640
    case SWITCH_EXPR:
641
      /* If SWITCH_LABELS is set, this is lowered, and represents a
642
         branch to a selected label and hence can not fall through.
643
         Otherwise SWITCH_BODY is set, and the switch can fall
644
         through.  */
645
      return SWITCH_LABELS (stmt) == NULL_TREE;
646
 
647
    case COND_EXPR:
648
      if (block_may_fallthru (COND_EXPR_THEN (stmt)))
649
        return true;
650
      return block_may_fallthru (COND_EXPR_ELSE (stmt));
651
 
652
    case BIND_EXPR:
653
      return block_may_fallthru (BIND_EXPR_BODY (stmt));
654
 
655
    case TRY_CATCH_EXPR:
656
      return try_catch_may_fallthru (stmt);
657
 
658
    case TRY_FINALLY_EXPR:
659
      /* The finally clause is always executed after the try clause,
660
         so if it does not fall through, then the try-finally will not
661
         fall through.  Otherwise, if the try clause does not fall
662
         through, then when the finally clause falls through it will
663
         resume execution wherever the try clause was going.  So the
664
         whole try-finally will only fall through if both the try
665
         clause and the finally clause fall through.  */
666
      return (block_may_fallthru (TREE_OPERAND (stmt, 0))
667
              && block_may_fallthru (TREE_OPERAND (stmt, 1)));
668
 
669
    case MODIFY_EXPR:
670
      if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
671
        stmt = TREE_OPERAND (stmt, 1);
672
      else
673
        return true;
674
      /* FALLTHRU */
675
 
676
    case CALL_EXPR:
677
      /* Functions that do not return do not fall through.  */
678
      return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
679
 
680
    case CLEANUP_POINT_EXPR:
681
      return block_may_fallthru (TREE_OPERAND (stmt, 0));
682
 
683
    default:
684
      return true;
685
    }
686
}
687
 
688
 
689
/* Try to determine if we can continue executing the statement
690
   immediately following STMT.  This guess need not be 100% accurate;
691
   simply be conservative and return true if we don't know.  This is
692
   used only to avoid stupidly generating extra code. If we're wrong,
693
   we'll just delete the extra code later.  */
694
 
695
bool
696
gimple_stmt_may_fallthru (gimple stmt)
697
{
698
  if (!stmt)
699
    return true;
700
 
701
  switch (gimple_code (stmt))
702
    {
703
    case GIMPLE_GOTO:
704
    case GIMPLE_RETURN:
705
    case GIMPLE_RESX:
706
      /* Easy cases.  If the last statement of the seq implies
707
         control transfer, then we can't fall through.  */
708
      return false;
709
 
710
    case GIMPLE_SWITCH:
711
      /* Switch has already been lowered and represents a branch
712
         to a selected label and hence can't fall through.  */
713
      return false;
714
 
715
    case GIMPLE_COND:
716
      /* GIMPLE_COND's are already lowered into a two-way branch.  They
717
         can't fall through.  */
718
      return false;
719
 
720
    case GIMPLE_BIND:
721
      return gimple_seq_may_fallthru (gimple_bind_body (stmt));
722
 
723
    case GIMPLE_TRY:
724
      if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
725
        return gimple_try_catch_may_fallthru (stmt);
726
 
727
      /* It must be a GIMPLE_TRY_FINALLY.  */
728
 
729
      /* The finally clause is always executed after the try clause,
730
         so if it does not fall through, then the try-finally will not
731
         fall through.  Otherwise, if the try clause does not fall
732
         through, then when the finally clause falls through it will
733
         resume execution wherever the try clause was going.  So the
734
         whole try-finally will only fall through if both the try
735
         clause and the finally clause fall through.  */
736
      return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
737
              && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
738
 
739
    case GIMPLE_EH_ELSE:
740
      return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt))
741
              || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt)));
742
 
743
    case GIMPLE_CALL:
744
      /* Functions that do not return do not fall through.  */
745
      return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
746
 
747
    default:
748
      return true;
749
    }
750
}
751
 
752
 
753
/* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ.  */
754
 
755
bool
756
gimple_seq_may_fallthru (gimple_seq seq)
757
{
758
  return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
759
}
760
 
761
 
762
/* Lower a GIMPLE_RETURN GSI.  DATA is passed through the recursion.  */
763
 
764
static void
765
lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
766
{
767
  gimple stmt = gsi_stmt (*gsi);
768
  gimple t;
769
  int i;
770
  return_statements_t tmp_rs;
771
 
772
  /* Match this up with an existing return statement that's been created.  */
773
  for (i = VEC_length (return_statements_t, data->return_statements) - 1;
774
       i >= 0; i--)
775
    {
776
      tmp_rs = *VEC_index (return_statements_t, data->return_statements, i);
777
 
778
      if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
779
        {
780
          /* Remove the line number from the representative return statement.
781
             It now fills in for many such returns.  Failure to remove this
782
             will result in incorrect results for coverage analysis.  */
783
          gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
784
 
785
          goto found;
786
        }
787
    }
788
 
789
  /* Not found.  Create a new label and record the return statement.  */
790
  tmp_rs.label = create_artificial_label (cfun->function_end_locus);
791
  tmp_rs.stmt = stmt;
792
  VEC_safe_push (return_statements_t, heap, data->return_statements, &tmp_rs);
793
 
794
  /* Generate a goto statement and remove the return statement.  */
795
 found:
796
  /* When not optimizing, make sure user returns are preserved.  */
797
  if (!optimize && gimple_has_location (stmt))
798
    DECL_ARTIFICIAL (tmp_rs.label) = 0;
799
  t = gimple_build_goto (tmp_rs.label);
800
  gimple_set_location (t, gimple_location (stmt));
801
  gimple_set_block (t, gimple_block (stmt));
802
  gsi_insert_before (gsi, t, GSI_SAME_STMT);
803
  gsi_remove (gsi, false);
804
}
805
 
806
/* Lower a __builtin_setjmp GSI.
807
 
808
   __builtin_setjmp is passed a pointer to an array of five words (not
809
   all will be used on all machines).  It operates similarly to the C
810
   library function of the same name, but is more efficient.
811
 
812
   It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
813
   __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
814
   __builtin_setjmp_dispatcher shared among all the instances; that's
815
   why it is only emitted at the end by lower_function_body.
816
 
817
   After full lowering, the body of the function should look like:
818
 
819
    {
820
      void * setjmpvar.0;
821
      int D.1844;
822
      int D.2844;
823
 
824
      [...]
825
 
826
      __builtin_setjmp_setup (&buf, &<D1847>);
827
      D.1844 = 0;
828
      goto <D1846>;
829
      <D1847>:;
830
      __builtin_setjmp_receiver (&<D1847>);
831
      D.1844 = 1;
832
      <D1846>:;
833
      if (D.1844 == 0) goto <D1848>; else goto <D1849>;
834
 
835
      [...]
836
 
837
      __builtin_setjmp_setup (&buf, &<D2847>);
838
      D.2844 = 0;
839
      goto <D2846>;
840
      <D2847>:;
841
      __builtin_setjmp_receiver (&<D2847>);
842
      D.2844 = 1;
843
      <D2846>:;
844
      if (D.2844 == 0) goto <D2848>; else goto <D2849>;
845
 
846
      [...]
847
 
848
      <D3850>:;
849
      return;
850
      <D3853>: [non-local];
851
      setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
852
      goto setjmpvar.0;
853
    }
854
 
855
   The dispatcher block will be both the unique destination of all the
856
   abnormal call edges and the unique source of all the abnormal edges
857
   to the receivers, thus keeping the complexity explosion localized.  */
858
 
859
static void
860
lower_builtin_setjmp (gimple_stmt_iterator *gsi)
861
{
862
  gimple stmt = gsi_stmt (*gsi);
863
  location_t loc = gimple_location (stmt);
864
  tree cont_label = create_artificial_label (loc);
865
  tree next_label = create_artificial_label (loc);
866
  tree dest, t, arg;
867
  gimple g;
868
 
869
  /* NEXT_LABEL is the label __builtin_longjmp will jump to.  Its address is
870
     passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver.  */
871
  FORCED_LABEL (next_label) = 1;
872
 
873
  dest = gimple_call_lhs (stmt);
874
 
875
  /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert.  */
876
  arg = build_addr (next_label, current_function_decl);
877
  t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
878
  g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
879
  gimple_set_location (g, loc);
880
  gimple_set_block (g, gimple_block (stmt));
881
  gsi_insert_before (gsi, g, GSI_SAME_STMT);
882
 
883
  /* Build 'DEST = 0' and insert.  */
884
  if (dest)
885
    {
886
      g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
887
      gimple_set_location (g, loc);
888
      gimple_set_block (g, gimple_block (stmt));
889
      gsi_insert_before (gsi, g, GSI_SAME_STMT);
890
    }
891
 
892
  /* Build 'goto CONT_LABEL' and insert.  */
893
  g = gimple_build_goto (cont_label);
894
  gsi_insert_before (gsi, g, GSI_SAME_STMT);
895
 
896
  /* Build 'NEXT_LABEL:' and insert.  */
897
  g = gimple_build_label (next_label);
898
  gsi_insert_before (gsi, g, GSI_SAME_STMT);
899
 
900
  /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert.  */
901
  arg = build_addr (next_label, current_function_decl);
902
  t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
903
  g = gimple_build_call (t, 1, arg);
904
  gimple_set_location (g, loc);
905
  gimple_set_block (g, gimple_block (stmt));
906
  gsi_insert_before (gsi, g, GSI_SAME_STMT);
907
 
908
  /* Build 'DEST = 1' and insert.  */
909
  if (dest)
910
    {
911
      g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
912
                                                       integer_one_node));
913
      gimple_set_location (g, loc);
914
      gimple_set_block (g, gimple_block (stmt));
915
      gsi_insert_before (gsi, g, GSI_SAME_STMT);
916
    }
917
 
918
  /* Build 'CONT_LABEL:' and insert.  */
919
  g = gimple_build_label (cont_label);
920
  gsi_insert_before (gsi, g, GSI_SAME_STMT);
921
 
922
  /* Remove the call to __builtin_setjmp.  */
923
  gsi_remove (gsi, false);
924
}
925
 
926
 
927
/* Record the variables in VARS into function FN.  */
928
 
929
void
930
record_vars_into (tree vars, tree fn)
931
{
932
  if (fn != current_function_decl)
933
    push_cfun (DECL_STRUCT_FUNCTION (fn));
934
 
935
  for (; vars; vars = DECL_CHAIN (vars))
936
    {
937
      tree var = vars;
938
 
939
      /* BIND_EXPRs contains also function/type/constant declarations
940
         we don't need to care about.  */
941
      if (TREE_CODE (var) != VAR_DECL)
942
        continue;
943
 
944
      /* Nothing to do in this case.  */
945
      if (DECL_EXTERNAL (var))
946
        continue;
947
 
948
      /* Record the variable.  */
949
      add_local_decl (cfun, var);
950
      if (gimple_referenced_vars (cfun))
951
        add_referenced_var (var);
952
    }
953
 
954
  if (fn != current_function_decl)
955
    pop_cfun ();
956
}
957
 
958
 
959
/* Record the variables in VARS into current_function_decl.  */
960
 
961
void
962
record_vars (tree vars)
963
{
964
  record_vars_into (vars, current_function_decl);
965
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.