OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [java/] [expr.c] - Blame information for rev 290

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 287 jeremybenn
/* Process expressions for the GNU compiler for the Java(TM) language.
2
   Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3
   2005, 2006, 2007, 2008 Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify
8
it under the terms of the GNU General Public License as published by
9
the Free Software Foundation; either version 3, or (at your option)
10
any later version.
11
 
12
GCC is distributed in the hope that it will be useful,
13
but WITHOUT ANY WARRANTY; without even the implied warranty of
14
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15
GNU General Public License for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.
20
 
21
Java and all Java-based marks are trademarks or registered trademarks
22
of Sun Microsystems, Inc. in the United States and other countries.
23
The Free Software Foundation is independent of Sun Microsystems, Inc.  */
24
 
25
/* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
26
 
27
#include "config.h"
28
#include "system.h"
29
#include "coretypes.h"
30
#include "tm.h"
31
#include "tree.h"
32
#include "real.h"
33
#include "rtl.h"
34
#include "flags.h"
35
#include "expr.h"
36
#include "java-tree.h"
37
#include "javaop.h"
38
#include "java-opcodes.h"
39
#include "jcf.h"
40
#include "java-except.h"
41
#include "parse.h"
42
#include "toplev.h"
43
#include "except.h"
44
#include "tm_p.h"
45
#include "ggc.h"
46
#include "tree-iterator.h"
47
#include "gimple.h"
48
#include "target.h"
49
 
50
static void flush_quick_stack (void);
51
static void push_value (tree);
52
static tree pop_value (tree);
53
static void java_stack_swap (void);
54
static void java_stack_dup (int, int);
55
static void build_java_athrow (tree);
56
static void build_java_jsr (int, int);
57
static void build_java_ret (tree);
58
static void expand_java_multianewarray (tree, int);
59
static void expand_java_arraystore (tree);
60
static void expand_java_arrayload (tree);
61
static void expand_java_array_length (void);
62
static tree build_java_monitor (tree, tree);
63
static void expand_java_pushc (int, tree);
64
static void expand_java_return (tree);
65
static void expand_load_internal (int, tree, int);
66
static void expand_java_NEW (tree);
67
static void expand_java_INSTANCEOF (tree);
68
static void expand_java_CHECKCAST (tree);
69
static void expand_iinc (unsigned int, int, int);
70
static void expand_java_binop (tree, enum tree_code);
71
static void note_label (int, int);
72
static void expand_compare (enum tree_code, tree, tree, int);
73
static void expand_test (enum tree_code, tree, int);
74
static void expand_cond (enum tree_code, tree, int);
75
static void expand_java_goto (int);
76
static tree expand_java_switch (tree, int);
77
static void expand_java_add_case (tree, int, int);
78
static tree pop_arguments (tree);
79
static void expand_invoke (int, int, int);
80
static void expand_java_field_op (int, int, int);
81
static void java_push_constant_from_pool (struct JCF *, int);
82
static void java_stack_pop (int);
83
static tree build_java_throw_out_of_bounds_exception (tree);
84
static tree build_java_check_indexed_type (tree, tree);
85
static unsigned char peek_opcode_at_pc (struct JCF *, int, int);
86
static void promote_arguments (void);
87
static void cache_cpool_data_ref (void);
88
 
89
static GTY(()) tree operand_type[59];
90
 
91
static GTY(()) tree methods_ident;
92
static GTY(()) tree ncode_ident;
93
tree dtable_ident = NULL_TREE;
94
 
95
/* Set to nonzero value in order to emit class initialization code
96
   before static field references.  */
97
int always_initialize_class_p = 0;
98
 
99
/* We store the stack state in two places:
100
   Within a basic block, we use the quick_stack, which is a
101
   pushdown list (TREE_LISTs) of expression nodes.
102
   This is the top part of the stack;  below that we use find_stack_slot.
103
   At the end of a basic block, the quick_stack must be flushed
104
   to the stack slot array (as handled by find_stack_slot).
105
   Using quick_stack generates better code (especially when
106
   compiled without optimization), because we do not have to
107
   explicitly store and load trees to temporary variables.
108
 
109
   If a variable is on the quick stack, it means the value of variable
110
   when the quick stack was last flushed.  Conceptually, flush_quick_stack
111
   saves all the quick_stack elements in parallel.  However, that is
112
   complicated, so it actually saves them (i.e. copies each stack value
113
   to is home virtual register) from low indexes.  This allows a quick_stack
114
   element at index i (counting from the bottom of stack the) to references
115
   slot virtuals for register that are >= i, but not those that are deeper.
116
   This convention makes most operations easier.  For example iadd works
117
   even when the stack contains (reg[0], reg[1]):  It results in the
118
   stack containing (reg[0]+reg[1]), which is OK.  However, some stack
119
   operations are more complicated.  For example dup given a stack
120
   containing (reg[0]) would yield (reg[0], reg[0]), which would violate
121
   the convention, since stack value 1 would refer to a register with
122
   lower index (reg[0]), which flush_quick_stack does not safely handle.
123
   So dup cannot just add an extra element to the quick_stack, but iadd can.
124
*/
125
 
126
static GTY(()) tree quick_stack;
127
 
128
/* A free-list of unused permanent TREE_LIST nodes.  */
129
static GTY((deletable)) tree tree_list_free_list;
130
 
131
/* The physical memory page size used in this computer.  See
132
   build_field_ref().  */
133
static GTY(()) tree page_size;
134
 
135
/* The stack pointer of the Java virtual machine.
136
   This does include the size of the quick_stack. */
137
 
138
int stack_pointer;
139
 
140
const unsigned char *linenumber_table;
141
int linenumber_count;
142
 
143
/* Largest pc so far in this method that has been passed to lookup_label. */
144
int highest_label_pc_this_method = -1;
145
 
146
/* Base value for this method to add to pc to get generated label. */
147
int start_label_pc_this_method = 0;
148
 
149
void
150
init_expr_processing (void)
151
{
152
  operand_type[21] = operand_type[54] = int_type_node;
153
  operand_type[22] = operand_type[55] = long_type_node;
154
  operand_type[23] = operand_type[56] = float_type_node;
155
  operand_type[24] = operand_type[57] = double_type_node;
156
  operand_type[25] = operand_type[58] = ptr_type_node;
157
}
158
 
159
tree
160
java_truthvalue_conversion (tree expr)
161
{
162
  /* It is simpler and generates better code to have only TRUTH_*_EXPR
163
     or comparison expressions as truth values at this level.
164
 
165
     This function should normally be identity for Java.  */
166
 
167
  switch (TREE_CODE (expr))
168
    {
169
    case EQ_EXPR:   case NE_EXPR:   case UNEQ_EXPR: case LTGT_EXPR:
170
    case LE_EXPR:   case GE_EXPR:   case LT_EXPR:   case GT_EXPR:
171
    case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR:
172
    case ORDERED_EXPR: case UNORDERED_EXPR:
173
    case TRUTH_ANDIF_EXPR:
174
    case TRUTH_ORIF_EXPR:
175
    case TRUTH_AND_EXPR:
176
    case TRUTH_OR_EXPR:
177
    case TRUTH_XOR_EXPR:
178
    case TRUTH_NOT_EXPR:
179
    case ERROR_MARK:
180
      return expr;
181
 
182
    case INTEGER_CST:
183
      return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
184
 
185
    case REAL_CST:
186
      return real_zerop (expr) ? boolean_false_node : boolean_true_node;
187
 
188
    /* are these legal? XXX JH */
189
    case NEGATE_EXPR:
190
    case ABS_EXPR:
191
    case FLOAT_EXPR:
192
      /* These don't change whether an object is nonzero or zero.  */
193
      return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
194
 
195
    case COND_EXPR:
196
      /* Distribute the conversion into the arms of a COND_EXPR.  */
197
      return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
198
                          java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
199
                          java_truthvalue_conversion (TREE_OPERAND (expr, 2)));
200
 
201
    case NOP_EXPR:
202
      /* If this is widening the argument, we can ignore it.  */
203
      if (TYPE_PRECISION (TREE_TYPE (expr))
204
          >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
205
        return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
206
      /* fall through to default */
207
 
208
    default:
209
      return fold_build2 (NE_EXPR, boolean_type_node,
210
                          expr, boolean_false_node);
211
    }
212
}
213
 
214
/* Save any stack slots that happen to be in the quick_stack into their
215
   home virtual register slots.
216
 
217
   The copy order is from low stack index to high, to support the invariant
218
   that the expression for a slot may contain decls for stack slots with
219
   higher (or the same) index, but not lower. */
220
 
221
static void
222
flush_quick_stack (void)
223
{
224
  int stack_index = stack_pointer;
225
  tree prev, cur, next;
226
 
227
  /* First reverse the quick_stack, and count the number of slots it has. */
228
  for (cur = quick_stack, prev = NULL_TREE; cur != NULL_TREE; cur = next)
229
    {
230
      next = TREE_CHAIN (cur);
231
      TREE_CHAIN (cur) = prev;
232
      prev = cur;
233
      stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (TREE_VALUE (cur)));
234
    }
235
  quick_stack = prev;
236
 
237
  while (quick_stack != NULL_TREE)
238
    {
239
      tree decl;
240
      tree node = quick_stack, type;
241
      quick_stack = TREE_CHAIN (node);
242
      TREE_CHAIN (node) = tree_list_free_list;
243
      tree_list_free_list = node;
244
      node = TREE_VALUE (node);
245
      type = TREE_TYPE (node);
246
 
247
      decl = find_stack_slot (stack_index, type);
248
      if (decl != node)
249
        java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (node), decl, node));
250
      stack_index += 1 + TYPE_IS_WIDE (type);
251
    }
252
}
253
 
254
/* Push TYPE on the type stack.
255
   Return true on success, 0 on overflow. */
256
 
257
int
258
push_type_0 (tree type)
259
{
260
  int n_words;
261
  type = promote_type (type);
262
  n_words = 1 + TYPE_IS_WIDE (type);
263
  if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl))
264
    return 0;
265
  /* Allocate decl for this variable now, so we get a temporary that
266
     survives the whole method. */
267
  find_stack_slot (stack_pointer, type);
268
  stack_type_map[stack_pointer++] = type;
269
  n_words--;
270
  while (--n_words >= 0)
271
    stack_type_map[stack_pointer++] = TYPE_SECOND;
272
  return 1;
273
}
274
 
275
void
276
push_type (tree type)
277
{
278
  int r = push_type_0 (type);
279
  gcc_assert (r);
280
}
281
 
282
static void
283
push_value (tree value)
284
{
285
  tree type = TREE_TYPE (value);
286
  if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type))
287
    {
288
      type = promote_type (type);
289
      value = convert (type, value);
290
    }
291
  push_type (type);
292
  if (tree_list_free_list == NULL_TREE)
293
    quick_stack = tree_cons (NULL_TREE, value, quick_stack);
294
  else
295
    {
296
      tree node = tree_list_free_list;
297
      tree_list_free_list = TREE_CHAIN (tree_list_free_list);
298
      TREE_VALUE (node) = value;
299
      TREE_CHAIN (node) = quick_stack;
300
      quick_stack = node;
301
    }
302
  /* If the value has a side effect, then we need to evaluate it
303
     whether or not the result is used.  If the value ends up on the
304
     quick stack and is then popped, this won't happen -- so we flush
305
     the quick stack.  It is safest to simply always flush, though,
306
     since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for
307
     the latter we may need to strip conversions.  */
308
  flush_quick_stack ();
309
}
310
 
311
/* Pop a type from the type stack.
312
   TYPE is the expected type.   Return the actual type, which must be
313
   convertible to TYPE.
314
   On an error, *MESSAGEP is set to a freshly malloc'd error message. */
315
 
316
tree
317
pop_type_0 (tree type, char **messagep)
318
{
319
  int n_words;
320
  tree t;
321
  *messagep = NULL;
322
  if (TREE_CODE (type) == RECORD_TYPE)
323
    type = promote_type (type);
324
  n_words = 1 + TYPE_IS_WIDE (type);
325
  if (stack_pointer < n_words)
326
    {
327
      *messagep = xstrdup ("stack underflow");
328
      return type;
329
    }
330
  while (--n_words > 0)
331
    {
332
      if (stack_type_map[--stack_pointer] != void_type_node)
333
        {
334
          *messagep = xstrdup ("Invalid multi-word value on type stack");
335
          return type;
336
        }
337
    }
338
  t = stack_type_map[--stack_pointer];
339
  if (type == NULL_TREE || t == type)
340
    return t;
341
  if (TREE_CODE (t) == TREE_LIST)
342
    {
343
      do
344
        {
345
          tree tt = TREE_PURPOSE (t);
346
          if (! can_widen_reference_to (tt, type))
347
            {
348
              t = tt;
349
              goto fail;
350
            }
351
          t = TREE_CHAIN (t);
352
        }
353
      while (t);
354
      return t;
355
    }
356
  if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t)
357
      && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32)
358
    return t;
359
  if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
360
    {
361
      /* If the expected type we've been passed is object or ptr
362
         (i.e. void*), the caller needs to know the real type.  */
363
      if (type == ptr_type_node || type == object_ptr_type_node)
364
        return t;
365
 
366
      /* Since the verifier has already run, we know that any
367
         types we see will be compatible.  In BC mode, this fact
368
         may be checked at runtime, but if that is so then we can
369
         assume its truth here as well.  So, we always succeed
370
         here, with the expected type.  */
371
      return type;
372
    }
373
 
374
  if (! flag_verify_invocations && flag_indirect_dispatch
375
      && t == object_ptr_type_node)
376
    {
377
      if (type != ptr_type_node)
378
        warning (0, "need to insert runtime check for %s",
379
                 xstrdup (lang_printable_name (type, 0)));
380
      return type;
381
    }
382
 
383
  /* lang_printable_name uses a static buffer, so we must save the result
384
     from calling it the first time.  */
385
 fail:
386
  {
387
    char *temp = xstrdup (lang_printable_name (type, 0));
388
    /* If the stack contains a multi-word type, keep popping the stack until
389
       the real type is found.  */
390
    while (t == void_type_node)
391
      t = stack_type_map[--stack_pointer];
392
    *messagep = concat ("expected type '", temp,
393
                        "' but stack contains '", lang_printable_name (t, 0),
394
                        "'", NULL);
395
    free (temp);
396
  }
397
  return type;
398
}
399
 
400
/* Pop a type from the type stack.
401
   TYPE is the expected type.  Return the actual type, which must be
402
   convertible to TYPE, otherwise call error. */
403
 
404
tree
405
pop_type (tree type)
406
{
407
  char *message = NULL;
408
  type = pop_type_0 (type, &message);
409
  if (message != NULL)
410
    {
411
      error ("%s", message);
412
      free (message);
413
    }
414
  return type;
415
}
416
 
417
 
418
/* Return true if two type assertions are equal.  */
419
 
420
static int
421
type_assertion_eq (const void * k1_p, const void * k2_p)
422
{
423
  const type_assertion k1 = *(const type_assertion *)k1_p;
424
  const type_assertion k2 = *(const type_assertion *)k2_p;
425
  return (k1.assertion_code == k2.assertion_code
426
          && k1.op1 == k2.op1
427
          && k1.op2 == k2.op2);
428
}
429
 
430
/* Hash a type assertion.  */
431
 
432
static hashval_t
433
type_assertion_hash (const void *p)
434
{
435
  const type_assertion *k_p = (const type_assertion *) p;
436
  hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof
437
                                   k_p->assertion_code, 0);
438
 
439
  switch (k_p->assertion_code)
440
    {
441
    case JV_ASSERT_TYPES_COMPATIBLE:
442
      hash = iterative_hash (&TYPE_UID (k_p->op2), sizeof TYPE_UID (k_p->op2),
443
                             hash);
444
      /* Fall through.  */
445
 
446
    case JV_ASSERT_IS_INSTANTIABLE:
447
      hash = iterative_hash (&TYPE_UID (k_p->op1), sizeof TYPE_UID (k_p->op1),
448
                             hash);
449
      /* Fall through.  */
450
 
451
    case JV_ASSERT_END_OF_TABLE:
452
      break;
453
 
454
    default:
455
      gcc_unreachable ();
456
    }
457
 
458
  return hash;
459
}
460
 
461
/* Add an entry to the type assertion table for the given class.
462
   KLASS is the class for which this assertion will be evaluated by the
463
   runtime during loading/initialization.
464
   ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h.
465
   OP1 and OP2 are the operands. The tree type of these arguments may be
466
   specific to each assertion_code. */
467
 
468
void
469
add_type_assertion (tree klass, int assertion_code, tree op1, tree op2)
470
{
471
  htab_t assertions_htab;
472
  type_assertion as;
473
  void **as_pp;
474
 
475
  assertions_htab = TYPE_ASSERTIONS (klass);
476
  if (assertions_htab == NULL)
477
    {
478
      assertions_htab = htab_create_ggc (7, type_assertion_hash,
479
                                         type_assertion_eq, NULL);
480
      TYPE_ASSERTIONS (current_class) = assertions_htab;
481
    }
482
 
483
  as.assertion_code = assertion_code;
484
  as.op1 = op1;
485
  as.op2 = op2;
486
 
487
  as_pp = htab_find_slot (assertions_htab, &as, INSERT);
488
 
489
  /* Don't add the same assertion twice.  */
490
  if (*as_pp)
491
    return;
492
 
493
  *as_pp = ggc_alloc (sizeof (type_assertion));
494
  **(type_assertion **)as_pp = as;
495
}
496
 
497
 
498
/* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE.
499
   Handles array types and interfaces.  */
500
 
501
int
502
can_widen_reference_to (tree source_type, tree target_type)
503
{
504
  if (source_type == ptr_type_node || target_type == object_ptr_type_node)
505
    return 1;
506
 
507
  /* Get rid of pointers  */
508
  if (TREE_CODE (source_type) == POINTER_TYPE)
509
    source_type = TREE_TYPE (source_type);
510
  if (TREE_CODE (target_type) == POINTER_TYPE)
511
    target_type = TREE_TYPE (target_type);
512
 
513
  if (source_type == target_type)
514
    return 1;
515
 
516
  /* FIXME: This is very pessimistic, in that it checks everything,
517
     even if we already know that the types are compatible.  If we're
518
     to support full Java class loader semantics, we need this.
519
     However, we could do something more optimal.  */
520
  if (! flag_verify_invocations)
521
    {
522
      add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE,
523
                          source_type, target_type);
524
 
525
      if (!quiet_flag)
526
       warning (0, "assert: %s is assign compatible with %s",
527
                xstrdup (lang_printable_name (target_type, 0)),
528
                xstrdup (lang_printable_name (source_type, 0)));
529
      /* Punt everything to runtime.  */
530
      return 1;
531
    }
532
 
533
  if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
534
    {
535
      return 1;
536
    }
537
  else
538
    {
539
      if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type))
540
        {
541
          HOST_WIDE_INT source_length, target_length;
542
          if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type))
543
            {
544
              /* An array implements Cloneable and Serializable.  */
545
              tree name = DECL_NAME (TYPE_NAME (target_type));
546
              return (name == java_lang_cloneable_identifier_node
547
                      || name == java_io_serializable_identifier_node);
548
            }
549
          target_length = java_array_type_length (target_type);
550
          if (target_length >= 0)
551
            {
552
              source_length = java_array_type_length (source_type);
553
              if (source_length != target_length)
554
                return 0;
555
            }
556
          source_type = TYPE_ARRAY_ELEMENT (source_type);
557
          target_type = TYPE_ARRAY_ELEMENT (target_type);
558
          if (source_type == target_type)
559
            return 1;
560
          if (TREE_CODE (source_type) != POINTER_TYPE
561
              || TREE_CODE (target_type) != POINTER_TYPE)
562
            return 0;
563
          return can_widen_reference_to (source_type, target_type);
564
        }
565
      else
566
        {
567
          int source_depth = class_depth (source_type);
568
          int target_depth = class_depth (target_type);
569
 
570
          if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
571
            {
572
              if (! quiet_flag)
573
                warning (0, "assert: %s is assign compatible with %s",
574
                         xstrdup (lang_printable_name (target_type, 0)),
575
                         xstrdup (lang_printable_name (source_type, 0)));
576
              return 1;
577
            }
578
 
579
          /* class_depth can return a negative depth if an error occurred */
580
          if (source_depth < 0 || target_depth < 0)
581
            return 0;
582
 
583
          if (CLASS_INTERFACE (TYPE_NAME (target_type)))
584
            {
585
              /* target_type is OK if source_type or source_type ancestors
586
                 implement target_type. We handle multiple sub-interfaces  */
587
              tree binfo, base_binfo;
588
              int i;
589
 
590
              for (binfo = TYPE_BINFO (source_type), i = 0;
591
                   BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
592
                if (can_widen_reference_to
593
                    (BINFO_TYPE (base_binfo), target_type))
594
                  return 1;
595
 
596
              if (!i)
597
                return 0;
598
            }
599
 
600
          for ( ; source_depth > target_depth;  source_depth--)
601
            {
602
              source_type
603
                = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type), 0));
604
            }
605
          return source_type == target_type;
606
        }
607
    }
608
}
609
 
610
static tree
611
pop_value (tree type)
612
{
613
  type = pop_type (type);
614
  if (quick_stack)
615
    {
616
      tree node = quick_stack;
617
      quick_stack = TREE_CHAIN (quick_stack);
618
      TREE_CHAIN (node) = tree_list_free_list;
619
      tree_list_free_list = node;
620
      node = TREE_VALUE (node);
621
      return node;
622
    }
623
  else
624
    return find_stack_slot (stack_pointer, promote_type (type));
625
}
626
 
627
 
628
/* Pop and discard the top COUNT stack slots. */
629
 
630
static void
631
java_stack_pop (int count)
632
{
633
  while (count > 0)
634
    {
635
      tree type;
636
 
637
      gcc_assert (stack_pointer != 0);
638
 
639
      type = stack_type_map[stack_pointer - 1];
640
      if (type == TYPE_SECOND)
641
        {
642
          count--;
643
          gcc_assert (stack_pointer != 1 && count > 0);
644
 
645
          type = stack_type_map[stack_pointer - 2];
646
        }
647
      pop_value (type);
648
      count--;
649
    }
650
}
651
 
652
/* Implement the 'swap' operator (to swap two top stack slots). */
653
 
654
static void
655
java_stack_swap (void)
656
{
657
  tree type1, type2;
658
  tree temp;
659
  tree decl1, decl2;
660
 
661
  if (stack_pointer < 2
662
      || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_SECOND
663
      || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_SECOND
664
      || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
665
    /* Bad stack swap.  */
666
    abort ();
667
  /* Bad stack swap.  */
668
 
669
  flush_quick_stack ();
670
  decl1 = find_stack_slot (stack_pointer - 1, type1);
671
  decl2 = find_stack_slot (stack_pointer - 2, type2);
672
  temp = build_decl (input_location, VAR_DECL, NULL_TREE, type1);
673
  java_add_local_var (temp);
674
  java_add_stmt (build2 (MODIFY_EXPR, type1, temp, decl1));
675
  java_add_stmt (build2 (MODIFY_EXPR, type2,
676
                         find_stack_slot (stack_pointer - 1, type2),
677
                         decl2));
678
  java_add_stmt (build2 (MODIFY_EXPR, type1,
679
                         find_stack_slot (stack_pointer - 2, type1),
680
                         temp));
681
  stack_type_map[stack_pointer - 1] = type2;
682
  stack_type_map[stack_pointer - 2] = type1;
683
}
684
 
685
static void
686
java_stack_dup (int size, int offset)
687
{
688
  int low_index = stack_pointer - size - offset;
689
  int dst_index;
690
  if (low_index < 0)
691
    error ("stack underflow - dup* operation");
692
 
693
  flush_quick_stack ();
694
 
695
  stack_pointer += size;
696
  dst_index = stack_pointer;
697
 
698
  for (dst_index = stack_pointer;  --dst_index >= low_index; )
699
    {
700
      tree type;
701
      int src_index = dst_index - size;
702
      if (src_index < low_index)
703
        src_index = dst_index + size + offset;
704
      type = stack_type_map [src_index];
705
      if (type == TYPE_SECOND)
706
        {
707
          /* Dup operation splits 64-bit number.  */
708
          gcc_assert (src_index > low_index);
709
 
710
          stack_type_map[dst_index] = type;
711
          src_index--;  dst_index--;
712
          type = stack_type_map[src_index];
713
          gcc_assert (TYPE_IS_WIDE (type));
714
        }
715
      else
716
        gcc_assert (! TYPE_IS_WIDE (type));
717
 
718
      if (src_index != dst_index)
719
        {
720
          tree src_decl = find_stack_slot (src_index, type);
721
          tree dst_decl = find_stack_slot (dst_index, type);
722
 
723
          java_add_stmt
724
            (build2 (MODIFY_EXPR, TREE_TYPE (dst_decl), dst_decl, src_decl));
725
          stack_type_map[dst_index] = type;
726
        }
727
    }
728
}
729
 
730
/* Calls _Jv_Throw or _Jv_Sjlj_Throw.  Discard the contents of the
731
   value stack. */
732
 
733
static void
734
build_java_athrow (tree node)
735
{
736
  tree call;
737
 
738
  call = build_call_nary (void_type_node,
739
                          build_address_of (throw_node),
740
                          1, node);
741
  TREE_SIDE_EFFECTS (call) = 1;
742
  java_add_stmt (call);
743
  java_stack_pop (stack_pointer);
744
}
745
 
746
/* Implementation for jsr/ret */
747
 
748
static void
749
build_java_jsr (int target_pc, int return_pc)
750
{
751
  tree where =  lookup_label (target_pc);
752
  tree ret = lookup_label (return_pc);
753
  tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret);
754
  push_value (ret_label);
755
  flush_quick_stack ();
756
  java_add_stmt (build1 (GOTO_EXPR, void_type_node, where));
757
 
758
  /* Do not need to emit the label here.  We noted the existence of the
759
     label as a jump target in note_instructions; we'll emit the label
760
     for real at the beginning of the expand_byte_code loop.  */
761
}
762
 
763
static void
764
build_java_ret (tree location)
765
{
766
  java_add_stmt (build1 (GOTO_EXPR, void_type_node, location));
767
}
768
 
769
/* Implementation of operations on array: new, load, store, length */
770
 
771
tree
772
decode_newarray_type (int atype)
773
{
774
  switch (atype)
775
    {
776
    case 4:  return boolean_type_node;
777
    case 5:  return char_type_node;
778
    case 6:  return float_type_node;
779
    case 7:  return double_type_node;
780
    case 8:  return byte_type_node;
781
    case 9:  return short_type_node;
782
    case 10: return int_type_node;
783
    case 11: return long_type_node;
784
    default: return NULL_TREE;
785
    }
786
}
787
 
788
/* Map primitive type to the code used by OPCODE_newarray. */
789
 
790
int
791
encode_newarray_type (tree type)
792
{
793
  if (type == boolean_type_node)
794
    return 4;
795
  else if (type == char_type_node)
796
    return 5;
797
  else if (type == float_type_node)
798
    return 6;
799
  else if (type == double_type_node)
800
    return 7;
801
  else if (type == byte_type_node)
802
    return 8;
803
  else if (type == short_type_node)
804
    return 9;
805
  else if (type == int_type_node)
806
    return 10;
807
  else if (type == long_type_node)
808
    return 11;
809
  else
810
    gcc_unreachable ();
811
}
812
 
813
/* Build a call to _Jv_ThrowBadArrayIndex(), the
814
   ArrayIndexOfBoundsException exception handler.  */
815
 
816
static tree
817
build_java_throw_out_of_bounds_exception (tree index)
818
{
819
  tree node;
820
 
821
  /* We need to build a COMPOUND_EXPR because _Jv_ThrowBadArrayIndex()
822
     has void return type.  We cannot just set the type of the CALL_EXPR below
823
     to int_type_node because we would lose it during gimplification.  */
824
  gcc_assert (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (soft_badarrayindex_node))));
825
  node = build_call_nary (void_type_node,
826
                               build_address_of (soft_badarrayindex_node),
827
                               1, index);
828
  TREE_SIDE_EFFECTS (node) = 1;
829
 
830
  node = build2 (COMPOUND_EXPR, int_type_node, node, integer_zero_node);
831
  TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */
832
 
833
  return (node);
834
}
835
 
836
/* Return the length of an array. Doesn't perform any checking on the nature
837
   or value of the array NODE. May be used to implement some bytecodes.  */
838
 
839
tree
840
build_java_array_length_access (tree node)
841
{
842
  tree type = TREE_TYPE (node);
843
  tree array_type = TREE_TYPE (type);
844
  HOST_WIDE_INT length;
845
 
846
  if (!is_array_type_p (type))
847
    {
848
      /* With the new verifier, we will see an ordinary pointer type
849
         here.  In this case, we just use an arbitrary array type.  */
850
      array_type = build_java_array_type (object_ptr_type_node, -1);
851
      type = promote_type (array_type);
852
    }
853
 
854
  length = java_array_type_length (type);
855
  if (length >= 0)
856
    return build_int_cst (NULL_TREE, length);
857
 
858
  node = build3 (COMPONENT_REF, int_type_node,
859
                 build_java_indirect_ref (array_type, node,
860
                                          flag_check_references),
861
                 lookup_field (&array_type, get_identifier ("length")),
862
                 NULL_TREE);
863
  IS_ARRAY_LENGTH_ACCESS (node) = 1;
864
  return node;
865
}
866
 
867
/* Optionally checks a reference against the NULL pointer.  ARG1: the
868
   expr, ARG2: we should check the reference.  Don't generate extra
869
   checks if we're not generating code.  */
870
 
871
tree
872
java_check_reference (tree expr, int check)
873
{
874
  if (!flag_syntax_only && check)
875
    {
876
      expr = save_expr (expr);
877
      expr = build3 (COND_EXPR, TREE_TYPE (expr),
878
                     build2 (EQ_EXPR, boolean_type_node,
879
                             expr, null_pointer_node),
880
                     build_call_nary (void_type_node,
881
                                      build_address_of (soft_nullpointer_node),
882
                                      0),
883
                     expr);
884
    }
885
 
886
  return expr;
887
}
888
 
889
/* Reference an object: just like an INDIRECT_REF, but with checking.  */
890
 
891
tree
892
build_java_indirect_ref (tree type, tree expr, int check)
893
{
894
  tree t;
895
  t = java_check_reference (expr, check);
896
  t = convert (build_pointer_type (type), t);
897
  return build1 (INDIRECT_REF, type, t);
898
}
899
 
900
/* Implement array indexing (either as l-value or r-value).
901
   Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
902
   Optionally performs bounds checking and/or test to NULL.
903
   At this point, ARRAY should have been verified as an array.  */
904
 
905
tree
906
build_java_arrayaccess (tree array, tree type, tree index)
907
{
908
  tree node, throw_expr = NULL_TREE;
909
  tree data_field;
910
  tree ref;
911
  tree array_type = TREE_TYPE (TREE_TYPE (array));
912
  tree size_exp = fold_convert (sizetype, size_in_bytes (type));
913
 
914
  if (!is_array_type_p (TREE_TYPE (array)))
915
    {
916
      /* With the new verifier, we will see an ordinary pointer type
917
         here.  In this case, we just use the correct array type.  */
918
      array_type = build_java_array_type (type, -1);
919
    }
920
 
921
  if (flag_bounds_check)
922
    {
923
      /* Generate:
924
       * (unsigned jint) INDEX >= (unsigned jint) LEN
925
       *    && throw ArrayIndexOutOfBoundsException.
926
       * Note this is equivalent to and more efficient than:
927
       * INDEX < 0 || INDEX >= LEN && throw ... */
928
      tree test;
929
      tree len = convert (unsigned_int_type_node,
930
                          build_java_array_length_access (array));
931
      test = fold_build2 (GE_EXPR, boolean_type_node,
932
                          convert (unsigned_int_type_node, index),
933
                          len);
934
      if (! integer_zerop (test))
935
        {
936
          throw_expr
937
            = build2 (TRUTH_ANDIF_EXPR, int_type_node, test,
938
                      build_java_throw_out_of_bounds_exception (index));
939
          /* allows expansion within COMPOUND */
940
          TREE_SIDE_EFFECTS( throw_expr ) = 1;
941
        }
942
    }
943
 
944
  /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
945
     to have the bounds check evaluated first. */
946
  if (throw_expr != NULL_TREE)
947
    index = build2 (COMPOUND_EXPR, int_type_node, throw_expr, index);
948
 
949
  data_field = lookup_field (&array_type, get_identifier ("data"));
950
 
951
  ref = build3 (COMPONENT_REF, TREE_TYPE (data_field),
952
                build_java_indirect_ref (array_type, array,
953
                                         flag_check_references),
954
                data_field, NULL_TREE);
955
 
956
  /* Take the address of the data field and convert it to a pointer to
957
     the element type.  */
958
  node = build1 (NOP_EXPR, build_pointer_type (type), build_address_of (ref));
959
 
960
  /* Multiply the index by the size of an element to obtain a byte
961
     offset.  Convert the result to a pointer to the element type.  */
962
  index = build2 (MULT_EXPR, sizetype,
963
                  fold_convert (sizetype, index),
964
                  size_exp);
965
 
966
  /* Sum the byte offset and the address of the data field.  */
967
  node = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (node), node, index);
968
 
969
  /* Finally, return
970
 
971
    *((&array->data) + index*size_exp)
972
 
973
  */
974
  return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (node)), node);
975
}
976
 
977
/* Generate code to throw an ArrayStoreException if OBJECT is not assignable
978
   (at runtime) to an element of ARRAY.  A NOP_EXPR is returned if it can
979
   determine that no check is required. */
980
 
981
tree
982
build_java_arraystore_check (tree array, tree object)
983
{
984
  tree check, element_type, source;
985
  tree array_type_p = TREE_TYPE (array);
986
  tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object)));
987
 
988
  if (! flag_verify_invocations)
989
    {
990
      /* With the new verifier, we don't track precise types.  FIXME:
991
         performance regression here.  */
992
      element_type = TYPE_NAME (object_type_node);
993
    }
994
  else
995
    {
996
      gcc_assert (is_array_type_p (array_type_p));
997
 
998
      /* Get the TYPE_DECL for ARRAY's element type. */
999
      element_type
1000
        = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));
1001
    }
1002
 
1003
  gcc_assert (TREE_CODE (element_type) == TYPE_DECL
1004
              && TREE_CODE (object_type) == TYPE_DECL);
1005
 
1006
  if (!flag_store_check)
1007
    return build1 (NOP_EXPR, array_type_p, array);
1008
 
1009
  /* No check is needed if the element type is final.  Also check that
1010
     element_type matches object_type, since in the bytecode
1011
     compilation case element_type may be the actual element type of
1012
     the array rather than its declared type.  However, if we're doing
1013
     indirect dispatch, we can't do the `final' optimization.  */
1014
  if (element_type == object_type
1015
      && ! flag_indirect_dispatch
1016
      && CLASS_FINAL (element_type))
1017
    return build1 (NOP_EXPR, array_type_p, array);
1018
 
1019
  /* OBJECT might be wrapped by a SAVE_EXPR. */
1020
  if (TREE_CODE (object) == SAVE_EXPR)
1021
    source = TREE_OPERAND (object, 0);
1022
  else
1023
    source = object;
1024
 
1025
  /* Avoid the check if OBJECT was just loaded from the same array. */
1026
  if (TREE_CODE (source) == ARRAY_REF)
1027
    {
1028
      tree target;
1029
      source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */
1030
      source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */
1031
      source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */
1032
      if (TREE_CODE (source) == SAVE_EXPR)
1033
        source = TREE_OPERAND (source, 0);
1034
 
1035
      target = array;
1036
      if (TREE_CODE (target) == SAVE_EXPR)
1037
        target = TREE_OPERAND (target, 0);
1038
 
1039
      if (source == target)
1040
        return build1 (NOP_EXPR, array_type_p, array);
1041
    }
1042
 
1043
  /* Build an invocation of _Jv_CheckArrayStore */
1044
  check = build_call_nary (void_type_node,
1045
                           build_address_of (soft_checkarraystore_node),
1046
                           2, array, object);
1047
  TREE_SIDE_EFFECTS (check) = 1;
1048
 
1049
  return check;
1050
}
1051
 
1052
/* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
1053
   ARRAY_NODE. This function is used to retrieve something less vague than
1054
   a pointer type when indexing the first dimension of something like [[<t>.
1055
   May return a corrected type, if necessary, otherwise INDEXED_TYPE is
1056
   return unchanged.  */
1057
 
1058
static tree
1059
build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED,
1060
                               tree indexed_type)
1061
{
1062
  /* We used to check to see if ARRAY_NODE really had array type.
1063
     However, with the new verifier, this is not necessary, as we know
1064
     that the object will be an array of the appropriate type.  */
1065
 
1066
  return indexed_type;
1067
}
1068
 
1069
/* newarray triggers a call to _Jv_NewPrimArray. This function should be
1070
   called with an integer code (the type of array to create), and the length
1071
   of the array to create.  */
1072
 
1073
tree
1074
build_newarray (int atype_value, tree length)
1075
{
1076
  tree type_arg;
1077
 
1078
  tree prim_type = decode_newarray_type (atype_value);
1079
  tree type
1080
    = build_java_array_type (prim_type,
1081
                             host_integerp (length, 0) == INTEGER_CST
1082
                             ? tree_low_cst (length, 0) : -1);
1083
 
1084
  /* Pass a reference to the primitive type class and save the runtime
1085
     some work.  */
1086
  type_arg = build_class_ref (prim_type);
1087
 
1088
  return build_call_nary (promote_type (type),
1089
                          build_address_of (soft_newarray_node),
1090
                          2, type_arg, length);
1091
}
1092
 
1093
/* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
1094
   of the dimension. */
1095
 
1096
tree
1097
build_anewarray (tree class_type, tree length)
1098
{
1099
  tree type
1100
    = build_java_array_type (class_type,
1101
                             host_integerp (length, 0)
1102
                             ? tree_low_cst (length, 0) : -1);
1103
 
1104
  return build_call_nary (promote_type (type),
1105
                          build_address_of (soft_anewarray_node),
1106
                          3,
1107
                          length,
1108
                          build_class_ref (class_type),
1109
                          null_pointer_node);
1110
}
1111
 
1112
/* Return a node the evaluates 'new TYPE[LENGTH]'. */
1113
 
1114
tree
1115
build_new_array (tree type, tree length)
1116
{
1117
  if (JPRIMITIVE_TYPE_P (type))
1118
    return build_newarray (encode_newarray_type (type), length);
1119
  else
1120
    return build_anewarray (TREE_TYPE (type), length);
1121
}
1122
 
1123
/* Generates a call to _Jv_NewMultiArray. multianewarray expects a
1124
   class pointer, a number of dimensions and the matching number of
1125
   dimensions. The argument list is NULL terminated.  */
1126
 
1127
static void
1128
expand_java_multianewarray (tree class_type, int ndim)
1129
{
1130
  int i;
1131
  tree args = build_tree_list( NULL_TREE, null_pointer_node );
1132
 
1133
  for( i = 0; i < ndim; i++ )
1134
    args = tree_cons (NULL_TREE, pop_value (int_type_node), args);
1135
 
1136
  args = tree_cons (NULL_TREE,
1137
                    build_class_ref (class_type),
1138
                    tree_cons (NULL_TREE,
1139
                               build_int_cst (NULL_TREE, ndim),
1140
                               args));
1141
 
1142
  push_value (build_call_list (promote_type (class_type),
1143
                               build_address_of (soft_multianewarray_node),
1144
                               args));
1145
}
1146
 
1147
/*  ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
1148
    ARRAY is an array type. May expand some bound checking and NULL
1149
    pointer checking. RHS_TYPE_NODE we are going to store. In the case
1150
    of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
1151
    INT. In those cases, we make the conversion.
1152
 
1153
    if ARRAy is a reference type, the assignment is checked at run-time
1154
    to make sure that the RHS can be assigned to the array element
1155
    type. It is not necessary to generate this code if ARRAY is final.  */
1156
 
1157
static void
1158
expand_java_arraystore (tree rhs_type_node)
1159
{
1160
  tree rhs_node    = pop_value ((INTEGRAL_TYPE_P (rhs_type_node)
1161
                                 && TYPE_PRECISION (rhs_type_node) <= 32) ?
1162
                                 int_type_node : rhs_type_node);
1163
  tree index = pop_value (int_type_node);
1164
  tree array_type, array, temp, access;
1165
 
1166
  /* If we're processing an `aaload' we might as well just pick
1167
     `Object'.  */
1168
  if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1169
    {
1170
      array_type = build_java_array_type (object_ptr_type_node, -1);
1171
      rhs_type_node = object_ptr_type_node;
1172
    }
1173
  else
1174
    array_type = build_java_array_type (rhs_type_node, -1);
1175
 
1176
  array = pop_value (array_type);
1177
  array = build1 (NOP_EXPR, promote_type (array_type), array);
1178
 
1179
  rhs_type_node    = build_java_check_indexed_type (array, rhs_type_node);
1180
 
1181
  flush_quick_stack ();
1182
 
1183
  index = save_expr (index);
1184
  array = save_expr (array);
1185
 
1186
  /* We want to perform the bounds check (done by
1187
     build_java_arrayaccess) before the type check (done by
1188
     build_java_arraystore_check).  So, we call build_java_arrayaccess
1189
     -- which returns an ARRAY_REF lvalue -- and we then generate code
1190
     to stash the address of that lvalue in a temp.  Then we call
1191
     build_java_arraystore_check, and finally we generate a
1192
     MODIFY_EXPR to set the array element.  */
1193
 
1194
  access = build_java_arrayaccess (array, rhs_type_node, index);
1195
  temp = build_decl (input_location, VAR_DECL, NULL_TREE,
1196
                     build_pointer_type (TREE_TYPE (access)));
1197
  java_add_local_var (temp);
1198
  java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (temp),
1199
                         temp,
1200
                         build_fold_addr_expr (access)));
1201
 
1202
  if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1203
    {
1204
      tree check = build_java_arraystore_check (array, rhs_node);
1205
      java_add_stmt (check);
1206
    }
1207
 
1208
  java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (access),
1209
                         build1 (INDIRECT_REF, TREE_TYPE (access), temp),
1210
                         rhs_node));
1211
}
1212
 
1213
/* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
1214
   sure that LHS is an array type. May expand some bound checking and NULL
1215
   pointer checking.
1216
   LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
1217
   BOOLEAN/SHORT, we push a promoted type back to the stack.
1218
*/
1219
 
1220
static void
1221
expand_java_arrayload (tree lhs_type_node)
1222
{
1223
  tree load_node;
1224
  tree index_node = pop_value (int_type_node);
1225
  tree array_type;
1226
  tree array_node;
1227
 
1228
  /* If we're processing an `aaload' we might as well just pick
1229
     `Object'.  */
1230
  if (TREE_CODE (lhs_type_node) == POINTER_TYPE)
1231
    {
1232
      array_type = build_java_array_type (object_ptr_type_node, -1);
1233
      lhs_type_node = object_ptr_type_node;
1234
    }
1235
  else
1236
    array_type = build_java_array_type (lhs_type_node, -1);
1237
  array_node = pop_value (array_type);
1238
  array_node = build1 (NOP_EXPR, promote_type (array_type), array_node);
1239
 
1240
  index_node = save_expr (index_node);
1241
  array_node = save_expr (array_node);
1242
 
1243
  lhs_type_node = build_java_check_indexed_type (array_node,
1244
                                                 lhs_type_node);
1245
  load_node = build_java_arrayaccess (array_node,
1246
                                      lhs_type_node,
1247
                                      index_node);
1248
  if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
1249
    load_node = fold_build1 (NOP_EXPR, int_type_node, load_node);
1250
  push_value (load_node);
1251
}
1252
 
1253
/* Expands .length. Makes sure that we deal with and array and may expand
1254
   a NULL check on the array object.  */
1255
 
1256
static void
1257
expand_java_array_length (void)
1258
{
1259
  tree array  = pop_value (ptr_type_node);
1260
  tree length = build_java_array_length_access (array);
1261
 
1262
  push_value (length);
1263
}
1264
 
1265
/* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1266
   either soft_monitorenter_node or soft_monitorexit_node.  */
1267
 
1268
static tree
1269
build_java_monitor (tree call, tree object)
1270
{
1271
  return build_call_nary (void_type_node,
1272
                          build_address_of (call),
1273
                          1, object);
1274
}
1275
 
1276
/* Emit code for one of the PUSHC instructions. */
1277
 
1278
static void
1279
expand_java_pushc (int ival, tree type)
1280
{
1281
  tree value;
1282
  if (type == ptr_type_node && ival == 0)
1283
    value = null_pointer_node;
1284
  else if (type == int_type_node || type == long_type_node)
1285
    value = build_int_cst (type, ival);
1286
  else if (type == float_type_node || type == double_type_node)
1287
    {
1288
      REAL_VALUE_TYPE x;
1289
      REAL_VALUE_FROM_INT (x, ival, 0, TYPE_MODE (type));
1290
      value = build_real (type, x);
1291
    }
1292
  else
1293
    gcc_unreachable ();
1294
 
1295
  push_value (value);
1296
}
1297
 
1298
static void
1299
expand_java_return (tree type)
1300
{
1301
  if (type == void_type_node)
1302
    java_add_stmt (build1 (RETURN_EXPR, void_type_node, NULL));
1303
  else
1304
    {
1305
      tree retval = pop_value (type);
1306
      tree res = DECL_RESULT (current_function_decl);
1307
      retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, retval);
1308
 
1309
      /* Handle the situation where the native integer type is smaller
1310
         than the JVM integer. It can happen for many cross compilers.
1311
         The whole if expression just goes away if INT_TYPE_SIZE < 32
1312
         is false. */
1313
      if (INT_TYPE_SIZE < 32
1314
          && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res)))
1315
              < GET_MODE_SIZE (TYPE_MODE (type))))
1316
        retval = build1(NOP_EXPR, TREE_TYPE(res), retval);
1317
 
1318
      TREE_SIDE_EFFECTS (retval) = 1;
1319
      java_add_stmt (build1 (RETURN_EXPR, void_type_node, retval));
1320
    }
1321
}
1322
 
1323
static void
1324
expand_load_internal (int index, tree type, int pc)
1325
{
1326
  tree copy;
1327
  tree var = find_local_variable (index, type, pc);
1328
 
1329
  /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1330
     on the stack.  If there is an assignment to this VAR_DECL between
1331
     the stack push and the use, then the wrong code could be
1332
     generated.  To avoid this we create a new local and copy our
1333
     value into it.  Then we push this new local on the stack.
1334
     Hopefully this all gets optimized out.  */
1335
  copy = build_decl (input_location, VAR_DECL, NULL_TREE, type);
1336
  if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1337
      && TREE_TYPE (copy) != TREE_TYPE (var))
1338
    var = convert (type, var);
1339
  java_add_local_var (copy);
1340
  java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var));
1341
 
1342
  push_value (copy);
1343
}
1344
 
1345
tree
1346
build_address_of (tree value)
1347
{
1348
  return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value);
1349
}
1350
 
1351
bool
1352
class_has_finalize_method (tree type)
1353
{
1354
  tree super = CLASSTYPE_SUPER (type);
1355
 
1356
  if (super == NULL_TREE)
1357
    return false;       /* Every class with a real finalizer inherits   */
1358
                        /* from java.lang.Object.                       */
1359
  else
1360
    return HAS_FINALIZER_P (type) || class_has_finalize_method (super);
1361
}
1362
 
1363
tree
1364
java_create_object (tree type)
1365
{
1366
  tree alloc_node = (class_has_finalize_method (type)
1367
                     ? alloc_object_node
1368
                     : alloc_no_finalizer_node);
1369
 
1370
  return build_call_nary (promote_type (type),
1371
                          build_address_of (alloc_node),
1372
                          1, build_class_ref (type));
1373
}
1374
 
1375
static void
1376
expand_java_NEW (tree type)
1377
{
1378
  tree alloc_node;
1379
 
1380
  alloc_node = (class_has_finalize_method (type) ? alloc_object_node
1381
                                                 : alloc_no_finalizer_node);
1382
  if (! CLASS_LOADED_P (type))
1383
    load_class (type, 1);
1384
  safe_layout_class (type);
1385
  push_value (build_call_nary (promote_type (type),
1386
                               build_address_of (alloc_node),
1387
                               1, build_class_ref (type)));
1388
}
1389
 
1390
/* This returns an expression which will extract the class of an
1391
   object.  */
1392
 
1393
tree
1394
build_get_class (tree value)
1395
{
1396
  tree class_field = lookup_field (&dtable_type, get_identifier ("class"));
1397
  tree vtable_field = lookup_field (&object_type_node,
1398
                                    get_identifier ("vtable"));
1399
  tree tmp = build3 (COMPONENT_REF, dtable_ptr_type,
1400
                     build_java_indirect_ref (object_type_node, value,
1401
                                              flag_check_references),
1402
                     vtable_field, NULL_TREE);
1403
  return build3 (COMPONENT_REF, class_ptr_type,
1404
                 build1 (INDIRECT_REF, dtable_type, tmp),
1405
                 class_field, NULL_TREE);
1406
}
1407
 
1408
/* This builds the tree representation of the `instanceof' operator.
1409
   It tries various tricks to optimize this in cases where types are
1410
   known.  */
1411
 
1412
tree
1413
build_instanceof (tree value, tree type)
1414
{
1415
  tree expr;
1416
  tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node));
1417
  tree valtype = TREE_TYPE (TREE_TYPE (value));
1418
  tree valclass = TYPE_NAME (valtype);
1419
  tree klass;
1420
 
1421
  /* When compiling from bytecode, we need to ensure that TYPE has
1422
     been loaded.  */
1423
  if (CLASS_P (type) && ! CLASS_LOADED_P (type))
1424
    {
1425
      load_class (type, 1);
1426
      safe_layout_class (type);
1427
      if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK)
1428
        return error_mark_node;
1429
    }
1430
  klass = TYPE_NAME (type);
1431
 
1432
  if (type == object_type_node || inherits_from_p (valtype, type))
1433
    {
1434
      /* Anything except `null' is an instance of Object.  Likewise,
1435
         if the object is known to be an instance of the class, then
1436
         we only need to check for `null'.  */
1437
      expr = build2 (NE_EXPR, itype, value, null_pointer_node);
1438
    }
1439
  else if (flag_verify_invocations
1440
           && ! TYPE_ARRAY_P (type)
1441
           && ! TYPE_ARRAY_P (valtype)
1442
           && DECL_P (klass) && DECL_P (valclass)
1443
           && ! CLASS_INTERFACE (valclass)
1444
           && ! CLASS_INTERFACE (klass)
1445
           && ! inherits_from_p (type, valtype)
1446
           && (CLASS_FINAL (klass)
1447
               || ! inherits_from_p (valtype, type)))
1448
    {
1449
      /* The classes are from different branches of the derivation
1450
         tree, so we immediately know the answer.  */
1451
      expr = boolean_false_node;
1452
    }
1453
  else if (DECL_P (klass) && CLASS_FINAL (klass))
1454
    {
1455
      tree save = save_expr (value);
1456
      expr = build3 (COND_EXPR, itype,
1457
                     build2 (NE_EXPR, boolean_type_node,
1458
                             save, null_pointer_node),
1459
                     build2 (EQ_EXPR, itype,
1460
                             build_get_class (save),
1461
                             build_class_ref (type)),
1462
                     boolean_false_node);
1463
    }
1464
  else
1465
    {
1466
      expr = build_call_nary (itype,
1467
                              build_address_of (soft_instanceof_node),
1468
                              2, value, build_class_ref (type));
1469
    }
1470
  TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value);
1471
  return expr;
1472
}
1473
 
1474
static void
1475
expand_java_INSTANCEOF (tree type)
1476
{
1477
  tree value = pop_value (object_ptr_type_node);
1478
  value = build_instanceof (value, type);
1479
  push_value (value);
1480
}
1481
 
1482
static void
1483
expand_java_CHECKCAST (tree type)
1484
{
1485
  tree value = pop_value (ptr_type_node);
1486
  value = build_call_nary (promote_type (type),
1487
                           build_address_of (soft_checkcast_node),
1488
                           2, build_class_ref (type), value);
1489
  push_value (value);
1490
}
1491
 
1492
static void
1493
expand_iinc (unsigned int local_var_index, int ival, int pc)
1494
{
1495
  tree local_var, res;
1496
  tree constant_value;
1497
 
1498
  flush_quick_stack ();
1499
  local_var = find_local_variable (local_var_index, int_type_node, pc);
1500
  constant_value = build_int_cst (NULL_TREE, ival);
1501
  res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value);
1502
  java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res));
1503
}
1504
 
1505
 
1506
tree
1507
build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2)
1508
{
1509
  tree call = NULL;
1510
  tree arg1 = convert (type, op1);
1511
  tree arg2 = convert (type, op2);
1512
 
1513
  if (type == int_type_node)
1514
    {
1515
      switch (op)
1516
        {
1517
        case TRUNC_DIV_EXPR:
1518
          call = soft_idiv_node;
1519
          break;
1520
        case TRUNC_MOD_EXPR:
1521
          call = soft_irem_node;
1522
          break;
1523
        default:
1524
          break;
1525
        }
1526
    }
1527
  else if (type == long_type_node)
1528
    {
1529
      switch (op)
1530
        {
1531
        case TRUNC_DIV_EXPR:
1532
          call = soft_ldiv_node;
1533
          break;
1534
        case TRUNC_MOD_EXPR:
1535
          call = soft_lrem_node;
1536
          break;
1537
        default:
1538
          break;
1539
        }
1540
    }
1541
 
1542
  gcc_assert (call);
1543
  call = build_call_nary (type, build_address_of (call), 2, arg1, arg2);
1544
  return call;
1545
}
1546
 
1547
tree
1548
build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2)
1549
{
1550
  tree mask;
1551
  switch (op)
1552
    {
1553
    case URSHIFT_EXPR:
1554
      {
1555
        tree u_type = unsigned_type_for (type);
1556
        arg1 = convert (u_type, arg1);
1557
        arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2);
1558
        return convert (type, arg1);
1559
      }
1560
    case LSHIFT_EXPR:
1561
    case RSHIFT_EXPR:
1562
      mask = build_int_cst (NULL_TREE,
1563
                            TYPE_PRECISION (TREE_TYPE (arg1)) - 1);
1564
      arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask);
1565
      break;
1566
 
1567
    case COMPARE_L_EXPR:  /* arg1 > arg2 ?  1 : arg1 == arg2 ? 0 : -1 */
1568
    case COMPARE_G_EXPR:  /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 :  1 */
1569
      arg1 = save_expr (arg1);  arg2 = save_expr (arg2);
1570
      {
1571
        tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
1572
                                   boolean_type_node, arg1, arg2);
1573
        tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2);
1574
        tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1575
                                           ifexp2, integer_zero_node,
1576
                                           op == COMPARE_L_EXPR
1577
                                           ? integer_minus_one_node
1578
                                           : integer_one_node);
1579
        return fold_build3 (COND_EXPR, int_type_node, ifexp1,
1580
                            op == COMPARE_L_EXPR ? integer_one_node
1581
                            : integer_minus_one_node,
1582
                            second_compare);
1583
      }
1584
    case COMPARE_EXPR:
1585
      arg1 = save_expr (arg1);  arg2 = save_expr (arg2);
1586
      {
1587
        tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2);
1588
        tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2);
1589
        tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1590
                                           ifexp2, integer_one_node,
1591
                                           integer_zero_node);
1592
        return fold_build3 (COND_EXPR, int_type_node,
1593
                            ifexp1, integer_minus_one_node, second_compare);
1594
      }
1595
    case TRUNC_DIV_EXPR:
1596
    case TRUNC_MOD_EXPR:
1597
      if (TREE_CODE (type) == REAL_TYPE
1598
          && op == TRUNC_MOD_EXPR)
1599
        {
1600
          tree call;
1601
          if (type != double_type_node)
1602
            {
1603
              arg1 = convert (double_type_node, arg1);
1604
              arg2 = convert (double_type_node, arg2);
1605
            }
1606
          call = build_call_nary (double_type_node,
1607
                                  build_address_of (soft_fmod_node),
1608
                                  2, arg1, arg2);
1609
          if (type != double_type_node)
1610
            call = convert (type, call);
1611
          return call;
1612
        }
1613
 
1614
      if (TREE_CODE (type) == INTEGER_TYPE
1615
          && flag_use_divide_subroutine
1616
          && ! flag_syntax_only)
1617
        return build_java_soft_divmod (op, type, arg1, arg2);
1618
 
1619
      break;
1620
    default:  ;
1621
    }
1622
  return fold_build2 (op, type, arg1, arg2);
1623
}
1624
 
1625
static void
1626
expand_java_binop (tree type, enum tree_code op)
1627
{
1628
  tree larg, rarg;
1629
  tree ltype = type;
1630
  tree rtype = type;
1631
  switch (op)
1632
    {
1633
    case LSHIFT_EXPR:
1634
    case RSHIFT_EXPR:
1635
    case URSHIFT_EXPR:
1636
      rtype = int_type_node;
1637
      rarg = pop_value (rtype);
1638
      break;
1639
    default:
1640
      rarg = pop_value (rtype);
1641
    }
1642
  larg = pop_value (ltype);
1643
  push_value (build_java_binop (op, type, larg, rarg));
1644
}
1645
 
1646
/* Lookup the field named NAME in *TYPEP or its super classes.
1647
   If not found, return NULL_TREE.
1648
   (If the *TYPEP is not found, or if the field reference is
1649
   ambiguous, return error_mark_node.)
1650
   If found, return the FIELD_DECL, and set *TYPEP to the
1651
   class containing the field. */
1652
 
1653
tree
1654
lookup_field (tree *typep, tree name)
1655
{
1656
  if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep))
1657
    {
1658
      load_class (*typep, 1);
1659
      safe_layout_class (*typep);
1660
      if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK)
1661
        return error_mark_node;
1662
    }
1663
  do
1664
    {
1665
      tree field, binfo, base_binfo;
1666
      tree save_field;
1667
      int i;
1668
 
1669
      for (field = TYPE_FIELDS (*typep); field; field = TREE_CHAIN (field))
1670
        if (DECL_NAME (field) == name)
1671
          return field;
1672
 
1673
      /* Process implemented interfaces. */
1674
      save_field = NULL_TREE;
1675
      for (binfo = TYPE_BINFO (*typep), i = 0;
1676
           BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1677
        {
1678
          tree t = BINFO_TYPE (base_binfo);
1679
          if ((field = lookup_field (&t, name)))
1680
            {
1681
              if (save_field == field)
1682
                continue;
1683
              if (save_field == NULL_TREE)
1684
                save_field = field;
1685
              else
1686
                {
1687
                  tree i1 = DECL_CONTEXT (save_field);
1688
                  tree i2 = DECL_CONTEXT (field);
1689
                  error ("reference %qs is ambiguous: appears in interface %qs and interface %qs",
1690
                         IDENTIFIER_POINTER (name),
1691
                         IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))),
1692
                         IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2))));
1693
                  return error_mark_node;
1694
                }
1695
            }
1696
        }
1697
 
1698
      if (save_field != NULL_TREE)
1699
        return save_field;
1700
 
1701
      *typep = CLASSTYPE_SUPER (*typep);
1702
    } while (*typep);
1703
  return NULL_TREE;
1704
}
1705
 
1706
/* Look up the field named NAME in object SELF_VALUE,
1707
   which has class SELF_CLASS (a non-handle RECORD_TYPE).
1708
   SELF_VALUE is NULL_TREE if looking for a static field. */
1709
 
1710
tree
1711
build_field_ref (tree self_value, tree self_class, tree name)
1712
{
1713
  tree base_class = self_class;
1714
  tree field_decl = lookup_field (&base_class, name);
1715
  if (field_decl == NULL_TREE)
1716
    {
1717
      error ("field %qs not found", IDENTIFIER_POINTER (name));
1718
      return error_mark_node;
1719
    }
1720
  if (self_value == NULL_TREE)
1721
    {
1722
      return build_static_field_ref (field_decl);
1723
    }
1724
  else
1725
    {
1726
      tree base_type = promote_type (base_class);
1727
 
1728
      /* CHECK is true if self_value is not the this pointer.  */
1729
      int check = (! (DECL_P (self_value)
1730
                      && DECL_NAME (self_value) == this_identifier_node));
1731
 
1732
      /* Determine whether a field offset from NULL will lie within
1733
         Page 0: this is necessary on those GNU/Linux/BSD systems that
1734
         trap SEGV to generate NullPointerExceptions.
1735
 
1736
         We assume that Page 0 will be mapped with NOPERM, and that
1737
         memory may be allocated from any other page, so only field
1738
         offsets < pagesize are guaranteed to trap.  We also assume
1739
         the smallest page size we'll encounter is 4k bytes.  */
1740
      if (! flag_syntax_only && check && ! flag_check_references
1741
          && ! flag_indirect_dispatch)
1742
        {
1743
          tree field_offset = byte_position (field_decl);
1744
          if (! page_size)
1745
            page_size = size_int (4096);
1746
          check = ! INT_CST_LT_UNSIGNED (field_offset, page_size);
1747
        }
1748
 
1749
      if (base_type != TREE_TYPE (self_value))
1750
        self_value = fold_build1 (NOP_EXPR, base_type, self_value);
1751
      if (! flag_syntax_only && flag_indirect_dispatch)
1752
        {
1753
          tree otable_index
1754
            = build_int_cst (NULL_TREE, get_symbol_table_index
1755
                             (field_decl, NULL_TREE,
1756
                              &TYPE_OTABLE_METHODS (output_class)));
1757
          tree field_offset
1758
            = build4 (ARRAY_REF, integer_type_node,
1759
                      TYPE_OTABLE_DECL (output_class), otable_index,
1760
                      NULL_TREE, NULL_TREE);
1761
          tree address;
1762
 
1763
          if (DECL_CONTEXT (field_decl) != output_class)
1764
            field_offset
1765
              = build3 (COND_EXPR, TREE_TYPE (field_offset),
1766
                        build2 (EQ_EXPR, boolean_type_node,
1767
                                field_offset, integer_zero_node),
1768
                        build_call_nary (void_type_node,
1769
                                         build_address_of (soft_nosuchfield_node),
1770
                                         1, otable_index),
1771
                        field_offset);
1772
 
1773
          field_offset = fold (convert (sizetype, field_offset));
1774
          self_value = java_check_reference (self_value, check);
1775
          address
1776
            = fold_build2 (POINTER_PLUS_EXPR,
1777
                           TREE_TYPE (self_value),
1778
                           self_value, field_offset);
1779
          address = fold_convert (build_pointer_type (TREE_TYPE (field_decl)),
1780
                                  address);
1781
          return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
1782
        }
1783
 
1784
      self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
1785
                                            self_value, check);
1786
      return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl),
1787
                          self_value, field_decl, NULL_TREE);
1788
    }
1789
}
1790
 
1791
tree
1792
lookup_label (int pc)
1793
{
1794
  tree name;
1795
  char buf[32];
1796
  if (pc > highest_label_pc_this_method)
1797
    highest_label_pc_this_method = pc;
1798
  ASM_GENERATE_INTERNAL_LABEL(buf, "LJpc=", start_label_pc_this_method + pc);
1799
  name = get_identifier (buf);
1800
  if (IDENTIFIER_LOCAL_VALUE (name))
1801
    return IDENTIFIER_LOCAL_VALUE (name);
1802
  else
1803
    {
1804
      /* The type of the address of a label is return_address_type_node. */
1805
      tree decl = create_label_decl (name);
1806
      return pushdecl (decl);
1807
    }
1808
}
1809
 
1810
/* Generate a unique name for the purpose of loops and switches
1811
   labels, and try-catch-finally blocks label or temporary variables.  */
1812
 
1813
tree
1814
generate_name (void)
1815
{
1816
  static int l_number = 0;
1817
  char buff [32];
1818
  ASM_GENERATE_INTERNAL_LABEL(buff, "LJv", l_number);
1819
  l_number++;
1820
  return get_identifier (buff);
1821
}
1822
 
1823
tree
1824
create_label_decl (tree name)
1825
{
1826
  tree decl;
1827
  decl = build_decl (input_location, LABEL_DECL, name,
1828
                     TREE_TYPE (return_address_type_node));
1829
  DECL_CONTEXT (decl) = current_function_decl;
1830
  DECL_IGNORED_P (decl) = 1;
1831
  return decl;
1832
}
1833
 
1834
/* This maps a bytecode offset (PC) to various flags.  */
1835
char *instruction_bits;
1836
 
1837
/* This is a vector of type states for the current method.  It is
1838
   indexed by PC.  Each element is a tree vector holding the type
1839
   state at that PC.  We only note type states at basic block
1840
   boundaries.  */
1841
VEC(tree, gc) *type_states;
1842
 
1843
static void
1844
note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc)
1845
{
1846
  lookup_label (target_pc);
1847
  instruction_bits [target_pc] |= BCODE_JUMP_TARGET;
1848
}
1849
 
1850
/* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1851
   where CONDITION is one of one the compare operators. */
1852
 
1853
static void
1854
expand_compare (enum tree_code condition, tree value1, tree value2,
1855
                int target_pc)
1856
{
1857
  tree target = lookup_label (target_pc);
1858
  tree cond = fold_build2 (condition, boolean_type_node, value1, value2);
1859
  java_add_stmt
1860
    (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond),
1861
             build1 (GOTO_EXPR, void_type_node, target),
1862
             build_java_empty_stmt ()));
1863
}
1864
 
1865
/* Emit code for a TEST-type opcode. */
1866
 
1867
static void
1868
expand_test (enum tree_code condition, tree type, int target_pc)
1869
{
1870
  tree value1, value2;
1871
  flush_quick_stack ();
1872
  value1 = pop_value (type);
1873
  value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node;
1874
  expand_compare (condition, value1, value2, target_pc);
1875
}
1876
 
1877
/* Emit code for a COND-type opcode. */
1878
 
1879
static void
1880
expand_cond (enum tree_code condition, tree type, int target_pc)
1881
{
1882
  tree value1, value2;
1883
  flush_quick_stack ();
1884
  /* note: pop values in opposite order */
1885
  value2 = pop_value (type);
1886
  value1 = pop_value (type);
1887
  /* Maybe should check value1 and value2 for type compatibility ??? */
1888
  expand_compare (condition, value1, value2, target_pc);
1889
}
1890
 
1891
static void
1892
expand_java_goto (int target_pc)
1893
{
1894
  tree target_label = lookup_label (target_pc);
1895
  flush_quick_stack ();
1896
  java_add_stmt (build1 (GOTO_EXPR, void_type_node, target_label));
1897
}
1898
 
1899
static tree
1900
expand_java_switch (tree selector, int default_pc)
1901
{
1902
  tree switch_expr, x;
1903
 
1904
  flush_quick_stack ();
1905
  switch_expr = build3 (SWITCH_EXPR, TREE_TYPE (selector), selector,
1906
                        NULL_TREE, NULL_TREE);
1907
  java_add_stmt (switch_expr);
1908
 
1909
  x = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE, NULL_TREE,
1910
              create_artificial_label (input_location));
1911
  append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1912
 
1913
  x = build1 (GOTO_EXPR, void_type_node, lookup_label (default_pc));
1914
  append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1915
 
1916
  return switch_expr;
1917
}
1918
 
1919
static void
1920
expand_java_add_case (tree switch_expr, int match, int target_pc)
1921
{
1922
  tree value, x;
1923
 
1924
  value = build_int_cst (TREE_TYPE (switch_expr), match);
1925
 
1926
  x = build3 (CASE_LABEL_EXPR, void_type_node, value, NULL_TREE,
1927
              create_artificial_label (input_location));
1928
  append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1929
 
1930
  x = build1 (GOTO_EXPR, void_type_node, lookup_label (target_pc));
1931
  append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1932
}
1933
 
1934
static tree
1935
pop_arguments (tree arg_types)
1936
{
1937
  if (arg_types == end_params_node)
1938
    return NULL_TREE;
1939
  if (TREE_CODE (arg_types) == TREE_LIST)
1940
    {
1941
      tree tail = pop_arguments (TREE_CHAIN (arg_types));
1942
      tree type = TREE_VALUE (arg_types);
1943
      tree arg = pop_value (type);
1944
 
1945
      /* We simply cast each argument to its proper type.  This is
1946
         needed since we lose type information coming out of the
1947
         verifier.  We also have to do this when we pop an integer
1948
         type that must be promoted for the function call.  */
1949
      if (TREE_CODE (type) == POINTER_TYPE)
1950
        arg = build1 (NOP_EXPR, type, arg);
1951
      else if (targetm.calls.promote_prototypes (type)
1952
               && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
1953
               && INTEGRAL_TYPE_P (type))
1954
        arg = convert (integer_type_node, arg);
1955
      return tree_cons (NULL_TREE, arg, tail);
1956
    }
1957
  gcc_unreachable ();
1958
}
1959
 
1960
/* Attach to PTR (a block) the declaration found in ENTRY. */
1961
 
1962
int
1963
attach_init_test_initialization_flags (void **entry, void *ptr)
1964
{
1965
  tree block = (tree)ptr;
1966
  struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry;
1967
 
1968
  if (block != error_mark_node)
1969
    {
1970
      if (TREE_CODE (block) == BIND_EXPR)
1971
        {
1972
          tree body = BIND_EXPR_BODY (block);
1973
          TREE_CHAIN (ite->value) = BIND_EXPR_VARS (block);
1974
          BIND_EXPR_VARS (block) = ite->value;
1975
          body = build2 (COMPOUND_EXPR, void_type_node,
1976
                         build1 (DECL_EXPR, void_type_node, ite->value), body);
1977
          BIND_EXPR_BODY (block) = body;
1978
        }
1979
      else
1980
        {
1981
          tree body = BLOCK_SUBBLOCKS (block);
1982
          TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block);
1983
          BLOCK_EXPR_DECLS (block) = ite->value;
1984
          body = build2 (COMPOUND_EXPR, void_type_node,
1985
                         build1 (DECL_EXPR, void_type_node, ite->value), body);
1986
          BLOCK_SUBBLOCKS (block) = body;
1987
        }
1988
 
1989
    }
1990
  return true;
1991
}
1992
 
1993
/* Build an expression to initialize the class CLAS.
1994
   if EXPR is non-NULL, returns an expression to first call the initializer
1995
   (if it is needed) and then calls EXPR. */
1996
 
1997
tree
1998
build_class_init (tree clas, tree expr)
1999
{
2000
  tree init;
2001
 
2002
  /* An optimization: if CLAS is a superclass of the class we're
2003
     compiling, we don't need to initialize it.  However, if CLAS is
2004
     an interface, it won't necessarily be initialized, even if we
2005
     implement it.  */
2006
  if ((! CLASS_INTERFACE (TYPE_NAME (clas))
2007
       && inherits_from_p (current_class, clas))
2008
      || current_class == clas)
2009
    return expr;
2010
 
2011
  if (always_initialize_class_p)
2012
    {
2013
      init = build_call_nary (void_type_node,
2014
                              build_address_of (soft_initclass_node),
2015
                              1, build_class_ref (clas));
2016
      TREE_SIDE_EFFECTS (init) = 1;
2017
    }
2018
  else
2019
    {
2020
      tree *init_test_decl;
2021
      tree decl;
2022
      init_test_decl = java_treetreehash_new
2023
        (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas);
2024
 
2025
      if (*init_test_decl == NULL)
2026
        {
2027
          /* Build a declaration and mark it as a flag used to track
2028
             static class initializations. */
2029
          decl = build_decl (input_location, VAR_DECL, NULL_TREE,
2030
                             boolean_type_node);
2031
          MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl);
2032
          DECL_CONTEXT (decl) = current_function_decl;
2033
          DECL_INITIAL (decl) = boolean_false_node;
2034
          /* Don't emit any symbolic debugging info for this decl.  */
2035
          DECL_IGNORED_P (decl) = 1;
2036
          *init_test_decl = decl;
2037
        }
2038
 
2039
      init = build_call_nary (void_type_node,
2040
                              build_address_of (soft_initclass_node),
2041
                              1, build_class_ref (clas));
2042
      TREE_SIDE_EFFECTS (init) = 1;
2043
      init = build3 (COND_EXPR, void_type_node,
2044
                     build2 (EQ_EXPR, boolean_type_node,
2045
                             *init_test_decl, boolean_false_node),
2046
                     init, integer_zero_node);
2047
      TREE_SIDE_EFFECTS (init) = 1;
2048
      init = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init,
2049
                     build2 (MODIFY_EXPR, boolean_type_node,
2050
                             *init_test_decl, boolean_true_node));
2051
      TREE_SIDE_EFFECTS (init) = 1;
2052
    }
2053
 
2054
  if (expr != NULL_TREE)
2055
    {
2056
      expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, expr);
2057
      TREE_SIDE_EFFECTS (expr) = 1;
2058
      return expr;
2059
    }
2060
  return init;
2061
}
2062
 
2063
 
2064
 
2065
/* Rewrite expensive calls that require stack unwinding at runtime to
2066
   cheaper alternatives.  The logic here performs these
2067
   transformations:
2068
 
2069
   java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
2070
   java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
2071
 
2072
*/
2073
 
2074
typedef struct
2075
{
2076
  const char *classname;
2077
  const char *method;
2078
  const char *signature;
2079
  const char *new_classname;
2080
  const char *new_signature;
2081
  int flags;
2082
  tree (*rewrite_arglist) (tree arglist);
2083
} rewrite_rule;
2084
 
2085
/* Add __builtin_return_address(0) to the end of an arglist.  */
2086
 
2087
 
2088
static tree
2089
rewrite_arglist_getcaller (tree arglist)
2090
{
2091
  tree retaddr
2092
    = build_call_expr (built_in_decls[BUILT_IN_RETURN_ADDRESS],
2093
                       1, integer_zero_node);
2094
 
2095
  DECL_UNINLINABLE (current_function_decl) = 1;
2096
 
2097
  return chainon (arglist,
2098
                  tree_cons (NULL_TREE, retaddr,
2099
                             NULL_TREE));
2100
}
2101
 
2102
/* Add this.class to the end of an arglist.  */
2103
 
2104
static tree
2105
rewrite_arglist_getclass (tree arglist)
2106
{
2107
  return chainon (arglist,
2108
                  tree_cons (NULL_TREE, build_class_ref (output_class),
2109
                             NULL_TREE));
2110
}
2111
 
2112
static rewrite_rule rules[] =
2113
  {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
2114
    "java.lang.Class", "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
2115
    ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass},
2116
 
2117
   {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
2118
    "java.lang.Class", "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
2119
    ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass},
2120
 
2121
   {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;",
2122
    "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/Class;",
2123
    ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2124
 
2125
   {"gnu.classpath.VMStackWalker", "getCallingClassLoader",
2126
    "()Ljava/lang/ClassLoader;",
2127
    "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;",
2128
    ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2129
 
2130
   {"gnu.java.lang.VMCPStringBuilder", "toString", "([CII)Ljava/lang/String;",
2131
    "java.lang.String", "([CII)Ljava/lang/String;",
2132
    ACC_FINAL|ACC_PRIVATE|ACC_STATIC, NULL},
2133
 
2134
   {NULL, NULL, NULL, NULL, NULL, 0, NULL}};
2135
 
2136
/* True if this method is special, i.e. it's a private method that
2137
   should be exported from a DSO.  */
2138
 
2139
bool
2140
special_method_p (tree candidate_method)
2141
{
2142
  tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (candidate_method)));
2143
  tree method = DECL_NAME (candidate_method);
2144
  rewrite_rule *p;
2145
 
2146
  for (p = rules; p->classname; p++)
2147
    {
2148
      if (get_identifier (p->classname) == context
2149
          && get_identifier (p->method) == method)
2150
        return true;
2151
    }
2152
  return false;
2153
}
2154
 
2155
/* Scan the rules list for replacements for *METHOD_P and replace the
2156
   args accordingly.  If the rewrite results in an access to a private
2157
   method, update SPECIAL.*/
2158
 
2159
void
2160
maybe_rewrite_invocation (tree *method_p, tree *arg_list_p,
2161
                          tree *method_signature_p, tree *special)
2162
{
2163
  tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p)));
2164
  rewrite_rule *p;
2165
  *special = NULL_TREE;
2166
 
2167
  for (p = rules; p->classname; p++)
2168
    {
2169
      if (get_identifier (p->classname) == context)
2170
        {
2171
          tree method = DECL_NAME (*method_p);
2172
          if (get_identifier (p->method) == method
2173
              && get_identifier (p->signature) == *method_signature_p)
2174
            {
2175
              tree maybe_method;
2176
              tree destination_class
2177
                = lookup_class (get_identifier (p->new_classname));
2178
              gcc_assert (destination_class);
2179
              maybe_method
2180
                = lookup_java_method (destination_class,
2181
                                      method,
2182
                                      get_identifier (p->new_signature));
2183
              if (! maybe_method && ! flag_verify_invocations)
2184
                {
2185
                  maybe_method
2186
                    = add_method (destination_class, p->flags,
2187
                                  method, get_identifier (p->new_signature));
2188
                  DECL_EXTERNAL (maybe_method) = 1;
2189
                }
2190
              *method_p = maybe_method;
2191
              gcc_assert (*method_p);
2192
              if (p->rewrite_arglist)
2193
                *arg_list_p = p->rewrite_arglist (*arg_list_p);
2194
              *method_signature_p = get_identifier (p->new_signature);
2195
              *special = integer_one_node;
2196
 
2197
              break;
2198
            }
2199
        }
2200
    }
2201
}
2202
 
2203
 
2204
 
2205
tree
2206
build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
2207
                        tree self_type, tree method_signature ATTRIBUTE_UNUSED,
2208
                        tree arg_list ATTRIBUTE_UNUSED, tree special)
2209
{
2210
  tree func;
2211
  if (is_compiled_class (self_type))
2212
    {
2213
      /* With indirect dispatch we have to use indirect calls for all
2214
         publicly visible methods or gcc will use PLT indirections
2215
         to reach them.  We also have to use indirect dispatch for all
2216
         external methods.  */
2217
      if (! flag_indirect_dispatch
2218
          || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method)))
2219
        {
2220
          func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)),
2221
                         method);
2222
        }
2223
      else
2224
        {
2225
          tree table_index
2226
            = build_int_cst (NULL_TREE,
2227
                             (get_symbol_table_index
2228
                              (method, special,
2229
                               &TYPE_ATABLE_METHODS (output_class))));
2230
          func
2231
            = build4 (ARRAY_REF,
2232
                      TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))),
2233
                      TYPE_ATABLE_DECL (output_class), table_index,
2234
                      NULL_TREE, NULL_TREE);
2235
        }
2236
      func = convert (method_ptr_type_node, func);
2237
    }
2238
  else
2239
    {
2240
      /* We don't know whether the method has been (statically) compiled.
2241
         Compile this code to get a reference to the method's code:
2242
 
2243
         SELF_TYPE->methods[METHOD_INDEX].ncode
2244
 
2245
      */
2246
 
2247
      int method_index = 0;
2248
      tree meth, ref;
2249
 
2250
      /* The method might actually be declared in some superclass, so
2251
         we have to use its class context, not the caller's notion of
2252
         where the method is.  */
2253
      self_type = DECL_CONTEXT (method);
2254
      ref = build_class_ref (self_type);
2255
      ref = build1 (INDIRECT_REF, class_type_node, ref);
2256
      if (ncode_ident == NULL_TREE)
2257
        ncode_ident = get_identifier ("ncode");
2258
      if (methods_ident == NULL_TREE)
2259
        methods_ident = get_identifier ("methods");
2260
      ref = build3 (COMPONENT_REF, method_ptr_type_node, ref,
2261
                    lookup_field (&class_type_node, methods_ident),
2262
                    NULL_TREE);
2263
      for (meth = TYPE_METHODS (self_type);
2264
           ; meth = TREE_CHAIN (meth))
2265
        {
2266
          if (method == meth)
2267
            break;
2268
          if (meth == NULL_TREE)
2269
            fatal_error ("method '%s' not found in class",
2270
                         IDENTIFIER_POINTER (DECL_NAME (method)));
2271
          method_index++;
2272
        }
2273
      method_index *= int_size_in_bytes (method_type_node);
2274
      ref = fold_build2 (POINTER_PLUS_EXPR, method_ptr_type_node,
2275
                         ref, size_int (method_index));
2276
      ref = build1 (INDIRECT_REF, method_type_node, ref);
2277
      func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
2278
                     ref, lookup_field (&method_type_node, ncode_ident),
2279
                     NULL_TREE);
2280
    }
2281
  return func;
2282
}
2283
 
2284
tree
2285
invoke_build_dtable (int is_invoke_interface, tree arg_list)
2286
{
2287
  tree dtable, objectref;
2288
 
2289
  TREE_VALUE (arg_list) = save_expr (TREE_VALUE (arg_list));
2290
 
2291
  /* If we're dealing with interfaces and if the objectref
2292
     argument is an array then get the dispatch table of the class
2293
     Object rather than the one from the objectref.  */
2294
  objectref = (is_invoke_interface
2295
               && is_array_type_p (TREE_TYPE (TREE_VALUE (arg_list)))
2296
               ? build_class_ref (object_type_node) : TREE_VALUE (arg_list));
2297
 
2298
  if (dtable_ident == NULL_TREE)
2299
    dtable_ident = get_identifier ("vtable");
2300
  dtable = build_java_indirect_ref (object_type_node, objectref,
2301
                                    flag_check_references);
2302
  dtable = build3 (COMPONENT_REF, dtable_ptr_type, dtable,
2303
                   lookup_field (&object_type_node, dtable_ident), NULL_TREE);
2304
 
2305
  return dtable;
2306
}
2307
 
2308
/* Determine the index in SYMBOL_TABLE for a reference to the decl
2309
   T. If this decl has not been seen before, it will be added to the
2310
   [oa]table_methods. If it has, the existing table slot will be
2311
   reused.  */
2312
 
2313
int
2314
get_symbol_table_index (tree t, tree special, tree *symbol_table)
2315
{
2316
  int i = 1;
2317
  tree method_list;
2318
 
2319
  if (*symbol_table == NULL_TREE)
2320
    {
2321
      *symbol_table = build_tree_list (special, t);
2322
      return 1;
2323
    }
2324
 
2325
  method_list = *symbol_table;
2326
 
2327
  while (1)
2328
    {
2329
      tree value = TREE_VALUE (method_list);
2330
      tree purpose = TREE_PURPOSE (method_list);
2331
      if (value == t && purpose == special)
2332
        return i;
2333
      i++;
2334
      if (TREE_CHAIN (method_list) == NULL_TREE)
2335
        break;
2336
      else
2337
        method_list = TREE_CHAIN (method_list);
2338
    }
2339
 
2340
  TREE_CHAIN (method_list) = build_tree_list (special, t);
2341
  return i;
2342
}
2343
 
2344
tree
2345
build_invokevirtual (tree dtable, tree method, tree special)
2346
{
2347
  tree func;
2348
  tree nativecode_ptr_ptr_type_node
2349
    = build_pointer_type (nativecode_ptr_type_node);
2350
  tree method_index;
2351
  tree otable_index;
2352
 
2353
  if (flag_indirect_dispatch)
2354
    {
2355
      gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))));
2356
 
2357
      otable_index
2358
        = build_int_cst (NULL_TREE, get_symbol_table_index
2359
                         (method, special,
2360
                          &TYPE_OTABLE_METHODS (output_class)));
2361
      method_index = build4 (ARRAY_REF, integer_type_node,
2362
                             TYPE_OTABLE_DECL (output_class),
2363
                             otable_index, NULL_TREE, NULL_TREE);
2364
    }
2365
  else
2366
    {
2367
      /* We fetch the DECL_VINDEX field directly here, rather than
2368
         using get_method_index().  DECL_VINDEX is the true offset
2369
         from the vtable base to a method, regrdless of any extra
2370
         words inserted at the start of the vtable.  */
2371
      method_index = DECL_VINDEX (method);
2372
      method_index = size_binop (MULT_EXPR, method_index,
2373
                                 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node));
2374
      if (TARGET_VTABLE_USES_DESCRIPTORS)
2375
        method_index = size_binop (MULT_EXPR, method_index,
2376
                                   size_int (TARGET_VTABLE_USES_DESCRIPTORS));
2377
    }
2378
 
2379
  func = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dtable), dtable,
2380
                      convert (sizetype, method_index));
2381
 
2382
  if (TARGET_VTABLE_USES_DESCRIPTORS)
2383
    func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
2384
  else
2385
    {
2386
      func = fold_convert (nativecode_ptr_ptr_type_node, func);
2387
      func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func);
2388
    }
2389
 
2390
  return func;
2391
}
2392
 
2393
static GTY(()) tree class_ident;
2394
tree
2395
build_invokeinterface (tree dtable, tree method)
2396
{
2397
  tree interface;
2398
  tree idx;
2399
 
2400
  /* We expand invokeinterface here.  */
2401
 
2402
  if (class_ident == NULL_TREE)
2403
    class_ident = get_identifier ("class");
2404
 
2405
  dtable = build_java_indirect_ref (dtable_type, dtable,
2406
                                    flag_check_references);
2407
  dtable = build3 (COMPONENT_REF, class_ptr_type, dtable,
2408
                   lookup_field (&dtable_type, class_ident), NULL_TREE);
2409
 
2410
  interface = DECL_CONTEXT (method);
2411
  gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface)));
2412
  layout_class_methods (interface);
2413
 
2414
  if (flag_indirect_dispatch)
2415
    {
2416
      int itable_index
2417
        = 2 * (get_symbol_table_index
2418
               (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class)));
2419
      interface
2420
        = build4 (ARRAY_REF,
2421
                 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2422
                 TYPE_ITABLE_DECL (output_class),
2423
                  build_int_cst (NULL_TREE, itable_index-1),
2424
                  NULL_TREE, NULL_TREE);
2425
      idx
2426
        = build4 (ARRAY_REF,
2427
                 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2428
                 TYPE_ITABLE_DECL (output_class),
2429
                  build_int_cst (NULL_TREE, itable_index),
2430
                  NULL_TREE, NULL_TREE);
2431
      interface = convert (class_ptr_type, interface);
2432
      idx = convert (integer_type_node, idx);
2433
    }
2434
  else
2435
    {
2436
      idx = build_int_cst (NULL_TREE,
2437
                           get_interface_method_index (method, interface));
2438
      interface = build_class_ref (interface);
2439
    }
2440
 
2441
  return build_call_nary (ptr_type_node,
2442
                          build_address_of (soft_lookupinterfacemethod_node),
2443
                          3, dtable, interface, idx);
2444
}
2445
 
2446
/* Expand one of the invoke_* opcodes.
2447
   OPCODE is the specific opcode.
2448
   METHOD_REF_INDEX is an index into the constant pool.
2449
   NARGS is the number of arguments, or -1 if not specified. */
2450
 
2451
static void
2452
expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
2453
{
2454
  tree method_signature
2455
    = COMPONENT_REF_SIGNATURE(&current_jcf->cpool, method_ref_index);
2456
  tree method_name = COMPONENT_REF_NAME (&current_jcf->cpool,
2457
                                         method_ref_index);
2458
  tree self_type
2459
    = get_class_constant (current_jcf,
2460
                          COMPONENT_REF_CLASS_INDEX(&current_jcf->cpool,
2461
                          method_ref_index));
2462
  const char *const self_name
2463
    = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2464
  tree call, func, method, arg_list, method_type;
2465
  tree check = NULL_TREE;
2466
 
2467
  tree special = NULL_TREE;
2468
 
2469
  if (! CLASS_LOADED_P (self_type))
2470
    {
2471
      load_class (self_type, 1);
2472
      safe_layout_class (self_type);
2473
      if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK)
2474
        fatal_error ("failed to find class '%s'", self_name);
2475
    }
2476
  layout_class_methods (self_type);
2477
 
2478
  if (ID_INIT_P (method_name))
2479
    method = lookup_java_constructor (self_type, method_signature);
2480
  else
2481
    method = lookup_java_method (self_type, method_name, method_signature);
2482
 
2483
  /* We've found a method in a class other than the one in which it
2484
     was wanted.  This can happen if, for instance, we're trying to
2485
     compile invokespecial super.equals().
2486
     FIXME: This is a kludge.  Rather than nullifying the result, we
2487
     should change lookup_java_method() so that it doesn't search the
2488
     superclass chain when we're BC-compiling.  */
2489
  if (! flag_verify_invocations
2490
      && method
2491
      && ! TYPE_ARRAY_P (self_type)
2492
      && self_type != DECL_CONTEXT (method))
2493
    method = NULL_TREE;
2494
 
2495
  /* We've found a method in an interface, but this isn't an interface
2496
     call.  */
2497
  if (opcode != OPCODE_invokeinterface
2498
      && method
2499
      && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))))
2500
    method = NULL_TREE;
2501
 
2502
  /* We've found a non-interface method but we are making an
2503
     interface call.  This can happen if the interface overrides a
2504
     method in Object.  */
2505
  if (! flag_verify_invocations
2506
      && opcode == OPCODE_invokeinterface
2507
      && method
2508
      && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))
2509
    method = NULL_TREE;
2510
 
2511
  if (method == NULL_TREE)
2512
    {
2513
      if (flag_verify_invocations || ! flag_indirect_dispatch)
2514
        {
2515
          error ("class '%s' has no method named '%s' matching signature '%s'",
2516
                 self_name,
2517
                 IDENTIFIER_POINTER (method_name),
2518
                 IDENTIFIER_POINTER (method_signature));
2519
        }
2520
      else
2521
        {
2522
          int flags = ACC_PUBLIC;
2523
          if (opcode == OPCODE_invokestatic)
2524
            flags |= ACC_STATIC;
2525
          if (opcode == OPCODE_invokeinterface)
2526
            {
2527
              flags |= ACC_INTERFACE | ACC_ABSTRACT;
2528
              CLASS_INTERFACE (TYPE_NAME (self_type)) = 1;
2529
            }
2530
          method = add_method (self_type, flags, method_name,
2531
                               method_signature);
2532
          DECL_ARTIFICIAL (method) = 1;
2533
          METHOD_DUMMY (method) = 1;
2534
          layout_class_method (self_type, NULL,
2535
                               method, NULL);
2536
        }
2537
    }
2538
 
2539
  /* Invoke static can't invoke static/abstract method */
2540
  if (method != NULL_TREE)
2541
    {
2542
      if (opcode == OPCODE_invokestatic)
2543
        {
2544
          if (!METHOD_STATIC (method))
2545
            {
2546
              error ("invokestatic on non static method");
2547
              method = NULL_TREE;
2548
            }
2549
          else if (METHOD_ABSTRACT (method))
2550
            {
2551
              error ("invokestatic on abstract method");
2552
              method = NULL_TREE;
2553
            }
2554
        }
2555
      else
2556
        {
2557
          if (METHOD_STATIC (method))
2558
            {
2559
              error ("invoke[non-static] on static method");
2560
              method = NULL_TREE;
2561
            }
2562
        }
2563
    }
2564
 
2565
  if (method == NULL_TREE)
2566
    {
2567
      /* If we got here, we emitted an error message above.  So we
2568
         just pop the arguments, push a properly-typed zero, and
2569
         continue.  */
2570
      method_type = get_type_from_signature (method_signature);
2571
      pop_arguments (TYPE_ARG_TYPES (method_type));
2572
      if (opcode != OPCODE_invokestatic)
2573
        pop_type (self_type);
2574
      method_type = promote_type (TREE_TYPE (method_type));
2575
      push_value (convert (method_type, integer_zero_node));
2576
      return;
2577
    }
2578
 
2579
  method_type = TREE_TYPE (method);
2580
  arg_list = pop_arguments (TYPE_ARG_TYPES (method_type));
2581
  flush_quick_stack ();
2582
 
2583
  maybe_rewrite_invocation (&method, &arg_list, &method_signature,
2584
                            &special);
2585
 
2586
  func = NULL_TREE;
2587
  if (opcode == OPCODE_invokestatic)
2588
    func = build_known_method_ref (method, method_type, self_type,
2589
                                   method_signature, arg_list, special);
2590
  else if (opcode == OPCODE_invokespecial
2591
           || (opcode == OPCODE_invokevirtual
2592
               && (METHOD_PRIVATE (method)
2593
                   || METHOD_FINAL (method)
2594
                   || CLASS_FINAL (TYPE_NAME (self_type)))))
2595
    {
2596
      /* If the object for the method call is null, we throw an
2597
         exception.  We don't do this if the object is the current
2598
         method's `this'.  In other cases we just rely on an
2599
         optimization pass to eliminate redundant checks.  FIXME:
2600
         Unfortunately there doesn't seem to be a way to determine
2601
         what the current method is right now.
2602
         We do omit the check if we're calling <init>.  */
2603
      /* We use a SAVE_EXPR here to make sure we only evaluate
2604
         the new `self' expression once.  */
2605
      tree save_arg = save_expr (TREE_VALUE (arg_list));
2606
      TREE_VALUE (arg_list) = save_arg;
2607
      check = java_check_reference (save_arg, ! DECL_INIT_P (method));
2608
      func = build_known_method_ref (method, method_type, self_type,
2609
                                     method_signature, arg_list, special);
2610
    }
2611
  else
2612
    {
2613
      tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface,
2614
                                         arg_list);
2615
      if (opcode == OPCODE_invokevirtual)
2616
        func = build_invokevirtual (dtable, method, special);
2617
      else
2618
        func = build_invokeinterface (dtable, method);
2619
    }
2620
 
2621
  if (TREE_CODE (func) == ADDR_EXPR)
2622
    TREE_TYPE (func) = build_pointer_type (method_type);
2623
  else
2624
    func = build1 (NOP_EXPR, build_pointer_type (method_type), func);
2625
 
2626
  call = build_call_list (TREE_TYPE (method_type), func, arg_list);
2627
  TREE_SIDE_EFFECTS (call) = 1;
2628
  call = check_for_builtin (method, call);
2629
 
2630
  if (check != NULL_TREE)
2631
    {
2632
      call = build2 (COMPOUND_EXPR, TREE_TYPE (call), check, call);
2633
      TREE_SIDE_EFFECTS (call) = 1;
2634
    }
2635
 
2636
  if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE)
2637
    java_add_stmt (call);
2638
  else
2639
    {
2640
      push_value (call);
2641
      flush_quick_stack ();
2642
    }
2643
}
2644
 
2645
/* Create a stub which will be put into the vtable but which will call
2646
   a JNI function.  */
2647
 
2648
tree
2649
build_jni_stub (tree method)
2650
{
2651
  tree jnifunc, call, args, body, method_sig, arg_types;
2652
  tree jniarg0, jniarg1, jniarg2, jniarg3;
2653
  tree jni_func_type, tem;
2654
  tree env_var, res_var = NULL_TREE, block;
2655
  tree method_args;
2656
  tree meth_var;
2657
  tree bind;
2658
 
2659
  int args_size = 0;
2660
 
2661
  tree klass = DECL_CONTEXT (method);
2662
  klass = build_class_ref (klass);
2663
 
2664
  gcc_assert (METHOD_NATIVE (method) && flag_jni);
2665
 
2666
  DECL_ARTIFICIAL (method) = 1;
2667
  DECL_EXTERNAL (method) = 0;
2668
 
2669
  env_var = build_decl (input_location,
2670
                        VAR_DECL, get_identifier ("env"), ptr_type_node);
2671
  DECL_CONTEXT (env_var) = method;
2672
 
2673
  if (TREE_TYPE (TREE_TYPE (method)) != void_type_node)
2674
    {
2675
      res_var = build_decl (input_location, VAR_DECL, get_identifier ("res"),
2676
                            TREE_TYPE (TREE_TYPE (method)));
2677
      DECL_CONTEXT (res_var) = method;
2678
      TREE_CHAIN (env_var) = res_var;
2679
    }
2680
 
2681
  method_args = DECL_ARGUMENTS (method);
2682
  block = build_block (env_var, NULL_TREE, method_args, NULL_TREE);
2683
  TREE_SIDE_EFFECTS (block) = 1;
2684
  TREE_TYPE (block) = TREE_TYPE (TREE_TYPE (method));
2685
 
2686
  /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame.  */
2687
  body = build2 (MODIFY_EXPR, ptr_type_node, env_var,
2688
                 build_call_nary (ptr_type_node,
2689
                                  build_address_of (soft_getjnienvnewframe_node),
2690
                                  1, klass));
2691
 
2692
  /* All the arguments to this method become arguments to the
2693
     underlying JNI function.  If we had to wrap object arguments in a
2694
     special way, we would do that here.  */
2695
  args = NULL_TREE;
2696
  for (tem = method_args; tem != NULL_TREE; tem = TREE_CHAIN (tem))
2697
    {
2698
      int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)));
2699
#ifdef PARM_BOUNDARY
2700
      arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY)
2701
                  * PARM_BOUNDARY);
2702
#endif
2703
      args_size += (arg_bits / BITS_PER_UNIT);
2704
 
2705
      args = tree_cons (NULL_TREE, tem, args);
2706
    }
2707
  args = nreverse (args);
2708
  arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));
2709
 
2710
  /* For a static method the second argument is the class.  For a
2711
     non-static method the second argument is `this'; that is already
2712
     available in the argument list.  */
2713
  if (METHOD_STATIC (method))
2714
    {
2715
      args_size += int_size_in_bytes (TREE_TYPE (klass));
2716
      args = tree_cons (NULL_TREE, klass, args);
2717
      arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types);
2718
    }
2719
 
2720
  /* The JNIEnv structure is the first argument to the JNI function.  */
2721
  args_size += int_size_in_bytes (TREE_TYPE (env_var));
2722
  args = tree_cons (NULL_TREE, env_var, args);
2723
  arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types);
2724
 
2725
  /* We call _Jv_LookupJNIMethod to find the actual underlying
2726
     function pointer.  _Jv_LookupJNIMethod will throw the appropriate
2727
     exception if this function is not found at runtime.  */
2728
  method_sig = build_java_signature (TREE_TYPE (method));
2729
  jniarg0 = klass;
2730
  jniarg1 = build_utf8_ref (DECL_NAME (method));
2731
  jniarg2 = build_utf8_ref (unmangle_classname
2732
                            (IDENTIFIER_POINTER (method_sig),
2733
                             IDENTIFIER_LENGTH (method_sig)));
2734
  jniarg3 = build_int_cst (NULL_TREE, args_size);
2735
 
2736
  tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types);
2737
 
2738
#ifdef MODIFY_JNI_METHOD_CALL
2739
  tem = MODIFY_JNI_METHOD_CALL (tem);
2740
#endif
2741
 
2742
  jni_func_type = build_pointer_type (tem);
2743
 
2744
  /* Use the actual function type, rather than a generic pointer type,
2745
     such that this decl keeps the actual pointer type from being
2746
     garbage-collected.  If it is, we end up using canonical types
2747
     with different uids for equivalent function types, and this in
2748
     turn causes utf8 identifiers and output order to vary.  */
2749
  meth_var = build_decl (input_location,
2750
                         VAR_DECL, get_identifier ("meth"), jni_func_type);
2751
  TREE_STATIC (meth_var) = 1;
2752
  TREE_PUBLIC (meth_var) = 0;
2753
  DECL_EXTERNAL (meth_var) = 0;
2754
  DECL_CONTEXT (meth_var) = method;
2755
  DECL_ARTIFICIAL (meth_var) = 1;
2756
  DECL_INITIAL (meth_var) = null_pointer_node;
2757
  TREE_USED (meth_var) = 1;
2758
  chainon (env_var, meth_var);
2759
  build_result_decl (method);
2760
 
2761
  jnifunc = build3 (COND_EXPR, jni_func_type,
2762
                    build2 (NE_EXPR, boolean_type_node,
2763
                            meth_var, build_int_cst (TREE_TYPE (meth_var), 0)),
2764
                    meth_var,
2765
                    build2 (MODIFY_EXPR, jni_func_type, meth_var,
2766
                            build1
2767
                            (NOP_EXPR, jni_func_type,
2768
                             build_call_nary (ptr_type_node,
2769
                                              build_address_of
2770
                                              (soft_lookupjnimethod_node),
2771
                                              4,
2772
                                              jniarg0, jniarg1,
2773
                                              jniarg2, jniarg3))));
2774
 
2775
  /* Now we make the actual JNI call via the resulting function
2776
     pointer.    */
2777
  call = build_call_list (TREE_TYPE (TREE_TYPE (method)),
2778
                          jnifunc, args);
2779
 
2780
  /* If the JNI call returned a result, capture it here.  If we had to
2781
     unwrap JNI object results, we would do that here.  */
2782
  if (res_var != NULL_TREE)
2783
    {
2784
      /* If the call returns an object, it may return a JNI weak
2785
         reference, in which case we must unwrap it.  */
2786
      if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method))))
2787
        call = build_call_nary (TREE_TYPE (TREE_TYPE (method)),
2788
                                build_address_of (soft_unwrapjni_node),
2789
                                1, call);
2790
      call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
2791
                     res_var, call);
2792
    }
2793
 
2794
  TREE_SIDE_EFFECTS (call) = 1;
2795
 
2796
  body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2797
  TREE_SIDE_EFFECTS (body) = 1;
2798
 
2799
  /* Now free the environment we allocated.  */
2800
  call = build_call_nary (ptr_type_node,
2801
                          build_address_of (soft_jnipopsystemframe_node),
2802
                          1, env_var);
2803
  TREE_SIDE_EFFECTS (call) = 1;
2804
  body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2805
  TREE_SIDE_EFFECTS (body) = 1;
2806
 
2807
  /* Finally, do the return.  */
2808
  if (res_var != NULL_TREE)
2809
    {
2810
      tree drt;
2811
      gcc_assert (DECL_RESULT (method));
2812
      /* Make sure we copy the result variable to the actual
2813
         result.  We use the type of the DECL_RESULT because it
2814
         might be different from the return type of the function:
2815
         it might be promoted.  */
2816
      drt = TREE_TYPE (DECL_RESULT (method));
2817
      if (drt != TREE_TYPE (res_var))
2818
        res_var = build1 (CONVERT_EXPR, drt, res_var);
2819
      res_var = build2 (MODIFY_EXPR, drt, DECL_RESULT (method), res_var);
2820
      TREE_SIDE_EFFECTS (res_var) = 1;
2821
    }
2822
 
2823
  body = build2 (COMPOUND_EXPR, void_type_node, body,
2824
                 build1 (RETURN_EXPR, void_type_node, res_var));
2825
  TREE_SIDE_EFFECTS (body) = 1;
2826
 
2827
  /* Prepend class initialization for static methods reachable from
2828
     other classes.  */
2829
  if (METHOD_STATIC (method)
2830
      && (! METHOD_PRIVATE (method)
2831
          || INNER_CLASS_P (DECL_CONTEXT (method))))
2832
    {
2833
      tree init = build_call_expr (soft_initclass_node, 1,
2834
                                   klass);
2835
      body = build2 (COMPOUND_EXPR, void_type_node, init, body);
2836
      TREE_SIDE_EFFECTS (body) = 1;
2837
    }
2838
 
2839
  bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
2840
                 body, block);
2841
  return bind;
2842
}
2843
 
2844
 
2845
/* Given lvalue EXP, return a volatile expression that references the
2846
   same object.  */
2847
 
2848
tree
2849
java_modify_addr_for_volatile (tree exp)
2850
{
2851
  tree exp_type = TREE_TYPE (exp);
2852
  tree v_type
2853
    = build_qualified_type (exp_type,
2854
                            TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE);
2855
  tree addr = build_fold_addr_expr (exp);
2856
  v_type = build_pointer_type (v_type);
2857
  addr = fold_convert (v_type, addr);
2858
  exp = build_fold_indirect_ref (addr);
2859
  return exp;
2860
}
2861
 
2862
 
2863
/* Expand an operation to extract from or store into a field.
2864
   IS_STATIC is 1 iff the field is static.
2865
   IS_PUTTING is 1 for putting into a field;  0 for getting from the field.
2866
   FIELD_REF_INDEX is an index into the constant pool.  */
2867
 
2868
static void
2869
expand_java_field_op (int is_static, int is_putting, int field_ref_index)
2870
{
2871
  tree self_type
2872
    = get_class_constant (current_jcf,
2873
                          COMPONENT_REF_CLASS_INDEX (&current_jcf->cpool,
2874
                          field_ref_index));
2875
  const char *self_name
2876
    = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2877
  tree field_name = COMPONENT_REF_NAME (&current_jcf->cpool, field_ref_index);
2878
  tree field_signature = COMPONENT_REF_SIGNATURE (&current_jcf->cpool,
2879
                                                  field_ref_index);
2880
  tree field_type = get_type_from_signature (field_signature);
2881
  tree new_value = is_putting ? pop_value (field_type) : NULL_TREE;
2882
  tree field_ref;
2883
  int is_error = 0;
2884
  tree original_self_type = self_type;
2885
  tree field_decl;
2886
  tree modify_expr;
2887
 
2888
  if (! CLASS_LOADED_P (self_type))
2889
    load_class (self_type, 1);
2890
  field_decl = lookup_field (&self_type, field_name);
2891
  if (field_decl == error_mark_node)
2892
    {
2893
      is_error = 1;
2894
    }
2895
  else if (field_decl == NULL_TREE)
2896
    {
2897
      if (! flag_verify_invocations)
2898
        {
2899
          int flags = ACC_PUBLIC;
2900
          if (is_static)
2901
            flags |= ACC_STATIC;
2902
          self_type = original_self_type;
2903
          field_decl = add_field (original_self_type, field_name,
2904
                                  field_type, flags);
2905
          DECL_ARTIFICIAL (field_decl) = 1;
2906
          DECL_IGNORED_P (field_decl) = 1;
2907
#if 0
2908
          /* FIXME: We should be pessimistic about volatility.  We
2909
             don't know one way or another, but this is safe.
2910
             However, doing this has bad effects on code quality.  We
2911
             need to look at better ways to do this.  */
2912
          TREE_THIS_VOLATILE (field_decl) = 1;
2913
#endif
2914
        }
2915
      else
2916
        {
2917
          error ("missing field '%s' in '%s'",
2918
                 IDENTIFIER_POINTER (field_name), self_name);
2919
          is_error = 1;
2920
      }
2921
    }
2922
  else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature)
2923
    {
2924
      error ("mismatching signature for field '%s' in '%s'",
2925
             IDENTIFIER_POINTER (field_name), self_name);
2926
      is_error = 1;
2927
    }
2928
  field_ref = is_static ? NULL_TREE : pop_value (self_type);
2929
  if (is_error)
2930
    {
2931
      if (! is_putting)
2932
        push_value (convert (field_type, integer_zero_node));
2933
      flush_quick_stack ();
2934
      return;
2935
    }
2936
 
2937
  field_ref = build_field_ref (field_ref, self_type, field_name);
2938
  if (is_static
2939
      && ! flag_indirect_dispatch)
2940
    {
2941
      tree context = DECL_CONTEXT (field_ref);
2942
      if (context != self_type && CLASS_INTERFACE (TYPE_NAME (context)))
2943
        field_ref = build_class_init (context, field_ref);
2944
      else
2945
        field_ref = build_class_init (self_type, field_ref);
2946
    }
2947
  if (is_putting)
2948
    {
2949
      flush_quick_stack ();
2950
      if (FIELD_FINAL (field_decl))
2951
        {
2952
          if (DECL_CONTEXT (field_decl) != current_class)
2953
            error ("assignment to final field %q+D not in field's class",
2954
                   field_decl);
2955
          /* We used to check for assignments to final fields not
2956
             occurring in the class initializer or in a constructor
2957
             here.  However, this constraint doesn't seem to be
2958
             enforced by the JVM.  */
2959
        }
2960
 
2961
      if (TREE_THIS_VOLATILE (field_decl))
2962
        field_ref = java_modify_addr_for_volatile (field_ref);
2963
 
2964
      modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref),
2965
                            field_ref, new_value);
2966
 
2967
      if (TREE_THIS_VOLATILE (field_decl))
2968
        java_add_stmt
2969
          (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2970
 
2971
      java_add_stmt (modify_expr);
2972
    }
2973
  else
2974
    {
2975
      tree temp = build_decl (input_location,
2976
                              VAR_DECL, NULL_TREE, TREE_TYPE (field_ref));
2977
      java_add_local_var (temp);
2978
 
2979
      if (TREE_THIS_VOLATILE (field_decl))
2980
        field_ref = java_modify_addr_for_volatile (field_ref);
2981
 
2982
      modify_expr
2983
        = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref);
2984
      java_add_stmt (modify_expr);
2985
 
2986
      if (TREE_THIS_VOLATILE (field_decl))
2987
        java_add_stmt
2988
          (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2989
 
2990
      push_value (temp);
2991
    }
2992
  TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl);
2993
}
2994
 
2995
static void
2996
load_type_state (int pc)
2997
{
2998
  int i;
2999
  tree vec = VEC_index (tree, type_states, pc);
3000
  int cur_length = TREE_VEC_LENGTH (vec);
3001
  stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
3002
  for (i = 0; i < cur_length; i++)
3003
    type_map [i] = TREE_VEC_ELT (vec, i);
3004
}
3005
 
3006
/* Go over METHOD's bytecode and note instruction starts in
3007
   instruction_bits[].  */
3008
 
3009
void
3010
note_instructions (JCF *jcf, tree method)
3011
{
3012
  int PC;
3013
  unsigned char* byte_ops;
3014
  long length = DECL_CODE_LENGTH (method);
3015
 
3016
  int saw_index;
3017
  jint INT_temp;
3018
 
3019
#undef RET /* Defined by config/i386/i386.h */
3020
#undef PTR
3021
#define BCODE byte_ops
3022
#define BYTE_type_node byte_type_node
3023
#define SHORT_type_node short_type_node
3024
#define INT_type_node int_type_node
3025
#define LONG_type_node long_type_node
3026
#define CHAR_type_node char_type_node
3027
#define PTR_type_node ptr_type_node
3028
#define FLOAT_type_node float_type_node
3029
#define DOUBLE_type_node double_type_node
3030
#define VOID_type_node void_type_node
3031
#define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3032
#define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3033
#define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3034
#define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3035
 
3036
#define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
3037
 
3038
  JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3039
  byte_ops = jcf->read_ptr;
3040
  instruction_bits = XRESIZEVAR (char, instruction_bits, length + 1);
3041
  memset (instruction_bits, 0, length + 1);
3042
  type_states = VEC_alloc (tree, gc, length + 1);
3043
  VEC_safe_grow_cleared (tree, gc, type_states, length + 1);
3044
 
3045
  /* This pass figures out which PC can be the targets of jumps. */
3046
  for (PC = 0; PC < length;)
3047
    {
3048
      int oldpc = PC; /* PC at instruction start. */
3049
      instruction_bits [PC] |=  BCODE_INSTRUCTION_START;
3050
      switch (byte_ops[PC++])
3051
        {
3052
#define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3053
        case OPCODE: \
3054
          PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3055
          break;
3056
 
3057
#define NOTE_LABEL(PC) note_label(oldpc, PC)
3058
 
3059
#define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3060
#define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3061
#define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3062
#define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3063
#define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3064
#define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3065
#define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3066
#define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3067
 
3068
#define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3069
  PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3070
#define PRE_SPECIAL_IINC(OPERAND_TYPE) \
3071
  ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
3072
#define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
3073
#define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
3074
#define PRE_SPECIAL_THROW(IGNORE) /* nothing */
3075
#define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
3076
 
3077
/* two forms of wide instructions */
3078
#define PRE_SPECIAL_WIDE(IGNORE) \
3079
  { \
3080
    int modified_opcode = IMMEDIATE_u1; \
3081
    if (modified_opcode == OPCODE_iinc) \
3082
      { \
3083
        (void) IMMEDIATE_u2;    /* indexbyte1 and indexbyte2 */ \
3084
        (void) IMMEDIATE_s2;    /* constbyte1 and constbyte2 */ \
3085
      } \
3086
    else \
3087
      { \
3088
        (void) IMMEDIATE_u2;    /* indexbyte1 and indexbyte2 */ \
3089
      } \
3090
  }
3091
 
3092
#define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
3093
 
3094
#define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3095
 
3096
#define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3097
#define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
3098
          PRE_ARRAY_##SUBOP(OPERAND_TYPE)
3099
#define PRE_ARRAY_LOAD(TYPE) /* nothing */
3100
#define PRE_ARRAY_STORE(TYPE) /* nothing */
3101
#define PRE_ARRAY_LENGTH(TYPE) /* nothing */
3102
#define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
3103
#define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
3104
#define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
3105
#define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
3106
 
3107
#define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3108
#define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3109
#define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3110
  saw_index = 0;  INT_temp = (OPERAND_VALUE); \
3111
  if (!saw_index)  NOTE_LABEL(oldpc + INT_temp);
3112
#define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
3113
  saw_index = 0;  INT_temp = (OPERAND_VALUE); \
3114
  NOTE_LABEL (PC); \
3115
  if (!saw_index)  NOTE_LABEL(oldpc + INT_temp);
3116
 
3117
#define PRE_RET(OPERAND_TYPE, OPERAND_VALUE)  (void)(OPERAND_VALUE)
3118
 
3119
#define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3120
  PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
3121
 
3122
#define PRE_LOOKUP_SWITCH                                               \
3123
  { jint default_offset = IMMEDIATE_s4;  jint npairs = IMMEDIATE_s4;    \
3124
    NOTE_LABEL (default_offset+oldpc);                                  \
3125
    if (npairs >= 0)                                                     \
3126
      while (--npairs >= 0) {                                            \
3127
       jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4;                      \
3128
       jint offset = IMMEDIATE_s4;                                      \
3129
       NOTE_LABEL (offset+oldpc); }                                     \
3130
  }
3131
 
3132
#define PRE_TABLE_SWITCH                                \
3133
  { jint default_offset = IMMEDIATE_s4;                 \
3134
    jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4;  \
3135
    NOTE_LABEL (default_offset+oldpc);                  \
3136
    if (low <= high)                                    \
3137
     while (low++ <= high) {                            \
3138
       jint offset = IMMEDIATE_s4;                      \
3139
       NOTE_LABEL (offset+oldpc); }                     \
3140
  }
3141
 
3142
#define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3143
#define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3144
#define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3145
  (void)(IMMEDIATE_u2); \
3146
  PC += 2 * IS_INTERFACE /* for invokeinterface */;
3147
 
3148
#include "javaop.def"
3149
#undef JAVAOP
3150
        }
3151
    } /* for */
3152
}
3153
 
3154
void
3155
expand_byte_code (JCF *jcf, tree method)
3156
{
3157
  int PC;
3158
  int i;
3159
  const unsigned char *linenumber_pointer;
3160
  int dead_code_index = -1;
3161
  unsigned char* byte_ops;
3162
  long length = DECL_CODE_LENGTH (method);
3163
  location_t max_location = input_location;
3164
 
3165
  stack_pointer = 0;
3166
  JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3167
  byte_ops = jcf->read_ptr;
3168
 
3169
  /* We make an initial pass of the line number table, to note
3170
     which instructions have associated line number entries. */
3171
  linenumber_pointer = linenumber_table;
3172
  for (i = 0; i < linenumber_count; i++)
3173
    {
3174
      int pc = GET_u2 (linenumber_pointer);
3175
      linenumber_pointer += 4;
3176
      if (pc >= length)
3177
        warning (0, "invalid PC in line number table");
3178
      else
3179
        {
3180
          if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0)
3181
            instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS;
3182
          instruction_bits[pc] |= BCODE_HAS_LINENUMBER;
3183
        }
3184
    }
3185
 
3186
  if (! verify_jvm_instructions_new (jcf, byte_ops, length))
3187
    return;
3188
 
3189
  promote_arguments ();
3190
  cache_this_class_ref (method);
3191
  cache_cpool_data_ref ();
3192
 
3193
  /* Translate bytecodes.  */
3194
  linenumber_pointer = linenumber_table;
3195
  for (PC = 0; PC < length;)
3196
    {
3197
      if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0)
3198
        {
3199
          tree label = lookup_label (PC);
3200
          flush_quick_stack ();
3201
          if ((instruction_bits [PC] & BCODE_TARGET) != 0)
3202
            java_add_stmt (build1 (LABEL_EXPR, void_type_node, label));
3203
          if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3204
            load_type_state (PC);
3205
        }
3206
 
3207
      if (! (instruction_bits [PC] & BCODE_VERIFIED))
3208
        {
3209
          if (dead_code_index == -1)
3210
            {
3211
              /* This is the start of a region of unreachable bytecodes.
3212
                 They still need to be processed in order for EH ranges
3213
                 to get handled correctly.  However, we can simply
3214
                 replace these bytecodes with nops.  */
3215
              dead_code_index = PC;
3216
            }
3217
 
3218
          /* Turn this bytecode into a nop.  */
3219
          byte_ops[PC] = 0x0;
3220
        }
3221
       else
3222
        {
3223
          if (dead_code_index != -1)
3224
            {
3225
              /* We've just reached the end of a region of dead code.  */
3226
              if (extra_warnings)
3227
                warning (0, "unreachable bytecode from %d to before %d",
3228
                         dead_code_index, PC);
3229
              dead_code_index = -1;
3230
            }
3231
        }
3232
 
3233
      /* Handle possible line number entry for this PC.
3234
 
3235
         This code handles out-of-order and multiple linenumbers per PC,
3236
         but is optimized for the case of line numbers increasing
3237
         monotonically with PC. */
3238
      if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0)
3239
        {
3240
          if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0
3241
              || GET_u2 (linenumber_pointer) != PC)
3242
            linenumber_pointer = linenumber_table;
3243
          while (linenumber_pointer < linenumber_table + linenumber_count * 4)
3244
            {
3245
              int pc = GET_u2 (linenumber_pointer);
3246
              linenumber_pointer += 4;
3247
              if (pc == PC)
3248
                {
3249
                  int line = GET_u2 (linenumber_pointer - 2);
3250
                  input_location = linemap_line_start (line_table, line, 1);
3251
                  if (input_location > max_location)
3252
                    max_location = input_location;
3253
                  if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS))
3254
                    break;
3255
                }
3256
            }
3257
        }
3258
      maybe_pushlevels (PC);
3259
      PC = process_jvm_instruction (PC, byte_ops, length);
3260
      maybe_poplevels (PC);
3261
    } /* for */
3262
 
3263
  uncache_this_class_ref (method);
3264
 
3265
  if (dead_code_index != -1)
3266
    {
3267
      /* We've just reached the end of a region of dead code.  */
3268
      if (extra_warnings)
3269
        warning (0, "unreachable bytecode from %d to the end of the method",
3270
                 dead_code_index);
3271
    }
3272
 
3273
  DECL_FUNCTION_LAST_LINE (method) = max_location;
3274
}
3275
 
3276
static void
3277
java_push_constant_from_pool (JCF *jcf, int index)
3278
{
3279
  tree c;
3280
  if (JPOOL_TAG (jcf, index) == CONSTANT_String)
3281
    {
3282
      tree name;
3283
      name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index));
3284
      index = alloc_name_constant (CONSTANT_String, name);
3285
      c = build_ref_from_constant_pool (index);
3286
      c = convert (promote_type (string_type_node), c);
3287
    }
3288
  else if (JPOOL_TAG (jcf, index) == CONSTANT_Class
3289
           || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass)
3290
    {
3291
      tree record = get_class_constant (jcf, index);
3292
      c = build_class_ref (record);
3293
    }
3294
  else
3295
    c = get_constant (jcf, index);
3296
  push_value (c);
3297
}
3298
 
3299
int
3300
process_jvm_instruction (int PC, const unsigned char* byte_ops,
3301
                         long length ATTRIBUTE_UNUSED)
3302
{
3303
  const char *opname; /* Temporary ??? */
3304
  int oldpc = PC; /* PC at instruction start. */
3305
 
3306
  /* If the instruction is at the beginning of an exception handler,
3307
     replace the top of the stack with the thrown object reference.  */
3308
  if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
3309
    {
3310
      /* Note that the verifier will not emit a type map at all for
3311
         dead exception handlers.  In this case we just ignore the
3312
         situation.  */
3313
      if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3314
        {
3315
          tree type = pop_type (promote_type (throwable_type_node));
3316
          push_value (build_exception_object_ref (type));
3317
        }
3318
    }
3319
 
3320
  switch (byte_ops[PC++])
3321
    {
3322
#define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3323
    case OPCODE: \
3324
      opname = #OPNAME; \
3325
      OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3326
      break;
3327
 
3328
#define RET(OPERAND_TYPE, OPERAND_VALUE)                                \
3329
  {                                                                     \
3330
    int saw_index = 0;                                                   \
3331
    int index     = OPERAND_VALUE;                                      \
3332
    build_java_ret                                                      \
3333
      (find_local_variable (index, return_address_type_node, oldpc));   \
3334
  }
3335
 
3336
#define JSR(OPERAND_TYPE, OPERAND_VALUE) \
3337
  {                                                 \
3338
    /* OPERAND_VALUE may have side-effects on PC */ \
3339
    int opvalue = OPERAND_VALUE;                    \
3340
    build_java_jsr (oldpc + opvalue, PC);           \
3341
  }
3342
 
3343
/* Push a constant onto the stack. */
3344
#define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
3345
  { int saw_index = 0;  int ival = (OPERAND_VALUE); \
3346
    if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
3347
    else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
3348
 
3349
/* internal macro added for use by the WIDE case */
3350
#define LOAD_INTERNAL(OPTYPE, OPVALUE) \
3351
  expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
3352
 
3353
/* Push local variable onto the opcode stack. */
3354
#define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
3355
  { \
3356
    /* have to do this since OPERAND_VALUE may have side-effects */ \
3357
    int opvalue = OPERAND_VALUE; \
3358
    LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3359
  }
3360
 
3361
#define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
3362
  expand_java_return (OPERAND_TYPE##_type_node)
3363
 
3364
#define REM_EXPR TRUNC_MOD_EXPR
3365
#define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
3366
  expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
3367
 
3368
#define FIELD(IS_STATIC, IS_PUT) \
3369
  expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
3370
 
3371
#define TEST(OPERAND_TYPE, CONDITION) \
3372
  expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3373
 
3374
#define COND(OPERAND_TYPE, CONDITION) \
3375
  expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3376
 
3377
#define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3378
  BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
3379
 
3380
#define BRANCH_GOTO(OPERAND_VALUE) \
3381
  expand_java_goto (oldpc + OPERAND_VALUE)
3382
 
3383
#define BRANCH_CALL(OPERAND_VALUE) \
3384
  expand_java_call (oldpc + OPERAND_VALUE, oldpc)
3385
 
3386
#if 0
3387
#define BRANCH_RETURN(OPERAND_VALUE) \
3388
  { \
3389
    tree type = OPERAND_TYPE##_type_node; \
3390
    tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
3391
    expand_java_ret (value); \
3392
  }
3393
#endif
3394
 
3395
#define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3396
          fprintf (stderr, "%3d: %s ", oldpc, opname); \
3397
          fprintf (stderr, "(not implemented)\n")
3398
#define NOT_IMPL1(OPERAND_VALUE) \
3399
          fprintf (stderr, "%3d: %s ", oldpc, opname); \
3400
          fprintf (stderr, "(not implemented)\n")
3401
 
3402
#define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3403
 
3404
#define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3405
 
3406
#define STACK_POP(COUNT) java_stack_pop (COUNT)
3407
 
3408
#define STACK_SWAP(COUNT) java_stack_swap()
3409
 
3410
#define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3411
#define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3412
#define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3413
 
3414
#define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3415
  PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3416
 
3417
#define LOOKUP_SWITCH \
3418
  { jint default_offset = IMMEDIATE_s4;  jint npairs = IMMEDIATE_s4; \
3419
    tree selector = pop_value (INT_type_node); \
3420
    tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3421
    while (--npairs >= 0) \
3422
      { \
3423
        jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3424
        expand_java_add_case (switch_expr, match, oldpc + offset); \
3425
      } \
3426
  }
3427
 
3428
#define TABLE_SWITCH \
3429
  { jint default_offset = IMMEDIATE_s4; \
3430
    jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3431
    tree selector = pop_value (INT_type_node); \
3432
    tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3433
    for (; low <= high; low++) \
3434
      { \
3435
        jint offset = IMMEDIATE_s4; \
3436
        expand_java_add_case (switch_expr, low, oldpc + offset); \
3437
      } \
3438
  }
3439
 
3440
#define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3441
  { int opcode = byte_ops[PC-1]; \
3442
    int method_ref_index = IMMEDIATE_u2; \
3443
    int nargs; \
3444
    if (IS_INTERFACE) { nargs = IMMEDIATE_u1;  (void) IMMEDIATE_u1; } \
3445
    else nargs = -1; \
3446
    expand_invoke (opcode, method_ref_index, nargs); \
3447
  }
3448
 
3449
/* Handle new, checkcast, instanceof */
3450
#define OBJECT(TYPE, OP) \
3451
  expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3452
 
3453
#define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3454
 
3455
#define ARRAY_LOAD(OPERAND_TYPE)                        \
3456
  {                                                     \
3457
    expand_java_arrayload( OPERAND_TYPE##_type_node );  \
3458
  }
3459
 
3460
#define ARRAY_STORE(OPERAND_TYPE)                       \
3461
  {                                                     \
3462
    expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3463
  }
3464
 
3465
#define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3466
#define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3467
#define ARRAY_NEW_PTR()                                                 \
3468
    push_value (build_anewarray (get_class_constant (current_jcf,       \
3469
                                                     IMMEDIATE_u2),     \
3470
                                 pop_value (int_type_node)));
3471
#define ARRAY_NEW_NUM()                         \
3472
  {                                             \
3473
    int atype = IMMEDIATE_u1;                   \
3474
    push_value (build_newarray (atype, pop_value (int_type_node)));\
3475
  }
3476
#define ARRAY_NEW_MULTI()                                       \
3477
  {                                                             \
3478
    tree klass = get_class_constant (current_jcf, IMMEDIATE_u2 );       \
3479
    int  ndims = IMMEDIATE_u1;                                  \
3480
    expand_java_multianewarray( klass, ndims );                 \
3481
  }
3482
 
3483
#define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3484
  push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3485
                           pop_value (OPERAND_TYPE##_type_node)));
3486
 
3487
#define CONVERT2(FROM_TYPE, TO_TYPE)                                     \
3488
  {                                                                      \
3489
    push_value (build1 (NOP_EXPR, int_type_node,                         \
3490
                        (convert (TO_TYPE##_type_node,                   \
3491
                                  pop_value (FROM_TYPE##_type_node))))); \
3492
  }
3493
 
3494
#define CONVERT(FROM_TYPE, TO_TYPE)                             \
3495
  {                                                             \
3496
    push_value (convert (TO_TYPE##_type_node,                   \
3497
                         pop_value (FROM_TYPE##_type_node)));   \
3498
  }
3499
 
3500
/* internal macro added for use by the WIDE case
3501
   Added TREE_TYPE (decl) assignment, apbianco  */
3502
#define STORE_INTERNAL(OPTYPE, OPVALUE)                         \
3503
  {                                                             \
3504
    tree decl, value;                                           \
3505
    int index = OPVALUE;                                        \
3506
    tree type = OPTYPE;                                         \
3507
    value = pop_value (type);                                   \
3508
    type = TREE_TYPE (value);                                   \
3509
    decl = find_local_variable (index, type, oldpc);            \
3510
    set_local_type (index, type);                               \
3511
    java_add_stmt (build2 (MODIFY_EXPR, type, decl, value));    \
3512
  }
3513
 
3514
#define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3515
  { \
3516
    /* have to do this since OPERAND_VALUE may have side-effects */ \
3517
    int opvalue = OPERAND_VALUE; \
3518
    STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3519
  }
3520
 
3521
#define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3522
  SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3523
 
3524
#define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3525
#define SPECIAL_EXIT(IGNORED)  MONITOR_OPERATION (soft_monitorexit_node)
3526
 
3527
#define MONITOR_OPERATION(call)                 \
3528
  {                                             \
3529
    tree o = pop_value (ptr_type_node);         \
3530
    tree c;                                     \
3531
    flush_quick_stack ();                       \
3532
    c = build_java_monitor (call, o);           \
3533
    TREE_SIDE_EFFECTS (c) = 1;                  \
3534
    java_add_stmt (c);                          \
3535
  }
3536
 
3537
#define SPECIAL_IINC(IGNORED) \
3538
  { \
3539
    unsigned int local_var_index = IMMEDIATE_u1; \
3540
    int ival = IMMEDIATE_s1; \
3541
    expand_iinc(local_var_index, ival, oldpc); \
3542
  }
3543
 
3544
#define SPECIAL_WIDE(IGNORED) \
3545
  { \
3546
    int modified_opcode = IMMEDIATE_u1; \
3547
    unsigned int local_var_index = IMMEDIATE_u2; \
3548
    switch (modified_opcode) \
3549
      { \
3550
      case OPCODE_iinc: \
3551
        { \
3552
          int ival = IMMEDIATE_s2; \
3553
          expand_iinc (local_var_index, ival, oldpc); \
3554
          break; \
3555
        } \
3556
      case OPCODE_iload: \
3557
      case OPCODE_lload: \
3558
      case OPCODE_fload: \
3559
      case OPCODE_dload: \
3560
      case OPCODE_aload: \
3561
        { \
3562
          /* duplicate code from LOAD macro */ \
3563
          LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3564
          break; \
3565
        } \
3566
      case OPCODE_istore: \
3567
      case OPCODE_lstore: \
3568
      case OPCODE_fstore: \
3569
      case OPCODE_dstore: \
3570
      case OPCODE_astore: \
3571
        { \
3572
          STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3573
          break; \
3574
        } \
3575
      default: \
3576
        error ("unrecogized wide sub-instruction"); \
3577
      } \
3578
  }
3579
 
3580
#define SPECIAL_THROW(IGNORED) \
3581
  build_java_athrow (pop_value (throwable_type_node))
3582
 
3583
#define SPECIAL_BREAK NOT_IMPL1
3584
#define IMPL          NOT_IMPL
3585
 
3586
#include "javaop.def"
3587
#undef JAVAOP
3588
   default:
3589
    fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]);
3590
  }
3591
  return PC;
3592
}
3593
 
3594
/* Return the opcode at PC in the code section pointed to by
3595
   CODE_OFFSET.  */
3596
 
3597
static unsigned char
3598
peek_opcode_at_pc (JCF *jcf, int code_offset, int pc)
3599
{
3600
  unsigned char opcode;
3601
  long absolute_offset = (long)JCF_TELL (jcf);
3602
 
3603
  JCF_SEEK (jcf, code_offset);
3604
  opcode = jcf->read_ptr [pc];
3605
  JCF_SEEK (jcf, absolute_offset);
3606
  return opcode;
3607
}
3608
 
3609
/* Some bytecode compilers are emitting accurate LocalVariableTable
3610
   attributes. Here's an example:
3611
 
3612
     PC   <t>store_<n>
3613
     PC+1 ...
3614
 
3615
     Attribute "LocalVariableTable"
3616
     slot #<n>: ... (PC: PC+1 length: L)
3617
 
3618
   This is accurate because the local in slot <n> really exists after
3619
   the opcode at PC is executed, hence from PC+1 to PC+1+L.
3620
 
3621
   This procedure recognizes this situation and extends the live range
3622
   of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3623
   length of the store instruction.)
3624
 
3625
   This function is used by `give_name_to_locals' so that a local's
3626
   DECL features a DECL_LOCAL_START_PC such that the first related
3627
   store operation will use DECL as a destination, not an unrelated
3628
   temporary created for the occasion.
3629
 
3630
   This function uses a global (instruction_bits) `note_instructions' should
3631
   have allocated and filled properly.  */
3632
 
3633
int
3634
maybe_adjust_start_pc (struct JCF *jcf, int code_offset,
3635
                       int start_pc, int slot)
3636
{
3637
  int first, index, opcode;
3638
  int pc, insn_pc;
3639
  int wide_found = 0;
3640
 
3641
  if (!start_pc)
3642
    return start_pc;
3643
 
3644
  first = index = -1;
3645
 
3646
  /* Find last previous instruction and remember it */
3647
  for (pc = start_pc-1; pc; pc--)
3648
    if (instruction_bits [pc] & BCODE_INSTRUCTION_START)
3649
      break;
3650
  insn_pc = pc;
3651
 
3652
  /* Retrieve the instruction, handle `wide'. */
3653
  opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3654
  if (opcode == OPCODE_wide)
3655
    {
3656
      wide_found = 1;
3657
      opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3658
    }
3659
 
3660
  switch (opcode)
3661
    {
3662
    case OPCODE_astore_0:
3663
    case OPCODE_astore_1:
3664
    case OPCODE_astore_2:
3665
    case OPCODE_astore_3:
3666
      first = OPCODE_astore_0;
3667
      break;
3668
 
3669
    case OPCODE_istore_0:
3670
    case OPCODE_istore_1:
3671
    case OPCODE_istore_2:
3672
    case OPCODE_istore_3:
3673
      first = OPCODE_istore_0;
3674
      break;
3675
 
3676
    case OPCODE_lstore_0:
3677
    case OPCODE_lstore_1:
3678
    case OPCODE_lstore_2:
3679
    case OPCODE_lstore_3:
3680
      first = OPCODE_lstore_0;
3681
      break;
3682
 
3683
    case OPCODE_fstore_0:
3684
    case OPCODE_fstore_1:
3685
    case OPCODE_fstore_2:
3686
    case OPCODE_fstore_3:
3687
      first = OPCODE_fstore_0;
3688
      break;
3689
 
3690
    case OPCODE_dstore_0:
3691
    case OPCODE_dstore_1:
3692
    case OPCODE_dstore_2:
3693
    case OPCODE_dstore_3:
3694
      first = OPCODE_dstore_0;
3695
      break;
3696
 
3697
    case OPCODE_astore:
3698
    case OPCODE_istore:
3699
    case OPCODE_lstore:
3700
    case OPCODE_fstore:
3701
    case OPCODE_dstore:
3702
      index = peek_opcode_at_pc (jcf, code_offset, pc);
3703
      if (wide_found)
3704
        {
3705
          int other = peek_opcode_at_pc (jcf, code_offset, ++pc);
3706
          index = (other << 8) + index;
3707
        }
3708
      break;
3709
    }
3710
 
3711
  /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3712
     means we have a <t>store. */
3713
  if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot))
3714
    start_pc = insn_pc;
3715
 
3716
  return start_pc;
3717
}
3718
 
3719
/* Force the (direct) sub-operands of NODE to be evaluated in left-to-right
3720
   order, as specified by Java Language Specification.
3721
 
3722
   The problem is that while expand_expr will evaluate its sub-operands in
3723
   left-to-right order, for variables it will just return an rtx (i.e.
3724
   an lvalue) for the variable (rather than an rvalue).  So it is possible
3725
   that a later sub-operand will change the register, and when the
3726
   actual operation is done, it will use the new value, when it should
3727
   have used the original value.
3728
 
3729
   We fix this by using save_expr.  This forces the sub-operand to be
3730
   copied into a fresh virtual register,
3731
 
3732
   For method invocation, we modify the arguments so that a
3733
   left-to-right order evaluation is performed. Saved expressions
3734
   will, in CALL_EXPR order, be reused when the call will be expanded.
3735
 
3736
   We also promote outgoing args if needed.  */
3737
 
3738
tree
3739
force_evaluation_order (tree node)
3740
{
3741
  if (flag_syntax_only)
3742
    return node;
3743
  if (TREE_CODE (node) == CALL_EXPR
3744
      || (TREE_CODE (node) == COMPOUND_EXPR
3745
          && TREE_CODE (TREE_OPERAND (node, 0)) == CALL_EXPR
3746
          && TREE_CODE (TREE_OPERAND (node, 1)) == SAVE_EXPR))
3747
    {
3748
      tree call, cmp;
3749
      int i, nargs;
3750
 
3751
      /* Account for wrapped around ctors.  */
3752
      if (TREE_CODE (node) == COMPOUND_EXPR)
3753
        call = TREE_OPERAND (node, 0);
3754
      else
3755
        call = node;
3756
 
3757
      nargs = call_expr_nargs (call);
3758
 
3759
      /* This reverses the evaluation order. This is a desired effect. */
3760
      for (i = 0, cmp = NULL_TREE; i < nargs; i++)
3761
        {
3762
          tree arg = CALL_EXPR_ARG (call, i);
3763
          /* Promote types smaller than integer.  This is required by
3764
             some ABIs.  */
3765
          tree type = TREE_TYPE (arg);
3766
          tree saved;
3767
          if (targetm.calls.promote_prototypes (type)
3768
              && INTEGRAL_TYPE_P (type)
3769
              && INT_CST_LT_UNSIGNED (TYPE_SIZE (type),
3770
                                      TYPE_SIZE (integer_type_node)))
3771
            arg = fold_convert (integer_type_node, arg);
3772
 
3773
          saved = save_expr (force_evaluation_order (arg));
3774
          cmp = (cmp == NULL_TREE ? saved :
3775
                 build2 (COMPOUND_EXPR, void_type_node, cmp, saved));
3776
 
3777
          CALL_EXPR_ARG (call, i) = saved;
3778
        }
3779
 
3780
      if (cmp && TREE_CODE (cmp) == COMPOUND_EXPR)
3781
        TREE_SIDE_EFFECTS (cmp) = 1;
3782
 
3783
      if (cmp)
3784
        {
3785
          cmp = build2 (COMPOUND_EXPR, TREE_TYPE (node), cmp, node);
3786
          if (TREE_TYPE (cmp) != void_type_node)
3787
            cmp = save_expr (cmp);
3788
          TREE_SIDE_EFFECTS (cmp) = 1;
3789
          node = cmp;
3790
        }
3791
    }
3792
  return node;
3793
}
3794
 
3795
/* Build a node to represent empty statements and blocks. */
3796
 
3797
tree
3798
build_java_empty_stmt (void)
3799
{
3800
  tree t = build_empty_stmt (input_location);
3801
  return t;
3802
}
3803
 
3804
/* Promote all args of integral type before generating any code.  */
3805
 
3806
static void
3807
promote_arguments (void)
3808
{
3809
  int i;
3810
  tree arg;
3811
  for (arg = DECL_ARGUMENTS (current_function_decl), i = 0;
3812
       arg != NULL_TREE;  arg = TREE_CHAIN (arg), i++)
3813
    {
3814
      tree arg_type = TREE_TYPE (arg);
3815
      if (INTEGRAL_TYPE_P (arg_type)
3816
          && TYPE_PRECISION (arg_type) < 32)
3817
        {
3818
          tree copy = find_local_variable (i, integer_type_node, -1);
3819
          java_add_stmt (build2 (MODIFY_EXPR, integer_type_node,
3820
                                 copy,
3821
                                 fold_convert (integer_type_node, arg)));
3822
        }
3823
      if (TYPE_IS_WIDE (arg_type))
3824
        i++;
3825
    }
3826
}
3827
 
3828
/* Create a local variable that points to the constant pool.  */
3829
 
3830
static void
3831
cache_cpool_data_ref (void)
3832
{
3833
  if (optimize)
3834
    {
3835
      tree cpool;
3836
      tree d = build_constant_data_ref (flag_indirect_classes);
3837
      tree cpool_ptr = build_decl (input_location, VAR_DECL, NULL_TREE,
3838
                                   build_pointer_type (TREE_TYPE (d)));
3839
      java_add_local_var (cpool_ptr);
3840
      TREE_CONSTANT (cpool_ptr) = 1;
3841
 
3842
      java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (cpool_ptr),
3843
                             cpool_ptr, build_address_of (d)));
3844
      cpool = build1 (INDIRECT_REF, TREE_TYPE (d), cpool_ptr);
3845
      TREE_THIS_NOTRAP (cpool) = 1;
3846
      TYPE_CPOOL_DATA_REF (output_class) = cpool;
3847
    }
3848
}
3849
 
3850
#include "gt-java-expr.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.