OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [java/] [expr.c] - Blame information for rev 849

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 715 jeremybenn
/* Process expressions for the GNU compiler for the Java(TM) language.
2
   Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3
   2005, 2006, 2007, 2008, 2010, 2011 Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify
8
it under the terms of the GNU General Public License as published by
9
the Free Software Foundation; either version 3, or (at your option)
10
any later version.
11
 
12
GCC is distributed in the hope that it will be useful,
13
but WITHOUT ANY WARRANTY; without even the implied warranty of
14
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15
GNU General Public License for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.
20
 
21
Java and all Java-based marks are trademarks or registered trademarks
22
of Sun Microsystems, Inc. in the United States and other countries.
23
The Free Software Foundation is independent of Sun Microsystems, Inc.  */
24
 
25
/* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
26
 
27
#include "config.h"
28
#include "system.h"
29
#include "coretypes.h"
30
#include "tm.h"                 /* For INT_TYPE_SIZE,
31
                                   TARGET_VTABLE_USES_DESCRIPTORS,
32
                                   BITS_PER_UNIT,
33
                                   MODIFY_JNI_METHOD_CALL and
34
                                   PARM_BOUNDARY.  */
35
 
36
#include "tree.h"
37
#include "flags.h"
38
#include "java-tree.h"
39
#include "javaop.h"
40
#include "java-opcodes.h"
41
#include "jcf.h"
42
#include "java-except.h"
43
#include "parse.h"
44
#include "diagnostic-core.h"
45
#include "ggc.h"
46
#include "tree-iterator.h"
47
#include "target.h"
48
 
49
static void flush_quick_stack (void);
50
static void push_value (tree);
51
static tree pop_value (tree);
52
static void java_stack_swap (void);
53
static void java_stack_dup (int, int);
54
static void build_java_athrow (tree);
55
static void build_java_jsr (int, int);
56
static void build_java_ret (tree);
57
static void expand_java_multianewarray (tree, int);
58
static void expand_java_arraystore (tree);
59
static void expand_java_arrayload (tree);
60
static void expand_java_array_length (void);
61
static tree build_java_monitor (tree, tree);
62
static void expand_java_pushc (int, tree);
63
static void expand_java_return (tree);
64
static void expand_load_internal (int, tree, int);
65
static void expand_java_NEW (tree);
66
static void expand_java_INSTANCEOF (tree);
67
static void expand_java_CHECKCAST (tree);
68
static void expand_iinc (unsigned int, int, int);
69
static void expand_java_binop (tree, enum tree_code);
70
static void note_label (int, int);
71
static void expand_compare (enum tree_code, tree, tree, int);
72
static void expand_test (enum tree_code, tree, int);
73
static void expand_cond (enum tree_code, tree, int);
74
static void expand_java_goto (int);
75
static tree expand_java_switch (tree, int);
76
static void expand_java_add_case (tree, int, int);
77
static VEC(tree,gc) *pop_arguments (tree);
78
static void expand_invoke (int, int, int);
79
static void expand_java_field_op (int, int, int);
80
static void java_push_constant_from_pool (struct JCF *, int);
81
static void java_stack_pop (int);
82
static tree build_java_throw_out_of_bounds_exception (tree);
83
static tree build_java_check_indexed_type (tree, tree);
84
static unsigned char peek_opcode_at_pc (struct JCF *, int, int);
85
static void promote_arguments (void);
86
static void cache_cpool_data_ref (void);
87
 
88
static GTY(()) tree operand_type[59];
89
 
90
static GTY(()) tree methods_ident;
91
static GTY(()) tree ncode_ident;
92
tree dtable_ident = NULL_TREE;
93
 
94
/* Set to nonzero value in order to emit class initialization code
95
   before static field references.  */
96
int always_initialize_class_p = 0;
97
 
98
/* We store the stack state in two places:
99
   Within a basic block, we use the quick_stack, which is a VEC of expression
100
   nodes.
101
   This is the top part of the stack;  below that we use find_stack_slot.
102
   At the end of a basic block, the quick_stack must be flushed
103
   to the stack slot array (as handled by find_stack_slot).
104
   Using quick_stack generates better code (especially when
105
   compiled without optimization), because we do not have to
106
   explicitly store and load trees to temporary variables.
107
 
108
   If a variable is on the quick stack, it means the value of variable
109
   when the quick stack was last flushed.  Conceptually, flush_quick_stack
110
   saves all the quick_stack elements in parallel.  However, that is
111
   complicated, so it actually saves them (i.e. copies each stack value
112
   to is home virtual register) from low indexes.  This allows a quick_stack
113
   element at index i (counting from the bottom of stack the) to references
114
   slot virtuals for register that are >= i, but not those that are deeper.
115
   This convention makes most operations easier.  For example iadd works
116
   even when the stack contains (reg[0], reg[1]):  It results in the
117
   stack containing (reg[0]+reg[1]), which is OK.  However, some stack
118
   operations are more complicated.  For example dup given a stack
119
   containing (reg[0]) would yield (reg[0], reg[0]), which would violate
120
   the convention, since stack value 1 would refer to a register with
121
   lower index (reg[0]), which flush_quick_stack does not safely handle.
122
   So dup cannot just add an extra element to the quick_stack, but iadd can.
123
*/
124
 
125
static GTY(()) VEC(tree,gc) *quick_stack;
126
 
127
/* The physical memory page size used in this computer.  See
128
   build_field_ref().  */
129
static GTY(()) tree page_size;
130
 
131
/* The stack pointer of the Java virtual machine.
132
   This does include the size of the quick_stack. */
133
 
134
int stack_pointer;
135
 
136
const unsigned char *linenumber_table;
137
int linenumber_count;
138
 
139
/* Largest pc so far in this method that has been passed to lookup_label. */
140
int highest_label_pc_this_method = -1;
141
 
142
/* Base value for this method to add to pc to get generated label. */
143
int start_label_pc_this_method = 0;
144
 
145
void
146
init_expr_processing (void)
147
{
148
  operand_type[21] = operand_type[54] = int_type_node;
149
  operand_type[22] = operand_type[55] = long_type_node;
150
  operand_type[23] = operand_type[56] = float_type_node;
151
  operand_type[24] = operand_type[57] = double_type_node;
152
  operand_type[25] = operand_type[58] = ptr_type_node;
153
}
154
 
155
tree
156
java_truthvalue_conversion (tree expr)
157
{
158
  /* It is simpler and generates better code to have only TRUTH_*_EXPR
159
     or comparison expressions as truth values at this level.
160
 
161
     This function should normally be identity for Java.  */
162
 
163
  switch (TREE_CODE (expr))
164
    {
165
    case EQ_EXPR:   case NE_EXPR:   case UNEQ_EXPR: case LTGT_EXPR:
166
    case LE_EXPR:   case GE_EXPR:   case LT_EXPR:   case GT_EXPR:
167
    case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR:
168
    case ORDERED_EXPR: case UNORDERED_EXPR:
169
    case TRUTH_ANDIF_EXPR:
170
    case TRUTH_ORIF_EXPR:
171
    case TRUTH_AND_EXPR:
172
    case TRUTH_OR_EXPR:
173
    case TRUTH_XOR_EXPR:
174
    case TRUTH_NOT_EXPR:
175
    case ERROR_MARK:
176
      return expr;
177
 
178
    case INTEGER_CST:
179
      return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
180
 
181
    case REAL_CST:
182
      return real_zerop (expr) ? boolean_false_node : boolean_true_node;
183
 
184
    /* are these legal? XXX JH */
185
    case NEGATE_EXPR:
186
    case ABS_EXPR:
187
    case FLOAT_EXPR:
188
      /* These don't change whether an object is nonzero or zero.  */
189
      return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
190
 
191
    case COND_EXPR:
192
      /* Distribute the conversion into the arms of a COND_EXPR.  */
193
      return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
194
                          java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
195
                          java_truthvalue_conversion (TREE_OPERAND (expr, 2)));
196
 
197
    case NOP_EXPR:
198
      /* If this is widening the argument, we can ignore it.  */
199
      if (TYPE_PRECISION (TREE_TYPE (expr))
200
          >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
201
        return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
202
      /* fall through to default */
203
 
204
    default:
205
      return fold_build2 (NE_EXPR, boolean_type_node,
206
                          expr, boolean_false_node);
207
    }
208
}
209
 
210
/* Save any stack slots that happen to be in the quick_stack into their
211
   home virtual register slots.
212
 
213
   The copy order is from low stack index to high, to support the invariant
214
   that the expression for a slot may contain decls for stack slots with
215
   higher (or the same) index, but not lower. */
216
 
217
static void
218
flush_quick_stack (void)
219
{
220
  int stack_index = stack_pointer;
221
  unsigned ix;
222
  tree t;
223
 
224
  /* Count the number of slots the quick stack is holding.  */
225
  for (ix = 0; VEC_iterate(tree, quick_stack, ix, t); ix++)
226
    stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (t));
227
 
228
  for (ix = 0; VEC_iterate(tree, quick_stack, ix, t); ix++)
229
    {
230
      tree decl, type = TREE_TYPE (t);
231
 
232
      decl = find_stack_slot (stack_index, type);
233
      if (decl != t)
234
        java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (t), decl, t));
235
      stack_index += 1 + TYPE_IS_WIDE (type);
236
    }
237
 
238
  VEC_truncate (tree, quick_stack, 0);
239
}
240
 
241
/* Push TYPE on the type stack.
242
   Return true on success, 0 on overflow. */
243
 
244
int
245
push_type_0 (tree type)
246
{
247
  int n_words;
248
  type = promote_type (type);
249
  n_words = 1 + TYPE_IS_WIDE (type);
250
  if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl))
251
    return 0;
252
  /* Allocate decl for this variable now, so we get a temporary that
253
     survives the whole method. */
254
  find_stack_slot (stack_pointer, type);
255
  stack_type_map[stack_pointer++] = type;
256
  n_words--;
257
  while (--n_words >= 0)
258
    stack_type_map[stack_pointer++] = TYPE_SECOND;
259
  return 1;
260
}
261
 
262
void
263
push_type (tree type)
264
{
265
  int r = push_type_0 (type);
266
  gcc_assert (r);
267
}
268
 
269
static void
270
push_value (tree value)
271
{
272
  tree type = TREE_TYPE (value);
273
  if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type))
274
    {
275
      type = promote_type (type);
276
      value = convert (type, value);
277
    }
278
  push_type (type);
279
  VEC_safe_push (tree, gc, quick_stack, value);
280
 
281
  /* If the value has a side effect, then we need to evaluate it
282
     whether or not the result is used.  If the value ends up on the
283
     quick stack and is then popped, this won't happen -- so we flush
284
     the quick stack.  It is safest to simply always flush, though,
285
     since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for
286
     the latter we may need to strip conversions.  */
287
  flush_quick_stack ();
288
}
289
 
290
/* Pop a type from the type stack.
291
   TYPE is the expected type.   Return the actual type, which must be
292
   convertible to TYPE.
293
   On an error, *MESSAGEP is set to a freshly malloc'd error message. */
294
 
295
tree
296
pop_type_0 (tree type, char **messagep)
297
{
298
  int n_words;
299
  tree t;
300
  *messagep = NULL;
301
  if (TREE_CODE (type) == RECORD_TYPE)
302
    type = promote_type (type);
303
  n_words = 1 + TYPE_IS_WIDE (type);
304
  if (stack_pointer < n_words)
305
    {
306
      *messagep = xstrdup ("stack underflow");
307
      return type;
308
    }
309
  while (--n_words > 0)
310
    {
311
      if (stack_type_map[--stack_pointer] != void_type_node)
312
        {
313
          *messagep = xstrdup ("Invalid multi-word value on type stack");
314
          return type;
315
        }
316
    }
317
  t = stack_type_map[--stack_pointer];
318
  if (type == NULL_TREE || t == type)
319
    return t;
320
  if (TREE_CODE (t) == TREE_LIST)
321
    {
322
      do
323
        {
324
          tree tt = TREE_PURPOSE (t);
325
          if (! can_widen_reference_to (tt, type))
326
            {
327
              t = tt;
328
              goto fail;
329
            }
330
          t = TREE_CHAIN (t);
331
        }
332
      while (t);
333
      return t;
334
    }
335
  if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t)
336
      && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32)
337
    return t;
338
  if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
339
    {
340
      /* If the expected type we've been passed is object or ptr
341
         (i.e. void*), the caller needs to know the real type.  */
342
      if (type == ptr_type_node || type == object_ptr_type_node)
343
        return t;
344
 
345
      /* Since the verifier has already run, we know that any
346
         types we see will be compatible.  In BC mode, this fact
347
         may be checked at runtime, but if that is so then we can
348
         assume its truth here as well.  So, we always succeed
349
         here, with the expected type.  */
350
      return type;
351
    }
352
 
353
  if (! flag_verify_invocations && flag_indirect_dispatch
354
      && t == object_ptr_type_node)
355
    {
356
      if (type != ptr_type_node)
357
        warning (0, "need to insert runtime check for %s",
358
                 xstrdup (lang_printable_name (type, 0)));
359
      return type;
360
    }
361
 
362
  /* lang_printable_name uses a static buffer, so we must save the result
363
     from calling it the first time.  */
364
 fail:
365
  {
366
    char *temp = xstrdup (lang_printable_name (type, 0));
367
    /* If the stack contains a multi-word type, keep popping the stack until
368
       the real type is found.  */
369
    while (t == void_type_node)
370
      t = stack_type_map[--stack_pointer];
371
    *messagep = concat ("expected type '", temp,
372
                        "' but stack contains '", lang_printable_name (t, 0),
373
                        "'", NULL);
374
    free (temp);
375
  }
376
  return type;
377
}
378
 
379
/* Pop a type from the type stack.
380
   TYPE is the expected type.  Return the actual type, which must be
381
   convertible to TYPE, otherwise call error. */
382
 
383
tree
384
pop_type (tree type)
385
{
386
  char *message = NULL;
387
  type = pop_type_0 (type, &message);
388
  if (message != NULL)
389
    {
390
      error ("%s", message);
391
      free (message);
392
    }
393
  return type;
394
}
395
 
396
 
397
/* Return true if two type assertions are equal.  */
398
 
399
static int
400
type_assertion_eq (const void * k1_p, const void * k2_p)
401
{
402
  const type_assertion k1 = *(const type_assertion *)k1_p;
403
  const type_assertion k2 = *(const type_assertion *)k2_p;
404
  return (k1.assertion_code == k2.assertion_code
405
          && k1.op1 == k2.op1
406
          && k1.op2 == k2.op2);
407
}
408
 
409
/* Hash a type assertion.  */
410
 
411
static hashval_t
412
type_assertion_hash (const void *p)
413
{
414
  const type_assertion *k_p = (const type_assertion *) p;
415
  hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof
416
                                   k_p->assertion_code, 0);
417
 
418
  switch (k_p->assertion_code)
419
    {
420
    case JV_ASSERT_TYPES_COMPATIBLE:
421
      hash = iterative_hash (&TYPE_UID (k_p->op2), sizeof TYPE_UID (k_p->op2),
422
                             hash);
423
      /* Fall through.  */
424
 
425
    case JV_ASSERT_IS_INSTANTIABLE:
426
      hash = iterative_hash (&TYPE_UID (k_p->op1), sizeof TYPE_UID (k_p->op1),
427
                             hash);
428
      /* Fall through.  */
429
 
430
    case JV_ASSERT_END_OF_TABLE:
431
      break;
432
 
433
    default:
434
      gcc_unreachable ();
435
    }
436
 
437
  return hash;
438
}
439
 
440
/* Add an entry to the type assertion table for the given class.
441
   KLASS is the class for which this assertion will be evaluated by the
442
   runtime during loading/initialization.
443
   ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h.
444
   OP1 and OP2 are the operands. The tree type of these arguments may be
445
   specific to each assertion_code. */
446
 
447
void
448
add_type_assertion (tree klass, int assertion_code, tree op1, tree op2)
449
{
450
  htab_t assertions_htab;
451
  type_assertion as;
452
  void **as_pp;
453
 
454
  assertions_htab = TYPE_ASSERTIONS (klass);
455
  if (assertions_htab == NULL)
456
    {
457
      assertions_htab = htab_create_ggc (7, type_assertion_hash,
458
                                         type_assertion_eq, NULL);
459
      TYPE_ASSERTIONS (current_class) = assertions_htab;
460
    }
461
 
462
  as.assertion_code = assertion_code;
463
  as.op1 = op1;
464
  as.op2 = op2;
465
 
466
  as_pp = htab_find_slot (assertions_htab, &as, INSERT);
467
 
468
  /* Don't add the same assertion twice.  */
469
  if (*as_pp)
470
    return;
471
 
472
  *as_pp = ggc_alloc_type_assertion ();
473
  **(type_assertion **)as_pp = as;
474
}
475
 
476
 
477
/* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE.
478
   Handles array types and interfaces.  */
479
 
480
int
481
can_widen_reference_to (tree source_type, tree target_type)
482
{
483
  if (source_type == ptr_type_node || target_type == object_ptr_type_node)
484
    return 1;
485
 
486
  /* Get rid of pointers  */
487
  if (TREE_CODE (source_type) == POINTER_TYPE)
488
    source_type = TREE_TYPE (source_type);
489
  if (TREE_CODE (target_type) == POINTER_TYPE)
490
    target_type = TREE_TYPE (target_type);
491
 
492
  if (source_type == target_type)
493
    return 1;
494
 
495
  /* FIXME: This is very pessimistic, in that it checks everything,
496
     even if we already know that the types are compatible.  If we're
497
     to support full Java class loader semantics, we need this.
498
     However, we could do something more optimal.  */
499
  if (! flag_verify_invocations)
500
    {
501
      add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE,
502
                          source_type, target_type);
503
 
504
      if (!quiet_flag)
505
       warning (0, "assert: %s is assign compatible with %s",
506
                xstrdup (lang_printable_name (target_type, 0)),
507
                xstrdup (lang_printable_name (source_type, 0)));
508
      /* Punt everything to runtime.  */
509
      return 1;
510
    }
511
 
512
  if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
513
    {
514
      return 1;
515
    }
516
  else
517
    {
518
      if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type))
519
        {
520
          HOST_WIDE_INT source_length, target_length;
521
          if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type))
522
            {
523
              /* An array implements Cloneable and Serializable.  */
524
              tree name = DECL_NAME (TYPE_NAME (target_type));
525
              return (name == java_lang_cloneable_identifier_node
526
                      || name == java_io_serializable_identifier_node);
527
            }
528
          target_length = java_array_type_length (target_type);
529
          if (target_length >= 0)
530
            {
531
              source_length = java_array_type_length (source_type);
532
              if (source_length != target_length)
533
                return 0;
534
            }
535
          source_type = TYPE_ARRAY_ELEMENT (source_type);
536
          target_type = TYPE_ARRAY_ELEMENT (target_type);
537
          if (source_type == target_type)
538
            return 1;
539
          if (TREE_CODE (source_type) != POINTER_TYPE
540
              || TREE_CODE (target_type) != POINTER_TYPE)
541
            return 0;
542
          return can_widen_reference_to (source_type, target_type);
543
        }
544
      else
545
        {
546
          int source_depth = class_depth (source_type);
547
          int target_depth = class_depth (target_type);
548
 
549
          if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
550
            {
551
              if (! quiet_flag)
552
                warning (0, "assert: %s is assign compatible with %s",
553
                         xstrdup (lang_printable_name (target_type, 0)),
554
                         xstrdup (lang_printable_name (source_type, 0)));
555
              return 1;
556
            }
557
 
558
          /* class_depth can return a negative depth if an error occurred */
559
          if (source_depth < 0 || target_depth < 0)
560
            return 0;
561
 
562
          if (CLASS_INTERFACE (TYPE_NAME (target_type)))
563
            {
564
              /* target_type is OK if source_type or source_type ancestors
565
                 implement target_type. We handle multiple sub-interfaces  */
566
              tree binfo, base_binfo;
567
              int i;
568
 
569
              for (binfo = TYPE_BINFO (source_type), i = 0;
570
                   BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
571
                if (can_widen_reference_to
572
                    (BINFO_TYPE (base_binfo), target_type))
573
                  return 1;
574
 
575
              if (!i)
576
                return 0;
577
            }
578
 
579
          for ( ; source_depth > target_depth;  source_depth--)
580
            {
581
              source_type
582
                = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type), 0));
583
            }
584
          return source_type == target_type;
585
        }
586
    }
587
}
588
 
589
static tree
590
pop_value (tree type)
591
{
592
  type = pop_type (type);
593
  if (VEC_length (tree, quick_stack) != 0)
594
    return VEC_pop (tree, quick_stack);
595
  else
596
    return find_stack_slot (stack_pointer, promote_type (type));
597
}
598
 
599
 
600
/* Pop and discard the top COUNT stack slots. */
601
 
602
static void
603
java_stack_pop (int count)
604
{
605
  while (count > 0)
606
    {
607
      tree type;
608
 
609
      gcc_assert (stack_pointer != 0);
610
 
611
      type = stack_type_map[stack_pointer - 1];
612
      if (type == TYPE_SECOND)
613
        {
614
          count--;
615
          gcc_assert (stack_pointer != 1 && count > 0);
616
 
617
          type = stack_type_map[stack_pointer - 2];
618
        }
619
      pop_value (type);
620
      count--;
621
    }
622
}
623
 
624
/* Implement the 'swap' operator (to swap two top stack slots). */
625
 
626
static void
627
java_stack_swap (void)
628
{
629
  tree type1, type2;
630
  tree temp;
631
  tree decl1, decl2;
632
 
633
  if (stack_pointer < 2
634
      || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_SECOND
635
      || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_SECOND
636
      || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
637
    /* Bad stack swap.  */
638
    abort ();
639
  /* Bad stack swap.  */
640
 
641
  flush_quick_stack ();
642
  decl1 = find_stack_slot (stack_pointer - 1, type1);
643
  decl2 = find_stack_slot (stack_pointer - 2, type2);
644
  temp = build_decl (input_location, VAR_DECL, NULL_TREE, type1);
645
  java_add_local_var (temp);
646
  java_add_stmt (build2 (MODIFY_EXPR, type1, temp, decl1));
647
  java_add_stmt (build2 (MODIFY_EXPR, type2,
648
                         find_stack_slot (stack_pointer - 1, type2),
649
                         decl2));
650
  java_add_stmt (build2 (MODIFY_EXPR, type1,
651
                         find_stack_slot (stack_pointer - 2, type1),
652
                         temp));
653
  stack_type_map[stack_pointer - 1] = type2;
654
  stack_type_map[stack_pointer - 2] = type1;
655
}
656
 
657
static void
658
java_stack_dup (int size, int offset)
659
{
660
  int low_index = stack_pointer - size - offset;
661
  int dst_index;
662
  if (low_index < 0)
663
    error ("stack underflow - dup* operation");
664
 
665
  flush_quick_stack ();
666
 
667
  stack_pointer += size;
668
  dst_index = stack_pointer;
669
 
670
  for (dst_index = stack_pointer;  --dst_index >= low_index; )
671
    {
672
      tree type;
673
      int src_index = dst_index - size;
674
      if (src_index < low_index)
675
        src_index = dst_index + size + offset;
676
      type = stack_type_map [src_index];
677
      if (type == TYPE_SECOND)
678
        {
679
          /* Dup operation splits 64-bit number.  */
680
          gcc_assert (src_index > low_index);
681
 
682
          stack_type_map[dst_index] = type;
683
          src_index--;  dst_index--;
684
          type = stack_type_map[src_index];
685
          gcc_assert (TYPE_IS_WIDE (type));
686
        }
687
      else
688
        gcc_assert (! TYPE_IS_WIDE (type));
689
 
690
      if (src_index != dst_index)
691
        {
692
          tree src_decl = find_stack_slot (src_index, type);
693
          tree dst_decl = find_stack_slot (dst_index, type);
694
 
695
          java_add_stmt
696
            (build2 (MODIFY_EXPR, TREE_TYPE (dst_decl), dst_decl, src_decl));
697
          stack_type_map[dst_index] = type;
698
        }
699
    }
700
}
701
 
702
/* Calls _Jv_Throw or _Jv_Sjlj_Throw.  Discard the contents of the
703
   value stack. */
704
 
705
static void
706
build_java_athrow (tree node)
707
{
708
  tree call;
709
 
710
  call = build_call_nary (void_type_node,
711
                          build_address_of (throw_node),
712
                          1, node);
713
  TREE_SIDE_EFFECTS (call) = 1;
714
  java_add_stmt (call);
715
  java_stack_pop (stack_pointer);
716
}
717
 
718
/* Implementation for jsr/ret */
719
 
720
static void
721
build_java_jsr (int target_pc, int return_pc)
722
{
723
  tree where =  lookup_label (target_pc);
724
  tree ret = lookup_label (return_pc);
725
  tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret);
726
  push_value (ret_label);
727
  flush_quick_stack ();
728
  java_add_stmt (build1 (GOTO_EXPR, void_type_node, where));
729
 
730
  /* Do not need to emit the label here.  We noted the existence of the
731
     label as a jump target in note_instructions; we'll emit the label
732
     for real at the beginning of the expand_byte_code loop.  */
733
}
734
 
735
static void
736
build_java_ret (tree location)
737
{
738
  java_add_stmt (build1 (GOTO_EXPR, void_type_node, location));
739
}
740
 
741
/* Implementation of operations on array: new, load, store, length */
742
 
743
tree
744
decode_newarray_type (int atype)
745
{
746
  switch (atype)
747
    {
748
    case 4:  return boolean_type_node;
749
    case 5:  return char_type_node;
750
    case 6:  return float_type_node;
751
    case 7:  return double_type_node;
752
    case 8:  return byte_type_node;
753
    case 9:  return short_type_node;
754
    case 10: return int_type_node;
755
    case 11: return long_type_node;
756
    default: return NULL_TREE;
757
    }
758
}
759
 
760
/* Map primitive type to the code used by OPCODE_newarray. */
761
 
762
int
763
encode_newarray_type (tree type)
764
{
765
  if (type == boolean_type_node)
766
    return 4;
767
  else if (type == char_type_node)
768
    return 5;
769
  else if (type == float_type_node)
770
    return 6;
771
  else if (type == double_type_node)
772
    return 7;
773
  else if (type == byte_type_node)
774
    return 8;
775
  else if (type == short_type_node)
776
    return 9;
777
  else if (type == int_type_node)
778
    return 10;
779
  else if (type == long_type_node)
780
    return 11;
781
  else
782
    gcc_unreachable ();
783
}
784
 
785
/* Build a call to _Jv_ThrowBadArrayIndex(), the
786
   ArrayIndexOfBoundsException exception handler.  */
787
 
788
static tree
789
build_java_throw_out_of_bounds_exception (tree index)
790
{
791
  tree node;
792
 
793
  /* We need to build a COMPOUND_EXPR because _Jv_ThrowBadArrayIndex()
794
     has void return type.  We cannot just set the type of the CALL_EXPR below
795
     to int_type_node because we would lose it during gimplification.  */
796
  gcc_assert (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (soft_badarrayindex_node))));
797
  node = build_call_nary (void_type_node,
798
                               build_address_of (soft_badarrayindex_node),
799
                               1, index);
800
  TREE_SIDE_EFFECTS (node) = 1;
801
 
802
  node = build2 (COMPOUND_EXPR, int_type_node, node, integer_zero_node);
803
  TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */
804
 
805
  return (node);
806
}
807
 
808
/* Return the length of an array. Doesn't perform any checking on the nature
809
   or value of the array NODE. May be used to implement some bytecodes.  */
810
 
811
tree
812
build_java_array_length_access (tree node)
813
{
814
  tree type = TREE_TYPE (node);
815
  tree array_type = TREE_TYPE (type);
816
  HOST_WIDE_INT length;
817
 
818
  if (!is_array_type_p (type))
819
    {
820
      /* With the new verifier, we will see an ordinary pointer type
821
         here.  In this case, we just use an arbitrary array type.  */
822
      array_type = build_java_array_type (object_ptr_type_node, -1);
823
      type = promote_type (array_type);
824
    }
825
 
826
  length = java_array_type_length (type);
827
  if (length >= 0)
828
    return build_int_cst (NULL_TREE, length);
829
 
830
  node = build3 (COMPONENT_REF, int_type_node,
831
                 build_java_indirect_ref (array_type, node,
832
                                          flag_check_references),
833
                 lookup_field (&array_type, get_identifier ("length")),
834
                 NULL_TREE);
835
  IS_ARRAY_LENGTH_ACCESS (node) = 1;
836
  return node;
837
}
838
 
839
/* Optionally checks a reference against the NULL pointer.  ARG1: the
840
   expr, ARG2: we should check the reference.  Don't generate extra
841
   checks if we're not generating code.  */
842
 
843
tree
844
java_check_reference (tree expr, int check)
845
{
846
  if (!flag_syntax_only && check)
847
    {
848
      expr = save_expr (expr);
849
      expr = build3 (COND_EXPR, TREE_TYPE (expr),
850
                     build2 (EQ_EXPR, boolean_type_node,
851
                             expr, null_pointer_node),
852
                     build_call_nary (void_type_node,
853
                                      build_address_of (soft_nullpointer_node),
854
                                      0),
855
                     expr);
856
    }
857
 
858
  return expr;
859
}
860
 
861
/* Reference an object: just like an INDIRECT_REF, but with checking.  */
862
 
863
tree
864
build_java_indirect_ref (tree type, tree expr, int check)
865
{
866
  tree t;
867
  t = java_check_reference (expr, check);
868
  t = convert (build_pointer_type (type), t);
869
  return build1 (INDIRECT_REF, type, t);
870
}
871
 
872
/* Implement array indexing (either as l-value or r-value).
873
   Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
874
   Optionally performs bounds checking and/or test to NULL.
875
   At this point, ARRAY should have been verified as an array.  */
876
 
877
tree
878
build_java_arrayaccess (tree array, tree type, tree index)
879
{
880
  tree node, throw_expr = NULL_TREE;
881
  tree data_field;
882
  tree ref;
883
  tree array_type = TREE_TYPE (TREE_TYPE (array));
884
  tree size_exp = fold_convert (sizetype, size_in_bytes (type));
885
 
886
  if (!is_array_type_p (TREE_TYPE (array)))
887
    {
888
      /* With the new verifier, we will see an ordinary pointer type
889
         here.  In this case, we just use the correct array type.  */
890
      array_type = build_java_array_type (type, -1);
891
    }
892
 
893
  if (flag_bounds_check)
894
    {
895
      /* Generate:
896
       * (unsigned jint) INDEX >= (unsigned jint) LEN
897
       *    && throw ArrayIndexOutOfBoundsException.
898
       * Note this is equivalent to and more efficient than:
899
       * INDEX < 0 || INDEX >= LEN && throw ... */
900
      tree test;
901
      tree len = convert (unsigned_int_type_node,
902
                          build_java_array_length_access (array));
903
      test = fold_build2 (GE_EXPR, boolean_type_node,
904
                          convert (unsigned_int_type_node, index),
905
                          len);
906
      if (! integer_zerop (test))
907
        {
908
          throw_expr
909
            = build2 (TRUTH_ANDIF_EXPR, int_type_node, test,
910
                      build_java_throw_out_of_bounds_exception (index));
911
          /* allows expansion within COMPOUND */
912
          TREE_SIDE_EFFECTS( throw_expr ) = 1;
913
        }
914
    }
915
 
916
  /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
917
     to have the bounds check evaluated first. */
918
  if (throw_expr != NULL_TREE)
919
    index = build2 (COMPOUND_EXPR, int_type_node, throw_expr, index);
920
 
921
  data_field = lookup_field (&array_type, get_identifier ("data"));
922
 
923
  ref = build3 (COMPONENT_REF, TREE_TYPE (data_field),
924
                build_java_indirect_ref (array_type, array,
925
                                         flag_check_references),
926
                data_field, NULL_TREE);
927
 
928
  /* Take the address of the data field and convert it to a pointer to
929
     the element type.  */
930
  node = build1 (NOP_EXPR, build_pointer_type (type), build_address_of (ref));
931
 
932
  /* Multiply the index by the size of an element to obtain a byte
933
     offset.  Convert the result to a pointer to the element type.  */
934
  index = build2 (MULT_EXPR, sizetype,
935
                  fold_convert (sizetype, index),
936
                  size_exp);
937
 
938
  /* Sum the byte offset and the address of the data field.  */
939
  node = fold_build_pointer_plus (node, index);
940
 
941
  /* Finally, return
942
 
943
    *((&array->data) + index*size_exp)
944
 
945
  */
946
  return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (node)), node);
947
}
948
 
949
/* Generate code to throw an ArrayStoreException if OBJECT is not assignable
950
   (at runtime) to an element of ARRAY.  A NOP_EXPR is returned if it can
951
   determine that no check is required. */
952
 
953
tree
954
build_java_arraystore_check (tree array, tree object)
955
{
956
  tree check, element_type, source;
957
  tree array_type_p = TREE_TYPE (array);
958
  tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object)));
959
 
960
  if (! flag_verify_invocations)
961
    {
962
      /* With the new verifier, we don't track precise types.  FIXME:
963
         performance regression here.  */
964
      element_type = TYPE_NAME (object_type_node);
965
    }
966
  else
967
    {
968
      gcc_assert (is_array_type_p (array_type_p));
969
 
970
      /* Get the TYPE_DECL for ARRAY's element type. */
971
      element_type
972
        = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));
973
    }
974
 
975
  gcc_assert (TREE_CODE (element_type) == TYPE_DECL
976
              && TREE_CODE (object_type) == TYPE_DECL);
977
 
978
  if (!flag_store_check)
979
    return build1 (NOP_EXPR, array_type_p, array);
980
 
981
  /* No check is needed if the element type is final.  Also check that
982
     element_type matches object_type, since in the bytecode
983
     compilation case element_type may be the actual element type of
984
     the array rather than its declared type.  However, if we're doing
985
     indirect dispatch, we can't do the `final' optimization.  */
986
  if (element_type == object_type
987
      && ! flag_indirect_dispatch
988
      && CLASS_FINAL (element_type))
989
    return build1 (NOP_EXPR, array_type_p, array);
990
 
991
  /* OBJECT might be wrapped by a SAVE_EXPR. */
992
  if (TREE_CODE (object) == SAVE_EXPR)
993
    source = TREE_OPERAND (object, 0);
994
  else
995
    source = object;
996
 
997
  /* Avoid the check if OBJECT was just loaded from the same array. */
998
  if (TREE_CODE (source) == ARRAY_REF)
999
    {
1000
      tree target;
1001
      source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */
1002
      source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */
1003
      source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */
1004
      if (TREE_CODE (source) == SAVE_EXPR)
1005
        source = TREE_OPERAND (source, 0);
1006
 
1007
      target = array;
1008
      if (TREE_CODE (target) == SAVE_EXPR)
1009
        target = TREE_OPERAND (target, 0);
1010
 
1011
      if (source == target)
1012
        return build1 (NOP_EXPR, array_type_p, array);
1013
    }
1014
 
1015
  /* Build an invocation of _Jv_CheckArrayStore */
1016
  check = build_call_nary (void_type_node,
1017
                           build_address_of (soft_checkarraystore_node),
1018
                           2, array, object);
1019
  TREE_SIDE_EFFECTS (check) = 1;
1020
 
1021
  return check;
1022
}
1023
 
1024
/* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
1025
   ARRAY_NODE. This function is used to retrieve something less vague than
1026
   a pointer type when indexing the first dimension of something like [[<t>.
1027
   May return a corrected type, if necessary, otherwise INDEXED_TYPE is
1028
   return unchanged.  */
1029
 
1030
static tree
1031
build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED,
1032
                               tree indexed_type)
1033
{
1034
  /* We used to check to see if ARRAY_NODE really had array type.
1035
     However, with the new verifier, this is not necessary, as we know
1036
     that the object will be an array of the appropriate type.  */
1037
 
1038
  return indexed_type;
1039
}
1040
 
1041
/* newarray triggers a call to _Jv_NewPrimArray. This function should be
1042
   called with an integer code (the type of array to create), and the length
1043
   of the array to create.  */
1044
 
1045
tree
1046
build_newarray (int atype_value, tree length)
1047
{
1048
  tree type_arg;
1049
 
1050
  tree prim_type = decode_newarray_type (atype_value);
1051
  tree type
1052
    = build_java_array_type (prim_type,
1053
                             host_integerp (length, 0) == INTEGER_CST
1054
                             ? tree_low_cst (length, 0) : -1);
1055
 
1056
  /* Pass a reference to the primitive type class and save the runtime
1057
     some work.  */
1058
  type_arg = build_class_ref (prim_type);
1059
 
1060
  return build_call_nary (promote_type (type),
1061
                          build_address_of (soft_newarray_node),
1062
                          2, type_arg, length);
1063
}
1064
 
1065
/* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
1066
   of the dimension. */
1067
 
1068
tree
1069
build_anewarray (tree class_type, tree length)
1070
{
1071
  tree type
1072
    = build_java_array_type (class_type,
1073
                             host_integerp (length, 0)
1074
                             ? tree_low_cst (length, 0) : -1);
1075
 
1076
  return build_call_nary (promote_type (type),
1077
                          build_address_of (soft_anewarray_node),
1078
                          3,
1079
                          length,
1080
                          build_class_ref (class_type),
1081
                          null_pointer_node);
1082
}
1083
 
1084
/* Return a node the evaluates 'new TYPE[LENGTH]'. */
1085
 
1086
tree
1087
build_new_array (tree type, tree length)
1088
{
1089
  if (JPRIMITIVE_TYPE_P (type))
1090
    return build_newarray (encode_newarray_type (type), length);
1091
  else
1092
    return build_anewarray (TREE_TYPE (type), length);
1093
}
1094
 
1095
/* Generates a call to _Jv_NewMultiArray. multianewarray expects a
1096
   class pointer, a number of dimensions and the matching number of
1097
   dimensions. The argument list is NULL terminated.  */
1098
 
1099
static void
1100
expand_java_multianewarray (tree class_type, int ndim)
1101
{
1102
  int i;
1103
  VEC(tree,gc) *args = NULL;
1104
 
1105
  VEC_safe_grow (tree, gc, args, 3 + ndim);
1106
 
1107
  VEC_replace (tree, args, 0, build_class_ref (class_type));
1108
  VEC_replace (tree, args, 1, build_int_cst (NULL_TREE, ndim));
1109
 
1110
  for(i = ndim - 1; i >= 0; i-- )
1111
    VEC_replace (tree, args, (unsigned)(2 + i), pop_value (int_type_node));
1112
 
1113
  VEC_replace (tree, args, 2 + ndim, null_pointer_node);
1114
 
1115
  push_value (build_call_vec (promote_type (class_type),
1116
                              build_address_of (soft_multianewarray_node),
1117
                              args));
1118
}
1119
 
1120
/*  ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
1121
    ARRAY is an array type. May expand some bound checking and NULL
1122
    pointer checking. RHS_TYPE_NODE we are going to store. In the case
1123
    of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
1124
    INT. In those cases, we make the conversion.
1125
 
1126
    if ARRAy is a reference type, the assignment is checked at run-time
1127
    to make sure that the RHS can be assigned to the array element
1128
    type. It is not necessary to generate this code if ARRAY is final.  */
1129
 
1130
static void
1131
expand_java_arraystore (tree rhs_type_node)
1132
{
1133
  tree rhs_node    = pop_value ((INTEGRAL_TYPE_P (rhs_type_node)
1134
                                 && TYPE_PRECISION (rhs_type_node) <= 32) ?
1135
                                 int_type_node : rhs_type_node);
1136
  tree index = pop_value (int_type_node);
1137
  tree array_type, array, temp, access;
1138
 
1139
  /* If we're processing an `aaload' we might as well just pick
1140
     `Object'.  */
1141
  if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1142
    {
1143
      array_type = build_java_array_type (object_ptr_type_node, -1);
1144
      rhs_type_node = object_ptr_type_node;
1145
    }
1146
  else
1147
    array_type = build_java_array_type (rhs_type_node, -1);
1148
 
1149
  array = pop_value (array_type);
1150
  array = build1 (NOP_EXPR, promote_type (array_type), array);
1151
 
1152
  rhs_type_node    = build_java_check_indexed_type (array, rhs_type_node);
1153
 
1154
  flush_quick_stack ();
1155
 
1156
  index = save_expr (index);
1157
  array = save_expr (array);
1158
 
1159
  /* We want to perform the bounds check (done by
1160
     build_java_arrayaccess) before the type check (done by
1161
     build_java_arraystore_check).  So, we call build_java_arrayaccess
1162
     -- which returns an ARRAY_REF lvalue -- and we then generate code
1163
     to stash the address of that lvalue in a temp.  Then we call
1164
     build_java_arraystore_check, and finally we generate a
1165
     MODIFY_EXPR to set the array element.  */
1166
 
1167
  access = build_java_arrayaccess (array, rhs_type_node, index);
1168
  temp = build_decl (input_location, VAR_DECL, NULL_TREE,
1169
                     build_pointer_type (TREE_TYPE (access)));
1170
  java_add_local_var (temp);
1171
  java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (temp),
1172
                         temp,
1173
                         build_fold_addr_expr (access)));
1174
 
1175
  if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1176
    {
1177
      tree check = build_java_arraystore_check (array, rhs_node);
1178
      java_add_stmt (check);
1179
    }
1180
 
1181
  java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (access),
1182
                         build1 (INDIRECT_REF, TREE_TYPE (access), temp),
1183
                         rhs_node));
1184
}
1185
 
1186
/* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
1187
   sure that LHS is an array type. May expand some bound checking and NULL
1188
   pointer checking.
1189
   LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
1190
   BOOLEAN/SHORT, we push a promoted type back to the stack.
1191
*/
1192
 
1193
static void
1194
expand_java_arrayload (tree lhs_type_node)
1195
{
1196
  tree load_node;
1197
  tree index_node = pop_value (int_type_node);
1198
  tree array_type;
1199
  tree array_node;
1200
 
1201
  /* If we're processing an `aaload' we might as well just pick
1202
     `Object'.  */
1203
  if (TREE_CODE (lhs_type_node) == POINTER_TYPE)
1204
    {
1205
      array_type = build_java_array_type (object_ptr_type_node, -1);
1206
      lhs_type_node = object_ptr_type_node;
1207
    }
1208
  else
1209
    array_type = build_java_array_type (lhs_type_node, -1);
1210
  array_node = pop_value (array_type);
1211
  array_node = build1 (NOP_EXPR, promote_type (array_type), array_node);
1212
 
1213
  index_node = save_expr (index_node);
1214
  array_node = save_expr (array_node);
1215
 
1216
  lhs_type_node = build_java_check_indexed_type (array_node,
1217
                                                 lhs_type_node);
1218
  load_node = build_java_arrayaccess (array_node,
1219
                                      lhs_type_node,
1220
                                      index_node);
1221
  if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
1222
    load_node = fold_build1 (NOP_EXPR, int_type_node, load_node);
1223
  push_value (load_node);
1224
}
1225
 
1226
/* Expands .length. Makes sure that we deal with and array and may expand
1227
   a NULL check on the array object.  */
1228
 
1229
static void
1230
expand_java_array_length (void)
1231
{
1232
  tree array  = pop_value (ptr_type_node);
1233
  tree length = build_java_array_length_access (array);
1234
 
1235
  push_value (length);
1236
}
1237
 
1238
/* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1239
   either soft_monitorenter_node or soft_monitorexit_node.  */
1240
 
1241
static tree
1242
build_java_monitor (tree call, tree object)
1243
{
1244
  return build_call_nary (void_type_node,
1245
                          build_address_of (call),
1246
                          1, object);
1247
}
1248
 
1249
/* Emit code for one of the PUSHC instructions. */
1250
 
1251
static void
1252
expand_java_pushc (int ival, tree type)
1253
{
1254
  tree value;
1255
  if (type == ptr_type_node && ival == 0)
1256
    value = null_pointer_node;
1257
  else if (type == int_type_node || type == long_type_node)
1258
    value = build_int_cst (type, ival);
1259
  else if (type == float_type_node || type == double_type_node)
1260
    {
1261
      REAL_VALUE_TYPE x;
1262
      REAL_VALUE_FROM_INT (x, ival, 0, TYPE_MODE (type));
1263
      value = build_real (type, x);
1264
    }
1265
  else
1266
    gcc_unreachable ();
1267
 
1268
  push_value (value);
1269
}
1270
 
1271
static void
1272
expand_java_return (tree type)
1273
{
1274
  if (type == void_type_node)
1275
    java_add_stmt (build1 (RETURN_EXPR, void_type_node, NULL));
1276
  else
1277
    {
1278
      tree retval = pop_value (type);
1279
      tree res = DECL_RESULT (current_function_decl);
1280
      retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, retval);
1281
 
1282
      /* Handle the situation where the native integer type is smaller
1283
         than the JVM integer. It can happen for many cross compilers.
1284
         The whole if expression just goes away if INT_TYPE_SIZE < 32
1285
         is false. */
1286
      if (INT_TYPE_SIZE < 32
1287
          && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res)))
1288
              < GET_MODE_SIZE (TYPE_MODE (type))))
1289
        retval = build1(NOP_EXPR, TREE_TYPE(res), retval);
1290
 
1291
      TREE_SIDE_EFFECTS (retval) = 1;
1292
      java_add_stmt (build1 (RETURN_EXPR, void_type_node, retval));
1293
    }
1294
}
1295
 
1296
static void
1297
expand_load_internal (int index, tree type, int pc)
1298
{
1299
  tree copy;
1300
  tree var = find_local_variable (index, type, pc);
1301
 
1302
  /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1303
     on the stack.  If there is an assignment to this VAR_DECL between
1304
     the stack push and the use, then the wrong code could be
1305
     generated.  To avoid this we create a new local and copy our
1306
     value into it.  Then we push this new local on the stack.
1307
     Hopefully this all gets optimized out.  */
1308
  copy = build_decl (input_location, VAR_DECL, NULL_TREE, type);
1309
  if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1310
      && TREE_TYPE (copy) != TREE_TYPE (var))
1311
    var = convert (type, var);
1312
  java_add_local_var (copy);
1313
  java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var));
1314
 
1315
  push_value (copy);
1316
}
1317
 
1318
tree
1319
build_address_of (tree value)
1320
{
1321
  return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value);
1322
}
1323
 
1324
bool
1325
class_has_finalize_method (tree type)
1326
{
1327
  tree super = CLASSTYPE_SUPER (type);
1328
 
1329
  if (super == NULL_TREE)
1330
    return false;       /* Every class with a real finalizer inherits   */
1331
                        /* from java.lang.Object.                       */
1332
  else
1333
    return HAS_FINALIZER_P (type) || class_has_finalize_method (super);
1334
}
1335
 
1336
tree
1337
java_create_object (tree type)
1338
{
1339
  tree alloc_node = (class_has_finalize_method (type)
1340
                     ? alloc_object_node
1341
                     : alloc_no_finalizer_node);
1342
 
1343
  return build_call_nary (promote_type (type),
1344
                          build_address_of (alloc_node),
1345
                          1, build_class_ref (type));
1346
}
1347
 
1348
static void
1349
expand_java_NEW (tree type)
1350
{
1351
  tree alloc_node;
1352
 
1353
  alloc_node = (class_has_finalize_method (type) ? alloc_object_node
1354
                                                 : alloc_no_finalizer_node);
1355
  if (! CLASS_LOADED_P (type))
1356
    load_class (type, 1);
1357
  safe_layout_class (type);
1358
  push_value (build_call_nary (promote_type (type),
1359
                               build_address_of (alloc_node),
1360
                               1, build_class_ref (type)));
1361
}
1362
 
1363
/* This returns an expression which will extract the class of an
1364
   object.  */
1365
 
1366
tree
1367
build_get_class (tree value)
1368
{
1369
  tree class_field = lookup_field (&dtable_type, get_identifier ("class"));
1370
  tree vtable_field = lookup_field (&object_type_node,
1371
                                    get_identifier ("vtable"));
1372
  tree tmp = build3 (COMPONENT_REF, dtable_ptr_type,
1373
                     build_java_indirect_ref (object_type_node, value,
1374
                                              flag_check_references),
1375
                     vtable_field, NULL_TREE);
1376
  return build3 (COMPONENT_REF, class_ptr_type,
1377
                 build1 (INDIRECT_REF, dtable_type, tmp),
1378
                 class_field, NULL_TREE);
1379
}
1380
 
1381
/* This builds the tree representation of the `instanceof' operator.
1382
   It tries various tricks to optimize this in cases where types are
1383
   known.  */
1384
 
1385
tree
1386
build_instanceof (tree value, tree type)
1387
{
1388
  tree expr;
1389
  tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node));
1390
  tree valtype = TREE_TYPE (TREE_TYPE (value));
1391
  tree valclass = TYPE_NAME (valtype);
1392
  tree klass;
1393
 
1394
  /* When compiling from bytecode, we need to ensure that TYPE has
1395
     been loaded.  */
1396
  if (CLASS_P (type) && ! CLASS_LOADED_P (type))
1397
    {
1398
      load_class (type, 1);
1399
      safe_layout_class (type);
1400
      if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK)
1401
        return error_mark_node;
1402
    }
1403
  klass = TYPE_NAME (type);
1404
 
1405
  if (type == object_type_node || inherits_from_p (valtype, type))
1406
    {
1407
      /* Anything except `null' is an instance of Object.  Likewise,
1408
         if the object is known to be an instance of the class, then
1409
         we only need to check for `null'.  */
1410
      expr = build2 (NE_EXPR, itype, value, null_pointer_node);
1411
    }
1412
  else if (flag_verify_invocations
1413
           && ! TYPE_ARRAY_P (type)
1414
           && ! TYPE_ARRAY_P (valtype)
1415
           && DECL_P (klass) && DECL_P (valclass)
1416
           && ! CLASS_INTERFACE (valclass)
1417
           && ! CLASS_INTERFACE (klass)
1418
           && ! inherits_from_p (type, valtype)
1419
           && (CLASS_FINAL (klass)
1420
               || ! inherits_from_p (valtype, type)))
1421
    {
1422
      /* The classes are from different branches of the derivation
1423
         tree, so we immediately know the answer.  */
1424
      expr = boolean_false_node;
1425
    }
1426
  else if (DECL_P (klass) && CLASS_FINAL (klass))
1427
    {
1428
      tree save = save_expr (value);
1429
      expr = build3 (COND_EXPR, itype,
1430
                     build2 (NE_EXPR, boolean_type_node,
1431
                             save, null_pointer_node),
1432
                     build2 (EQ_EXPR, itype,
1433
                             build_get_class (save),
1434
                             build_class_ref (type)),
1435
                     boolean_false_node);
1436
    }
1437
  else
1438
    {
1439
      expr = build_call_nary (itype,
1440
                              build_address_of (soft_instanceof_node),
1441
                              2, value, build_class_ref (type));
1442
    }
1443
  TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value);
1444
  return expr;
1445
}
1446
 
1447
static void
1448
expand_java_INSTANCEOF (tree type)
1449
{
1450
  tree value = pop_value (object_ptr_type_node);
1451
  value = build_instanceof (value, type);
1452
  push_value (value);
1453
}
1454
 
1455
static void
1456
expand_java_CHECKCAST (tree type)
1457
{
1458
  tree value = pop_value (ptr_type_node);
1459
  value = build_call_nary (promote_type (type),
1460
                           build_address_of (soft_checkcast_node),
1461
                           2, build_class_ref (type), value);
1462
  push_value (value);
1463
}
1464
 
1465
static void
1466
expand_iinc (unsigned int local_var_index, int ival, int pc)
1467
{
1468
  tree local_var, res;
1469
  tree constant_value;
1470
 
1471
  flush_quick_stack ();
1472
  local_var = find_local_variable (local_var_index, int_type_node, pc);
1473
  constant_value = build_int_cst (NULL_TREE, ival);
1474
  res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value);
1475
  java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res));
1476
}
1477
 
1478
 
1479
tree
1480
build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2)
1481
{
1482
  tree call = NULL;
1483
  tree arg1 = convert (type, op1);
1484
  tree arg2 = convert (type, op2);
1485
 
1486
  if (type == int_type_node)
1487
    {
1488
      switch (op)
1489
        {
1490
        case TRUNC_DIV_EXPR:
1491
          call = soft_idiv_node;
1492
          break;
1493
        case TRUNC_MOD_EXPR:
1494
          call = soft_irem_node;
1495
          break;
1496
        default:
1497
          break;
1498
        }
1499
    }
1500
  else if (type == long_type_node)
1501
    {
1502
      switch (op)
1503
        {
1504
        case TRUNC_DIV_EXPR:
1505
          call = soft_ldiv_node;
1506
          break;
1507
        case TRUNC_MOD_EXPR:
1508
          call = soft_lrem_node;
1509
          break;
1510
        default:
1511
          break;
1512
        }
1513
    }
1514
 
1515
  gcc_assert (call);
1516
  call = build_call_nary (type, build_address_of (call), 2, arg1, arg2);
1517
  return call;
1518
}
1519
 
1520
tree
1521
build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2)
1522
{
1523
  tree mask;
1524
  switch (op)
1525
    {
1526
    case URSHIFT_EXPR:
1527
      {
1528
        tree u_type = unsigned_type_for (type);
1529
        arg1 = convert (u_type, arg1);
1530
        arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2);
1531
        return convert (type, arg1);
1532
      }
1533
    case LSHIFT_EXPR:
1534
    case RSHIFT_EXPR:
1535
      mask = build_int_cst (NULL_TREE,
1536
                            TYPE_PRECISION (TREE_TYPE (arg1)) - 1);
1537
      arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask);
1538
      break;
1539
 
1540
    case COMPARE_L_EXPR:  /* arg1 > arg2 ?  1 : arg1 == arg2 ? 0 : -1 */
1541
    case COMPARE_G_EXPR:  /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 :  1 */
1542
      arg1 = save_expr (arg1);  arg2 = save_expr (arg2);
1543
      {
1544
        tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
1545
                                   boolean_type_node, arg1, arg2);
1546
        tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2);
1547
        tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1548
                                           ifexp2, integer_zero_node,
1549
                                           op == COMPARE_L_EXPR
1550
                                           ? integer_minus_one_node
1551
                                           : integer_one_node);
1552
        return fold_build3 (COND_EXPR, int_type_node, ifexp1,
1553
                            op == COMPARE_L_EXPR ? integer_one_node
1554
                            : integer_minus_one_node,
1555
                            second_compare);
1556
      }
1557
    case COMPARE_EXPR:
1558
      arg1 = save_expr (arg1);  arg2 = save_expr (arg2);
1559
      {
1560
        tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2);
1561
        tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2);
1562
        tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1563
                                           ifexp2, integer_one_node,
1564
                                           integer_zero_node);
1565
        return fold_build3 (COND_EXPR, int_type_node,
1566
                            ifexp1, integer_minus_one_node, second_compare);
1567
      }
1568
    case TRUNC_DIV_EXPR:
1569
    case TRUNC_MOD_EXPR:
1570
      if (TREE_CODE (type) == REAL_TYPE
1571
          && op == TRUNC_MOD_EXPR)
1572
        {
1573
          tree call;
1574
          if (type != double_type_node)
1575
            {
1576
              arg1 = convert (double_type_node, arg1);
1577
              arg2 = convert (double_type_node, arg2);
1578
            }
1579
          call = build_call_nary (double_type_node,
1580
                                  build_address_of (soft_fmod_node),
1581
                                  2, arg1, arg2);
1582
          if (type != double_type_node)
1583
            call = convert (type, call);
1584
          return call;
1585
        }
1586
 
1587
      if (TREE_CODE (type) == INTEGER_TYPE
1588
          && flag_use_divide_subroutine
1589
          && ! flag_syntax_only)
1590
        return build_java_soft_divmod (op, type, arg1, arg2);
1591
 
1592
      break;
1593
    default:  ;
1594
    }
1595
  return fold_build2 (op, type, arg1, arg2);
1596
}
1597
 
1598
static void
1599
expand_java_binop (tree type, enum tree_code op)
1600
{
1601
  tree larg, rarg;
1602
  tree ltype = type;
1603
  tree rtype = type;
1604
  switch (op)
1605
    {
1606
    case LSHIFT_EXPR:
1607
    case RSHIFT_EXPR:
1608
    case URSHIFT_EXPR:
1609
      rtype = int_type_node;
1610
      rarg = pop_value (rtype);
1611
      break;
1612
    default:
1613
      rarg = pop_value (rtype);
1614
    }
1615
  larg = pop_value (ltype);
1616
  push_value (build_java_binop (op, type, larg, rarg));
1617
}
1618
 
1619
/* Lookup the field named NAME in *TYPEP or its super classes.
1620
   If not found, return NULL_TREE.
1621
   (If the *TYPEP is not found, or if the field reference is
1622
   ambiguous, return error_mark_node.)
1623
   If found, return the FIELD_DECL, and set *TYPEP to the
1624
   class containing the field. */
1625
 
1626
tree
1627
lookup_field (tree *typep, tree name)
1628
{
1629
  if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep))
1630
    {
1631
      load_class (*typep, 1);
1632
      safe_layout_class (*typep);
1633
      if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK)
1634
        return error_mark_node;
1635
    }
1636
  do
1637
    {
1638
      tree field, binfo, base_binfo;
1639
      tree save_field;
1640
      int i;
1641
 
1642
      for (field = TYPE_FIELDS (*typep); field; field = DECL_CHAIN (field))
1643
        if (DECL_NAME (field) == name)
1644
          return field;
1645
 
1646
      /* Process implemented interfaces. */
1647
      save_field = NULL_TREE;
1648
      for (binfo = TYPE_BINFO (*typep), i = 0;
1649
           BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1650
        {
1651
          tree t = BINFO_TYPE (base_binfo);
1652
          if ((field = lookup_field (&t, name)))
1653
            {
1654
              if (save_field == field)
1655
                continue;
1656
              if (save_field == NULL_TREE)
1657
                save_field = field;
1658
              else
1659
                {
1660
                  tree i1 = DECL_CONTEXT (save_field);
1661
                  tree i2 = DECL_CONTEXT (field);
1662
                  error ("reference %qs is ambiguous: appears in interface %qs and interface %qs",
1663
                         IDENTIFIER_POINTER (name),
1664
                         IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))),
1665
                         IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2))));
1666
                  return error_mark_node;
1667
                }
1668
            }
1669
        }
1670
 
1671
      if (save_field != NULL_TREE)
1672
        return save_field;
1673
 
1674
      *typep = CLASSTYPE_SUPER (*typep);
1675
    } while (*typep);
1676
  return NULL_TREE;
1677
}
1678
 
1679
/* Look up the field named NAME in object SELF_VALUE,
1680
   which has class SELF_CLASS (a non-handle RECORD_TYPE).
1681
   SELF_VALUE is NULL_TREE if looking for a static field. */
1682
 
1683
tree
1684
build_field_ref (tree self_value, tree self_class, tree name)
1685
{
1686
  tree base_class = self_class;
1687
  tree field_decl = lookup_field (&base_class, name);
1688
  if (field_decl == NULL_TREE)
1689
    {
1690
      error ("field %qs not found", IDENTIFIER_POINTER (name));
1691
      return error_mark_node;
1692
    }
1693
  if (self_value == NULL_TREE)
1694
    {
1695
      return build_static_field_ref (field_decl);
1696
    }
1697
  else
1698
    {
1699
      tree base_type = promote_type (base_class);
1700
 
1701
      /* CHECK is true if self_value is not the this pointer.  */
1702
      int check = (! (DECL_P (self_value)
1703
                      && DECL_NAME (self_value) == this_identifier_node));
1704
 
1705
      /* Determine whether a field offset from NULL will lie within
1706
         Page 0: this is necessary on those GNU/Linux/BSD systems that
1707
         trap SEGV to generate NullPointerExceptions.
1708
 
1709
         We assume that Page 0 will be mapped with NOPERM, and that
1710
         memory may be allocated from any other page, so only field
1711
         offsets < pagesize are guaranteed to trap.  We also assume
1712
         the smallest page size we'll encounter is 4k bytes.  */
1713
      if (! flag_syntax_only && check && ! flag_check_references
1714
          && ! flag_indirect_dispatch)
1715
        {
1716
          tree field_offset = byte_position (field_decl);
1717
          if (! page_size)
1718
            page_size = size_int (4096);
1719
          check = ! INT_CST_LT_UNSIGNED (field_offset, page_size);
1720
        }
1721
 
1722
      if (base_type != TREE_TYPE (self_value))
1723
        self_value = fold_build1 (NOP_EXPR, base_type, self_value);
1724
      if (! flag_syntax_only && flag_indirect_dispatch)
1725
        {
1726
          tree otable_index
1727
            = build_int_cst (NULL_TREE, get_symbol_table_index
1728
                             (field_decl, NULL_TREE,
1729
                              &TYPE_OTABLE_METHODS (output_class)));
1730
          tree field_offset
1731
            = build4 (ARRAY_REF, integer_type_node,
1732
                      TYPE_OTABLE_DECL (output_class), otable_index,
1733
                      NULL_TREE, NULL_TREE);
1734
          tree address;
1735
 
1736
          if (DECL_CONTEXT (field_decl) != output_class)
1737
            field_offset
1738
              = build3 (COND_EXPR, TREE_TYPE (field_offset),
1739
                        build2 (EQ_EXPR, boolean_type_node,
1740
                                field_offset, integer_zero_node),
1741
                        build_call_nary (void_type_node,
1742
                                         build_address_of (soft_nosuchfield_node),
1743
                                         1, otable_index),
1744
                        field_offset);
1745
 
1746
          self_value = java_check_reference (self_value, check);
1747
          address = fold_build_pointer_plus (self_value, field_offset);
1748
          address = fold_convert (build_pointer_type (TREE_TYPE (field_decl)),
1749
                                  address);
1750
          return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
1751
        }
1752
 
1753
      self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
1754
                                            self_value, check);
1755
      return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl),
1756
                          self_value, field_decl, NULL_TREE);
1757
    }
1758
}
1759
 
1760
tree
1761
lookup_label (int pc)
1762
{
1763
  tree name;
1764
  char buf[32];
1765
  if (pc > highest_label_pc_this_method)
1766
    highest_label_pc_this_method = pc;
1767
  targetm.asm_out.generate_internal_label (buf, "LJpc=",
1768
                                           start_label_pc_this_method + pc);
1769
  name = get_identifier (buf);
1770
  if (IDENTIFIER_LOCAL_VALUE (name))
1771
    return IDENTIFIER_LOCAL_VALUE (name);
1772
  else
1773
    {
1774
      /* The type of the address of a label is return_address_type_node. */
1775
      tree decl = create_label_decl (name);
1776
      return pushdecl (decl);
1777
    }
1778
}
1779
 
1780
/* Generate a unique name for the purpose of loops and switches
1781
   labels, and try-catch-finally blocks label or temporary variables.  */
1782
 
1783
tree
1784
generate_name (void)
1785
{
1786
  static int l_number = 0;
1787
  char buff [32];
1788
  targetm.asm_out.generate_internal_label (buff, "LJv", l_number);
1789
  l_number++;
1790
  return get_identifier (buff);
1791
}
1792
 
1793
tree
1794
create_label_decl (tree name)
1795
{
1796
  tree decl;
1797
  decl = build_decl (input_location, LABEL_DECL, name,
1798
                     TREE_TYPE (return_address_type_node));
1799
  DECL_CONTEXT (decl) = current_function_decl;
1800
  DECL_IGNORED_P (decl) = 1;
1801
  return decl;
1802
}
1803
 
1804
/* This maps a bytecode offset (PC) to various flags.  */
1805
char *instruction_bits;
1806
 
1807
/* This is a vector of type states for the current method.  It is
1808
   indexed by PC.  Each element is a tree vector holding the type
1809
   state at that PC.  We only note type states at basic block
1810
   boundaries.  */
1811
VEC(tree, gc) *type_states;
1812
 
1813
static void
1814
note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc)
1815
{
1816
  lookup_label (target_pc);
1817
  instruction_bits [target_pc] |= BCODE_JUMP_TARGET;
1818
}
1819
 
1820
/* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1821
   where CONDITION is one of one the compare operators. */
1822
 
1823
static void
1824
expand_compare (enum tree_code condition, tree value1, tree value2,
1825
                int target_pc)
1826
{
1827
  tree target = lookup_label (target_pc);
1828
  tree cond = fold_build2 (condition, boolean_type_node, value1, value2);
1829
  java_add_stmt
1830
    (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond),
1831
             build1 (GOTO_EXPR, void_type_node, target),
1832
             build_java_empty_stmt ()));
1833
}
1834
 
1835
/* Emit code for a TEST-type opcode. */
1836
 
1837
static void
1838
expand_test (enum tree_code condition, tree type, int target_pc)
1839
{
1840
  tree value1, value2;
1841
  flush_quick_stack ();
1842
  value1 = pop_value (type);
1843
  value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node;
1844
  expand_compare (condition, value1, value2, target_pc);
1845
}
1846
 
1847
/* Emit code for a COND-type opcode. */
1848
 
1849
static void
1850
expand_cond (enum tree_code condition, tree type, int target_pc)
1851
{
1852
  tree value1, value2;
1853
  flush_quick_stack ();
1854
  /* note: pop values in opposite order */
1855
  value2 = pop_value (type);
1856
  value1 = pop_value (type);
1857
  /* Maybe should check value1 and value2 for type compatibility ??? */
1858
  expand_compare (condition, value1, value2, target_pc);
1859
}
1860
 
1861
static void
1862
expand_java_goto (int target_pc)
1863
{
1864
  tree target_label = lookup_label (target_pc);
1865
  flush_quick_stack ();
1866
  java_add_stmt (build1 (GOTO_EXPR, void_type_node, target_label));
1867
}
1868
 
1869
static tree
1870
expand_java_switch (tree selector, int default_pc)
1871
{
1872
  tree switch_expr, x;
1873
 
1874
  flush_quick_stack ();
1875
  switch_expr = build3 (SWITCH_EXPR, TREE_TYPE (selector), selector,
1876
                        NULL_TREE, NULL_TREE);
1877
  java_add_stmt (switch_expr);
1878
 
1879
  x = build_case_label (NULL_TREE, NULL_TREE,
1880
                        create_artificial_label (input_location));
1881
  append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1882
 
1883
  x = build1 (GOTO_EXPR, void_type_node, lookup_label (default_pc));
1884
  append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1885
 
1886
  return switch_expr;
1887
}
1888
 
1889
static void
1890
expand_java_add_case (tree switch_expr, int match, int target_pc)
1891
{
1892
  tree value, x;
1893
 
1894
  value = build_int_cst (TREE_TYPE (switch_expr), match);
1895
 
1896
  x = build_case_label (value, NULL_TREE,
1897
                        create_artificial_label (input_location));
1898
  append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1899
 
1900
  x = build1 (GOTO_EXPR, void_type_node, lookup_label (target_pc));
1901
  append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1902
}
1903
 
1904
static VEC(tree,gc) *
1905
pop_arguments (tree method_type)
1906
{
1907
  function_args_iterator fnai;
1908
  tree type;
1909
  VEC(tree,gc) *args = NULL;
1910
  int arity;
1911
 
1912
  FOREACH_FUNCTION_ARGS (method_type, type, fnai)
1913
    {
1914
      /* XXX: leaky abstraction.  */
1915
      if (type == void_type_node)
1916
        break;
1917
 
1918
      VEC_safe_push (tree, gc, args, type);
1919
    }
1920
 
1921
  arity = VEC_length (tree, args);
1922
 
1923
  while (arity--)
1924
    {
1925
      tree arg = pop_value (VEC_index (tree, args, arity));
1926
 
1927
      /* We simply cast each argument to its proper type.  This is
1928
         needed since we lose type information coming out of the
1929
         verifier.  We also have to do this when we pop an integer
1930
         type that must be promoted for the function call.  */
1931
      if (TREE_CODE (type) == POINTER_TYPE)
1932
        arg = build1 (NOP_EXPR, type, arg);
1933
      else if (targetm.calls.promote_prototypes (type)
1934
               && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
1935
               && INTEGRAL_TYPE_P (type))
1936
        arg = convert (integer_type_node, arg);
1937
 
1938
      VEC_replace (tree, args, arity, arg);
1939
    }
1940
 
1941
  return args;
1942
}
1943
 
1944
/* Attach to PTR (a block) the declaration found in ENTRY. */
1945
 
1946
int
1947
attach_init_test_initialization_flags (void **entry, void *ptr)
1948
{
1949
  tree block = (tree)ptr;
1950
  struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry;
1951
 
1952
  if (block != error_mark_node)
1953
    {
1954
      if (TREE_CODE (block) == BIND_EXPR)
1955
        {
1956
          tree body = BIND_EXPR_BODY (block);
1957
          DECL_CHAIN (ite->value) = BIND_EXPR_VARS (block);
1958
          BIND_EXPR_VARS (block) = ite->value;
1959
          body = build2 (COMPOUND_EXPR, void_type_node,
1960
                         build1 (DECL_EXPR, void_type_node, ite->value), body);
1961
          BIND_EXPR_BODY (block) = body;
1962
        }
1963
      else
1964
        {
1965
          tree body = BLOCK_SUBBLOCKS (block);
1966
          TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block);
1967
          BLOCK_EXPR_DECLS (block) = ite->value;
1968
          body = build2 (COMPOUND_EXPR, void_type_node,
1969
                         build1 (DECL_EXPR, void_type_node, ite->value), body);
1970
          BLOCK_SUBBLOCKS (block) = body;
1971
        }
1972
 
1973
    }
1974
  return true;
1975
}
1976
 
1977
/* Build an expression to initialize the class CLAS.
1978
   if EXPR is non-NULL, returns an expression to first call the initializer
1979
   (if it is needed) and then calls EXPR. */
1980
 
1981
tree
1982
build_class_init (tree clas, tree expr)
1983
{
1984
  tree init;
1985
 
1986
  /* An optimization: if CLAS is a superclass of the class we're
1987
     compiling, we don't need to initialize it.  However, if CLAS is
1988
     an interface, it won't necessarily be initialized, even if we
1989
     implement it.  */
1990
  if ((! CLASS_INTERFACE (TYPE_NAME (clas))
1991
       && inherits_from_p (current_class, clas))
1992
      || current_class == clas)
1993
    return expr;
1994
 
1995
  if (always_initialize_class_p)
1996
    {
1997
      init = build_call_nary (void_type_node,
1998
                              build_address_of (soft_initclass_node),
1999
                              1, build_class_ref (clas));
2000
      TREE_SIDE_EFFECTS (init) = 1;
2001
    }
2002
  else
2003
    {
2004
      tree *init_test_decl;
2005
      tree decl;
2006
      init_test_decl = java_treetreehash_new
2007
        (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas);
2008
 
2009
      if (*init_test_decl == NULL)
2010
        {
2011
          /* Build a declaration and mark it as a flag used to track
2012
             static class initializations. */
2013
          decl = build_decl (input_location, VAR_DECL, NULL_TREE,
2014
                             boolean_type_node);
2015
          MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl);
2016
          DECL_CONTEXT (decl) = current_function_decl;
2017
          DECL_INITIAL (decl) = boolean_false_node;
2018
          /* Don't emit any symbolic debugging info for this decl.  */
2019
          DECL_IGNORED_P (decl) = 1;
2020
          *init_test_decl = decl;
2021
        }
2022
 
2023
      init = build_call_nary (void_type_node,
2024
                              build_address_of (soft_initclass_node),
2025
                              1, build_class_ref (clas));
2026
      TREE_SIDE_EFFECTS (init) = 1;
2027
      init = build3 (COND_EXPR, void_type_node,
2028
                     build2 (EQ_EXPR, boolean_type_node,
2029
                             *init_test_decl, boolean_false_node),
2030
                     init, integer_zero_node);
2031
      TREE_SIDE_EFFECTS (init) = 1;
2032
      init = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init,
2033
                     build2 (MODIFY_EXPR, boolean_type_node,
2034
                             *init_test_decl, boolean_true_node));
2035
      TREE_SIDE_EFFECTS (init) = 1;
2036
    }
2037
 
2038
  if (expr != NULL_TREE)
2039
    {
2040
      expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, expr);
2041
      TREE_SIDE_EFFECTS (expr) = 1;
2042
      return expr;
2043
    }
2044
  return init;
2045
}
2046
 
2047
 
2048
 
2049
/* Rewrite expensive calls that require stack unwinding at runtime to
2050
   cheaper alternatives.  The logic here performs these
2051
   transformations:
2052
 
2053
   java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
2054
   java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
2055
 
2056
*/
2057
 
2058
typedef struct
2059
{
2060
  const char *classname;
2061
  const char *method;
2062
  const char *signature;
2063
  const char *new_classname;
2064
  const char *new_signature;
2065
  int flags;
2066
  void (*rewrite_arglist) (VEC(tree,gc) **);
2067
} rewrite_rule;
2068
 
2069
/* Add __builtin_return_address(0) to the end of an arglist.  */
2070
 
2071
 
2072
static void
2073
rewrite_arglist_getcaller (VEC(tree,gc) **arglist)
2074
{
2075
  tree retaddr
2076
    = build_call_expr (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS),
2077
                       1, integer_zero_node);
2078
 
2079
  DECL_UNINLINABLE (current_function_decl) = 1;
2080
 
2081
  VEC_safe_push (tree, gc, *arglist, retaddr);
2082
}
2083
 
2084
/* Add this.class to the end of an arglist.  */
2085
 
2086
static void
2087
rewrite_arglist_getclass (VEC(tree,gc) **arglist)
2088
{
2089
  VEC_safe_push (tree, gc, *arglist, build_class_ref (output_class));
2090
}
2091
 
2092
static rewrite_rule rules[] =
2093
  {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
2094
    "java.lang.Class", "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
2095
    ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass},
2096
 
2097
   {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
2098
    "java.lang.Class", "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
2099
    ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass},
2100
 
2101
   {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;",
2102
    "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/Class;",
2103
    ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2104
 
2105
   {"gnu.classpath.VMStackWalker", "getCallingClassLoader",
2106
    "()Ljava/lang/ClassLoader;",
2107
    "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;",
2108
    ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2109
 
2110
   {"gnu.java.lang.VMCPStringBuilder", "toString", "([CII)Ljava/lang/String;",
2111
    "java.lang.String", "([CII)Ljava/lang/String;",
2112
    ACC_FINAL|ACC_PRIVATE|ACC_STATIC, NULL},
2113
 
2114
   {NULL, NULL, NULL, NULL, NULL, 0, NULL}};
2115
 
2116
/* True if this method is special, i.e. it's a private method that
2117
   should be exported from a DSO.  */
2118
 
2119
bool
2120
special_method_p (tree candidate_method)
2121
{
2122
  tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (candidate_method)));
2123
  tree method = DECL_NAME (candidate_method);
2124
  rewrite_rule *p;
2125
 
2126
  for (p = rules; p->classname; p++)
2127
    {
2128
      if (get_identifier (p->classname) == context
2129
          && get_identifier (p->method) == method)
2130
        return true;
2131
    }
2132
  return false;
2133
}
2134
 
2135
/* Scan the rules list for replacements for *METHOD_P and replace the
2136
   args accordingly.  If the rewrite results in an access to a private
2137
   method, update SPECIAL.*/
2138
 
2139
void
2140
maybe_rewrite_invocation (tree *method_p, VEC(tree,gc) **arg_list_p,
2141
                          tree *method_signature_p, tree *special)
2142
{
2143
  tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p)));
2144
  rewrite_rule *p;
2145
  *special = NULL_TREE;
2146
 
2147
  for (p = rules; p->classname; p++)
2148
    {
2149
      if (get_identifier (p->classname) == context)
2150
        {
2151
          tree method = DECL_NAME (*method_p);
2152
          if (get_identifier (p->method) == method
2153
              && get_identifier (p->signature) == *method_signature_p)
2154
            {
2155
              tree maybe_method;
2156
              tree destination_class
2157
                = lookup_class (get_identifier (p->new_classname));
2158
              gcc_assert (destination_class);
2159
              maybe_method
2160
                = lookup_java_method (destination_class,
2161
                                      method,
2162
                                      get_identifier (p->new_signature));
2163
              if (! maybe_method && ! flag_verify_invocations)
2164
                {
2165
                  maybe_method
2166
                    = add_method (destination_class, p->flags,
2167
                                  method, get_identifier (p->new_signature));
2168
                  DECL_EXTERNAL (maybe_method) = 1;
2169
                }
2170
              *method_p = maybe_method;
2171
              gcc_assert (*method_p);
2172
              if (p->rewrite_arglist)
2173
                p->rewrite_arglist (arg_list_p);
2174
              *method_signature_p = get_identifier (p->new_signature);
2175
              *special = integer_one_node;
2176
 
2177
              break;
2178
            }
2179
        }
2180
    }
2181
}
2182
 
2183
 
2184
 
2185
tree
2186
build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
2187
                        tree self_type, tree method_signature ATTRIBUTE_UNUSED,
2188
                        VEC(tree,gc) *arg_list ATTRIBUTE_UNUSED, tree special)
2189
{
2190
  tree func;
2191
  if (is_compiled_class (self_type))
2192
    {
2193
      /* With indirect dispatch we have to use indirect calls for all
2194
         publicly visible methods or gcc will use PLT indirections
2195
         to reach them.  We also have to use indirect dispatch for all
2196
         external methods.  */
2197
      if (! flag_indirect_dispatch
2198
          || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method)))
2199
        {
2200
          func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)),
2201
                         method);
2202
        }
2203
      else
2204
        {
2205
          tree table_index
2206
            = build_int_cst (NULL_TREE,
2207
                             (get_symbol_table_index
2208
                              (method, special,
2209
                               &TYPE_ATABLE_METHODS (output_class))));
2210
          func
2211
            = build4 (ARRAY_REF,
2212
                      TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))),
2213
                      TYPE_ATABLE_DECL (output_class), table_index,
2214
                      NULL_TREE, NULL_TREE);
2215
        }
2216
      func = convert (method_ptr_type_node, func);
2217
    }
2218
  else
2219
    {
2220
      /* We don't know whether the method has been (statically) compiled.
2221
         Compile this code to get a reference to the method's code:
2222
 
2223
         SELF_TYPE->methods[METHOD_INDEX].ncode
2224
 
2225
      */
2226
 
2227
      int method_index = 0;
2228
      tree meth, ref;
2229
 
2230
      /* The method might actually be declared in some superclass, so
2231
         we have to use its class context, not the caller's notion of
2232
         where the method is.  */
2233
      self_type = DECL_CONTEXT (method);
2234
      ref = build_class_ref (self_type);
2235
      ref = build1 (INDIRECT_REF, class_type_node, ref);
2236
      if (ncode_ident == NULL_TREE)
2237
        ncode_ident = get_identifier ("ncode");
2238
      if (methods_ident == NULL_TREE)
2239
        methods_ident = get_identifier ("methods");
2240
      ref = build3 (COMPONENT_REF, method_ptr_type_node, ref,
2241
                    lookup_field (&class_type_node, methods_ident),
2242
                    NULL_TREE);
2243
      for (meth = TYPE_METHODS (self_type);
2244
           ; meth = DECL_CHAIN (meth))
2245
        {
2246
          if (method == meth)
2247
            break;
2248
          if (meth == NULL_TREE)
2249
            fatal_error ("method '%s' not found in class",
2250
                         IDENTIFIER_POINTER (DECL_NAME (method)));
2251
          method_index++;
2252
        }
2253
      method_index *= int_size_in_bytes (method_type_node);
2254
      ref = fold_build_pointer_plus_hwi (ref, method_index);
2255
      ref = build1 (INDIRECT_REF, method_type_node, ref);
2256
      func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
2257
                     ref, lookup_field (&method_type_node, ncode_ident),
2258
                     NULL_TREE);
2259
    }
2260
  return func;
2261
}
2262
 
2263
tree
2264
invoke_build_dtable (int is_invoke_interface, VEC(tree,gc) *arg_list)
2265
{
2266
  tree dtable, objectref;
2267
  tree saved = save_expr (VEC_index (tree, arg_list, 0));
2268
 
2269
  VEC_replace (tree, arg_list, 0, saved);
2270
 
2271
  /* If we're dealing with interfaces and if the objectref
2272
     argument is an array then get the dispatch table of the class
2273
     Object rather than the one from the objectref.  */
2274
  objectref = (is_invoke_interface
2275
               && is_array_type_p (TREE_TYPE (saved))
2276
               ? build_class_ref (object_type_node) : saved);
2277
 
2278
  if (dtable_ident == NULL_TREE)
2279
    dtable_ident = get_identifier ("vtable");
2280
  dtable = build_java_indirect_ref (object_type_node, objectref,
2281
                                    flag_check_references);
2282
  dtable = build3 (COMPONENT_REF, dtable_ptr_type, dtable,
2283
                   lookup_field (&object_type_node, dtable_ident), NULL_TREE);
2284
 
2285
  return dtable;
2286
}
2287
 
2288
/* Determine the index in SYMBOL_TABLE for a reference to the decl
2289
   T. If this decl has not been seen before, it will be added to the
2290
   [oa]table_methods. If it has, the existing table slot will be
2291
   reused.  */
2292
 
2293
int
2294
get_symbol_table_index (tree t, tree special,
2295
                        VEC(method_entry,gc) **symbol_table)
2296
{
2297
  method_entry *e;
2298
  unsigned i;
2299
 
2300
  FOR_EACH_VEC_ELT (method_entry, *symbol_table, i, e)
2301
    if (t == e->method && special == e->special)
2302
      goto done;
2303
 
2304
  e = VEC_safe_push (method_entry, gc, *symbol_table, NULL);
2305
  e->method = t;
2306
  e->special = special;
2307
 
2308
 done:
2309
  return i + 1;
2310
}
2311
 
2312
tree
2313
build_invokevirtual (tree dtable, tree method, tree special)
2314
{
2315
  tree func;
2316
  tree nativecode_ptr_ptr_type_node
2317
    = build_pointer_type (nativecode_ptr_type_node);
2318
  tree method_index;
2319
  tree otable_index;
2320
 
2321
  if (flag_indirect_dispatch)
2322
    {
2323
      gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))));
2324
 
2325
      otable_index
2326
        = build_int_cst (NULL_TREE, get_symbol_table_index
2327
                         (method, special,
2328
                          &TYPE_OTABLE_METHODS (output_class)));
2329
      method_index = build4 (ARRAY_REF, integer_type_node,
2330
                             TYPE_OTABLE_DECL (output_class),
2331
                             otable_index, NULL_TREE, NULL_TREE);
2332
    }
2333
  else
2334
    {
2335
      /* We fetch the DECL_VINDEX field directly here, rather than
2336
         using get_method_index().  DECL_VINDEX is the true offset
2337
         from the vtable base to a method, regrdless of any extra
2338
         words inserted at the start of the vtable.  */
2339
      method_index = DECL_VINDEX (method);
2340
      method_index = size_binop (MULT_EXPR, method_index,
2341
                                 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node));
2342
      if (TARGET_VTABLE_USES_DESCRIPTORS)
2343
        method_index = size_binop (MULT_EXPR, method_index,
2344
                                   size_int (TARGET_VTABLE_USES_DESCRIPTORS));
2345
    }
2346
 
2347
  func = fold_build_pointer_plus (dtable, method_index);
2348
 
2349
  if (TARGET_VTABLE_USES_DESCRIPTORS)
2350
    func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
2351
  else
2352
    {
2353
      func = fold_convert (nativecode_ptr_ptr_type_node, func);
2354
      func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func);
2355
    }
2356
 
2357
  return func;
2358
}
2359
 
2360
static GTY(()) tree class_ident;
2361
tree
2362
build_invokeinterface (tree dtable, tree method)
2363
{
2364
  tree interface;
2365
  tree idx;
2366
 
2367
  /* We expand invokeinterface here.  */
2368
 
2369
  if (class_ident == NULL_TREE)
2370
    class_ident = get_identifier ("class");
2371
 
2372
  dtable = build_java_indirect_ref (dtable_type, dtable,
2373
                                    flag_check_references);
2374
  dtable = build3 (COMPONENT_REF, class_ptr_type, dtable,
2375
                   lookup_field (&dtable_type, class_ident), NULL_TREE);
2376
 
2377
  interface = DECL_CONTEXT (method);
2378
  gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface)));
2379
  layout_class_methods (interface);
2380
 
2381
  if (flag_indirect_dispatch)
2382
    {
2383
      int itable_index
2384
        = 2 * (get_symbol_table_index
2385
               (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class)));
2386
      interface
2387
        = build4 (ARRAY_REF,
2388
                 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2389
                 TYPE_ITABLE_DECL (output_class),
2390
                  build_int_cst (NULL_TREE, itable_index-1),
2391
                  NULL_TREE, NULL_TREE);
2392
      idx
2393
        = build4 (ARRAY_REF,
2394
                 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2395
                 TYPE_ITABLE_DECL (output_class),
2396
                  build_int_cst (NULL_TREE, itable_index),
2397
                  NULL_TREE, NULL_TREE);
2398
      interface = convert (class_ptr_type, interface);
2399
      idx = convert (integer_type_node, idx);
2400
    }
2401
  else
2402
    {
2403
      idx = build_int_cst (NULL_TREE,
2404
                           get_interface_method_index (method, interface));
2405
      interface = build_class_ref (interface);
2406
    }
2407
 
2408
  return build_call_nary (ptr_type_node,
2409
                          build_address_of (soft_lookupinterfacemethod_node),
2410
                          3, dtable, interface, idx);
2411
}
2412
 
2413
/* Expand one of the invoke_* opcodes.
2414
   OPCODE is the specific opcode.
2415
   METHOD_REF_INDEX is an index into the constant pool.
2416
   NARGS is the number of arguments, or -1 if not specified. */
2417
 
2418
static void
2419
expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
2420
{
2421
  tree method_signature
2422
    = COMPONENT_REF_SIGNATURE(&current_jcf->cpool, method_ref_index);
2423
  tree method_name = COMPONENT_REF_NAME (&current_jcf->cpool,
2424
                                         method_ref_index);
2425
  tree self_type
2426
    = get_class_constant (current_jcf,
2427
                          COMPONENT_REF_CLASS_INDEX(&current_jcf->cpool,
2428
                          method_ref_index));
2429
  const char *const self_name
2430
    = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2431
  tree call, func, method, method_type;
2432
  VEC(tree,gc) *arg_list;
2433
  tree check = NULL_TREE;
2434
 
2435
  tree special = NULL_TREE;
2436
 
2437
  if (! CLASS_LOADED_P (self_type))
2438
    {
2439
      load_class (self_type, 1);
2440
      safe_layout_class (self_type);
2441
      if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK)
2442
        fatal_error ("failed to find class '%s'", self_name);
2443
    }
2444
  layout_class_methods (self_type);
2445
 
2446
  if (ID_INIT_P (method_name))
2447
    method = lookup_java_constructor (self_type, method_signature);
2448
  else
2449
    method = lookup_java_method (self_type, method_name, method_signature);
2450
 
2451
  /* We've found a method in a class other than the one in which it
2452
     was wanted.  This can happen if, for instance, we're trying to
2453
     compile invokespecial super.equals().
2454
     FIXME: This is a kludge.  Rather than nullifying the result, we
2455
     should change lookup_java_method() so that it doesn't search the
2456
     superclass chain when we're BC-compiling.  */
2457
  if (! flag_verify_invocations
2458
      && method
2459
      && ! TYPE_ARRAY_P (self_type)
2460
      && self_type != DECL_CONTEXT (method))
2461
    method = NULL_TREE;
2462
 
2463
  /* We've found a method in an interface, but this isn't an interface
2464
     call.  */
2465
  if (opcode != OPCODE_invokeinterface
2466
      && method
2467
      && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))))
2468
    method = NULL_TREE;
2469
 
2470
  /* We've found a non-interface method but we are making an
2471
     interface call.  This can happen if the interface overrides a
2472
     method in Object.  */
2473
  if (! flag_verify_invocations
2474
      && opcode == OPCODE_invokeinterface
2475
      && method
2476
      && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))
2477
    method = NULL_TREE;
2478
 
2479
  if (method == NULL_TREE)
2480
    {
2481
      if (flag_verify_invocations || ! flag_indirect_dispatch)
2482
        {
2483
          error ("class '%s' has no method named '%s' matching signature '%s'",
2484
                 self_name,
2485
                 IDENTIFIER_POINTER (method_name),
2486
                 IDENTIFIER_POINTER (method_signature));
2487
        }
2488
      else
2489
        {
2490
          int flags = ACC_PUBLIC;
2491
          if (opcode == OPCODE_invokestatic)
2492
            flags |= ACC_STATIC;
2493
          if (opcode == OPCODE_invokeinterface)
2494
            {
2495
              flags |= ACC_INTERFACE | ACC_ABSTRACT;
2496
              CLASS_INTERFACE (TYPE_NAME (self_type)) = 1;
2497
            }
2498
          method = add_method (self_type, flags, method_name,
2499
                               method_signature);
2500
          DECL_ARTIFICIAL (method) = 1;
2501
          METHOD_DUMMY (method) = 1;
2502
          layout_class_method (self_type, NULL,
2503
                               method, NULL);
2504
        }
2505
    }
2506
 
2507
  /* Invoke static can't invoke static/abstract method */
2508
  if (method != NULL_TREE)
2509
    {
2510
      if (opcode == OPCODE_invokestatic)
2511
        {
2512
          if (!METHOD_STATIC (method))
2513
            {
2514
              error ("invokestatic on non static method");
2515
              method = NULL_TREE;
2516
            }
2517
          else if (METHOD_ABSTRACT (method))
2518
            {
2519
              error ("invokestatic on abstract method");
2520
              method = NULL_TREE;
2521
            }
2522
        }
2523
      else
2524
        {
2525
          if (METHOD_STATIC (method))
2526
            {
2527
              error ("invoke[non-static] on static method");
2528
              method = NULL_TREE;
2529
            }
2530
        }
2531
    }
2532
 
2533
  if (method == NULL_TREE)
2534
    {
2535
      /* If we got here, we emitted an error message above.  So we
2536
         just pop the arguments, push a properly-typed zero, and
2537
         continue.  */
2538
      method_type = get_type_from_signature (method_signature);
2539
      pop_arguments (method_type);
2540
      if (opcode != OPCODE_invokestatic)
2541
        pop_type (self_type);
2542
      method_type = promote_type (TREE_TYPE (method_type));
2543
      push_value (convert (method_type, integer_zero_node));
2544
      return;
2545
    }
2546
 
2547
  arg_list = pop_arguments (TREE_TYPE (method));
2548
  flush_quick_stack ();
2549
 
2550
  maybe_rewrite_invocation (&method, &arg_list, &method_signature,
2551
                            &special);
2552
  method_type = TREE_TYPE (method);
2553
 
2554
  func = NULL_TREE;
2555
  if (opcode == OPCODE_invokestatic)
2556
    func = build_known_method_ref (method, method_type, self_type,
2557
                                   method_signature, arg_list, special);
2558
  else if (opcode == OPCODE_invokespecial
2559
           || (opcode == OPCODE_invokevirtual
2560
               && (METHOD_PRIVATE (method)
2561
                   || METHOD_FINAL (method)
2562
                   || CLASS_FINAL (TYPE_NAME (self_type)))))
2563
    {
2564
      /* If the object for the method call is null, we throw an
2565
         exception.  We don't do this if the object is the current
2566
         method's `this'.  In other cases we just rely on an
2567
         optimization pass to eliminate redundant checks.  FIXME:
2568
         Unfortunately there doesn't seem to be a way to determine
2569
         what the current method is right now.
2570
         We do omit the check if we're calling <init>.  */
2571
      /* We use a SAVE_EXPR here to make sure we only evaluate
2572
         the new `self' expression once.  */
2573
      tree save_arg = save_expr (VEC_index (tree, arg_list, 0));
2574
      VEC_replace (tree, arg_list, 0, save_arg);
2575
      check = java_check_reference (save_arg, ! DECL_INIT_P (method));
2576
      func = build_known_method_ref (method, method_type, self_type,
2577
                                     method_signature, arg_list, special);
2578
    }
2579
  else
2580
    {
2581
      tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface,
2582
                                         arg_list);
2583
      if (opcode == OPCODE_invokevirtual)
2584
        func = build_invokevirtual (dtable, method, special);
2585
      else
2586
        func = build_invokeinterface (dtable, method);
2587
    }
2588
 
2589
  if (TREE_CODE (func) == ADDR_EXPR)
2590
    TREE_TYPE (func) = build_pointer_type (method_type);
2591
  else
2592
    func = build1 (NOP_EXPR, build_pointer_type (method_type), func);
2593
 
2594
  call = build_call_vec (TREE_TYPE (method_type), func, arg_list);
2595
  TREE_SIDE_EFFECTS (call) = 1;
2596
  call = check_for_builtin (method, call);
2597
 
2598
  if (check != NULL_TREE)
2599
    {
2600
      call = build2 (COMPOUND_EXPR, TREE_TYPE (call), check, call);
2601
      TREE_SIDE_EFFECTS (call) = 1;
2602
    }
2603
 
2604
  if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE)
2605
    java_add_stmt (call);
2606
  else
2607
    {
2608
      push_value (call);
2609
      flush_quick_stack ();
2610
    }
2611
}
2612
 
2613
/* Create a stub which will be put into the vtable but which will call
2614
   a JNI function.  */
2615
 
2616
tree
2617
build_jni_stub (tree method)
2618
{
2619
  tree jnifunc, call, body, method_sig, arg_types;
2620
  tree jniarg0, jniarg1, jniarg2, jniarg3;
2621
  tree jni_func_type, tem;
2622
  tree env_var, res_var = NULL_TREE, block;
2623
  tree method_args;
2624
  tree meth_var;
2625
  tree bind;
2626
  VEC(tree,gc) *args = NULL;
2627
  int args_size = 0;
2628
 
2629
  tree klass = DECL_CONTEXT (method);
2630
  klass = build_class_ref (klass);
2631
 
2632
  gcc_assert (METHOD_NATIVE (method) && flag_jni);
2633
 
2634
  DECL_ARTIFICIAL (method) = 1;
2635
  DECL_EXTERNAL (method) = 0;
2636
 
2637
  env_var = build_decl (input_location,
2638
                        VAR_DECL, get_identifier ("env"), ptr_type_node);
2639
  DECL_CONTEXT (env_var) = method;
2640
 
2641
  if (TREE_TYPE (TREE_TYPE (method)) != void_type_node)
2642
    {
2643
      res_var = build_decl (input_location, VAR_DECL, get_identifier ("res"),
2644
                            TREE_TYPE (TREE_TYPE (method)));
2645
      DECL_CONTEXT (res_var) = method;
2646
      DECL_CHAIN (env_var) = res_var;
2647
    }
2648
 
2649
  method_args = DECL_ARGUMENTS (method);
2650
  block = build_block (env_var, NULL_TREE, method_args, NULL_TREE);
2651
  TREE_SIDE_EFFECTS (block) = 1;
2652
 
2653
  /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame.  */
2654
  body = build2 (MODIFY_EXPR, ptr_type_node, env_var,
2655
                 build_call_nary (ptr_type_node,
2656
                                  build_address_of (soft_getjnienvnewframe_node),
2657
                                  1, klass));
2658
 
2659
  /* The JNIEnv structure is the first argument to the JNI function.  */
2660
  args_size += int_size_in_bytes (TREE_TYPE (env_var));
2661
  VEC_safe_push (tree, gc, args, env_var);
2662
 
2663
  /* For a static method the second argument is the class.  For a
2664
     non-static method the second argument is `this'; that is already
2665
     available in the argument list.  */
2666
  if (METHOD_STATIC (method))
2667
    {
2668
      args_size += int_size_in_bytes (TREE_TYPE (klass));
2669
      VEC_safe_push (tree, gc, args, klass);
2670
    }
2671
 
2672
  /* All the arguments to this method become arguments to the
2673
     underlying JNI function.  If we had to wrap object arguments in a
2674
     special way, we would do that here.  */
2675
  for (tem = method_args; tem != NULL_TREE; tem = DECL_CHAIN (tem))
2676
    {
2677
      int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)));
2678
#ifdef PARM_BOUNDARY
2679
      arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY)
2680
                  * PARM_BOUNDARY);
2681
#endif
2682
      args_size += (arg_bits / BITS_PER_UNIT);
2683
 
2684
      VEC_safe_push (tree, gc, args, tem);
2685
    }
2686
  arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));
2687
 
2688
  /* Argument types for static methods and the JNIEnv structure.
2689
     FIXME: Write and use build_function_type_vec to avoid this.  */
2690
  if (METHOD_STATIC (method))
2691
    arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types);
2692
  arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types);
2693
 
2694
  /* We call _Jv_LookupJNIMethod to find the actual underlying
2695
     function pointer.  _Jv_LookupJNIMethod will throw the appropriate
2696
     exception if this function is not found at runtime.  */
2697
  method_sig = build_java_signature (TREE_TYPE (method));
2698
  jniarg0 = klass;
2699
  jniarg1 = build_utf8_ref (DECL_NAME (method));
2700
  jniarg2 = build_utf8_ref (unmangle_classname
2701
                            (IDENTIFIER_POINTER (method_sig),
2702
                             IDENTIFIER_LENGTH (method_sig)));
2703
  jniarg3 = build_int_cst (NULL_TREE, args_size);
2704
 
2705
  tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types);
2706
 
2707
#ifdef MODIFY_JNI_METHOD_CALL
2708
  tem = MODIFY_JNI_METHOD_CALL (tem);
2709
#endif
2710
 
2711
  jni_func_type = build_pointer_type (tem);
2712
 
2713
  /* Use the actual function type, rather than a generic pointer type,
2714
     such that this decl keeps the actual pointer type from being
2715
     garbage-collected.  If it is, we end up using canonical types
2716
     with different uids for equivalent function types, and this in
2717
     turn causes utf8 identifiers and output order to vary.  */
2718
  meth_var = build_decl (input_location,
2719
                         VAR_DECL, get_identifier ("meth"), jni_func_type);
2720
  TREE_STATIC (meth_var) = 1;
2721
  TREE_PUBLIC (meth_var) = 0;
2722
  DECL_EXTERNAL (meth_var) = 0;
2723
  DECL_CONTEXT (meth_var) = method;
2724
  DECL_ARTIFICIAL (meth_var) = 1;
2725
  DECL_INITIAL (meth_var) = null_pointer_node;
2726
  TREE_USED (meth_var) = 1;
2727
  chainon (env_var, meth_var);
2728
  build_result_decl (method);
2729
 
2730
  jnifunc = build3 (COND_EXPR, jni_func_type,
2731
                    build2 (NE_EXPR, boolean_type_node,
2732
                            meth_var, build_int_cst (TREE_TYPE (meth_var), 0)),
2733
                    meth_var,
2734
                    build2 (MODIFY_EXPR, jni_func_type, meth_var,
2735
                            build1
2736
                            (NOP_EXPR, jni_func_type,
2737
                             build_call_nary (ptr_type_node,
2738
                                              build_address_of
2739
                                              (soft_lookupjnimethod_node),
2740
                                              4,
2741
                                              jniarg0, jniarg1,
2742
                                              jniarg2, jniarg3))));
2743
 
2744
  /* Now we make the actual JNI call via the resulting function
2745
     pointer.    */
2746
  call = build_call_vec (TREE_TYPE (TREE_TYPE (method)), jnifunc, args);
2747
 
2748
  /* If the JNI call returned a result, capture it here.  If we had to
2749
     unwrap JNI object results, we would do that here.  */
2750
  if (res_var != NULL_TREE)
2751
    {
2752
      /* If the call returns an object, it may return a JNI weak
2753
         reference, in which case we must unwrap it.  */
2754
      if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method))))
2755
        call = build_call_nary (TREE_TYPE (TREE_TYPE (method)),
2756
                                build_address_of (soft_unwrapjni_node),
2757
                                1, call);
2758
      call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
2759
                     res_var, call);
2760
    }
2761
 
2762
  TREE_SIDE_EFFECTS (call) = 1;
2763
 
2764
  body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2765
  TREE_SIDE_EFFECTS (body) = 1;
2766
 
2767
  /* Now free the environment we allocated.  */
2768
  call = build_call_nary (ptr_type_node,
2769
                          build_address_of (soft_jnipopsystemframe_node),
2770
                          1, env_var);
2771
  TREE_SIDE_EFFECTS (call) = 1;
2772
  body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2773
  TREE_SIDE_EFFECTS (body) = 1;
2774
 
2775
  /* Finally, do the return.  */
2776
  if (res_var != NULL_TREE)
2777
    {
2778
      tree drt;
2779
      gcc_assert (DECL_RESULT (method));
2780
      /* Make sure we copy the result variable to the actual
2781
         result.  We use the type of the DECL_RESULT because it
2782
         might be different from the return type of the function:
2783
         it might be promoted.  */
2784
      drt = TREE_TYPE (DECL_RESULT (method));
2785
      if (drt != TREE_TYPE (res_var))
2786
        res_var = build1 (CONVERT_EXPR, drt, res_var);
2787
      res_var = build2 (MODIFY_EXPR, drt, DECL_RESULT (method), res_var);
2788
      TREE_SIDE_EFFECTS (res_var) = 1;
2789
    }
2790
 
2791
  body = build2 (COMPOUND_EXPR, void_type_node, body,
2792
                 build1 (RETURN_EXPR, void_type_node, res_var));
2793
  TREE_SIDE_EFFECTS (body) = 1;
2794
 
2795
  /* Prepend class initialization for static methods reachable from
2796
     other classes.  */
2797
  if (METHOD_STATIC (method)
2798
      && (! METHOD_PRIVATE (method)
2799
          || INNER_CLASS_P (DECL_CONTEXT (method))))
2800
    {
2801
      tree init = build_call_expr (soft_initclass_node, 1,
2802
                                   klass);
2803
      body = build2 (COMPOUND_EXPR, void_type_node, init, body);
2804
      TREE_SIDE_EFFECTS (body) = 1;
2805
    }
2806
 
2807
  bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
2808
                 body, block);
2809
  return bind;
2810
}
2811
 
2812
 
2813
/* Given lvalue EXP, return a volatile expression that references the
2814
   same object.  */
2815
 
2816
tree
2817
java_modify_addr_for_volatile (tree exp)
2818
{
2819
  tree exp_type = TREE_TYPE (exp);
2820
  tree v_type
2821
    = build_qualified_type (exp_type,
2822
                            TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE);
2823
  tree addr = build_fold_addr_expr (exp);
2824
  v_type = build_pointer_type (v_type);
2825
  addr = fold_convert (v_type, addr);
2826
  exp = build_fold_indirect_ref (addr);
2827
  return exp;
2828
}
2829
 
2830
 
2831
/* Expand an operation to extract from or store into a field.
2832
   IS_STATIC is 1 iff the field is static.
2833
   IS_PUTTING is 1 for putting into a field;  0 for getting from the field.
2834
   FIELD_REF_INDEX is an index into the constant pool.  */
2835
 
2836
static void
2837
expand_java_field_op (int is_static, int is_putting, int field_ref_index)
2838
{
2839
  tree self_type
2840
    = get_class_constant (current_jcf,
2841
                          COMPONENT_REF_CLASS_INDEX (&current_jcf->cpool,
2842
                          field_ref_index));
2843
  const char *self_name
2844
    = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2845
  tree field_name = COMPONENT_REF_NAME (&current_jcf->cpool, field_ref_index);
2846
  tree field_signature = COMPONENT_REF_SIGNATURE (&current_jcf->cpool,
2847
                                                  field_ref_index);
2848
  tree field_type = get_type_from_signature (field_signature);
2849
  tree new_value = is_putting ? pop_value (field_type) : NULL_TREE;
2850
  tree field_ref;
2851
  int is_error = 0;
2852
  tree original_self_type = self_type;
2853
  tree field_decl;
2854
  tree modify_expr;
2855
 
2856
  if (! CLASS_LOADED_P (self_type))
2857
    load_class (self_type, 1);
2858
  field_decl = lookup_field (&self_type, field_name);
2859
  if (field_decl == error_mark_node)
2860
    {
2861
      is_error = 1;
2862
    }
2863
  else if (field_decl == NULL_TREE)
2864
    {
2865
      if (! flag_verify_invocations)
2866
        {
2867
          int flags = ACC_PUBLIC;
2868
          if (is_static)
2869
            flags |= ACC_STATIC;
2870
          self_type = original_self_type;
2871
          field_decl = add_field (original_self_type, field_name,
2872
                                  field_type, flags);
2873
          DECL_ARTIFICIAL (field_decl) = 1;
2874
          DECL_IGNORED_P (field_decl) = 1;
2875
#if 0
2876
          /* FIXME: We should be pessimistic about volatility.  We
2877
             don't know one way or another, but this is safe.
2878
             However, doing this has bad effects on code quality.  We
2879
             need to look at better ways to do this.  */
2880
          TREE_THIS_VOLATILE (field_decl) = 1;
2881
#endif
2882
        }
2883
      else
2884
        {
2885
          error ("missing field '%s' in '%s'",
2886
                 IDENTIFIER_POINTER (field_name), self_name);
2887
          is_error = 1;
2888
      }
2889
    }
2890
  else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature)
2891
    {
2892
      error ("mismatching signature for field '%s' in '%s'",
2893
             IDENTIFIER_POINTER (field_name), self_name);
2894
      is_error = 1;
2895
    }
2896
  field_ref = is_static ? NULL_TREE : pop_value (self_type);
2897
  if (is_error)
2898
    {
2899
      if (! is_putting)
2900
        push_value (convert (field_type, integer_zero_node));
2901
      flush_quick_stack ();
2902
      return;
2903
    }
2904
 
2905
  field_ref = build_field_ref (field_ref, self_type, field_name);
2906
  if (is_static
2907
      && ! flag_indirect_dispatch)
2908
    {
2909
      tree context = DECL_CONTEXT (field_ref);
2910
      if (context != self_type && CLASS_INTERFACE (TYPE_NAME (context)))
2911
        field_ref = build_class_init (context, field_ref);
2912
      else
2913
        field_ref = build_class_init (self_type, field_ref);
2914
    }
2915
  if (is_putting)
2916
    {
2917
      flush_quick_stack ();
2918
      if (FIELD_FINAL (field_decl))
2919
        {
2920
          if (DECL_CONTEXT (field_decl) != current_class)
2921
            error ("assignment to final field %q+D not in field%'s class",
2922
                   field_decl);
2923
          /* We used to check for assignments to final fields not
2924
             occurring in the class initializer or in a constructor
2925
             here.  However, this constraint doesn't seem to be
2926
             enforced by the JVM.  */
2927
        }
2928
 
2929
      if (TREE_THIS_VOLATILE (field_decl))
2930
        field_ref = java_modify_addr_for_volatile (field_ref);
2931
 
2932
      modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref),
2933
                            field_ref, new_value);
2934
 
2935
      if (TREE_THIS_VOLATILE (field_decl))
2936
        {
2937
          tree sync = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
2938
          java_add_stmt (build_call_expr (sync, 0));
2939
        }
2940
 
2941
      java_add_stmt (modify_expr);
2942
    }
2943
  else
2944
    {
2945
      tree temp = build_decl (input_location,
2946
                              VAR_DECL, NULL_TREE, TREE_TYPE (field_ref));
2947
      java_add_local_var (temp);
2948
 
2949
      if (TREE_THIS_VOLATILE (field_decl))
2950
        field_ref = java_modify_addr_for_volatile (field_ref);
2951
 
2952
      modify_expr
2953
        = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref);
2954
      java_add_stmt (modify_expr);
2955
 
2956
      if (TREE_THIS_VOLATILE (field_decl))
2957
        {
2958
          tree sync = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
2959
          java_add_stmt (build_call_expr (sync, 0));
2960
        }
2961
 
2962
      push_value (temp);
2963
    }
2964
  TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl);
2965
}
2966
 
2967
static void
2968
load_type_state (int pc)
2969
{
2970
  int i;
2971
  tree vec = VEC_index (tree, type_states, pc);
2972
  int cur_length = TREE_VEC_LENGTH (vec);
2973
  stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
2974
  for (i = 0; i < cur_length; i++)
2975
    type_map [i] = TREE_VEC_ELT (vec, i);
2976
}
2977
 
2978
/* Go over METHOD's bytecode and note instruction starts in
2979
   instruction_bits[].  */
2980
 
2981
void
2982
note_instructions (JCF *jcf, tree method)
2983
{
2984
  int PC;
2985
  unsigned char* byte_ops;
2986
  long length = DECL_CODE_LENGTH (method);
2987
 
2988
  int saw_index;
2989
  jint INT_temp;
2990
 
2991
#undef RET /* Defined by config/i386/i386.h */
2992
#undef PTR
2993
#define BCODE byte_ops
2994
#define BYTE_type_node byte_type_node
2995
#define SHORT_type_node short_type_node
2996
#define INT_type_node int_type_node
2997
#define LONG_type_node long_type_node
2998
#define CHAR_type_node char_type_node
2999
#define PTR_type_node ptr_type_node
3000
#define FLOAT_type_node float_type_node
3001
#define DOUBLE_type_node double_type_node
3002
#define VOID_type_node void_type_node
3003
#define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3004
#define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3005
#define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3006
#define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3007
 
3008
#define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
3009
 
3010
  JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3011
  byte_ops = jcf->read_ptr;
3012
  instruction_bits = XRESIZEVAR (char, instruction_bits, length + 1);
3013
  memset (instruction_bits, 0, length + 1);
3014
  type_states = VEC_alloc (tree, gc, length + 1);
3015
  VEC_safe_grow_cleared (tree, gc, type_states, length + 1);
3016
 
3017
  /* This pass figures out which PC can be the targets of jumps. */
3018
  for (PC = 0; PC < length;)
3019
    {
3020
      int oldpc = PC; /* PC at instruction start. */
3021
      instruction_bits [PC] |=  BCODE_INSTRUCTION_START;
3022
      switch (byte_ops[PC++])
3023
        {
3024
#define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3025
        case OPCODE: \
3026
          PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3027
          break;
3028
 
3029
#define NOTE_LABEL(PC) note_label(oldpc, PC)
3030
 
3031
#define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3032
#define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3033
#define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3034
#define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3035
#define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3036
#define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3037
#define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3038
#define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3039
 
3040
#define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3041
  PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3042
#define PRE_SPECIAL_IINC(OPERAND_TYPE) \
3043
  ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
3044
#define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
3045
#define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
3046
#define PRE_SPECIAL_THROW(IGNORE) /* nothing */
3047
#define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
3048
 
3049
/* two forms of wide instructions */
3050
#define PRE_SPECIAL_WIDE(IGNORE) \
3051
  { \
3052
    int modified_opcode = IMMEDIATE_u1; \
3053
    if (modified_opcode == OPCODE_iinc) \
3054
      { \
3055
        (void) IMMEDIATE_u2;    /* indexbyte1 and indexbyte2 */ \
3056
        (void) IMMEDIATE_s2;    /* constbyte1 and constbyte2 */ \
3057
      } \
3058
    else \
3059
      { \
3060
        (void) IMMEDIATE_u2;    /* indexbyte1 and indexbyte2 */ \
3061
      } \
3062
  }
3063
 
3064
#define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
3065
 
3066
#define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3067
 
3068
#define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3069
#define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
3070
          PRE_ARRAY_##SUBOP(OPERAND_TYPE)
3071
#define PRE_ARRAY_LOAD(TYPE) /* nothing */
3072
#define PRE_ARRAY_STORE(TYPE) /* nothing */
3073
#define PRE_ARRAY_LENGTH(TYPE) /* nothing */
3074
#define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
3075
#define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
3076
#define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
3077
#define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
3078
 
3079
#define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3080
#define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3081
#define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3082
  saw_index = 0;  INT_temp = (OPERAND_VALUE); \
3083
  if (!saw_index)  NOTE_LABEL(oldpc + INT_temp);
3084
#define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
3085
  saw_index = 0;  INT_temp = (OPERAND_VALUE); \
3086
  NOTE_LABEL (PC); \
3087
  if (!saw_index)  NOTE_LABEL(oldpc + INT_temp);
3088
 
3089
#define PRE_RET(OPERAND_TYPE, OPERAND_VALUE)  (void)(OPERAND_VALUE)
3090
 
3091
#define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3092
  PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
3093
 
3094
#define PRE_LOOKUP_SWITCH                                               \
3095
  { jint default_offset = IMMEDIATE_s4;  jint npairs = IMMEDIATE_s4;    \
3096
    NOTE_LABEL (default_offset+oldpc);                                  \
3097
    if (npairs >= 0)                                                     \
3098
      while (--npairs >= 0) {                                            \
3099
       jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4;                      \
3100
       jint offset = IMMEDIATE_s4;                                      \
3101
       NOTE_LABEL (offset+oldpc); }                                     \
3102
  }
3103
 
3104
#define PRE_TABLE_SWITCH                                \
3105
  { jint default_offset = IMMEDIATE_s4;                 \
3106
    jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4;  \
3107
    NOTE_LABEL (default_offset+oldpc);                  \
3108
    if (low <= high)                                    \
3109
     while (low++ <= high) {                            \
3110
       jint offset = IMMEDIATE_s4;                      \
3111
       NOTE_LABEL (offset+oldpc); }                     \
3112
  }
3113
 
3114
#define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3115
#define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3116
#define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3117
  (void)(IMMEDIATE_u2); \
3118
  PC += 2 * IS_INTERFACE /* for invokeinterface */;
3119
 
3120
#include "javaop.def"
3121
#undef JAVAOP
3122
        }
3123
    } /* for */
3124
}
3125
 
3126
void
3127
expand_byte_code (JCF *jcf, tree method)
3128
{
3129
  int PC;
3130
  int i;
3131
  const unsigned char *linenumber_pointer;
3132
  int dead_code_index = -1;
3133
  unsigned char* byte_ops;
3134
  long length = DECL_CODE_LENGTH (method);
3135
  location_t max_location = input_location;
3136
 
3137
  stack_pointer = 0;
3138
  JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3139
  byte_ops = jcf->read_ptr;
3140
 
3141
  /* We make an initial pass of the line number table, to note
3142
     which instructions have associated line number entries. */
3143
  linenumber_pointer = linenumber_table;
3144
  for (i = 0; i < linenumber_count; i++)
3145
    {
3146
      int pc = GET_u2 (linenumber_pointer);
3147
      linenumber_pointer += 4;
3148
      if (pc >= length)
3149
        warning (0, "invalid PC in line number table");
3150
      else
3151
        {
3152
          if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0)
3153
            instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS;
3154
          instruction_bits[pc] |= BCODE_HAS_LINENUMBER;
3155
        }
3156
    }
3157
 
3158
  if (! verify_jvm_instructions_new (jcf, byte_ops, length))
3159
    return;
3160
 
3161
  promote_arguments ();
3162
  cache_this_class_ref (method);
3163
  cache_cpool_data_ref ();
3164
 
3165
  /* Translate bytecodes.  */
3166
  linenumber_pointer = linenumber_table;
3167
  for (PC = 0; PC < length;)
3168
    {
3169
      if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0)
3170
        {
3171
          tree label = lookup_label (PC);
3172
          flush_quick_stack ();
3173
          if ((instruction_bits [PC] & BCODE_TARGET) != 0)
3174
            java_add_stmt (build1 (LABEL_EXPR, void_type_node, label));
3175
          if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3176
            load_type_state (PC);
3177
        }
3178
 
3179
      if (! (instruction_bits [PC] & BCODE_VERIFIED))
3180
        {
3181
          if (dead_code_index == -1)
3182
            {
3183
              /* This is the start of a region of unreachable bytecodes.
3184
                 They still need to be processed in order for EH ranges
3185
                 to get handled correctly.  However, we can simply
3186
                 replace these bytecodes with nops.  */
3187
              dead_code_index = PC;
3188
            }
3189
 
3190
          /* Turn this bytecode into a nop.  */
3191
          byte_ops[PC] = 0x0;
3192
        }
3193
       else
3194
        {
3195
          if (dead_code_index != -1)
3196
            {
3197
              /* We've just reached the end of a region of dead code.  */
3198
              if (extra_warnings)
3199
                warning (0, "unreachable bytecode from %d to before %d",
3200
                         dead_code_index, PC);
3201
              dead_code_index = -1;
3202
            }
3203
        }
3204
 
3205
      /* Handle possible line number entry for this PC.
3206
 
3207
         This code handles out-of-order and multiple linenumbers per PC,
3208
         but is optimized for the case of line numbers increasing
3209
         monotonically with PC. */
3210
      if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0)
3211
        {
3212
          if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0
3213
              || GET_u2 (linenumber_pointer) != PC)
3214
            linenumber_pointer = linenumber_table;
3215
          while (linenumber_pointer < linenumber_table + linenumber_count * 4)
3216
            {
3217
              int pc = GET_u2 (linenumber_pointer);
3218
              linenumber_pointer += 4;
3219
              if (pc == PC)
3220
                {
3221
                  int line = GET_u2 (linenumber_pointer - 2);
3222
                  input_location = linemap_line_start (line_table, line, 1);
3223
                  if (input_location > max_location)
3224
                    max_location = input_location;
3225
                  if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS))
3226
                    break;
3227
                }
3228
            }
3229
        }
3230
      maybe_pushlevels (PC);
3231
      PC = process_jvm_instruction (PC, byte_ops, length);
3232
      maybe_poplevels (PC);
3233
    } /* for */
3234
 
3235
  uncache_this_class_ref (method);
3236
 
3237
  if (dead_code_index != -1)
3238
    {
3239
      /* We've just reached the end of a region of dead code.  */
3240
      if (extra_warnings)
3241
        warning (0, "unreachable bytecode from %d to the end of the method",
3242
                 dead_code_index);
3243
    }
3244
 
3245
  DECL_FUNCTION_LAST_LINE (method) = max_location;
3246
}
3247
 
3248
static void
3249
java_push_constant_from_pool (JCF *jcf, int index)
3250
{
3251
  tree c;
3252
  if (JPOOL_TAG (jcf, index) == CONSTANT_String)
3253
    {
3254
      tree name;
3255
      name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index));
3256
      index = alloc_name_constant (CONSTANT_String, name);
3257
      c = build_ref_from_constant_pool (index);
3258
      c = convert (promote_type (string_type_node), c);
3259
    }
3260
  else if (JPOOL_TAG (jcf, index) == CONSTANT_Class
3261
           || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass)
3262
    {
3263
      tree record = get_class_constant (jcf, index);
3264
      c = build_class_ref (record);
3265
    }
3266
  else
3267
    c = get_constant (jcf, index);
3268
  push_value (c);
3269
}
3270
 
3271
int
3272
process_jvm_instruction (int PC, const unsigned char* byte_ops,
3273
                         long length ATTRIBUTE_UNUSED)
3274
{
3275
  const char *opname; /* Temporary ??? */
3276
  int oldpc = PC; /* PC at instruction start. */
3277
 
3278
  /* If the instruction is at the beginning of an exception handler,
3279
     replace the top of the stack with the thrown object reference.  */
3280
  if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
3281
    {
3282
      /* Note that the verifier will not emit a type map at all for
3283
         dead exception handlers.  In this case we just ignore the
3284
         situation.  */
3285
      if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3286
        {
3287
          tree type = pop_type (promote_type (throwable_type_node));
3288
          push_value (build_exception_object_ref (type));
3289
        }
3290
    }
3291
 
3292
  switch (byte_ops[PC++])
3293
    {
3294
#define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3295
    case OPCODE: \
3296
      opname = #OPNAME; \
3297
      OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3298
      break;
3299
 
3300
#define RET(OPERAND_TYPE, OPERAND_VALUE)                                \
3301
  {                                                                     \
3302
    int saw_index = 0;                                                   \
3303
    int index     = OPERAND_VALUE;                                      \
3304
    (void) saw_index; /* Avoid set but not used warning.  */            \
3305
    build_java_ret                                                      \
3306
      (find_local_variable (index, return_address_type_node, oldpc));   \
3307
  }
3308
 
3309
#define JSR(OPERAND_TYPE, OPERAND_VALUE) \
3310
  {                                                 \
3311
    /* OPERAND_VALUE may have side-effects on PC */ \
3312
    int opvalue = OPERAND_VALUE;                    \
3313
    build_java_jsr (oldpc + opvalue, PC);           \
3314
  }
3315
 
3316
/* Push a constant onto the stack. */
3317
#define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
3318
  { int saw_index = 0;  int ival = (OPERAND_VALUE); \
3319
    if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
3320
    else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
3321
 
3322
/* internal macro added for use by the WIDE case */
3323
#define LOAD_INTERNAL(OPTYPE, OPVALUE) \
3324
  expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
3325
 
3326
/* Push local variable onto the opcode stack. */
3327
#define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
3328
  { \
3329
    /* have to do this since OPERAND_VALUE may have side-effects */ \
3330
    int opvalue = OPERAND_VALUE; \
3331
    LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3332
  }
3333
 
3334
#define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
3335
  expand_java_return (OPERAND_TYPE##_type_node)
3336
 
3337
#define REM_EXPR TRUNC_MOD_EXPR
3338
#define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
3339
  expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
3340
 
3341
#define FIELD(IS_STATIC, IS_PUT) \
3342
  expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
3343
 
3344
#define TEST(OPERAND_TYPE, CONDITION) \
3345
  expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3346
 
3347
#define COND(OPERAND_TYPE, CONDITION) \
3348
  expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3349
 
3350
#define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3351
  BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
3352
 
3353
#define BRANCH_GOTO(OPERAND_VALUE) \
3354
  expand_java_goto (oldpc + OPERAND_VALUE)
3355
 
3356
#define BRANCH_CALL(OPERAND_VALUE) \
3357
  expand_java_call (oldpc + OPERAND_VALUE, oldpc)
3358
 
3359
#if 0
3360
#define BRANCH_RETURN(OPERAND_VALUE) \
3361
  { \
3362
    tree type = OPERAND_TYPE##_type_node; \
3363
    tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
3364
    expand_java_ret (value); \
3365
  }
3366
#endif
3367
 
3368
#define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3369
          fprintf (stderr, "%3d: %s ", oldpc, opname); \
3370
          fprintf (stderr, "(not implemented)\n")
3371
#define NOT_IMPL1(OPERAND_VALUE) \
3372
          fprintf (stderr, "%3d: %s ", oldpc, opname); \
3373
          fprintf (stderr, "(not implemented)\n")
3374
 
3375
#define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3376
 
3377
#define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3378
 
3379
#define STACK_POP(COUNT) java_stack_pop (COUNT)
3380
 
3381
#define STACK_SWAP(COUNT) java_stack_swap()
3382
 
3383
#define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3384
#define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3385
#define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3386
 
3387
#define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3388
  PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3389
 
3390
#define LOOKUP_SWITCH \
3391
  { jint default_offset = IMMEDIATE_s4;  jint npairs = IMMEDIATE_s4; \
3392
    tree selector = pop_value (INT_type_node); \
3393
    tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3394
    while (--npairs >= 0) \
3395
      { \
3396
        jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3397
        expand_java_add_case (switch_expr, match, oldpc + offset); \
3398
      } \
3399
  }
3400
 
3401
#define TABLE_SWITCH \
3402
  { jint default_offset = IMMEDIATE_s4; \
3403
    jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3404
    tree selector = pop_value (INT_type_node); \
3405
    tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3406
    for (; low <= high; low++) \
3407
      { \
3408
        jint offset = IMMEDIATE_s4; \
3409
        expand_java_add_case (switch_expr, low, oldpc + offset); \
3410
      } \
3411
  }
3412
 
3413
#define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3414
  { int opcode = byte_ops[PC-1]; \
3415
    int method_ref_index = IMMEDIATE_u2; \
3416
    int nargs; \
3417
    if (IS_INTERFACE) { nargs = IMMEDIATE_u1;  (void) IMMEDIATE_u1; } \
3418
    else nargs = -1; \
3419
    expand_invoke (opcode, method_ref_index, nargs); \
3420
  }
3421
 
3422
/* Handle new, checkcast, instanceof */
3423
#define OBJECT(TYPE, OP) \
3424
  expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3425
 
3426
#define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3427
 
3428
#define ARRAY_LOAD(OPERAND_TYPE)                        \
3429
  {                                                     \
3430
    expand_java_arrayload( OPERAND_TYPE##_type_node );  \
3431
  }
3432
 
3433
#define ARRAY_STORE(OPERAND_TYPE)                       \
3434
  {                                                     \
3435
    expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3436
  }
3437
 
3438
#define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3439
#define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3440
#define ARRAY_NEW_PTR()                                                 \
3441
    push_value (build_anewarray (get_class_constant (current_jcf,       \
3442
                                                     IMMEDIATE_u2),     \
3443
                                 pop_value (int_type_node)));
3444
#define ARRAY_NEW_NUM()                         \
3445
  {                                             \
3446
    int atype = IMMEDIATE_u1;                   \
3447
    push_value (build_newarray (atype, pop_value (int_type_node)));\
3448
  }
3449
#define ARRAY_NEW_MULTI()                                       \
3450
  {                                                             \
3451
    tree klass = get_class_constant (current_jcf, IMMEDIATE_u2 );       \
3452
    int  ndims = IMMEDIATE_u1;                                  \
3453
    expand_java_multianewarray( klass, ndims );                 \
3454
  }
3455
 
3456
#define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3457
  push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3458
                           pop_value (OPERAND_TYPE##_type_node)));
3459
 
3460
#define CONVERT2(FROM_TYPE, TO_TYPE)                                     \
3461
  {                                                                      \
3462
    push_value (build1 (NOP_EXPR, int_type_node,                         \
3463
                        (convert (TO_TYPE##_type_node,                   \
3464
                                  pop_value (FROM_TYPE##_type_node))))); \
3465
  }
3466
 
3467
#define CONVERT(FROM_TYPE, TO_TYPE)                             \
3468
  {                                                             \
3469
    push_value (convert (TO_TYPE##_type_node,                   \
3470
                         pop_value (FROM_TYPE##_type_node)));   \
3471
  }
3472
 
3473
/* internal macro added for use by the WIDE case
3474
   Added TREE_TYPE (decl) assignment, apbianco  */
3475
#define STORE_INTERNAL(OPTYPE, OPVALUE)                         \
3476
  {                                                             \
3477
    tree decl, value;                                           \
3478
    int index = OPVALUE;                                        \
3479
    tree type = OPTYPE;                                         \
3480
    value = pop_value (type);                                   \
3481
    type = TREE_TYPE (value);                                   \
3482
    decl = find_local_variable (index, type, oldpc);            \
3483
    set_local_type (index, type);                               \
3484
    java_add_stmt (build2 (MODIFY_EXPR, type, decl, value));    \
3485
  }
3486
 
3487
#define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3488
  { \
3489
    /* have to do this since OPERAND_VALUE may have side-effects */ \
3490
    int opvalue = OPERAND_VALUE; \
3491
    STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3492
  }
3493
 
3494
#define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3495
  SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3496
 
3497
#define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3498
#define SPECIAL_EXIT(IGNORED)  MONITOR_OPERATION (soft_monitorexit_node)
3499
 
3500
#define MONITOR_OPERATION(call)                 \
3501
  {                                             \
3502
    tree o = pop_value (ptr_type_node);         \
3503
    tree c;                                     \
3504
    flush_quick_stack ();                       \
3505
    c = build_java_monitor (call, o);           \
3506
    TREE_SIDE_EFFECTS (c) = 1;                  \
3507
    java_add_stmt (c);                          \
3508
  }
3509
 
3510
#define SPECIAL_IINC(IGNORED) \
3511
  { \
3512
    unsigned int local_var_index = IMMEDIATE_u1; \
3513
    int ival = IMMEDIATE_s1; \
3514
    expand_iinc(local_var_index, ival, oldpc); \
3515
  }
3516
 
3517
#define SPECIAL_WIDE(IGNORED) \
3518
  { \
3519
    int modified_opcode = IMMEDIATE_u1; \
3520
    unsigned int local_var_index = IMMEDIATE_u2; \
3521
    switch (modified_opcode) \
3522
      { \
3523
      case OPCODE_iinc: \
3524
        { \
3525
          int ival = IMMEDIATE_s2; \
3526
          expand_iinc (local_var_index, ival, oldpc); \
3527
          break; \
3528
        } \
3529
      case OPCODE_iload: \
3530
      case OPCODE_lload: \
3531
      case OPCODE_fload: \
3532
      case OPCODE_dload: \
3533
      case OPCODE_aload: \
3534
        { \
3535
          /* duplicate code from LOAD macro */ \
3536
          LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3537
          break; \
3538
        } \
3539
      case OPCODE_istore: \
3540
      case OPCODE_lstore: \
3541
      case OPCODE_fstore: \
3542
      case OPCODE_dstore: \
3543
      case OPCODE_astore: \
3544
        { \
3545
          STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3546
          break; \
3547
        } \
3548
      default: \
3549
        error ("unrecognized wide sub-instruction"); \
3550
      } \
3551
  }
3552
 
3553
#define SPECIAL_THROW(IGNORED) \
3554
  build_java_athrow (pop_value (throwable_type_node))
3555
 
3556
#define SPECIAL_BREAK NOT_IMPL1
3557
#define IMPL          NOT_IMPL
3558
 
3559
#include "javaop.def"
3560
#undef JAVAOP
3561
   default:
3562
    fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]);
3563
  }
3564
  return PC;
3565
}
3566
 
3567
/* Return the opcode at PC in the code section pointed to by
3568
   CODE_OFFSET.  */
3569
 
3570
static unsigned char
3571
peek_opcode_at_pc (JCF *jcf, int code_offset, int pc)
3572
{
3573
  unsigned char opcode;
3574
  long absolute_offset = (long)JCF_TELL (jcf);
3575
 
3576
  JCF_SEEK (jcf, code_offset);
3577
  opcode = jcf->read_ptr [pc];
3578
  JCF_SEEK (jcf, absolute_offset);
3579
  return opcode;
3580
}
3581
 
3582
/* Some bytecode compilers are emitting accurate LocalVariableTable
3583
   attributes. Here's an example:
3584
 
3585
     PC   <t>store_<n>
3586
     PC+1 ...
3587
 
3588
     Attribute "LocalVariableTable"
3589
     slot #<n>: ... (PC: PC+1 length: L)
3590
 
3591
   This is accurate because the local in slot <n> really exists after
3592
   the opcode at PC is executed, hence from PC+1 to PC+1+L.
3593
 
3594
   This procedure recognizes this situation and extends the live range
3595
   of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3596
   length of the store instruction.)
3597
 
3598
   This function is used by `give_name_to_locals' so that a local's
3599
   DECL features a DECL_LOCAL_START_PC such that the first related
3600
   store operation will use DECL as a destination, not an unrelated
3601
   temporary created for the occasion.
3602
 
3603
   This function uses a global (instruction_bits) `note_instructions' should
3604
   have allocated and filled properly.  */
3605
 
3606
int
3607
maybe_adjust_start_pc (struct JCF *jcf, int code_offset,
3608
                       int start_pc, int slot)
3609
{
3610
  int first, index, opcode;
3611
  int pc, insn_pc;
3612
  int wide_found = 0;
3613
 
3614
  if (!start_pc)
3615
    return start_pc;
3616
 
3617
  first = index = -1;
3618
 
3619
  /* Find last previous instruction and remember it */
3620
  for (pc = start_pc-1; pc; pc--)
3621
    if (instruction_bits [pc] & BCODE_INSTRUCTION_START)
3622
      break;
3623
  insn_pc = pc;
3624
 
3625
  /* Retrieve the instruction, handle `wide'. */
3626
  opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3627
  if (opcode == OPCODE_wide)
3628
    {
3629
      wide_found = 1;
3630
      opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3631
    }
3632
 
3633
  switch (opcode)
3634
    {
3635
    case OPCODE_astore_0:
3636
    case OPCODE_astore_1:
3637
    case OPCODE_astore_2:
3638
    case OPCODE_astore_3:
3639
      first = OPCODE_astore_0;
3640
      break;
3641
 
3642
    case OPCODE_istore_0:
3643
    case OPCODE_istore_1:
3644
    case OPCODE_istore_2:
3645
    case OPCODE_istore_3:
3646
      first = OPCODE_istore_0;
3647
      break;
3648
 
3649
    case OPCODE_lstore_0:
3650
    case OPCODE_lstore_1:
3651
    case OPCODE_lstore_2:
3652
    case OPCODE_lstore_3:
3653
      first = OPCODE_lstore_0;
3654
      break;
3655
 
3656
    case OPCODE_fstore_0:
3657
    case OPCODE_fstore_1:
3658
    case OPCODE_fstore_2:
3659
    case OPCODE_fstore_3:
3660
      first = OPCODE_fstore_0;
3661
      break;
3662
 
3663
    case OPCODE_dstore_0:
3664
    case OPCODE_dstore_1:
3665
    case OPCODE_dstore_2:
3666
    case OPCODE_dstore_3:
3667
      first = OPCODE_dstore_0;
3668
      break;
3669
 
3670
    case OPCODE_astore:
3671
    case OPCODE_istore:
3672
    case OPCODE_lstore:
3673
    case OPCODE_fstore:
3674
    case OPCODE_dstore:
3675
      index = peek_opcode_at_pc (jcf, code_offset, pc);
3676
      if (wide_found)
3677
        {
3678
          int other = peek_opcode_at_pc (jcf, code_offset, ++pc);
3679
          index = (other << 8) + index;
3680
        }
3681
      break;
3682
    }
3683
 
3684
  /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3685
     means we have a <t>store. */
3686
  if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot))
3687
    start_pc = insn_pc;
3688
 
3689
  return start_pc;
3690
}
3691
 
3692
/* Force the (direct) sub-operands of NODE to be evaluated in left-to-right
3693
   order, as specified by Java Language Specification.
3694
 
3695
   The problem is that while expand_expr will evaluate its sub-operands in
3696
   left-to-right order, for variables it will just return an rtx (i.e.
3697
   an lvalue) for the variable (rather than an rvalue).  So it is possible
3698
   that a later sub-operand will change the register, and when the
3699
   actual operation is done, it will use the new value, when it should
3700
   have used the original value.
3701
 
3702
   We fix this by using save_expr.  This forces the sub-operand to be
3703
   copied into a fresh virtual register,
3704
 
3705
   For method invocation, we modify the arguments so that a
3706
   left-to-right order evaluation is performed. Saved expressions
3707
   will, in CALL_EXPR order, be reused when the call will be expanded.
3708
 
3709
   We also promote outgoing args if needed.  */
3710
 
3711
tree
3712
force_evaluation_order (tree node)
3713
{
3714
  if (flag_syntax_only)
3715
    return node;
3716
  if (TREE_CODE (node) == CALL_EXPR
3717
      || (TREE_CODE (node) == COMPOUND_EXPR
3718
          && TREE_CODE (TREE_OPERAND (node, 0)) == CALL_EXPR
3719
          && TREE_CODE (TREE_OPERAND (node, 1)) == SAVE_EXPR))
3720
    {
3721
      tree call, cmp;
3722
      int i, nargs;
3723
 
3724
      /* Account for wrapped around ctors.  */
3725
      if (TREE_CODE (node) == COMPOUND_EXPR)
3726
        call = TREE_OPERAND (node, 0);
3727
      else
3728
        call = node;
3729
 
3730
      nargs = call_expr_nargs (call);
3731
 
3732
      /* This reverses the evaluation order. This is a desired effect. */
3733
      for (i = 0, cmp = NULL_TREE; i < nargs; i++)
3734
        {
3735
          tree arg = CALL_EXPR_ARG (call, i);
3736
          /* Promote types smaller than integer.  This is required by
3737
             some ABIs.  */
3738
          tree type = TREE_TYPE (arg);
3739
          tree saved;
3740
          if (targetm.calls.promote_prototypes (type)
3741
              && INTEGRAL_TYPE_P (type)
3742
              && INT_CST_LT_UNSIGNED (TYPE_SIZE (type),
3743
                                      TYPE_SIZE (integer_type_node)))
3744
            arg = fold_convert (integer_type_node, arg);
3745
 
3746
          saved = save_expr (force_evaluation_order (arg));
3747
          cmp = (cmp == NULL_TREE ? saved :
3748
                 build2 (COMPOUND_EXPR, void_type_node, cmp, saved));
3749
 
3750
          CALL_EXPR_ARG (call, i) = saved;
3751
        }
3752
 
3753
      if (cmp && TREE_CODE (cmp) == COMPOUND_EXPR)
3754
        TREE_SIDE_EFFECTS (cmp) = 1;
3755
 
3756
      if (cmp)
3757
        {
3758
          cmp = build2 (COMPOUND_EXPR, TREE_TYPE (node), cmp, node);
3759
          if (TREE_TYPE (cmp) != void_type_node)
3760
            cmp = save_expr (cmp);
3761
          TREE_SIDE_EFFECTS (cmp) = 1;
3762
          node = cmp;
3763
        }
3764
    }
3765
  return node;
3766
}
3767
 
3768
/* Build a node to represent empty statements and blocks. */
3769
 
3770
tree
3771
build_java_empty_stmt (void)
3772
{
3773
  tree t = build_empty_stmt (input_location);
3774
  return t;
3775
}
3776
 
3777
/* Promote all args of integral type before generating any code.  */
3778
 
3779
static void
3780
promote_arguments (void)
3781
{
3782
  int i;
3783
  tree arg;
3784
  for (arg = DECL_ARGUMENTS (current_function_decl), i = 0;
3785
       arg != NULL_TREE;  arg = DECL_CHAIN (arg), i++)
3786
    {
3787
      tree arg_type = TREE_TYPE (arg);
3788
      if (INTEGRAL_TYPE_P (arg_type)
3789
          && TYPE_PRECISION (arg_type) < 32)
3790
        {
3791
          tree copy = find_local_variable (i, integer_type_node, -1);
3792
          java_add_stmt (build2 (MODIFY_EXPR, integer_type_node,
3793
                                 copy,
3794
                                 fold_convert (integer_type_node, arg)));
3795
        }
3796
      if (TYPE_IS_WIDE (arg_type))
3797
        i++;
3798
    }
3799
}
3800
 
3801
/* Create a local variable that points to the constant pool.  */
3802
 
3803
static void
3804
cache_cpool_data_ref (void)
3805
{
3806
  if (optimize)
3807
    {
3808
      tree cpool;
3809
      tree d = build_constant_data_ref (flag_indirect_classes);
3810
      tree cpool_ptr = build_decl (input_location, VAR_DECL, NULL_TREE,
3811
                                   build_pointer_type (TREE_TYPE (d)));
3812
      java_add_local_var (cpool_ptr);
3813
      TREE_CONSTANT (cpool_ptr) = 1;
3814
 
3815
      java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (cpool_ptr),
3816
                             cpool_ptr, build_address_of (d)));
3817
      cpool = build1 (INDIRECT_REF, TREE_TYPE (d), cpool_ptr);
3818
      TREE_THIS_NOTRAP (cpool) = 1;
3819
      TYPE_CPOOL_DATA_REF (output_class) = cpool;
3820
    }
3821
}
3822
 
3823
#include "gt-java-expr.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.