OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [stmt.c] - Blame information for rev 832

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Expands front end tree to back end RTL for GCC
2
   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3
   1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
/* This file handles the generation of rtl code from tree structure
23
   above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24
   The functions whose names start with `expand_' are called by the
25
   expander to generate RTL instructions for various kinds of constructs.  */
26
 
27
#include "config.h"
28
#include "system.h"
29
#include "coretypes.h"
30
#include "tm.h"
31
 
32
#include "rtl.h"
33
#include "hard-reg-set.h"
34
#include "tree.h"
35
#include "tm_p.h"
36
#include "flags.h"
37
#include "except.h"
38
#include "function.h"
39
#include "insn-config.h"
40
#include "expr.h"
41
#include "libfuncs.h"
42
#include "recog.h"
43
#include "machmode.h"
44
#include "toplev.h"
45
#include "output.h"
46
#include "ggc.h"
47
#include "langhooks.h"
48
#include "predict.h"
49
#include "optabs.h"
50
#include "target.h"
51
#include "regs.h"
52
 
53
/* Functions and data structures for expanding case statements.  */
54
 
55
/* Case label structure, used to hold info on labels within case
56
   statements.  We handle "range" labels; for a single-value label
57
   as in C, the high and low limits are the same.
58
 
59
   We start with a vector of case nodes sorted in ascending order, and
60
   the default label as the last element in the vector.  Before expanding
61
   to RTL, we transform this vector into a list linked via the RIGHT
62
   fields in the case_node struct.  Nodes with higher case values are
63
   later in the list.
64
 
65
   Switch statements can be output in three forms.  A branch table is
66
   used if there are more than a few labels and the labels are dense
67
   within the range between the smallest and largest case value.  If a
68
   branch table is used, no further manipulations are done with the case
69
   node chain.
70
 
71
   The alternative to the use of a branch table is to generate a series
72
   of compare and jump insns.  When that is done, we use the LEFT, RIGHT,
73
   and PARENT fields to hold a binary tree.  Initially the tree is
74
   totally unbalanced, with everything on the right.  We balance the tree
75
   with nodes on the left having lower case values than the parent
76
   and nodes on the right having higher values.  We then output the tree
77
   in order.
78
 
79
   For very small, suitable switch statements, we can generate a series
80
   of simple bit test and branches instead.  */
81
 
82
struct case_node GTY(())
83
{
84
  struct case_node      *left;  /* Left son in binary tree */
85
  struct case_node      *right; /* Right son in binary tree; also node chain */
86
  struct case_node      *parent; /* Parent of node in binary tree */
87
  tree                  low;    /* Lowest index value for this label */
88
  tree                  high;   /* Highest index value for this label */
89
  tree                  code_label; /* Label to jump to when node matches */
90
};
91
 
92
typedef struct case_node case_node;
93
typedef struct case_node *case_node_ptr;
94
 
95
/* These are used by estimate_case_costs and balance_case_nodes.  */
96
 
97
/* This must be a signed type, and non-ANSI compilers lack signed char.  */
98
static short cost_table_[129];
99
static int use_cost_table;
100
static int cost_table_initialized;
101
 
102
/* Special care is needed because we allow -1, but TREE_INT_CST_LOW
103
   is unsigned.  */
104
#define COST_TABLE(I)  cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
105
 
106
static int n_occurrences (int, const char *);
107
static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *);
108
static void expand_nl_goto_receiver (void);
109
static bool check_operand_nalternatives (tree, tree);
110
static bool check_unique_operand_names (tree, tree);
111
static char *resolve_operand_name_1 (char *, tree, tree);
112
static void expand_null_return_1 (void);
113
static void expand_value_return (rtx);
114
static int estimate_case_costs (case_node_ptr);
115
static bool lshift_cheap_p (void);
116
static int case_bit_test_cmp (const void *, const void *);
117
static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
118
static void balance_case_nodes (case_node_ptr *, case_node_ptr);
119
static int node_has_low_bound (case_node_ptr, tree);
120
static int node_has_high_bound (case_node_ptr, tree);
121
static int node_is_bounded (case_node_ptr, tree);
122
static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
123
static struct case_node *add_case_node (struct case_node *, tree,
124
                                        tree, tree, tree);
125
 
126
 
127
/* Return the rtx-label that corresponds to a LABEL_DECL,
128
   creating it if necessary.  */
129
 
130
rtx
131
label_rtx (tree label)
132
{
133
  gcc_assert (TREE_CODE (label) == LABEL_DECL);
134
 
135
  if (!DECL_RTL_SET_P (label))
136
    {
137
      rtx r = gen_label_rtx ();
138
      SET_DECL_RTL (label, r);
139
      if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
140
        LABEL_PRESERVE_P (r) = 1;
141
    }
142
 
143
  return DECL_RTL (label);
144
}
145
 
146
/* As above, but also put it on the forced-reference list of the
147
   function that contains it.  */
148
rtx
149
force_label_rtx (tree label)
150
{
151
  rtx ref = label_rtx (label);
152
  tree function = decl_function_context (label);
153
  struct function *p;
154
 
155
  gcc_assert (function);
156
 
157
  if (function != current_function_decl)
158
    p = find_function_data (function);
159
  else
160
    p = cfun;
161
 
162
  p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
163
                                                p->expr->x_forced_labels);
164
  return ref;
165
}
166
 
167
/* Add an unconditional jump to LABEL as the next sequential instruction.  */
168
 
169
void
170
emit_jump (rtx label)
171
{
172
  do_pending_stack_adjust ();
173
  emit_jump_insn (gen_jump (label));
174
  emit_barrier ();
175
}
176
 
177
/* Emit code to jump to the address
178
   specified by the pointer expression EXP.  */
179
 
180
void
181
expand_computed_goto (tree exp)
182
{
183
  rtx x = expand_normal (exp);
184
 
185
  x = convert_memory_address (Pmode, x);
186
 
187
  do_pending_stack_adjust ();
188
  emit_indirect_jump (x);
189
}
190
 
191
/* Handle goto statements and the labels that they can go to.  */
192
 
193
/* Specify the location in the RTL code of a label LABEL,
194
   which is a LABEL_DECL tree node.
195
 
196
   This is used for the kind of label that the user can jump to with a
197
   goto statement, and for alternatives of a switch or case statement.
198
   RTL labels generated for loops and conditionals don't go through here;
199
   they are generated directly at the RTL level, by other functions below.
200
 
201
   Note that this has nothing to do with defining label *names*.
202
   Languages vary in how they do that and what that even means.  */
203
 
204
void
205
expand_label (tree label)
206
{
207
  rtx label_r = label_rtx (label);
208
 
209
  do_pending_stack_adjust ();
210
  emit_label (label_r);
211
  if (DECL_NAME (label))
212
    LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
213
 
214
  if (DECL_NONLOCAL (label))
215
    {
216
      expand_nl_goto_receiver ();
217
      nonlocal_goto_handler_labels
218
        = gen_rtx_EXPR_LIST (VOIDmode, label_r,
219
                             nonlocal_goto_handler_labels);
220
    }
221
 
222
  if (FORCED_LABEL (label))
223
    forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
224
 
225
  if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
226
    maybe_set_first_label_num (label_r);
227
}
228
 
229
/* Generate RTL code for a `goto' statement with target label LABEL.
230
   LABEL should be a LABEL_DECL tree node that was or will later be
231
   defined with `expand_label'.  */
232
 
233
void
234
expand_goto (tree label)
235
{
236
#ifdef ENABLE_CHECKING
237
  /* Check for a nonlocal goto to a containing function.  Should have
238
     gotten translated to __builtin_nonlocal_goto.  */
239
  tree context = decl_function_context (label);
240
  gcc_assert (!context || context == current_function_decl);
241
#endif
242
 
243
  emit_jump (label_rtx (label));
244
}
245
 
246
/* Return the number of times character C occurs in string S.  */
247
static int
248
n_occurrences (int c, const char *s)
249
{
250
  int n = 0;
251
  while (*s)
252
    n += (*s++ == c);
253
  return n;
254
}
255
 
256
/* Generate RTL for an asm statement (explicit assembler code).
257
   STRING is a STRING_CST node containing the assembler code text,
258
   or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
259
   insn is volatile; don't optimize it.  */
260
 
261
static void
262
expand_asm (tree string, int vol)
263
{
264
  rtx body;
265
 
266
  if (TREE_CODE (string) == ADDR_EXPR)
267
    string = TREE_OPERAND (string, 0);
268
 
269
  body = gen_rtx_ASM_INPUT (VOIDmode,
270
                            ggc_strdup (TREE_STRING_POINTER (string)));
271
 
272
  MEM_VOLATILE_P (body) = vol;
273
 
274
  emit_insn (body);
275
}
276
 
277
/* Parse the output constraint pointed to by *CONSTRAINT_P.  It is the
278
   OPERAND_NUMth output operand, indexed from zero.  There are NINPUTS
279
   inputs and NOUTPUTS outputs to this extended-asm.  Upon return,
280
   *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
281
   memory operand.  Similarly, *ALLOWS_REG will be TRUE iff the
282
   constraint allows the use of a register operand.  And, *IS_INOUT
283
   will be true if the operand is read-write, i.e., if it is used as
284
   an input as well as an output.  If *CONSTRAINT_P is not in
285
   canonical form, it will be made canonical.  (Note that `+' will be
286
   replaced with `=' as part of this process.)
287
 
288
   Returns TRUE if all went well; FALSE if an error occurred.  */
289
 
290
bool
291
parse_output_constraint (const char **constraint_p, int operand_num,
292
                         int ninputs, int noutputs, bool *allows_mem,
293
                         bool *allows_reg, bool *is_inout)
294
{
295
  const char *constraint = *constraint_p;
296
  const char *p;
297
 
298
  /* Assume the constraint doesn't allow the use of either a register
299
     or memory.  */
300
  *allows_mem = false;
301
  *allows_reg = false;
302
 
303
  /* Allow the `=' or `+' to not be at the beginning of the string,
304
     since it wasn't explicitly documented that way, and there is a
305
     large body of code that puts it last.  Swap the character to
306
     the front, so as not to uglify any place else.  */
307
  p = strchr (constraint, '=');
308
  if (!p)
309
    p = strchr (constraint, '+');
310
 
311
  /* If the string doesn't contain an `=', issue an error
312
     message.  */
313
  if (!p)
314
    {
315
      error ("output operand constraint lacks %<=%>");
316
      return false;
317
    }
318
 
319
  /* If the constraint begins with `+', then the operand is both read
320
     from and written to.  */
321
  *is_inout = (*p == '+');
322
 
323
  /* Canonicalize the output constraint so that it begins with `='.  */
324
  if (p != constraint || *is_inout)
325
    {
326
      char *buf;
327
      size_t c_len = strlen (constraint);
328
 
329
      if (p != constraint)
330
        warning (0, "output constraint %qc for operand %d "
331
                 "is not at the beginning",
332
                 *p, operand_num);
333
 
334
      /* Make a copy of the constraint.  */
335
      buf = alloca (c_len + 1);
336
      strcpy (buf, constraint);
337
      /* Swap the first character and the `=' or `+'.  */
338
      buf[p - constraint] = buf[0];
339
      /* Make sure the first character is an `='.  (Until we do this,
340
         it might be a `+'.)  */
341
      buf[0] = '=';
342
      /* Replace the constraint with the canonicalized string.  */
343
      *constraint_p = ggc_alloc_string (buf, c_len);
344
      constraint = *constraint_p;
345
    }
346
 
347
  /* Loop through the constraint string.  */
348
  for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
349
    switch (*p)
350
      {
351
      case '+':
352
      case '=':
353
        error ("operand constraint contains incorrectly positioned "
354
               "%<+%> or %<=%>");
355
        return false;
356
 
357
      case '%':
358
        if (operand_num + 1 == ninputs + noutputs)
359
          {
360
            error ("%<%%%> constraint used with last operand");
361
            return false;
362
          }
363
        break;
364
 
365
      case 'V':  case 'm':  case 'o':
366
        *allows_mem = true;
367
        break;
368
 
369
      case '?':  case '!':  case '*':  case '&':  case '#':
370
      case 'E':  case 'F':  case 'G':  case 'H':
371
      case 's':  case 'i':  case 'n':
372
      case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
373
      case 'N':  case 'O':  case 'P':  case ',':
374
        break;
375
 
376
      case '0':  case '1':  case '2':  case '3':  case '4':
377
      case '5':  case '6':  case '7':  case '8':  case '9':
378
      case '[':
379
        error ("matching constraint not valid in output operand");
380
        return false;
381
 
382
      case '<':  case '>':
383
        /* ??? Before flow, auto inc/dec insns are not supposed to exist,
384
           excepting those that expand_call created.  So match memory
385
           and hope.  */
386
        *allows_mem = true;
387
        break;
388
 
389
      case 'g':  case 'X':
390
        *allows_reg = true;
391
        *allows_mem = true;
392
        break;
393
 
394
      case 'p': case 'r':
395
        *allows_reg = true;
396
        break;
397
 
398
      default:
399
        if (!ISALPHA (*p))
400
          break;
401
        if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
402
          *allows_reg = true;
403
#ifdef EXTRA_CONSTRAINT_STR
404
        else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
405
          *allows_reg = true;
406
        else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
407
          *allows_mem = true;
408
        else
409
          {
410
            /* Otherwise we can't assume anything about the nature of
411
               the constraint except that it isn't purely registers.
412
               Treat it like "g" and hope for the best.  */
413
            *allows_reg = true;
414
            *allows_mem = true;
415
          }
416
#endif
417
        break;
418
      }
419
 
420
  return true;
421
}
422
 
423
/* Similar, but for input constraints.  */
424
 
425
bool
426
parse_input_constraint (const char **constraint_p, int input_num,
427
                        int ninputs, int noutputs, int ninout,
428
                        const char * const * constraints,
429
                        bool *allows_mem, bool *allows_reg)
430
{
431
  const char *constraint = *constraint_p;
432
  const char *orig_constraint = constraint;
433
  size_t c_len = strlen (constraint);
434
  size_t j;
435
  bool saw_match = false;
436
 
437
  /* Assume the constraint doesn't allow the use of either
438
     a register or memory.  */
439
  *allows_mem = false;
440
  *allows_reg = false;
441
 
442
  /* Make sure constraint has neither `=', `+', nor '&'.  */
443
 
444
  for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
445
    switch (constraint[j])
446
      {
447
      case '+':  case '=':  case '&':
448
        if (constraint == orig_constraint)
449
          {
450
            error ("input operand constraint contains %qc", constraint[j]);
451
            return false;
452
          }
453
        break;
454
 
455
      case '%':
456
        if (constraint == orig_constraint
457
            && input_num + 1 == ninputs - ninout)
458
          {
459
            error ("%<%%%> constraint used with last operand");
460
            return false;
461
          }
462
        break;
463
 
464
      case 'V':  case 'm':  case 'o':
465
        *allows_mem = true;
466
        break;
467
 
468
      case '<':  case '>':
469
      case '?':  case '!':  case '*':  case '#':
470
      case 'E':  case 'F':  case 'G':  case 'H':
471
      case 's':  case 'i':  case 'n':
472
      case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
473
      case 'N':  case 'O':  case 'P':  case ',':
474
        break;
475
 
476
        /* Whether or not a numeric constraint allows a register is
477
           decided by the matching constraint, and so there is no need
478
           to do anything special with them.  We must handle them in
479
           the default case, so that we don't unnecessarily force
480
           operands to memory.  */
481
      case '0':  case '1':  case '2':  case '3':  case '4':
482
      case '5':  case '6':  case '7':  case '8':  case '9':
483
        {
484
          char *end;
485
          unsigned long match;
486
 
487
          saw_match = true;
488
 
489
          match = strtoul (constraint + j, &end, 10);
490
          if (match >= (unsigned long) noutputs)
491
            {
492
              error ("matching constraint references invalid operand number");
493
              return false;
494
            }
495
 
496
          /* Try and find the real constraint for this dup.  Only do this
497
             if the matching constraint is the only alternative.  */
498
          if (*end == '\0'
499
              && (j == 0 || (j == 1 && constraint[0] == '%')))
500
            {
501
              constraint = constraints[match];
502
              *constraint_p = constraint;
503
              c_len = strlen (constraint);
504
              j = 0;
505
              /* ??? At the end of the loop, we will skip the first part of
506
                 the matched constraint.  This assumes not only that the
507
                 other constraint is an output constraint, but also that
508
                 the '=' or '+' come first.  */
509
              break;
510
            }
511
          else
512
            j = end - constraint;
513
          /* Anticipate increment at end of loop.  */
514
          j--;
515
        }
516
        /* Fall through.  */
517
 
518
      case 'p':  case 'r':
519
        *allows_reg = true;
520
        break;
521
 
522
      case 'g':  case 'X':
523
        *allows_reg = true;
524
        *allows_mem = true;
525
        break;
526
 
527
      default:
528
        if (! ISALPHA (constraint[j]))
529
          {
530
            error ("invalid punctuation %qc in constraint", constraint[j]);
531
            return false;
532
          }
533
        if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
534
            != NO_REGS)
535
          *allows_reg = true;
536
#ifdef EXTRA_CONSTRAINT_STR
537
        else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
538
          *allows_reg = true;
539
        else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
540
          *allows_mem = true;
541
        else
542
          {
543
            /* Otherwise we can't assume anything about the nature of
544
               the constraint except that it isn't purely registers.
545
               Treat it like "g" and hope for the best.  */
546
            *allows_reg = true;
547
            *allows_mem = true;
548
          }
549
#endif
550
        break;
551
      }
552
 
553
  if (saw_match && !*allows_reg)
554
    warning (0, "matching constraint does not allow a register");
555
 
556
  return true;
557
}
558
 
559
/* Return DECL iff there's an overlap between *REGS and DECL, where DECL
560
   can be an asm-declared register.  Called via walk_tree.  */
561
 
562
static tree
563
decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
564
                              void *data)
565
{
566
  tree decl = *declp;
567
  const HARD_REG_SET *regs = data;
568
 
569
  if (TREE_CODE (decl) == VAR_DECL)
570
    {
571
      if (DECL_HARD_REGISTER (decl)
572
          && REG_P (DECL_RTL (decl))
573
          && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
574
        {
575
          rtx reg = DECL_RTL (decl);
576
          unsigned int regno;
577
 
578
          for (regno = REGNO (reg);
579
               regno < (REGNO (reg)
580
                        + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
581
               regno++)
582
            if (TEST_HARD_REG_BIT (*regs, regno))
583
              return decl;
584
        }
585
      walk_subtrees = 0;
586
    }
587
  else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL)
588
    walk_subtrees = 0;
589
  return NULL_TREE;
590
}
591
 
592
/* If there is an overlap between *REGS and DECL, return the first overlap
593
   found.  */
594
tree
595
tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
596
{
597
  return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
598
}
599
 
600
/* Check for overlap between registers marked in CLOBBERED_REGS and
601
   anything inappropriate in T.  Emit error and return the register
602
   variable definition for error, NULL_TREE for ok.  */
603
 
604
static bool
605
tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
606
{
607
  /* Conflicts between asm-declared register variables and the clobber
608
     list are not allowed.  */
609
  tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
610
 
611
  if (overlap)
612
    {
613
      error ("asm-specifier for variable %qs conflicts with asm clobber list",
614
             IDENTIFIER_POINTER (DECL_NAME (overlap)));
615
 
616
      /* Reset registerness to stop multiple errors emitted for a single
617
         variable.  */
618
      DECL_REGISTER (overlap) = 0;
619
      return true;
620
    }
621
 
622
  return false;
623
}
624
 
625
/* Generate RTL for an asm statement with arguments.
626
   STRING is the instruction template.
627
   OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
628
   Each output or input has an expression in the TREE_VALUE and
629
   and a tree list in TREE_PURPOSE which in turn contains a constraint
630
   name in TREE_VALUE (or NULL_TREE) and a constraint string
631
   in TREE_PURPOSE.
632
   CLOBBERS is a list of STRING_CST nodes each naming a hard register
633
   that is clobbered by this insn.
634
 
635
   Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
636
   Some elements of OUTPUTS may be replaced with trees representing temporary
637
   values.  The caller should copy those temporary values to the originally
638
   specified lvalues.
639
 
640
   VOL nonzero means the insn is volatile; don't optimize it.  */
641
 
642
static void
643
expand_asm_operands (tree string, tree outputs, tree inputs,
644
                     tree clobbers, int vol, location_t locus)
645
{
646
  rtvec argvec, constraintvec;
647
  rtx body;
648
  int ninputs = list_length (inputs);
649
  int noutputs = list_length (outputs);
650
  int ninout;
651
  int nclobbers;
652
  HARD_REG_SET clobbered_regs;
653
  int clobber_conflict_found = 0;
654
  tree tail;
655
  tree t;
656
  int i;
657
  /* Vector of RTX's of evaluated output operands.  */
658
  rtx *output_rtx = alloca (noutputs * sizeof (rtx));
659
  int *inout_opnum = alloca (noutputs * sizeof (int));
660
  rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
661
  enum machine_mode *inout_mode
662
    = alloca (noutputs * sizeof (enum machine_mode));
663
  const char **constraints
664
    = alloca ((noutputs + ninputs) * sizeof (const char *));
665
  int old_generating_concat_p = generating_concat_p;
666
 
667
  /* An ASM with no outputs needs to be treated as volatile, for now.  */
668
  if (noutputs == 0)
669
    vol = 1;
670
 
671
  if (! check_operand_nalternatives (outputs, inputs))
672
    return;
673
 
674
  string = resolve_asm_operand_names (string, outputs, inputs);
675
 
676
  /* Collect constraints.  */
677
  i = 0;
678
  for (t = outputs; t ; t = TREE_CHAIN (t), i++)
679
    constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
680
  for (t = inputs; t ; t = TREE_CHAIN (t), i++)
681
    constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
682
 
683
  /* Sometimes we wish to automatically clobber registers across an asm.
684
     Case in point is when the i386 backend moved from cc0 to a hard reg --
685
     maintaining source-level compatibility means automatically clobbering
686
     the flags register.  */
687
  clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
688
 
689
  /* Count the number of meaningful clobbered registers, ignoring what
690
     we would ignore later.  */
691
  nclobbers = 0;
692
  CLEAR_HARD_REG_SET (clobbered_regs);
693
  for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
694
    {
695
      const char *regname;
696
 
697
      if (TREE_VALUE (tail) == error_mark_node)
698
        return;
699
      regname = TREE_STRING_POINTER (TREE_VALUE (tail));
700
 
701
      i = decode_reg_name (regname);
702
      if (i >= 0 || i == -4)
703
        ++nclobbers;
704
      else if (i == -2)
705
        error ("unknown register name %qs in %<asm%>", regname);
706
 
707
      /* Mark clobbered registers.  */
708
      if (i >= 0)
709
        {
710
          /* Clobbering the PIC register is an error.  */
711
          if (i == (int) PIC_OFFSET_TABLE_REGNUM)
712
            {
713
              error ("PIC register %qs clobbered in %<asm%>", regname);
714
              return;
715
            }
716
 
717
          SET_HARD_REG_BIT (clobbered_regs, i);
718
        }
719
    }
720
 
721
  /* First pass over inputs and outputs checks validity and sets
722
     mark_addressable if needed.  */
723
 
724
  ninout = 0;
725
  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
726
    {
727
      tree val = TREE_VALUE (tail);
728
      tree type = TREE_TYPE (val);
729
      const char *constraint;
730
      bool is_inout;
731
      bool allows_reg;
732
      bool allows_mem;
733
 
734
      /* If there's an erroneous arg, emit no insn.  */
735
      if (type == error_mark_node)
736
        return;
737
 
738
      /* Try to parse the output constraint.  If that fails, there's
739
         no point in going further.  */
740
      constraint = constraints[i];
741
      if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
742
                                    &allows_mem, &allows_reg, &is_inout))
743
        return;
744
 
745
      if (! allows_reg
746
          && (allows_mem
747
              || is_inout
748
              || (DECL_P (val)
749
                  && REG_P (DECL_RTL (val))
750
                  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
751
        lang_hooks.mark_addressable (val);
752
 
753
      if (is_inout)
754
        ninout++;
755
    }
756
 
757
  ninputs += ninout;
758
  if (ninputs + noutputs > MAX_RECOG_OPERANDS)
759
    {
760
      error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
761
      return;
762
    }
763
 
764
  for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
765
    {
766
      bool allows_reg, allows_mem;
767
      const char *constraint;
768
 
769
      /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
770
         would get VOIDmode and that could cause a crash in reload.  */
771
      if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
772
        return;
773
 
774
      constraint = constraints[i + noutputs];
775
      if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
776
                                    constraints, &allows_mem, &allows_reg))
777
        return;
778
 
779
      if (! allows_reg && allows_mem)
780
        lang_hooks.mark_addressable (TREE_VALUE (tail));
781
    }
782
 
783
  /* Second pass evaluates arguments.  */
784
 
785
  ninout = 0;
786
  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
787
    {
788
      tree val = TREE_VALUE (tail);
789
      tree type = TREE_TYPE (val);
790
      bool is_inout;
791
      bool allows_reg;
792
      bool allows_mem;
793
      rtx op;
794
      bool ok;
795
 
796
      ok = parse_output_constraint (&constraints[i], i, ninputs,
797
                                    noutputs, &allows_mem, &allows_reg,
798
                                    &is_inout);
799
      gcc_assert (ok);
800
 
801
      /* If an output operand is not a decl or indirect ref and our constraint
802
         allows a register, make a temporary to act as an intermediate.
803
         Make the asm insn write into that, then our caller will copy it to
804
         the real output operand.  Likewise for promoted variables.  */
805
 
806
      generating_concat_p = 0;
807
 
808
      real_output_rtx[i] = NULL_RTX;
809
      if ((TREE_CODE (val) == INDIRECT_REF
810
           && allows_mem)
811
          || (DECL_P (val)
812
              && (allows_mem || REG_P (DECL_RTL (val)))
813
              && ! (REG_P (DECL_RTL (val))
814
                    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
815
          || ! allows_reg
816
          || is_inout)
817
        {
818
          op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
819
          if (MEM_P (op))
820
            op = validize_mem (op);
821
 
822
          if (! allows_reg && !MEM_P (op))
823
            error ("output number %d not directly addressable", i);
824
          if ((! allows_mem && MEM_P (op))
825
              || GET_CODE (op) == CONCAT)
826
            {
827
              real_output_rtx[i] = op;
828
              op = gen_reg_rtx (GET_MODE (op));
829
              if (is_inout)
830
                emit_move_insn (op, real_output_rtx[i]);
831
            }
832
        }
833
      else
834
        {
835
          op = assign_temp (type, 0, 0, 1);
836
          op = validize_mem (op);
837
          TREE_VALUE (tail) = make_tree (type, op);
838
        }
839
      output_rtx[i] = op;
840
 
841
      generating_concat_p = old_generating_concat_p;
842
 
843
      if (is_inout)
844
        {
845
          inout_mode[ninout] = TYPE_MODE (type);
846
          inout_opnum[ninout++] = i;
847
        }
848
 
849
      if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
850
        clobber_conflict_found = 1;
851
    }
852
 
853
  /* Make vectors for the expression-rtx, constraint strings,
854
     and named operands.  */
855
 
856
  argvec = rtvec_alloc (ninputs);
857
  constraintvec = rtvec_alloc (ninputs);
858
 
859
  body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
860
                                : GET_MODE (output_rtx[0])),
861
                               ggc_strdup (TREE_STRING_POINTER (string)),
862
                               empty_string, 0, argvec, constraintvec,
863
                               locus);
864
 
865
  MEM_VOLATILE_P (body) = vol;
866
 
867
  /* Eval the inputs and put them into ARGVEC.
868
     Put their constraints into ASM_INPUTs and store in CONSTRAINTS.  */
869
 
870
  for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
871
    {
872
      bool allows_reg, allows_mem;
873
      const char *constraint;
874
      tree val, type;
875
      rtx op;
876
      bool ok;
877
 
878
      constraint = constraints[i + noutputs];
879
      ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
880
                                   constraints, &allows_mem, &allows_reg);
881
      gcc_assert (ok);
882
 
883
      generating_concat_p = 0;
884
 
885
      val = TREE_VALUE (tail);
886
      type = TREE_TYPE (val);
887
      /* EXPAND_INITIALIZER will not generate code for valid initializer
888
         constants, but will still generate code for other types of operand.
889
         This is the behavior we want for constant constraints.  */
890
      op = expand_expr (val, NULL_RTX, VOIDmode,
891
                        allows_reg ? EXPAND_NORMAL
892
                        : allows_mem ? EXPAND_MEMORY
893
                        : EXPAND_INITIALIZER);
894
 
895
      /* Never pass a CONCAT to an ASM.  */
896
      if (GET_CODE (op) == CONCAT)
897
        op = force_reg (GET_MODE (op), op);
898
      else if (MEM_P (op))
899
        op = validize_mem (op);
900
 
901
      if (asm_operand_ok (op, constraint) <= 0)
902
        {
903
          if (allows_reg && TYPE_MODE (type) != BLKmode)
904
            op = force_reg (TYPE_MODE (type), op);
905
          else if (!allows_mem)
906
            warning (0, "asm operand %d probably doesn%'t match constraints",
907
                     i + noutputs);
908
          else if (MEM_P (op))
909
            {
910
              /* We won't recognize either volatile memory or memory
911
                 with a queued address as available a memory_operand
912
                 at this point.  Ignore it: clearly this *is* a memory.  */
913
            }
914
          else
915
            {
916
              warning (0, "use of memory input without lvalue in "
917
                       "asm operand %d is deprecated", i + noutputs);
918
 
919
              if (CONSTANT_P (op))
920
                {
921
                  rtx mem = force_const_mem (TYPE_MODE (type), op);
922
                  if (mem)
923
                    op = validize_mem (mem);
924
                  else
925
                    op = force_reg (TYPE_MODE (type), op);
926
                }
927
              if (REG_P (op)
928
                  || GET_CODE (op) == SUBREG
929
                  || GET_CODE (op) == CONCAT)
930
                {
931
                  tree qual_type = build_qualified_type (type,
932
                                                         (TYPE_QUALS (type)
933
                                                          | TYPE_QUAL_CONST));
934
                  rtx memloc = assign_temp (qual_type, 1, 1, 1);
935
                  memloc = validize_mem (memloc);
936
                  emit_move_insn (memloc, op);
937
                  op = memloc;
938
                }
939
            }
940
        }
941
 
942
      generating_concat_p = old_generating_concat_p;
943
      ASM_OPERANDS_INPUT (body, i) = op;
944
 
945
      ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
946
        = gen_rtx_ASM_INPUT (TYPE_MODE (type),
947
                             ggc_strdup (constraints[i + noutputs]));
948
 
949
      if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
950
        clobber_conflict_found = 1;
951
    }
952
 
953
  /* Protect all the operands from the queue now that they have all been
954
     evaluated.  */
955
 
956
  generating_concat_p = 0;
957
 
958
  /* For in-out operands, copy output rtx to input rtx.  */
959
  for (i = 0; i < ninout; i++)
960
    {
961
      int j = inout_opnum[i];
962
      char buffer[16];
963
 
964
      ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
965
        = output_rtx[j];
966
 
967
      sprintf (buffer, "%d", j);
968
      ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
969
        = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
970
    }
971
 
972
  generating_concat_p = old_generating_concat_p;
973
 
974
  /* Now, for each output, construct an rtx
975
     (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
976
                               ARGVEC CONSTRAINTS OPNAMES))
977
     If there is more than one, put them inside a PARALLEL.  */
978
 
979
  if (noutputs == 1 && nclobbers == 0)
980
    {
981
      ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
982
      emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
983
    }
984
 
985
  else if (noutputs == 0 && nclobbers == 0)
986
    {
987
      /* No output operands: put in a raw ASM_OPERANDS rtx.  */
988
      emit_insn (body);
989
    }
990
 
991
  else
992
    {
993
      rtx obody = body;
994
      int num = noutputs;
995
 
996
      if (num == 0)
997
        num = 1;
998
 
999
      body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1000
 
1001
      /* For each output operand, store a SET.  */
1002
      for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1003
        {
1004
          XVECEXP (body, 0, i)
1005
            = gen_rtx_SET (VOIDmode,
1006
                           output_rtx[i],
1007
                           gen_rtx_ASM_OPERANDS
1008
                           (GET_MODE (output_rtx[i]),
1009
                            ggc_strdup (TREE_STRING_POINTER (string)),
1010
                            ggc_strdup (constraints[i]),
1011
                            i, argvec, constraintvec, locus));
1012
 
1013
          MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1014
        }
1015
 
1016
      /* If there are no outputs (but there are some clobbers)
1017
         store the bare ASM_OPERANDS into the PARALLEL.  */
1018
 
1019
      if (i == 0)
1020
        XVECEXP (body, 0, i++) = obody;
1021
 
1022
      /* Store (clobber REG) for each clobbered register specified.  */
1023
 
1024
      for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1025
        {
1026
          const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1027
          int j = decode_reg_name (regname);
1028
          rtx clobbered_reg;
1029
 
1030
          if (j < 0)
1031
            {
1032
              if (j == -3)      /* `cc', which is not a register */
1033
                continue;
1034
 
1035
              if (j == -4)      /* `memory', don't cache memory across asm */
1036
                {
1037
                  XVECEXP (body, 0, i++)
1038
                    = gen_rtx_CLOBBER (VOIDmode,
1039
                                       gen_rtx_MEM
1040
                                       (BLKmode,
1041
                                        gen_rtx_SCRATCH (VOIDmode)));
1042
                  continue;
1043
                }
1044
 
1045
              /* Ignore unknown register, error already signaled.  */
1046
              continue;
1047
            }
1048
 
1049
          /* Use QImode since that's guaranteed to clobber just one reg.  */
1050
          clobbered_reg = gen_rtx_REG (QImode, j);
1051
 
1052
          /* Do sanity check for overlap between clobbers and respectively
1053
             input and outputs that hasn't been handled.  Such overlap
1054
             should have been detected and reported above.  */
1055
          if (!clobber_conflict_found)
1056
            {
1057
              int opno;
1058
 
1059
              /* We test the old body (obody) contents to avoid tripping
1060
                 over the under-construction body.  */
1061
              for (opno = 0; opno < noutputs; opno++)
1062
                if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1063
                  internal_error ("asm clobber conflict with output operand");
1064
 
1065
              for (opno = 0; opno < ninputs - ninout; opno++)
1066
                if (reg_overlap_mentioned_p (clobbered_reg,
1067
                                             ASM_OPERANDS_INPUT (obody, opno)))
1068
                  internal_error ("asm clobber conflict with input operand");
1069
            }
1070
 
1071
          XVECEXP (body, 0, i++)
1072
            = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1073
        }
1074
 
1075
      emit_insn (body);
1076
    }
1077
 
1078
  /* For any outputs that needed reloading into registers, spill them
1079
     back to where they belong.  */
1080
  for (i = 0; i < noutputs; ++i)
1081
    if (real_output_rtx[i])
1082
      emit_move_insn (real_output_rtx[i], output_rtx[i]);
1083
 
1084
  free_temp_slots ();
1085
}
1086
 
1087
void
1088
expand_asm_expr (tree exp)
1089
{
1090
  int noutputs, i;
1091
  tree outputs, tail;
1092
  tree *o;
1093
 
1094
  if (ASM_INPUT_P (exp))
1095
    {
1096
      expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1097
      return;
1098
    }
1099
 
1100
  outputs = ASM_OUTPUTS (exp);
1101
  noutputs = list_length (outputs);
1102
  /* o[I] is the place that output number I should be written.  */
1103
  o = (tree *) alloca (noutputs * sizeof (tree));
1104
 
1105
  /* Record the contents of OUTPUTS before it is modified.  */
1106
  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1107
    o[i] = TREE_VALUE (tail);
1108
 
1109
  /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1110
     OUTPUTS some trees for where the values were actually stored.  */
1111
  expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1112
                       ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1113
                       input_location);
1114
 
1115
  /* Copy all the intermediate outputs into the specified outputs.  */
1116
  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1117
    {
1118
      if (o[i] != TREE_VALUE (tail))
1119
        {
1120
          expand_assignment (o[i], TREE_VALUE (tail));
1121
          free_temp_slots ();
1122
 
1123
          /* Restore the original value so that it's correct the next
1124
             time we expand this function.  */
1125
          TREE_VALUE (tail) = o[i];
1126
        }
1127
    }
1128
}
1129
 
1130
/* A subroutine of expand_asm_operands.  Check that all operands have
1131
   the same number of alternatives.  Return true if so.  */
1132
 
1133
static bool
1134
check_operand_nalternatives (tree outputs, tree inputs)
1135
{
1136
  if (outputs || inputs)
1137
    {
1138
      tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1139
      int nalternatives
1140
        = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1141
      tree next = inputs;
1142
 
1143
      if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1144
        {
1145
          error ("too many alternatives in %<asm%>");
1146
          return false;
1147
        }
1148
 
1149
      tmp = outputs;
1150
      while (tmp)
1151
        {
1152
          const char *constraint
1153
            = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1154
 
1155
          if (n_occurrences (',', constraint) != nalternatives)
1156
            {
1157
              error ("operand constraints for %<asm%> differ "
1158
                     "in number of alternatives");
1159
              return false;
1160
            }
1161
 
1162
          if (TREE_CHAIN (tmp))
1163
            tmp = TREE_CHAIN (tmp);
1164
          else
1165
            tmp = next, next = 0;
1166
        }
1167
    }
1168
 
1169
  return true;
1170
}
1171
 
1172
/* A subroutine of expand_asm_operands.  Check that all operand names
1173
   are unique.  Return true if so.  We rely on the fact that these names
1174
   are identifiers, and so have been canonicalized by get_identifier,
1175
   so all we need are pointer comparisons.  */
1176
 
1177
static bool
1178
check_unique_operand_names (tree outputs, tree inputs)
1179
{
1180
  tree i, j;
1181
 
1182
  for (i = outputs; i ; i = TREE_CHAIN (i))
1183
    {
1184
      tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1185
      if (! i_name)
1186
        continue;
1187
 
1188
      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1189
        if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1190
          goto failure;
1191
    }
1192
 
1193
  for (i = inputs; i ; i = TREE_CHAIN (i))
1194
    {
1195
      tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1196
      if (! i_name)
1197
        continue;
1198
 
1199
      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1200
        if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1201
          goto failure;
1202
      for (j = outputs; j ; j = TREE_CHAIN (j))
1203
        if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1204
          goto failure;
1205
    }
1206
 
1207
  return true;
1208
 
1209
 failure:
1210
  error ("duplicate asm operand name %qs",
1211
         TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1212
  return false;
1213
}
1214
 
1215
/* A subroutine of expand_asm_operands.  Resolve the names of the operands
1216
   in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1217
   STRING and in the constraints to those numbers.  */
1218
 
1219
tree
1220
resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1221
{
1222
  char *buffer;
1223
  char *p;
1224
  const char *c;
1225
  tree t;
1226
 
1227
  check_unique_operand_names (outputs, inputs);
1228
 
1229
  /* Substitute [<name>] in input constraint strings.  There should be no
1230
     named operands in output constraints.  */
1231
  for (t = inputs; t ; t = TREE_CHAIN (t))
1232
    {
1233
      c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1234
      if (strchr (c, '[') != NULL)
1235
        {
1236
          p = buffer = xstrdup (c);
1237
          while ((p = strchr (p, '[')) != NULL)
1238
            p = resolve_operand_name_1 (p, outputs, inputs);
1239
          TREE_VALUE (TREE_PURPOSE (t))
1240
            = build_string (strlen (buffer), buffer);
1241
          free (buffer);
1242
        }
1243
    }
1244
 
1245
  /* Now check for any needed substitutions in the template.  */
1246
  c = TREE_STRING_POINTER (string);
1247
  while ((c = strchr (c, '%')) != NULL)
1248
    {
1249
      if (c[1] == '[')
1250
        break;
1251
      else if (ISALPHA (c[1]) && c[2] == '[')
1252
        break;
1253
      else
1254
        {
1255
          c += 1;
1256
          continue;
1257
        }
1258
    }
1259
 
1260
  if (c)
1261
    {
1262
      /* OK, we need to make a copy so we can perform the substitutions.
1263
         Assume that we will not need extra space--we get to remove '['
1264
         and ']', which means we cannot have a problem until we have more
1265
         than 999 operands.  */
1266
      buffer = xstrdup (TREE_STRING_POINTER (string));
1267
      p = buffer + (c - TREE_STRING_POINTER (string));
1268
 
1269
      while ((p = strchr (p, '%')) != NULL)
1270
        {
1271
          if (p[1] == '[')
1272
            p += 1;
1273
          else if (ISALPHA (p[1]) && p[2] == '[')
1274
            p += 2;
1275
          else
1276
            {
1277
              p += 1;
1278
              continue;
1279
            }
1280
 
1281
          p = resolve_operand_name_1 (p, outputs, inputs);
1282
        }
1283
 
1284
      string = build_string (strlen (buffer), buffer);
1285
      free (buffer);
1286
    }
1287
 
1288
  return string;
1289
}
1290
 
1291
/* A subroutine of resolve_operand_names.  P points to the '[' for a
1292
   potential named operand of the form [<name>].  In place, replace
1293
   the name and brackets with a number.  Return a pointer to the
1294
   balance of the string after substitution.  */
1295
 
1296
static char *
1297
resolve_operand_name_1 (char *p, tree outputs, tree inputs)
1298
{
1299
  char *q;
1300
  int op;
1301
  tree t;
1302
  size_t len;
1303
 
1304
  /* Collect the operand name.  */
1305
  q = strchr (p, ']');
1306
  if (!q)
1307
    {
1308
      error ("missing close brace for named operand");
1309
      return strchr (p, '\0');
1310
    }
1311
  len = q - p - 1;
1312
 
1313
  /* Resolve the name to a number.  */
1314
  for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1315
    {
1316
      tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1317
      if (name)
1318
        {
1319
          const char *c = TREE_STRING_POINTER (name);
1320
          if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1321
            goto found;
1322
        }
1323
    }
1324
  for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1325
    {
1326
      tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1327
      if (name)
1328
        {
1329
          const char *c = TREE_STRING_POINTER (name);
1330
          if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1331
            goto found;
1332
        }
1333
    }
1334
 
1335
  *q = '\0';
1336
  error ("undefined named operand %qs", p + 1);
1337
  op = 0;
1338
 found:
1339
 
1340
  /* Replace the name with the number.  Unfortunately, not all libraries
1341
     get the return value of sprintf correct, so search for the end of the
1342
     generated string by hand.  */
1343
  sprintf (p, "%d", op);
1344
  p = strchr (p, '\0');
1345
 
1346
  /* Verify the no extra buffer space assumption.  */
1347
  gcc_assert (p <= q);
1348
 
1349
  /* Shift the rest of the buffer down to fill the gap.  */
1350
  memmove (p, q + 1, strlen (q + 1) + 1);
1351
 
1352
  return p;
1353
}
1354
 
1355
/* Generate RTL to evaluate the expression EXP.  */
1356
 
1357
void
1358
expand_expr_stmt (tree exp)
1359
{
1360
  rtx value;
1361
  tree type;
1362
 
1363
  value = expand_expr (exp, const0_rtx, VOIDmode, 0);
1364
  type = TREE_TYPE (exp);
1365
 
1366
  /* If all we do is reference a volatile value in memory,
1367
     copy it to a register to be sure it is actually touched.  */
1368
  if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1369
    {
1370
      if (TYPE_MODE (type) == VOIDmode)
1371
        ;
1372
      else if (TYPE_MODE (type) != BLKmode)
1373
        value = copy_to_reg (value);
1374
      else
1375
        {
1376
          rtx lab = gen_label_rtx ();
1377
 
1378
          /* Compare the value with itself to reference it.  */
1379
          emit_cmp_and_jump_insns (value, value, EQ,
1380
                                   expand_normal (TYPE_SIZE (type)),
1381
                                   BLKmode, 0, lab);
1382
          emit_label (lab);
1383
        }
1384
    }
1385
 
1386
  /* Free any temporaries used to evaluate this expression.  */
1387
  free_temp_slots ();
1388
}
1389
 
1390
/* Warn if EXP contains any computations whose results are not used.
1391
   Return 1 if a warning is printed; 0 otherwise.  LOCUS is the
1392
   (potential) location of the expression.  */
1393
 
1394
int
1395
warn_if_unused_value (tree exp, location_t locus)
1396
{
1397
 restart:
1398
  if (TREE_USED (exp) || TREE_NO_WARNING (exp))
1399
    return 0;
1400
 
1401
  /* Don't warn about void constructs.  This includes casting to void,
1402
     void function calls, and statement expressions with a final cast
1403
     to void.  */
1404
  if (VOID_TYPE_P (TREE_TYPE (exp)))
1405
    return 0;
1406
 
1407
  if (EXPR_HAS_LOCATION (exp))
1408
    locus = EXPR_LOCATION (exp);
1409
 
1410
  switch (TREE_CODE (exp))
1411
    {
1412
    case PREINCREMENT_EXPR:
1413
    case POSTINCREMENT_EXPR:
1414
    case PREDECREMENT_EXPR:
1415
    case POSTDECREMENT_EXPR:
1416
    case MODIFY_EXPR:
1417
    case INIT_EXPR:
1418
    case TARGET_EXPR:
1419
    case CALL_EXPR:
1420
    case TRY_CATCH_EXPR:
1421
    case WITH_CLEANUP_EXPR:
1422
    case EXIT_EXPR:
1423
    case VA_ARG_EXPR:
1424
      return 0;
1425
 
1426
    case BIND_EXPR:
1427
      /* For a binding, warn if no side effect within it.  */
1428
      exp = BIND_EXPR_BODY (exp);
1429
      goto restart;
1430
 
1431
    case SAVE_EXPR:
1432
      exp = TREE_OPERAND (exp, 0);
1433
      goto restart;
1434
 
1435
    case TRUTH_ORIF_EXPR:
1436
    case TRUTH_ANDIF_EXPR:
1437
      /* In && or ||, warn if 2nd operand has no side effect.  */
1438
      exp = TREE_OPERAND (exp, 1);
1439
      goto restart;
1440
 
1441
    case COMPOUND_EXPR:
1442
      if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1443
        return 1;
1444
      /* Let people do `(foo (), 0)' without a warning.  */
1445
      if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1446
        return 0;
1447
      exp = TREE_OPERAND (exp, 1);
1448
      goto restart;
1449
 
1450
    case COND_EXPR:
1451
      /* If this is an expression with side effects, don't warn; this
1452
         case commonly appears in macro expansions.  */
1453
      if (TREE_SIDE_EFFECTS (exp))
1454
        return 0;
1455
      goto warn;
1456
 
1457
    case INDIRECT_REF:
1458
      /* Don't warn about automatic dereferencing of references, since
1459
         the user cannot control it.  */
1460
      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1461
        {
1462
          exp = TREE_OPERAND (exp, 0);
1463
          goto restart;
1464
        }
1465
      /* Fall through.  */
1466
 
1467
    default:
1468
      /* Referencing a volatile value is a side effect, so don't warn.  */
1469
      if ((DECL_P (exp) || REFERENCE_CLASS_P (exp))
1470
          && TREE_THIS_VOLATILE (exp))
1471
        return 0;
1472
 
1473
      /* If this is an expression which has no operands, there is no value
1474
         to be unused.  There are no such language-independent codes,
1475
         but front ends may define such.  */
1476
      if (EXPRESSION_CLASS_P (exp) && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
1477
        return 0;
1478
 
1479
    warn:
1480
      warning (0, "%Hvalue computed is not used", &locus);
1481
      return 1;
1482
    }
1483
}
1484
 
1485
 
1486
/* Generate RTL to return from the current function, with no value.
1487
   (That is, we do not do anything about returning any value.)  */
1488
 
1489
void
1490
expand_null_return (void)
1491
{
1492
  /* If this function was declared to return a value, but we
1493
     didn't, clobber the return registers so that they are not
1494
     propagated live to the rest of the function.  */
1495
  clobber_return_register ();
1496
 
1497
  expand_null_return_1 ();
1498
}
1499
 
1500
/* Generate RTL to return directly from the current function.
1501
   (That is, we bypass any return value.)  */
1502
 
1503
void
1504
expand_naked_return (void)
1505
{
1506
  rtx end_label;
1507
 
1508
  clear_pending_stack_adjust ();
1509
  do_pending_stack_adjust ();
1510
 
1511
  end_label = naked_return_label;
1512
  if (end_label == 0)
1513
    end_label = naked_return_label = gen_label_rtx ();
1514
 
1515
  emit_jump (end_label);
1516
}
1517
 
1518
/* Generate RTL to return from the current function, with value VAL.  */
1519
 
1520
static void
1521
expand_value_return (rtx val)
1522
{
1523
  /* Copy the value to the return location
1524
     unless it's already there.  */
1525
 
1526
  rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
1527
  if (return_reg != val)
1528
    {
1529
      tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
1530
      if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
1531
      {
1532
        int unsignedp = TYPE_UNSIGNED (type);
1533
        enum machine_mode old_mode
1534
          = DECL_MODE (DECL_RESULT (current_function_decl));
1535
        enum machine_mode mode
1536
          = promote_mode (type, old_mode, &unsignedp, 1);
1537
 
1538
        if (mode != old_mode)
1539
          val = convert_modes (mode, old_mode, val, unsignedp);
1540
      }
1541
      if (GET_CODE (return_reg) == PARALLEL)
1542
        emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1543
      else
1544
        emit_move_insn (return_reg, val);
1545
    }
1546
 
1547
  expand_null_return_1 ();
1548
}
1549
 
1550
/* Output a return with no value.  */
1551
 
1552
static void
1553
expand_null_return_1 (void)
1554
{
1555
  clear_pending_stack_adjust ();
1556
  do_pending_stack_adjust ();
1557
  emit_jump (return_label);
1558
}
1559
 
1560
/* Generate RTL to evaluate the expression RETVAL and return it
1561
   from the current function.  */
1562
 
1563
void
1564
expand_return (tree retval)
1565
{
1566
  rtx result_rtl;
1567
  rtx val = 0;
1568
  tree retval_rhs;
1569
 
1570
  /* If function wants no value, give it none.  */
1571
  if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1572
    {
1573
      expand_normal (retval);
1574
      expand_null_return ();
1575
      return;
1576
    }
1577
 
1578
  if (retval == error_mark_node)
1579
    {
1580
      /* Treat this like a return of no value from a function that
1581
         returns a value.  */
1582
      expand_null_return ();
1583
      return;
1584
    }
1585
  else if ((TREE_CODE (retval) == MODIFY_EXPR
1586
            || TREE_CODE (retval) == INIT_EXPR)
1587
           && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1588
    retval_rhs = TREE_OPERAND (retval, 1);
1589
  else
1590
    retval_rhs = retval;
1591
 
1592
  result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1593
 
1594
  /* If we are returning the RESULT_DECL, then the value has already
1595
     been stored into it, so we don't have to do anything special.  */
1596
  if (TREE_CODE (retval_rhs) == RESULT_DECL)
1597
    expand_value_return (result_rtl);
1598
 
1599
  /* If the result is an aggregate that is being returned in one (or more)
1600
     registers, load the registers here.  The compiler currently can't handle
1601
     copying a BLKmode value into registers.  We could put this code in a
1602
     more general area (for use by everyone instead of just function
1603
     call/return), but until this feature is generally usable it is kept here
1604
     (and in expand_call).  */
1605
 
1606
  else if (retval_rhs != 0
1607
           && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1608
           && REG_P (result_rtl))
1609
    {
1610
      int i;
1611
      unsigned HOST_WIDE_INT bitpos, xbitpos;
1612
      unsigned HOST_WIDE_INT padding_correction = 0;
1613
      unsigned HOST_WIDE_INT bytes
1614
        = int_size_in_bytes (TREE_TYPE (retval_rhs));
1615
      int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1616
      unsigned int bitsize
1617
        = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
1618
      rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
1619
      rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
1620
      rtx result_val = expand_normal (retval_rhs);
1621
      enum machine_mode tmpmode, result_reg_mode;
1622
 
1623
      if (bytes == 0)
1624
        {
1625
          expand_null_return ();
1626
          return;
1627
        }
1628
 
1629
      /* If the structure doesn't take up a whole number of words, see
1630
         whether the register value should be padded on the left or on
1631
         the right.  Set PADDING_CORRECTION to the number of padding
1632
         bits needed on the left side.
1633
 
1634
         In most ABIs, the structure will be returned at the least end of
1635
         the register, which translates to right padding on little-endian
1636
         targets and left padding on big-endian targets.  The opposite
1637
         holds if the structure is returned at the most significant
1638
         end of the register.  */
1639
      if (bytes % UNITS_PER_WORD != 0
1640
          && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
1641
              ? !BYTES_BIG_ENDIAN
1642
              : BYTES_BIG_ENDIAN))
1643
        padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
1644
                                               * BITS_PER_UNIT));
1645
 
1646
      /* Copy the structure BITSIZE bits at a time.  */
1647
      for (bitpos = 0, xbitpos = padding_correction;
1648
           bitpos < bytes * BITS_PER_UNIT;
1649
           bitpos += bitsize, xbitpos += bitsize)
1650
        {
1651
          /* We need a new destination pseudo each time xbitpos is
1652
             on a word boundary and when xbitpos == padding_correction
1653
             (the first time through).  */
1654
          if (xbitpos % BITS_PER_WORD == 0
1655
              || xbitpos == padding_correction)
1656
            {
1657
              /* Generate an appropriate register.  */
1658
              dst = gen_reg_rtx (word_mode);
1659
              result_pseudos[xbitpos / BITS_PER_WORD] = dst;
1660
 
1661
              /* Clear the destination before we move anything into it.  */
1662
              emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
1663
            }
1664
 
1665
          /* We need a new source operand each time bitpos is on a word
1666
             boundary.  */
1667
          if (bitpos % BITS_PER_WORD == 0)
1668
            src = operand_subword_force (result_val,
1669
                                         bitpos / BITS_PER_WORD,
1670
                                         BLKmode);
1671
 
1672
          /* Use bitpos for the source extraction (left justified) and
1673
             xbitpos for the destination store (right justified).  */
1674
          store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
1675
                           extract_bit_field (src, bitsize,
1676
                                              bitpos % BITS_PER_WORD, 1,
1677
                                              NULL_RTX, word_mode, word_mode));
1678
        }
1679
 
1680
      tmpmode = GET_MODE (result_rtl);
1681
      if (tmpmode == BLKmode)
1682
        {
1683
          /* Find the smallest integer mode large enough to hold the
1684
             entire structure and use that mode instead of BLKmode
1685
             on the USE insn for the return register.  */
1686
          for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1687
               tmpmode != VOIDmode;
1688
               tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1689
            /* Have we found a large enough mode?  */
1690
            if (GET_MODE_SIZE (tmpmode) >= bytes)
1691
              break;
1692
 
1693
          /* A suitable mode should have been found.  */
1694
          gcc_assert (tmpmode != VOIDmode);
1695
 
1696
          PUT_MODE (result_rtl, tmpmode);
1697
        }
1698
 
1699
      if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
1700
        result_reg_mode = word_mode;
1701
      else
1702
        result_reg_mode = tmpmode;
1703
      result_reg = gen_reg_rtx (result_reg_mode);
1704
 
1705
      for (i = 0; i < n_regs; i++)
1706
        emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
1707
                        result_pseudos[i]);
1708
 
1709
      if (tmpmode != result_reg_mode)
1710
        result_reg = gen_lowpart (tmpmode, result_reg);
1711
 
1712
      expand_value_return (result_reg);
1713
    }
1714
  else if (retval_rhs != 0
1715
           && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1716
           && (REG_P (result_rtl)
1717
               || (GET_CODE (result_rtl) == PARALLEL)))
1718
    {
1719
      /* Calculate the return value into a temporary (usually a pseudo
1720
         reg).  */
1721
      tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
1722
      tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
1723
 
1724
      val = assign_temp (nt, 0, 0, 1);
1725
      val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
1726
      val = force_not_mem (val);
1727
      /* Return the calculated value.  */
1728
      expand_value_return (val);
1729
    }
1730
  else
1731
    {
1732
      /* No hard reg used; calculate value into hard return reg.  */
1733
      expand_expr (retval, const0_rtx, VOIDmode, 0);
1734
      expand_value_return (result_rtl);
1735
    }
1736
}
1737
 
1738
/* Given a pointer to a BLOCK node return nonzero if (and only if) the node
1739
   in question represents the outermost pair of curly braces (i.e. the "body
1740
   block") of a function or method.
1741
 
1742
   For any BLOCK node representing a "body block" of a function or method, the
1743
   BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
1744
   represents the outermost (function) scope for the function or method (i.e.
1745
   the one which includes the formal parameters).  The BLOCK_SUPERCONTEXT of
1746
   *that* node in turn will point to the relevant FUNCTION_DECL node.  */
1747
 
1748
int
1749
is_body_block (tree stmt)
1750
{
1751
  if (lang_hooks.no_body_blocks)
1752
    return 0;
1753
 
1754
  if (TREE_CODE (stmt) == BLOCK)
1755
    {
1756
      tree parent = BLOCK_SUPERCONTEXT (stmt);
1757
 
1758
      if (parent && TREE_CODE (parent) == BLOCK)
1759
        {
1760
          tree grandparent = BLOCK_SUPERCONTEXT (parent);
1761
 
1762
          if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
1763
            return 1;
1764
        }
1765
    }
1766
 
1767
  return 0;
1768
}
1769
 
1770
/* Emit code to restore vital registers at the beginning of a nonlocal goto
1771
   handler.  */
1772
static void
1773
expand_nl_goto_receiver (void)
1774
{
1775
  /* Clobber the FP when we get here, so we have to make sure it's
1776
     marked as used by this function.  */
1777
  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
1778
 
1779
  /* Mark the static chain as clobbered here so life information
1780
     doesn't get messed up for it.  */
1781
  emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
1782
 
1783
#ifdef HAVE_nonlocal_goto
1784
  if (! HAVE_nonlocal_goto)
1785
#endif
1786
    /* First adjust our frame pointer to its actual value.  It was
1787
       previously set to the start of the virtual area corresponding to
1788
       the stacked variables when we branched here and now needs to be
1789
       adjusted to the actual hardware fp value.
1790
 
1791
       Assignments are to virtual registers are converted by
1792
       instantiate_virtual_regs into the corresponding assignment
1793
       to the underlying register (fp in this case) that makes
1794
       the original assignment true.
1795
       So the following insn will actually be
1796
       decrementing fp by STARTING_FRAME_OFFSET.  */
1797
    emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1798
 
1799
#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1800
  if (fixed_regs[ARG_POINTER_REGNUM])
1801
    {
1802
#ifdef ELIMINABLE_REGS
1803
      /* If the argument pointer can be eliminated in favor of the
1804
         frame pointer, we don't need to restore it.  We assume here
1805
         that if such an elimination is present, it can always be used.
1806
         This is the case on all known machines; if we don't make this
1807
         assumption, we do unnecessary saving on many machines.  */
1808
      static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1809
      size_t i;
1810
 
1811
      for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1812
        if (elim_regs[i].from == ARG_POINTER_REGNUM
1813
            && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1814
          break;
1815
 
1816
      if (i == ARRAY_SIZE (elim_regs))
1817
#endif
1818
        {
1819
          /* Now restore our arg pointer from the address at which it
1820
             was saved in our stack frame.  */
1821
          emit_move_insn (virtual_incoming_args_rtx,
1822
                          copy_to_reg (get_arg_pointer_save_area (cfun)));
1823
        }
1824
    }
1825
#endif
1826
 
1827
#ifdef HAVE_nonlocal_goto_receiver
1828
  if (HAVE_nonlocal_goto_receiver)
1829
    emit_insn (gen_nonlocal_goto_receiver ());
1830
#endif
1831
 
1832
  /* @@@ This is a kludge.  Not all machine descriptions define a blockage
1833
     insn, but we must not allow the code we just generated to be reordered
1834
     by scheduling.  Specifically, the update of the frame pointer must
1835
     happen immediately, not later.  So emit an ASM_INPUT to act as blockage
1836
     insn.  */
1837
  emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1838
}
1839
 
1840
/* Generate RTL for the automatic variable declaration DECL.
1841
   (Other kinds of declarations are simply ignored if seen here.)  */
1842
 
1843
void
1844
expand_decl (tree decl)
1845
{
1846
  tree type;
1847
 
1848
  type = TREE_TYPE (decl);
1849
 
1850
  /* For a CONST_DECL, set mode, alignment, and sizes from those of the
1851
     type in case this node is used in a reference.  */
1852
  if (TREE_CODE (decl) == CONST_DECL)
1853
    {
1854
      DECL_MODE (decl) = TYPE_MODE (type);
1855
      DECL_ALIGN (decl) = TYPE_ALIGN (type);
1856
      DECL_SIZE (decl) = TYPE_SIZE (type);
1857
      DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
1858
      return;
1859
    }
1860
 
1861
  /* Otherwise, only automatic variables need any expansion done.  Static and
1862
     external variables, and external functions, will be handled by
1863
     `assemble_variable' (called from finish_decl).  TYPE_DECL requires
1864
     nothing.  PARM_DECLs are handled in `assign_parms'.  */
1865
  if (TREE_CODE (decl) != VAR_DECL)
1866
    return;
1867
 
1868
  if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1869
    return;
1870
 
1871
  /* Create the RTL representation for the variable.  */
1872
 
1873
  if (type == error_mark_node)
1874
    SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1875
 
1876
  else if (DECL_SIZE (decl) == 0)
1877
    /* Variable with incomplete type.  */
1878
    {
1879
      rtx x;
1880
      if (DECL_INITIAL (decl) == 0)
1881
        /* Error message was already done; now avoid a crash.  */
1882
        x = gen_rtx_MEM (BLKmode, const0_rtx);
1883
      else
1884
        /* An initializer is going to decide the size of this array.
1885
           Until we know the size, represent its address with a reg.  */
1886
        x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
1887
 
1888
      set_mem_attributes (x, decl, 1);
1889
      SET_DECL_RTL (decl, x);
1890
    }
1891
  else if (use_register_for_decl (decl))
1892
    {
1893
      /* Automatic variable that can go in a register.  */
1894
      int unsignedp = TYPE_UNSIGNED (type);
1895
      enum machine_mode reg_mode
1896
        = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
1897
 
1898
      SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
1899
 
1900
      /* Note if the object is a user variable.  */
1901
      if (!DECL_ARTIFICIAL (decl))
1902
        {
1903
          mark_user_reg (DECL_RTL (decl));
1904
 
1905
          /* Trust user variables which have a pointer type to really
1906
             be pointers.  Do not trust compiler generated temporaries
1907
             as our type system is totally busted as it relates to
1908
             pointer arithmetic which translates into lots of compiler
1909
             generated objects with pointer types, but which are not really
1910
             pointers.  */
1911
          if (POINTER_TYPE_P (type))
1912
            mark_reg_pointer (DECL_RTL (decl),
1913
                              TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1914
        }
1915
    }
1916
 
1917
  else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
1918
           && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
1919
                 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
1920
                                          STACK_CHECK_MAX_VAR_SIZE)))
1921
    {
1922
      /* Variable of fixed size that goes on the stack.  */
1923
      rtx oldaddr = 0;
1924
      rtx addr;
1925
      rtx x;
1926
 
1927
      /* If we previously made RTL for this decl, it must be an array
1928
         whose size was determined by the initializer.
1929
         The old address was a register; set that register now
1930
         to the proper address.  */
1931
      if (DECL_RTL_SET_P (decl))
1932
        {
1933
          gcc_assert (MEM_P (DECL_RTL (decl)));
1934
          gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0)));
1935
          oldaddr = XEXP (DECL_RTL (decl), 0);
1936
        }
1937
 
1938
      /* Set alignment we actually gave this decl.  */
1939
      DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
1940
                           : GET_MODE_BITSIZE (DECL_MODE (decl)));
1941
      DECL_USER_ALIGN (decl) = 0;
1942
 
1943
      x = assign_temp (decl, 1, 1, 1);
1944
      set_mem_attributes (x, decl, 1);
1945
      SET_DECL_RTL (decl, x);
1946
 
1947
      if (oldaddr)
1948
        {
1949
          addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
1950
          if (addr != oldaddr)
1951
            emit_move_insn (oldaddr, addr);
1952
        }
1953
    }
1954
  else
1955
    /* Dynamic-size object: must push space on the stack.  */
1956
    {
1957
      rtx address, size, x;
1958
 
1959
      /* Record the stack pointer on entry to block, if have
1960
         not already done so.  */
1961
      do_pending_stack_adjust ();
1962
 
1963
      /* Compute the variable's size, in bytes.  This will expand any
1964
         needed SAVE_EXPRs for the first time.  */
1965
      size = expand_normal (DECL_SIZE_UNIT (decl));
1966
      free_temp_slots ();
1967
 
1968
      /* Allocate space on the stack for the variable.  Note that
1969
         DECL_ALIGN says how the variable is to be aligned and we
1970
         cannot use it to conclude anything about the alignment of
1971
         the size.  */
1972
      address = allocate_dynamic_stack_space (size, NULL_RTX,
1973
                                              TYPE_ALIGN (TREE_TYPE (decl)));
1974
 
1975
      /* Reference the variable indirect through that rtx.  */
1976
      x = gen_rtx_MEM (DECL_MODE (decl), address);
1977
      set_mem_attributes (x, decl, 1);
1978
      SET_DECL_RTL (decl, x);
1979
 
1980
 
1981
      /* Indicate the alignment we actually gave this variable.  */
1982
#ifdef STACK_BOUNDARY
1983
      DECL_ALIGN (decl) = STACK_BOUNDARY;
1984
#else
1985
      DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
1986
#endif
1987
      DECL_USER_ALIGN (decl) = 0;
1988
    }
1989
}
1990
 
1991
/* Emit code to save the current value of stack.  */
1992
rtx
1993
expand_stack_save (void)
1994
{
1995
  rtx ret = NULL_RTX;
1996
 
1997
  do_pending_stack_adjust ();
1998
  emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
1999
  return ret;
2000
}
2001
 
2002
/* Emit code to restore the current value of stack.  */
2003
void
2004
expand_stack_restore (tree var)
2005
{
2006
  rtx sa = DECL_RTL (var);
2007
 
2008
  emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
2009
}
2010
 
2011
/* DECL is an anonymous union.  CLEANUP is a cleanup for DECL.
2012
   DECL_ELTS is the list of elements that belong to DECL's type.
2013
   In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup.  */
2014
 
2015
void
2016
expand_anon_union_decl (tree decl, tree cleanup ATTRIBUTE_UNUSED,
2017
                        tree decl_elts)
2018
{
2019
  rtx x;
2020
  tree t;
2021
 
2022
  /* If any of the elements are addressable, so is the entire union.  */
2023
  for (t = decl_elts; t; t = TREE_CHAIN (t))
2024
    if (TREE_ADDRESSABLE (TREE_VALUE (t)))
2025
      {
2026
        TREE_ADDRESSABLE (decl) = 1;
2027
        break;
2028
      }
2029
 
2030
  expand_decl (decl);
2031
  x = DECL_RTL (decl);
2032
 
2033
  /* Go through the elements, assigning RTL to each.  */
2034
  for (t = decl_elts; t; t = TREE_CHAIN (t))
2035
    {
2036
      tree decl_elt = TREE_VALUE (t);
2037
      enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2038
      rtx decl_rtl;
2039
 
2040
      /* If any of the elements are addressable, so is the entire
2041
         union.  */
2042
      if (TREE_USED (decl_elt))
2043
        TREE_USED (decl) = 1;
2044
 
2045
      /* Propagate the union's alignment to the elements.  */
2046
      DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
2047
      DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
2048
 
2049
      /* If the element has BLKmode and the union doesn't, the union is
2050
         aligned such that the element doesn't need to have BLKmode, so
2051
         change the element's mode to the appropriate one for its size.  */
2052
      if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
2053
        DECL_MODE (decl_elt) = mode
2054
          = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
2055
 
2056
      if (mode == GET_MODE (x))
2057
        decl_rtl = x;
2058
      else if (MEM_P (x))
2059
        /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2060
           instead create a new MEM rtx with the proper mode.  */
2061
        decl_rtl = adjust_address_nv (x, mode, 0);
2062
      else
2063
        {
2064
          gcc_assert (REG_P (x));
2065
          decl_rtl = gen_lowpart_SUBREG (mode, x);
2066
        }
2067
      SET_DECL_RTL (decl_elt, decl_rtl);
2068
    }
2069
}
2070
 
2071
/* Do the insertion of a case label into case_list.  The labels are
2072
   fed to us in descending order from the sorted vector of case labels used
2073
   in the tree part of the middle end.  So the list we construct is
2074
   sorted in ascending order.  The bounds on the case range, LOW and HIGH,
2075
   are converted to case's index type TYPE.  */
2076
 
2077
static struct case_node *
2078
add_case_node (struct case_node *head, tree type, tree low, tree high,
2079
               tree label)
2080
{
2081
  tree min_value, max_value;
2082
  struct case_node *r;
2083
 
2084
  gcc_assert (TREE_CODE (low) == INTEGER_CST);
2085
  gcc_assert (!high || TREE_CODE (high) == INTEGER_CST);
2086
 
2087
  min_value = TYPE_MIN_VALUE (type);
2088
  max_value = TYPE_MAX_VALUE (type);
2089
 
2090
  /* If there's no HIGH value, then this is not a case range; it's
2091
     just a simple case label.  But that's just a degenerate case
2092
     range.
2093
     If the bounds are equal, turn this into the one-value case.  */
2094
  if (!high || tree_int_cst_equal (low, high))
2095
    {
2096
      /* If the simple case value is unreachable, ignore it.  */
2097
      if ((TREE_CODE (min_value) == INTEGER_CST
2098
            && tree_int_cst_compare (low, min_value) < 0)
2099
          || (TREE_CODE (max_value) == INTEGER_CST
2100
              && tree_int_cst_compare (low, max_value) > 0))
2101
        return head;
2102
      low = fold_convert (type, low);
2103
      high = low;
2104
    }
2105
  else
2106
    {
2107
      /* If the entire case range is unreachable, ignore it.  */
2108
      if ((TREE_CODE (min_value) == INTEGER_CST
2109
            && tree_int_cst_compare (high, min_value) < 0)
2110
          || (TREE_CODE (max_value) == INTEGER_CST
2111
              && tree_int_cst_compare (low, max_value) > 0))
2112
        return head;
2113
 
2114
      /* If the lower bound is less than the index type's minimum
2115
         value, truncate the range bounds.  */
2116
      if (TREE_CODE (min_value) == INTEGER_CST
2117
            && tree_int_cst_compare (low, min_value) < 0)
2118
        low = min_value;
2119
      low = fold_convert (type, low);
2120
 
2121
      /* If the upper bound is greater than the index type's maximum
2122
         value, truncate the range bounds.  */
2123
      if (TREE_CODE (max_value) == INTEGER_CST
2124
          && tree_int_cst_compare (high, max_value) > 0)
2125
        high = max_value;
2126
      high = fold_convert (type, high);
2127
    }
2128
 
2129
 
2130
  /* Add this label to the chain.  Make sure to drop overflow flags.  */
2131
  r = ggc_alloc (sizeof (struct case_node));
2132
  r->low = build_int_cst_wide (TREE_TYPE (low), TREE_INT_CST_LOW (low),
2133
                               TREE_INT_CST_HIGH (low));
2134
  r->high = build_int_cst_wide (TREE_TYPE (high), TREE_INT_CST_LOW (high),
2135
                                TREE_INT_CST_HIGH (high));
2136
  r->code_label = label;
2137
  r->parent = r->left = NULL;
2138
  r->right = head;
2139
  return r;
2140
}
2141
 
2142
/* Maximum number of case bit tests.  */
2143
#define MAX_CASE_BIT_TESTS  3
2144
 
2145
/* By default, enable case bit tests on targets with ashlsi3.  */
2146
#ifndef CASE_USE_BIT_TESTS
2147
#define CASE_USE_BIT_TESTS  (ashl_optab->handlers[word_mode].insn_code \
2148
                             != CODE_FOR_nothing)
2149
#endif
2150
 
2151
 
2152
/* A case_bit_test represents a set of case nodes that may be
2153
   selected from using a bit-wise comparison.  HI and LO hold
2154
   the integer to be tested against, LABEL contains the label
2155
   to jump to upon success and BITS counts the number of case
2156
   nodes handled by this test, typically the number of bits
2157
   set in HI:LO.  */
2158
 
2159
struct case_bit_test
2160
{
2161
  HOST_WIDE_INT hi;
2162
  HOST_WIDE_INT lo;
2163
  rtx label;
2164
  int bits;
2165
};
2166
 
2167
/* Determine whether "1 << x" is relatively cheap in word_mode.  */
2168
 
2169
static
2170
bool lshift_cheap_p (void)
2171
{
2172
  static bool init = false;
2173
  static bool cheap = true;
2174
 
2175
  if (!init)
2176
    {
2177
      rtx reg = gen_rtx_REG (word_mode, 10000);
2178
      int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
2179
      cheap = cost < COSTS_N_INSNS (3);
2180
      init = true;
2181
    }
2182
 
2183
  return cheap;
2184
}
2185
 
2186
/* Comparison function for qsort to order bit tests by decreasing
2187
   number of case nodes, i.e. the node with the most cases gets
2188
   tested first.  */
2189
 
2190
static int
2191
case_bit_test_cmp (const void *p1, const void *p2)
2192
{
2193
  const struct case_bit_test *d1 = p1;
2194
  const struct case_bit_test *d2 = p2;
2195
 
2196
  if (d2->bits != d1->bits)
2197
    return d2->bits - d1->bits;
2198
 
2199
  /* Stabilize the sort.  */
2200
  return CODE_LABEL_NUMBER (d2->label) - CODE_LABEL_NUMBER (d1->label);
2201
}
2202
 
2203
/*  Expand a switch statement by a short sequence of bit-wise
2204
    comparisons.  "switch(x)" is effectively converted into
2205
    "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
2206
    integer constants.
2207
 
2208
    INDEX_EXPR is the value being switched on, which is of
2209
    type INDEX_TYPE.  MINVAL is the lowest case value of in
2210
    the case nodes, of INDEX_TYPE type, and RANGE is highest
2211
    value minus MINVAL, also of type INDEX_TYPE.  NODES is
2212
    the set of case nodes, and DEFAULT_LABEL is the label to
2213
    branch to should none of the cases match.
2214
 
2215
    There *MUST* be MAX_CASE_BIT_TESTS or less unique case
2216
    node targets.  */
2217
 
2218
static void
2219
emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
2220
                     tree range, case_node_ptr nodes, rtx default_label)
2221
{
2222
  struct case_bit_test test[MAX_CASE_BIT_TESTS];
2223
  enum machine_mode mode;
2224
  rtx expr, index, label;
2225
  unsigned int i,j,lo,hi;
2226
  struct case_node *n;
2227
  unsigned int count;
2228
 
2229
  count = 0;
2230
  for (n = nodes; n; n = n->right)
2231
    {
2232
      label = label_rtx (n->code_label);
2233
      for (i = 0; i < count; i++)
2234
        if (label == test[i].label)
2235
          break;
2236
 
2237
      if (i == count)
2238
        {
2239
          gcc_assert (count < MAX_CASE_BIT_TESTS);
2240
          test[i].hi = 0;
2241
          test[i].lo = 0;
2242
          test[i].label = label;
2243
          test[i].bits = 1;
2244
          count++;
2245
        }
2246
      else
2247
        test[i].bits++;
2248
 
2249
      lo = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2250
                                      n->low, minval), 1);
2251
      hi = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2252
                                      n->high, minval), 1);
2253
      for (j = lo; j <= hi; j++)
2254
        if (j >= HOST_BITS_PER_WIDE_INT)
2255
          test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
2256
        else
2257
          test[i].lo |= (HOST_WIDE_INT) 1 << j;
2258
    }
2259
 
2260
  qsort (test, count, sizeof(*test), case_bit_test_cmp);
2261
 
2262
  index_expr = fold_build2 (MINUS_EXPR, index_type,
2263
                            fold_convert (index_type, index_expr),
2264
                            fold_convert (index_type, minval));
2265
  index = expand_normal (index_expr);
2266
  do_pending_stack_adjust ();
2267
 
2268
  mode = TYPE_MODE (index_type);
2269
  expr = expand_normal (range);
2270
  emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
2271
                           default_label);
2272
 
2273
  index = convert_to_mode (word_mode, index, 0);
2274
  index = expand_binop (word_mode, ashl_optab, const1_rtx,
2275
                        index, NULL_RTX, 1, OPTAB_WIDEN);
2276
 
2277
  for (i = 0; i < count; i++)
2278
    {
2279
      expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
2280
      expr = expand_binop (word_mode, and_optab, index, expr,
2281
                           NULL_RTX, 1, OPTAB_WIDEN);
2282
      emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
2283
                               word_mode, 1, test[i].label);
2284
    }
2285
 
2286
  emit_jump (default_label);
2287
}
2288
 
2289
#ifndef HAVE_casesi
2290
#define HAVE_casesi 0
2291
#endif
2292
 
2293
#ifndef HAVE_tablejump
2294
#define HAVE_tablejump 0
2295
#endif
2296
 
2297
/* Terminate a case (Pascal/Ada) or switch (C) statement
2298
   in which ORIG_INDEX is the expression to be tested.
2299
   If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2300
   type as given in the source before any compiler conversions.
2301
   Generate the code to test it and jump to the right place.  */
2302
 
2303
void
2304
expand_case (tree exp)
2305
{
2306
  tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2307
  rtx default_label = 0;
2308
  struct case_node *n;
2309
  unsigned int count, uniq;
2310
  rtx index;
2311
  rtx table_label;
2312
  int ncases;
2313
  rtx *labelvec;
2314
  int i, fail;
2315
  rtx before_case, end, lab;
2316
 
2317
  tree vec = SWITCH_LABELS (exp);
2318
  tree orig_type = TREE_TYPE (exp);
2319
  tree index_expr = SWITCH_COND (exp);
2320
  tree index_type = TREE_TYPE (index_expr);
2321
  int unsignedp = TYPE_UNSIGNED (index_type);
2322
 
2323
  /* The insn after which the case dispatch should finally
2324
     be emitted.  Zero for a dummy.  */
2325
  rtx start;
2326
 
2327
  /* A list of case labels; it is first built as a list and it may then
2328
     be rearranged into a nearly balanced binary tree.  */
2329
  struct case_node *case_list = 0;
2330
 
2331
  /* Label to jump to if no case matches.  */
2332
  tree default_label_decl;
2333
 
2334
  /* The switch body is lowered in gimplify.c, we should never have
2335
     switches with a non-NULL SWITCH_BODY here.  */
2336
  gcc_assert (!SWITCH_BODY (exp));
2337
  gcc_assert (SWITCH_LABELS (exp));
2338
 
2339
  do_pending_stack_adjust ();
2340
 
2341
  /* An ERROR_MARK occurs for various reasons including invalid data type.  */
2342
  if (index_type != error_mark_node)
2343
    {
2344
      tree elt;
2345
      bitmap label_bitmap;
2346
 
2347
      /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
2348
         expressions being INTEGER_CST.  */
2349
      gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);
2350
 
2351
      /* The default case is at the end of TREE_VEC.  */
2352
      elt = TREE_VEC_ELT (vec, TREE_VEC_LENGTH (vec) - 1);
2353
      gcc_assert (!CASE_HIGH (elt));
2354
      gcc_assert (!CASE_LOW (elt));
2355
      default_label_decl = CASE_LABEL (elt);
2356
 
2357
      for (i = TREE_VEC_LENGTH (vec) - 1; --i >= 0; )
2358
        {
2359
          tree low, high;
2360
          elt = TREE_VEC_ELT (vec, i);
2361
 
2362
          low = CASE_LOW (elt);
2363
          gcc_assert (low);
2364
          high = CASE_HIGH (elt);
2365
 
2366
          /* Discard empty ranges.  */
2367
          if (high && INT_CST_LT (high, low))
2368
            continue;
2369
 
2370
          case_list = add_case_node (case_list, index_type, low, high,
2371
                                     CASE_LABEL (elt));
2372
        }
2373
 
2374
 
2375
      before_case = start = get_last_insn ();
2376
      default_label = label_rtx (default_label_decl);
2377
 
2378
      /* Get upper and lower bounds of case values.  */
2379
 
2380
      uniq = 0;
2381
      count = 0;
2382
      label_bitmap = BITMAP_ALLOC (NULL);
2383
      for (n = case_list; n; n = n->right)
2384
        {
2385
          /* Count the elements and track the largest and smallest
2386
             of them (treating them as signed even if they are not).  */
2387
          if (count++ == 0)
2388
            {
2389
              minval = n->low;
2390
              maxval = n->high;
2391
            }
2392
          else
2393
            {
2394
              if (INT_CST_LT (n->low, minval))
2395
                minval = n->low;
2396
              if (INT_CST_LT (maxval, n->high))
2397
                maxval = n->high;
2398
            }
2399
          /* A range counts double, since it requires two compares.  */
2400
          if (! tree_int_cst_equal (n->low, n->high))
2401
            count++;
2402
 
2403
          /* If we have not seen this label yet, then increase the
2404
             number of unique case node targets seen.  */
2405
          lab = label_rtx (n->code_label);
2406
          if (!bitmap_bit_p (label_bitmap, CODE_LABEL_NUMBER (lab)))
2407
            {
2408
              bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab));
2409
              uniq++;
2410
            }
2411
        }
2412
 
2413
      BITMAP_FREE (label_bitmap);
2414
 
2415
      /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2416
         destination, such as one with a default case only.  However,
2417
         it doesn't remove cases that are out of range for the switch
2418
         type, so we may still get a zero here.  */
2419
      if (count == 0)
2420
        {
2421
          emit_jump (default_label);
2422
          return;
2423
        }
2424
 
2425
      /* Compute span of values.  */
2426
      range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
2427
 
2428
      /* Try implementing this switch statement by a short sequence of
2429
         bit-wise comparisons.  However, we let the binary-tree case
2430
         below handle constant index expressions.  */
2431
      if (CASE_USE_BIT_TESTS
2432
          && ! TREE_CONSTANT (index_expr)
2433
          && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
2434
          && compare_tree_int (range, 0) > 0
2435
          && lshift_cheap_p ()
2436
          && ((uniq == 1 && count >= 3)
2437
              || (uniq == 2 && count >= 5)
2438
              || (uniq == 3 && count >= 6)))
2439
        {
2440
          /* Optimize the case where all the case values fit in a
2441
             word without having to subtract MINVAL.  In this case,
2442
             we can optimize away the subtraction.  */
2443
          if (compare_tree_int (minval, 0) > 0
2444
              && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
2445
            {
2446
              minval = build_int_cst (index_type, 0);
2447
              range = maxval;
2448
            }
2449
          emit_case_bit_tests (index_type, index_expr, minval, range,
2450
                               case_list, default_label);
2451
        }
2452
 
2453
      /* If range of values is much bigger than number of values,
2454
         make a sequence of conditional branches instead of a dispatch.
2455
         If the switch-index is a constant, do it this way
2456
         because we can optimize it.  */
2457
 
2458
      else if (count < case_values_threshold ()
2459
               || compare_tree_int (range,
2460
                                    (optimize_size ? 3 : 10) * count) > 0
2461
               /* RANGE may be signed, and really large ranges will show up
2462
                  as negative numbers.  */
2463
               || compare_tree_int (range, 0) < 0
2464
#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
2465
               || flag_pic
2466
#endif
2467
               || !flag_jump_tables
2468
               || TREE_CONSTANT (index_expr)
2469
               /* If neither casesi or tablejump is available, we can
2470
                  only go this way.  */
2471
               || (!HAVE_casesi && !HAVE_tablejump))
2472
        {
2473
          index = expand_normal (index_expr);
2474
 
2475
          /* If the index is a short or char that we do not have
2476
             an insn to handle comparisons directly, convert it to
2477
             a full integer now, rather than letting each comparison
2478
             generate the conversion.  */
2479
 
2480
          if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
2481
              && ! have_insn_for (COMPARE, GET_MODE (index)))
2482
            {
2483
              enum machine_mode wider_mode;
2484
              for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
2485
                   wider_mode = GET_MODE_WIDER_MODE (wider_mode))
2486
                if (have_insn_for (COMPARE, wider_mode))
2487
                  {
2488
                    index = convert_to_mode (wider_mode, index, unsignedp);
2489
                    break;
2490
                  }
2491
            }
2492
 
2493
          do_pending_stack_adjust ();
2494
 
2495
          if (MEM_P (index))
2496
            index = copy_to_reg (index);
2497
 
2498
          /* We generate a binary decision tree to select the
2499
             appropriate target code.  This is done as follows:
2500
 
2501
             The list of cases is rearranged into a binary tree,
2502
             nearly optimal assuming equal probability for each case.
2503
 
2504
             The tree is transformed into RTL, eliminating
2505
             redundant test conditions at the same time.
2506
 
2507
             If program flow could reach the end of the
2508
             decision tree an unconditional jump to the
2509
             default code is emitted.  */
2510
 
2511
          use_cost_table
2512
            = (TREE_CODE (orig_type) != ENUMERAL_TYPE
2513
               && estimate_case_costs (case_list));
2514
          balance_case_nodes (&case_list, NULL);
2515
          emit_case_nodes (index, case_list, default_label, index_type);
2516
          emit_jump (default_label);
2517
        }
2518
      else
2519
        {
2520
          table_label = gen_label_rtx ();
2521
          if (! try_casesi (index_type, index_expr, minval, range,
2522
                            table_label, default_label))
2523
            {
2524
              bool ok;
2525
 
2526
              /* Index jumptables from zero for suitable values of
2527
                 minval to avoid a subtraction.  */
2528
              if (! optimize_size
2529
                  && compare_tree_int (minval, 0) > 0
2530
                  && compare_tree_int (minval, 3) < 0)
2531
                {
2532
                  minval = build_int_cst (index_type, 0);
2533
                  range = maxval;
2534
                }
2535
 
2536
              ok = try_tablejump (index_type, index_expr, minval, range,
2537
                                  table_label, default_label);
2538
              gcc_assert (ok);
2539
            }
2540
 
2541
          /* Get table of labels to jump to, in order of case index.  */
2542
 
2543
          ncases = tree_low_cst (range, 0) + 1;
2544
          labelvec = alloca (ncases * sizeof (rtx));
2545
          memset (labelvec, 0, ncases * sizeof (rtx));
2546
 
2547
          for (n = case_list; n; n = n->right)
2548
            {
2549
              /* Compute the low and high bounds relative to the minimum
2550
                 value since that should fit in a HOST_WIDE_INT while the
2551
                 actual values may not.  */
2552
              HOST_WIDE_INT i_low
2553
                = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2554
                                             n->low, minval), 1);
2555
              HOST_WIDE_INT i_high
2556
                = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2557
                                             n->high, minval), 1);
2558
              HOST_WIDE_INT i;
2559
 
2560
              for (i = i_low; i <= i_high; i ++)
2561
                labelvec[i]
2562
                  = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
2563
            }
2564
 
2565
          /* Fill in the gaps with the default.  */
2566
          for (i = 0; i < ncases; i++)
2567
            if (labelvec[i] == 0)
2568
              labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
2569
 
2570
          /* Output the table.  */
2571
          emit_label (table_label);
2572
 
2573
          if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2574
            emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
2575
                                                   gen_rtx_LABEL_REF (Pmode, table_label),
2576
                                                   gen_rtvec_v (ncases, labelvec),
2577
                                                   const0_rtx, const0_rtx));
2578
          else
2579
            emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
2580
                                              gen_rtvec_v (ncases, labelvec)));
2581
 
2582
          /* Record no drop-through after the table.  */
2583
          emit_barrier ();
2584
        }
2585
 
2586
      before_case = NEXT_INSN (before_case);
2587
      end = get_last_insn ();
2588
      fail = squeeze_notes (&before_case, &end);
2589
      gcc_assert (!fail);
2590
      reorder_insns (before_case, end, start);
2591
    }
2592
 
2593
  free_temp_slots ();
2594
}
2595
 
2596
/* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE.  */
2597
 
2598
static void
2599
do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label,
2600
                  int unsignedp)
2601
{
2602
  do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
2603
                           NULL_RTX, NULL_RTX, label);
2604
}
2605
 
2606
/* Not all case values are encountered equally.  This function
2607
   uses a heuristic to weight case labels, in cases where that
2608
   looks like a reasonable thing to do.
2609
 
2610
   Right now, all we try to guess is text, and we establish the
2611
   following weights:
2612
 
2613
        chars above space:      16
2614
        digits:                 16
2615
        default:                12
2616
        space, punct:           8
2617
        tab:                    4
2618
        newline:                2
2619
        other "\" chars:        1
2620
        remaining chars:        0
2621
 
2622
   If we find any cases in the switch that are not either -1 or in the range
2623
   of valid ASCII characters, or are control characters other than those
2624
   commonly used with "\", don't treat this switch scanning text.
2625
 
2626
   Return 1 if these nodes are suitable for cost estimation, otherwise
2627
   return 0.  */
2628
 
2629
static int
2630
estimate_case_costs (case_node_ptr node)
2631
{
2632
  tree min_ascii = integer_minus_one_node;
2633
  tree max_ascii = build_int_cst (TREE_TYPE (node->high), 127);
2634
  case_node_ptr n;
2635
  int i;
2636
 
2637
  /* If we haven't already made the cost table, make it now.  Note that the
2638
     lower bound of the table is -1, not zero.  */
2639
 
2640
  if (! cost_table_initialized)
2641
    {
2642
      cost_table_initialized = 1;
2643
 
2644
      for (i = 0; i < 128; i++)
2645
        {
2646
          if (ISALNUM (i))
2647
            COST_TABLE (i) = 16;
2648
          else if (ISPUNCT (i))
2649
            COST_TABLE (i) = 8;
2650
          else if (ISCNTRL (i))
2651
            COST_TABLE (i) = -1;
2652
        }
2653
 
2654
      COST_TABLE (' ') = 8;
2655
      COST_TABLE ('\t') = 4;
2656
      COST_TABLE ('\0') = 4;
2657
      COST_TABLE ('\n') = 2;
2658
      COST_TABLE ('\f') = 1;
2659
      COST_TABLE ('\v') = 1;
2660
      COST_TABLE ('\b') = 1;
2661
    }
2662
 
2663
  /* See if all the case expressions look like text.  It is text if the
2664
     constant is >= -1 and the highest constant is <= 127.  Do all comparisons
2665
     as signed arithmetic since we don't want to ever access cost_table with a
2666
     value less than -1.  Also check that none of the constants in a range
2667
     are strange control characters.  */
2668
 
2669
  for (n = node; n; n = n->right)
2670
    {
2671
      if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
2672
        return 0;
2673
 
2674
      for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
2675
           i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2676
        if (COST_TABLE (i) < 0)
2677
          return 0;
2678
    }
2679
 
2680
  /* All interesting values are within the range of interesting
2681
     ASCII characters.  */
2682
  return 1;
2683
}
2684
 
2685
/* Take an ordered list of case nodes
2686
   and transform them into a near optimal binary tree,
2687
   on the assumption that any target code selection value is as
2688
   likely as any other.
2689
 
2690
   The transformation is performed by splitting the ordered
2691
   list into two equal sections plus a pivot.  The parts are
2692
   then attached to the pivot as left and right branches.  Each
2693
   branch is then transformed recursively.  */
2694
 
2695
static void
2696
balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
2697
{
2698
  case_node_ptr np;
2699
 
2700
  np = *head;
2701
  if (np)
2702
    {
2703
      int cost = 0;
2704
      int i = 0;
2705
      int ranges = 0;
2706
      case_node_ptr *npp;
2707
      case_node_ptr left;
2708
 
2709
      /* Count the number of entries on branch.  Also count the ranges.  */
2710
 
2711
      while (np)
2712
        {
2713
          if (!tree_int_cst_equal (np->low, np->high))
2714
            {
2715
              ranges++;
2716
              if (use_cost_table)
2717
                cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
2718
            }
2719
 
2720
          if (use_cost_table)
2721
            cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
2722
 
2723
          i++;
2724
          np = np->right;
2725
        }
2726
 
2727
      if (i > 2)
2728
        {
2729
          /* Split this list if it is long enough for that to help.  */
2730
          npp = head;
2731
          left = *npp;
2732
          if (use_cost_table)
2733
            {
2734
              /* Find the place in the list that bisects the list's total cost,
2735
                 Here I gets half the total cost.  */
2736
              int n_moved = 0;
2737
              i = (cost + 1) / 2;
2738
              while (1)
2739
                {
2740
                  /* Skip nodes while their cost does not reach that amount.  */
2741
                  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2742
                    i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
2743
                  i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
2744
                  if (i <= 0)
2745
                    break;
2746
                  npp = &(*npp)->right;
2747
                  n_moved += 1;
2748
                }
2749
              if (n_moved == 0)
2750
                {
2751
                  /* Leave this branch lopsided, but optimize left-hand
2752
                     side and fill in `parent' fields for right-hand side.  */
2753
                  np = *head;
2754
                  np->parent = parent;
2755
                  balance_case_nodes (&np->left, np);
2756
                  for (; np->right; np = np->right)
2757
                    np->right->parent = np;
2758
                  return;
2759
                }
2760
            }
2761
          /* If there are just three nodes, split at the middle one.  */
2762
          else if (i == 3)
2763
            npp = &(*npp)->right;
2764
          else
2765
            {
2766
              /* Find the place in the list that bisects the list's total cost,
2767
                 where ranges count as 2.
2768
                 Here I gets half the total cost.  */
2769
              i = (i + ranges + 1) / 2;
2770
              while (1)
2771
                {
2772
                  /* Skip nodes while their cost does not reach that amount.  */
2773
                  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2774
                    i--;
2775
                  i--;
2776
                  if (i <= 0)
2777
                    break;
2778
                  npp = &(*npp)->right;
2779
                }
2780
            }
2781
          *head = np = *npp;
2782
          *npp = 0;
2783
          np->parent = parent;
2784
          np->left = left;
2785
 
2786
          /* Optimize each of the two split parts.  */
2787
          balance_case_nodes (&np->left, np);
2788
          balance_case_nodes (&np->right, np);
2789
        }
2790
      else
2791
        {
2792
          /* Else leave this branch as one level,
2793
             but fill in `parent' fields.  */
2794
          np = *head;
2795
          np->parent = parent;
2796
          for (; np->right; np = np->right)
2797
            np->right->parent = np;
2798
        }
2799
    }
2800
}
2801
 
2802
/* Search the parent sections of the case node tree
2803
   to see if a test for the lower bound of NODE would be redundant.
2804
   INDEX_TYPE is the type of the index expression.
2805
 
2806
   The instructions to generate the case decision tree are
2807
   output in the same order as nodes are processed so it is
2808
   known that if a parent node checks the range of the current
2809
   node minus one that the current node is bounded at its lower
2810
   span.  Thus the test would be redundant.  */
2811
 
2812
static int
2813
node_has_low_bound (case_node_ptr node, tree index_type)
2814
{
2815
  tree low_minus_one;
2816
  case_node_ptr pnode;
2817
 
2818
  /* If the lower bound of this node is the lowest value in the index type,
2819
     we need not test it.  */
2820
 
2821
  if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
2822
    return 1;
2823
 
2824
  /* If this node has a left branch, the value at the left must be less
2825
     than that at this node, so it cannot be bounded at the bottom and
2826
     we need not bother testing any further.  */
2827
 
2828
  if (node->left)
2829
    return 0;
2830
 
2831
  low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
2832
                               node->low,
2833
                               build_int_cst (TREE_TYPE (node->low), 1));
2834
 
2835
  /* If the subtraction above overflowed, we can't verify anything.
2836
     Otherwise, look for a parent that tests our value - 1.  */
2837
 
2838
  if (! tree_int_cst_lt (low_minus_one, node->low))
2839
    return 0;
2840
 
2841
  for (pnode = node->parent; pnode; pnode = pnode->parent)
2842
    if (tree_int_cst_equal (low_minus_one, pnode->high))
2843
      return 1;
2844
 
2845
  return 0;
2846
}
2847
 
2848
/* Search the parent sections of the case node tree
2849
   to see if a test for the upper bound of NODE would be redundant.
2850
   INDEX_TYPE is the type of the index expression.
2851
 
2852
   The instructions to generate the case decision tree are
2853
   output in the same order as nodes are processed so it is
2854
   known that if a parent node checks the range of the current
2855
   node plus one that the current node is bounded at its upper
2856
   span.  Thus the test would be redundant.  */
2857
 
2858
static int
2859
node_has_high_bound (case_node_ptr node, tree index_type)
2860
{
2861
  tree high_plus_one;
2862
  case_node_ptr pnode;
2863
 
2864
  /* If there is no upper bound, obviously no test is needed.  */
2865
 
2866
  if (TYPE_MAX_VALUE (index_type) == NULL)
2867
    return 1;
2868
 
2869
  /* If the upper bound of this node is the highest value in the type
2870
     of the index expression, we need not test against it.  */
2871
 
2872
  if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
2873
    return 1;
2874
 
2875
  /* If this node has a right branch, the value at the right must be greater
2876
     than that at this node, so it cannot be bounded at the top and
2877
     we need not bother testing any further.  */
2878
 
2879
  if (node->right)
2880
    return 0;
2881
 
2882
  high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
2883
                               node->high,
2884
                               build_int_cst (TREE_TYPE (node->high), 1));
2885
 
2886
  /* If the addition above overflowed, we can't verify anything.
2887
     Otherwise, look for a parent that tests our value + 1.  */
2888
 
2889
  if (! tree_int_cst_lt (node->high, high_plus_one))
2890
    return 0;
2891
 
2892
  for (pnode = node->parent; pnode; pnode = pnode->parent)
2893
    if (tree_int_cst_equal (high_plus_one, pnode->low))
2894
      return 1;
2895
 
2896
  return 0;
2897
}
2898
 
2899
/* Search the parent sections of the
2900
   case node tree to see if both tests for the upper and lower
2901
   bounds of NODE would be redundant.  */
2902
 
2903
static int
2904
node_is_bounded (case_node_ptr node, tree index_type)
2905
{
2906
  return (node_has_low_bound (node, index_type)
2907
          && node_has_high_bound (node, index_type));
2908
}
2909
 
2910
/* Emit step-by-step code to select a case for the value of INDEX.
2911
   The thus generated decision tree follows the form of the
2912
   case-node binary tree NODE, whose nodes represent test conditions.
2913
   INDEX_TYPE is the type of the index of the switch.
2914
 
2915
   Care is taken to prune redundant tests from the decision tree
2916
   by detecting any boundary conditions already checked by
2917
   emitted rtx.  (See node_has_high_bound, node_has_low_bound
2918
   and node_is_bounded, above.)
2919
 
2920
   Where the test conditions can be shown to be redundant we emit
2921
   an unconditional jump to the target code.  As a further
2922
   optimization, the subordinates of a tree node are examined to
2923
   check for bounded nodes.  In this case conditional and/or
2924
   unconditional jumps as a result of the boundary check for the
2925
   current node are arranged to target the subordinates associated
2926
   code for out of bound conditions on the current node.
2927
 
2928
   We can assume that when control reaches the code generated here,
2929
   the index value has already been compared with the parents
2930
   of this node, and determined to be on the same side of each parent
2931
   as this node is.  Thus, if this node tests for the value 51,
2932
   and a parent tested for 52, we don't need to consider
2933
   the possibility of a value greater than 51.  If another parent
2934
   tests for the value 50, then this node need not test anything.  */
2935
 
2936
static void
2937
emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
2938
                 tree index_type)
2939
{
2940
  /* If INDEX has an unsigned type, we must make unsigned branches.  */
2941
  int unsignedp = TYPE_UNSIGNED (index_type);
2942
  enum machine_mode mode = GET_MODE (index);
2943
  enum machine_mode imode = TYPE_MODE (index_type);
2944
 
2945
  /* Handle indices detected as constant during RTL expansion.  */
2946
  if (mode == VOIDmode)
2947
    mode = imode;
2948
 
2949
  /* See if our parents have already tested everything for us.
2950
     If they have, emit an unconditional jump for this node.  */
2951
  if (node_is_bounded (node, index_type))
2952
    emit_jump (label_rtx (node->code_label));
2953
 
2954
  else if (tree_int_cst_equal (node->low, node->high))
2955
    {
2956
      /* Node is single valued.  First see if the index expression matches
2957
         this node and then check our children, if any.  */
2958
 
2959
      do_jump_if_equal (mode, index,
2960
                        convert_modes (mode, imode,
2961
                                       expand_normal (node->low),
2962
                                       unsignedp),
2963
                        label_rtx (node->code_label), unsignedp);
2964
 
2965
      if (node->right != 0 && node->left != 0)
2966
        {
2967
          /* This node has children on both sides.
2968
             Dispatch to one side or the other
2969
             by comparing the index value with this node's value.
2970
             If one subtree is bounded, check that one first,
2971
             so we can avoid real branches in the tree.  */
2972
 
2973
          if (node_is_bounded (node->right, index_type))
2974
            {
2975
              emit_cmp_and_jump_insns (index,
2976
                                       convert_modes
2977
                                       (mode, imode,
2978
                                        expand_normal (node->high),
2979
                                        unsignedp),
2980
                                       GT, NULL_RTX, mode, unsignedp,
2981
                                       label_rtx (node->right->code_label));
2982
              emit_case_nodes (index, node->left, default_label, index_type);
2983
            }
2984
 
2985
          else if (node_is_bounded (node->left, index_type))
2986
            {
2987
              emit_cmp_and_jump_insns (index,
2988
                                       convert_modes
2989
                                       (mode, imode,
2990
                                        expand_normal (node->high),
2991
                                        unsignedp),
2992
                                       LT, NULL_RTX, mode, unsignedp,
2993
                                       label_rtx (node->left->code_label));
2994
              emit_case_nodes (index, node->right, default_label, index_type);
2995
            }
2996
 
2997
          /* If both children are single-valued cases with no
2998
             children, finish up all the work.  This way, we can save
2999
             one ordered comparison.  */
3000
          else if (tree_int_cst_equal (node->right->low, node->right->high)
3001
                   && node->right->left == 0
3002
                   && node->right->right == 0
3003
                   && tree_int_cst_equal (node->left->low, node->left->high)
3004
                   && node->left->left == 0
3005
                   && node->left->right == 0)
3006
            {
3007
              /* Neither node is bounded.  First distinguish the two sides;
3008
                 then emit the code for one side at a time.  */
3009
 
3010
              /* See if the value matches what the right hand side
3011
                 wants.  */
3012
              do_jump_if_equal (mode, index,
3013
                                convert_modes (mode, imode,
3014
                                               expand_normal (node->right->low),
3015
                                               unsignedp),
3016
                                label_rtx (node->right->code_label),
3017
                                unsignedp);
3018
 
3019
              /* See if the value matches what the left hand side
3020
                 wants.  */
3021
              do_jump_if_equal (mode, index,
3022
                                convert_modes (mode, imode,
3023
                                               expand_normal (node->left->low),
3024
                                               unsignedp),
3025
                                label_rtx (node->left->code_label),
3026
                                unsignedp);
3027
            }
3028
 
3029
          else
3030
            {
3031
              /* Neither node is bounded.  First distinguish the two sides;
3032
                 then emit the code for one side at a time.  */
3033
 
3034
              tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3035
 
3036
              /* See if the value is on the right.  */
3037
              emit_cmp_and_jump_insns (index,
3038
                                       convert_modes
3039
                                       (mode, imode,
3040
                                        expand_normal (node->high),
3041
                                        unsignedp),
3042
                                       GT, NULL_RTX, mode, unsignedp,
3043
                                       label_rtx (test_label));
3044
 
3045
              /* Value must be on the left.
3046
                 Handle the left-hand subtree.  */
3047
              emit_case_nodes (index, node->left, default_label, index_type);
3048
              /* If left-hand subtree does nothing,
3049
                 go to default.  */
3050
              emit_jump (default_label);
3051
 
3052
              /* Code branches here for the right-hand subtree.  */
3053
              expand_label (test_label);
3054
              emit_case_nodes (index, node->right, default_label, index_type);
3055
            }
3056
        }
3057
 
3058
      else if (node->right != 0 && node->left == 0)
3059
        {
3060
          /* Here we have a right child but no left so we issue a conditional
3061
             branch to default and process the right child.
3062
 
3063
             Omit the conditional branch to default if the right child
3064
             does not have any children and is single valued; it would
3065
             cost too much space to save so little time.  */
3066
 
3067
          if (node->right->right || node->right->left
3068
              || !tree_int_cst_equal (node->right->low, node->right->high))
3069
            {
3070
              if (!node_has_low_bound (node, index_type))
3071
                {
3072
                  emit_cmp_and_jump_insns (index,
3073
                                           convert_modes
3074
                                           (mode, imode,
3075
                                            expand_normal (node->high),
3076
                                            unsignedp),
3077
                                           LT, NULL_RTX, mode, unsignedp,
3078
                                           default_label);
3079
                }
3080
 
3081
              emit_case_nodes (index, node->right, default_label, index_type);
3082
            }
3083
          else
3084
            /* We cannot process node->right normally
3085
               since we haven't ruled out the numbers less than
3086
               this node's value.  So handle node->right explicitly.  */
3087
            do_jump_if_equal (mode, index,
3088
                              convert_modes
3089
                              (mode, imode,
3090
                               expand_normal (node->right->low),
3091
                               unsignedp),
3092
                              label_rtx (node->right->code_label), unsignedp);
3093
        }
3094
 
3095
      else if (node->right == 0 && node->left != 0)
3096
        {
3097
          /* Just one subtree, on the left.  */
3098
          if (node->left->left || node->left->right
3099
              || !tree_int_cst_equal (node->left->low, node->left->high))
3100
            {
3101
              if (!node_has_high_bound (node, index_type))
3102
                {
3103
                  emit_cmp_and_jump_insns (index,
3104
                                           convert_modes
3105
                                           (mode, imode,
3106
                                            expand_normal (node->high),
3107
                                            unsignedp),
3108
                                           GT, NULL_RTX, mode, unsignedp,
3109
                                           default_label);
3110
                }
3111
 
3112
              emit_case_nodes (index, node->left, default_label, index_type);
3113
            }
3114
          else
3115
            /* We cannot process node->left normally
3116
               since we haven't ruled out the numbers less than
3117
               this node's value.  So handle node->left explicitly.  */
3118
            do_jump_if_equal (mode, index,
3119
                              convert_modes
3120
                              (mode, imode,
3121
                               expand_normal (node->left->low),
3122
                               unsignedp),
3123
                              label_rtx (node->left->code_label), unsignedp);
3124
        }
3125
    }
3126
  else
3127
    {
3128
      /* Node is a range.  These cases are very similar to those for a single
3129
         value, except that we do not start by testing whether this node
3130
         is the one to branch to.  */
3131
 
3132
      if (node->right != 0 && node->left != 0)
3133
        {
3134
          /* Node has subtrees on both sides.
3135
             If the right-hand subtree is bounded,
3136
             test for it first, since we can go straight there.
3137
             Otherwise, we need to make a branch in the control structure,
3138
             then handle the two subtrees.  */
3139
          tree test_label = 0;
3140
 
3141
          if (node_is_bounded (node->right, index_type))
3142
            /* Right hand node is fully bounded so we can eliminate any
3143
               testing and branch directly to the target code.  */
3144
            emit_cmp_and_jump_insns (index,
3145
                                     convert_modes
3146
                                     (mode, imode,
3147
                                      expand_normal (node->high),
3148
                                      unsignedp),
3149
                                     GT, NULL_RTX, mode, unsignedp,
3150
                                     label_rtx (node->right->code_label));
3151
          else
3152
            {
3153
              /* Right hand node requires testing.
3154
                 Branch to a label where we will handle it later.  */
3155
 
3156
              test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3157
              emit_cmp_and_jump_insns (index,
3158
                                       convert_modes
3159
                                       (mode, imode,
3160
                                        expand_normal (node->high),
3161
                                        unsignedp),
3162
                                       GT, NULL_RTX, mode, unsignedp,
3163
                                       label_rtx (test_label));
3164
            }
3165
 
3166
          /* Value belongs to this node or to the left-hand subtree.  */
3167
 
3168
          emit_cmp_and_jump_insns (index,
3169
                                   convert_modes
3170
                                   (mode, imode,
3171
                                    expand_normal (node->low),
3172
                                    unsignedp),
3173
                                   GE, NULL_RTX, mode, unsignedp,
3174
                                   label_rtx (node->code_label));
3175
 
3176
          /* Handle the left-hand subtree.  */
3177
          emit_case_nodes (index, node->left, default_label, index_type);
3178
 
3179
          /* If right node had to be handled later, do that now.  */
3180
 
3181
          if (test_label)
3182
            {
3183
              /* If the left-hand subtree fell through,
3184
                 don't let it fall into the right-hand subtree.  */
3185
              emit_jump (default_label);
3186
 
3187
              expand_label (test_label);
3188
              emit_case_nodes (index, node->right, default_label, index_type);
3189
            }
3190
        }
3191
 
3192
      else if (node->right != 0 && node->left == 0)
3193
        {
3194
          /* Deal with values to the left of this node,
3195
             if they are possible.  */
3196
          if (!node_has_low_bound (node, index_type))
3197
            {
3198
              emit_cmp_and_jump_insns (index,
3199
                                       convert_modes
3200
                                       (mode, imode,
3201
                                        expand_normal (node->low),
3202
                                        unsignedp),
3203
                                       LT, NULL_RTX, mode, unsignedp,
3204
                                       default_label);
3205
            }
3206
 
3207
          /* Value belongs to this node or to the right-hand subtree.  */
3208
 
3209
          emit_cmp_and_jump_insns (index,
3210
                                   convert_modes
3211
                                   (mode, imode,
3212
                                    expand_normal (node->high),
3213
                                    unsignedp),
3214
                                   LE, NULL_RTX, mode, unsignedp,
3215
                                   label_rtx (node->code_label));
3216
 
3217
          emit_case_nodes (index, node->right, default_label, index_type);
3218
        }
3219
 
3220
      else if (node->right == 0 && node->left != 0)
3221
        {
3222
          /* Deal with values to the right of this node,
3223
             if they are possible.  */
3224
          if (!node_has_high_bound (node, index_type))
3225
            {
3226
              emit_cmp_and_jump_insns (index,
3227
                                       convert_modes
3228
                                       (mode, imode,
3229
                                        expand_normal (node->high),
3230
                                        unsignedp),
3231
                                       GT, NULL_RTX, mode, unsignedp,
3232
                                       default_label);
3233
            }
3234
 
3235
          /* Value belongs to this node or to the left-hand subtree.  */
3236
 
3237
          emit_cmp_and_jump_insns (index,
3238
                                   convert_modes
3239
                                   (mode, imode,
3240
                                    expand_normal (node->low),
3241
                                    unsignedp),
3242
                                   GE, NULL_RTX, mode, unsignedp,
3243
                                   label_rtx (node->code_label));
3244
 
3245
          emit_case_nodes (index, node->left, default_label, index_type);
3246
        }
3247
 
3248
      else
3249
        {
3250
          /* Node has no children so we check low and high bounds to remove
3251
             redundant tests.  Only one of the bounds can exist,
3252
             since otherwise this node is bounded--a case tested already.  */
3253
          int high_bound = node_has_high_bound (node, index_type);
3254
          int low_bound = node_has_low_bound (node, index_type);
3255
 
3256
          if (!high_bound && low_bound)
3257
            {
3258
              emit_cmp_and_jump_insns (index,
3259
                                       convert_modes
3260
                                       (mode, imode,
3261
                                        expand_normal (node->high),
3262
                                        unsignedp),
3263
                                       GT, NULL_RTX, mode, unsignedp,
3264
                                       default_label);
3265
            }
3266
 
3267
          else if (!low_bound && high_bound)
3268
            {
3269
              emit_cmp_and_jump_insns (index,
3270
                                       convert_modes
3271
                                       (mode, imode,
3272
                                        expand_normal (node->low),
3273
                                        unsignedp),
3274
                                       LT, NULL_RTX, mode, unsignedp,
3275
                                       default_label);
3276
            }
3277
          else if (!low_bound && !high_bound)
3278
            {
3279
              /* Widen LOW and HIGH to the same width as INDEX.  */
3280
              tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3281
              tree low = build1 (CONVERT_EXPR, type, node->low);
3282
              tree high = build1 (CONVERT_EXPR, type, node->high);
3283
              rtx low_rtx, new_index, new_bound;
3284
 
3285
              /* Instead of doing two branches, emit one unsigned branch for
3286
                 (index-low) > (high-low).  */
3287
              low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL);
3288
              new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
3289
                                               NULL_RTX, unsignedp,
3290
                                               OPTAB_WIDEN);
3291
              new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
3292
                                                    high, low),
3293
                                       NULL_RTX, mode, EXPAND_NORMAL);
3294
 
3295
              emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3296
                                       mode, 1, default_label);
3297
            }
3298
 
3299
          emit_jump (label_rtx (node->code_label));
3300
        }
3301
    }
3302
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.