OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [stmt.c] - Blame information for rev 20

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
/* Expands front end tree to back end RTL for GCC
2
   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3
   1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 2, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING.  If not, write to the Free
20
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21
02110-1301, USA.  */
22
 
23
/* This file handles the generation of rtl code from tree structure
24
   above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
25
   The functions whose names start with `expand_' are called by the
26
   expander to generate RTL instructions for various kinds of constructs.  */
27
 
28
#include "config.h"
29
#include "system.h"
30
#include "coretypes.h"
31
#include "tm.h"
32
 
33
#include "rtl.h"
34
#include "hard-reg-set.h"
35
#include "tree.h"
36
#include "tm_p.h"
37
#include "flags.h"
38
#include "except.h"
39
#include "function.h"
40
#include "insn-config.h"
41
#include "expr.h"
42
#include "libfuncs.h"
43
#include "recog.h"
44
#include "machmode.h"
45
#include "toplev.h"
46
#include "output.h"
47
#include "ggc.h"
48
#include "langhooks.h"
49
#include "predict.h"
50
#include "optabs.h"
51
#include "target.h"
52
#include "regs.h"
53
 
54
/* Functions and data structures for expanding case statements.  */
55
 
56
/* Case label structure, used to hold info on labels within case
57
   statements.  We handle "range" labels; for a single-value label
58
   as in C, the high and low limits are the same.
59
 
60
   We start with a vector of case nodes sorted in ascending order, and
61
   the default label as the last element in the vector.  Before expanding
62
   to RTL, we transform this vector into a list linked via the RIGHT
63
   fields in the case_node struct.  Nodes with higher case values are
64
   later in the list.
65
 
66
   Switch statements can be output in three forms.  A branch table is
67
   used if there are more than a few labels and the labels are dense
68
   within the range between the smallest and largest case value.  If a
69
   branch table is used, no further manipulations are done with the case
70
   node chain.
71
 
72
   The alternative to the use of a branch table is to generate a series
73
   of compare and jump insns.  When that is done, we use the LEFT, RIGHT,
74
   and PARENT fields to hold a binary tree.  Initially the tree is
75
   totally unbalanced, with everything on the right.  We balance the tree
76
   with nodes on the left having lower case values than the parent
77
   and nodes on the right having higher values.  We then output the tree
78
   in order.
79
 
80
   For very small, suitable switch statements, we can generate a series
81
   of simple bit test and branches instead.  */
82
 
83
struct case_node GTY(())
84
{
85
  struct case_node      *left;  /* Left son in binary tree */
86
  struct case_node      *right; /* Right son in binary tree; also node chain */
87
  struct case_node      *parent; /* Parent of node in binary tree */
88
  tree                  low;    /* Lowest index value for this label */
89
  tree                  high;   /* Highest index value for this label */
90
  tree                  code_label; /* Label to jump to when node matches */
91
};
92
 
93
typedef struct case_node case_node;
94
typedef struct case_node *case_node_ptr;
95
 
96
/* These are used by estimate_case_costs and balance_case_nodes.  */
97
 
98
/* This must be a signed type, and non-ANSI compilers lack signed char.  */
99
static short cost_table_[129];
100
static int use_cost_table;
101
static int cost_table_initialized;
102
 
103
/* Special care is needed because we allow -1, but TREE_INT_CST_LOW
104
   is unsigned.  */
105
#define COST_TABLE(I)  cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
106
 
107
static int n_occurrences (int, const char *);
108
static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *);
109
static void expand_nl_goto_receiver (void);
110
static bool check_operand_nalternatives (tree, tree);
111
static bool check_unique_operand_names (tree, tree);
112
static char *resolve_operand_name_1 (char *, tree, tree);
113
static void expand_null_return_1 (void);
114
static void expand_value_return (rtx);
115
static void do_jump_if_equal (rtx, rtx, rtx, int);
116
static int estimate_case_costs (case_node_ptr);
117
static bool lshift_cheap_p (void);
118
static int case_bit_test_cmp (const void *, const void *);
119
static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
120
static void balance_case_nodes (case_node_ptr *, case_node_ptr);
121
static int node_has_low_bound (case_node_ptr, tree);
122
static int node_has_high_bound (case_node_ptr, tree);
123
static int node_is_bounded (case_node_ptr, tree);
124
static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
125
static struct case_node *add_case_node (struct case_node *, tree,
126
                                        tree, tree, tree);
127
 
128
 
129
/* Return the rtx-label that corresponds to a LABEL_DECL,
130
   creating it if necessary.  */
131
 
132
rtx
133
label_rtx (tree label)
134
{
135
  gcc_assert (TREE_CODE (label) == LABEL_DECL);
136
 
137
  if (!DECL_RTL_SET_P (label))
138
    {
139
      rtx r = gen_label_rtx ();
140
      SET_DECL_RTL (label, r);
141
      if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
142
        LABEL_PRESERVE_P (r) = 1;
143
    }
144
 
145
  return DECL_RTL (label);
146
}
147
 
148
/* As above, but also put it on the forced-reference list of the
149
   function that contains it.  */
150
rtx
151
force_label_rtx (tree label)
152
{
153
  rtx ref = label_rtx (label);
154
  tree function = decl_function_context (label);
155
  struct function *p;
156
 
157
  gcc_assert (function);
158
 
159
  if (function != current_function_decl)
160
    p = find_function_data (function);
161
  else
162
    p = cfun;
163
 
164
  p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
165
                                                p->expr->x_forced_labels);
166
  return ref;
167
}
168
 
169
/* Add an unconditional jump to LABEL as the next sequential instruction.  */
170
 
171
void
172
emit_jump (rtx label)
173
{
174
  do_pending_stack_adjust ();
175
  emit_jump_insn (gen_jump (label));
176
  emit_barrier ();
177
}
178
 
179
/* Emit code to jump to the address
180
   specified by the pointer expression EXP.  */
181
 
182
void
183
expand_computed_goto (tree exp)
184
{
185
  rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
186
 
187
  x = convert_memory_address (Pmode, x);
188
 
189
  do_pending_stack_adjust ();
190
  emit_indirect_jump (x);
191
}
192
 
193
/* Handle goto statements and the labels that they can go to.  */
194
 
195
/* Specify the location in the RTL code of a label LABEL,
196
   which is a LABEL_DECL tree node.
197
 
198
   This is used for the kind of label that the user can jump to with a
199
   goto statement, and for alternatives of a switch or case statement.
200
   RTL labels generated for loops and conditionals don't go through here;
201
   they are generated directly at the RTL level, by other functions below.
202
 
203
   Note that this has nothing to do with defining label *names*.
204
   Languages vary in how they do that and what that even means.  */
205
 
206
void
207
expand_label (tree label)
208
{
209
  rtx label_r = label_rtx (label);
210
 
211
  do_pending_stack_adjust ();
212
  emit_label (label_r);
213
  if (DECL_NAME (label))
214
    LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
215
 
216
  if (DECL_NONLOCAL (label))
217
    {
218
      expand_nl_goto_receiver ();
219
      nonlocal_goto_handler_labels
220
        = gen_rtx_EXPR_LIST (VOIDmode, label_r,
221
                             nonlocal_goto_handler_labels);
222
    }
223
 
224
  if (FORCED_LABEL (label))
225
    forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
226
 
227
  if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
228
    maybe_set_first_label_num (label_r);
229
}
230
 
231
/* Generate RTL code for a `goto' statement with target label LABEL.
232
   LABEL should be a LABEL_DECL tree node that was or will later be
233
   defined with `expand_label'.  */
234
 
235
void
236
expand_goto (tree label)
237
{
238
#ifdef ENABLE_CHECKING
239
  /* Check for a nonlocal goto to a containing function.  Should have
240
     gotten translated to __builtin_nonlocal_goto.  */
241
  tree context = decl_function_context (label);
242
  gcc_assert (!context || context == current_function_decl);
243
#endif
244
 
245
  emit_jump (label_rtx (label));
246
}
247
 
248
/* Return the number of times character C occurs in string S.  */
249
static int
250
n_occurrences (int c, const char *s)
251
{
252
  int n = 0;
253
  while (*s)
254
    n += (*s++ == c);
255
  return n;
256
}
257
 
258
/* Generate RTL for an asm statement (explicit assembler code).
259
   STRING is a STRING_CST node containing the assembler code text,
260
   or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
261
   insn is volatile; don't optimize it.  */
262
 
263
static void
264
expand_asm (tree string, int vol)
265
{
266
  rtx body;
267
 
268
  if (TREE_CODE (string) == ADDR_EXPR)
269
    string = TREE_OPERAND (string, 0);
270
 
271
  body = gen_rtx_ASM_INPUT (VOIDmode,
272
                            ggc_strdup (TREE_STRING_POINTER (string)));
273
 
274
  MEM_VOLATILE_P (body) = vol;
275
 
276
  emit_insn (body);
277
}
278
 
279
/* Parse the output constraint pointed to by *CONSTRAINT_P.  It is the
280
   OPERAND_NUMth output operand, indexed from zero.  There are NINPUTS
281
   inputs and NOUTPUTS outputs to this extended-asm.  Upon return,
282
   *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
283
   memory operand.  Similarly, *ALLOWS_REG will be TRUE iff the
284
   constraint allows the use of a register operand.  And, *IS_INOUT
285
   will be true if the operand is read-write, i.e., if it is used as
286
   an input as well as an output.  If *CONSTRAINT_P is not in
287
   canonical form, it will be made canonical.  (Note that `+' will be
288
   replaced with `=' as part of this process.)
289
 
290
   Returns TRUE if all went well; FALSE if an error occurred.  */
291
 
292
bool
293
parse_output_constraint (const char **constraint_p, int operand_num,
294
                         int ninputs, int noutputs, bool *allows_mem,
295
                         bool *allows_reg, bool *is_inout)
296
{
297
  const char *constraint = *constraint_p;
298
  const char *p;
299
 
300
  /* Assume the constraint doesn't allow the use of either a register
301
     or memory.  */
302
  *allows_mem = false;
303
  *allows_reg = false;
304
 
305
  /* Allow the `=' or `+' to not be at the beginning of the string,
306
     since it wasn't explicitly documented that way, and there is a
307
     large body of code that puts it last.  Swap the character to
308
     the front, so as not to uglify any place else.  */
309
  p = strchr (constraint, '=');
310
  if (!p)
311
    p = strchr (constraint, '+');
312
 
313
  /* If the string doesn't contain an `=', issue an error
314
     message.  */
315
  if (!p)
316
    {
317
      error ("output operand constraint lacks %<=%>");
318
      return false;
319
    }
320
 
321
  /* If the constraint begins with `+', then the operand is both read
322
     from and written to.  */
323
  *is_inout = (*p == '+');
324
 
325
  /* Canonicalize the output constraint so that it begins with `='.  */
326
  if (p != constraint || *is_inout)
327
    {
328
      char *buf;
329
      size_t c_len = strlen (constraint);
330
 
331
      if (p != constraint)
332
        warning (0, "output constraint %qc for operand %d "
333
                 "is not at the beginning",
334
                 *p, operand_num);
335
 
336
      /* Make a copy of the constraint.  */
337
      buf = alloca (c_len + 1);
338
      strcpy (buf, constraint);
339
      /* Swap the first character and the `=' or `+'.  */
340
      buf[p - constraint] = buf[0];
341
      /* Make sure the first character is an `='.  (Until we do this,
342
         it might be a `+'.)  */
343
      buf[0] = '=';
344
      /* Replace the constraint with the canonicalized string.  */
345
      *constraint_p = ggc_alloc_string (buf, c_len);
346
      constraint = *constraint_p;
347
    }
348
 
349
  /* Loop through the constraint string.  */
350
  for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
351
    switch (*p)
352
      {
353
      case '+':
354
      case '=':
355
        error ("operand constraint contains incorrectly positioned "
356
               "%<+%> or %<=%>");
357
        return false;
358
 
359
      case '%':
360
        if (operand_num + 1 == ninputs + noutputs)
361
          {
362
            error ("%<%%%> constraint used with last operand");
363
            return false;
364
          }
365
        break;
366
 
367
      case 'V':  case 'm':  case 'o':
368
        *allows_mem = true;
369
        break;
370
 
371
      case '?':  case '!':  case '*':  case '&':  case '#':
372
      case 'E':  case 'F':  case 'G':  case 'H':
373
      case 's':  case 'i':  case 'n':
374
      case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
375
      case 'N':  case 'O':  case 'P':  case ',':
376
        break;
377
 
378
      case '0':  case '1':  case '2':  case '3':  case '4':
379
      case '5':  case '6':  case '7':  case '8':  case '9':
380
      case '[':
381
        error ("matching constraint not valid in output operand");
382
        return false;
383
 
384
      case '<':  case '>':
385
        /* ??? Before flow, auto inc/dec insns are not supposed to exist,
386
           excepting those that expand_call created.  So match memory
387
           and hope.  */
388
        *allows_mem = true;
389
        break;
390
 
391
      case 'g':  case 'X':
392
        *allows_reg = true;
393
        *allows_mem = true;
394
        break;
395
 
396
      case 'p': case 'r':
397
        *allows_reg = true;
398
        break;
399
 
400
      default:
401
        if (!ISALPHA (*p))
402
          break;
403
        if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
404
          *allows_reg = true;
405
#ifdef EXTRA_CONSTRAINT_STR
406
        else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
407
          *allows_reg = true;
408
        else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
409
          *allows_mem = true;
410
        else
411
          {
412
            /* Otherwise we can't assume anything about the nature of
413
               the constraint except that it isn't purely registers.
414
               Treat it like "g" and hope for the best.  */
415
            *allows_reg = true;
416
            *allows_mem = true;
417
          }
418
#endif
419
        break;
420
      }
421
 
422
  return true;
423
}
424
 
425
/* Similar, but for input constraints.  */
426
 
427
bool
428
parse_input_constraint (const char **constraint_p, int input_num,
429
                        int ninputs, int noutputs, int ninout,
430
                        const char * const * constraints,
431
                        bool *allows_mem, bool *allows_reg)
432
{
433
  const char *constraint = *constraint_p;
434
  const char *orig_constraint = constraint;
435
  size_t c_len = strlen (constraint);
436
  size_t j;
437
  bool saw_match = false;
438
 
439
  /* Assume the constraint doesn't allow the use of either
440
     a register or memory.  */
441
  *allows_mem = false;
442
  *allows_reg = false;
443
 
444
  /* Make sure constraint has neither `=', `+', nor '&'.  */
445
 
446
  for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
447
    switch (constraint[j])
448
      {
449
      case '+':  case '=':  case '&':
450
        if (constraint == orig_constraint)
451
          {
452
            error ("input operand constraint contains %qc", constraint[j]);
453
            return false;
454
          }
455
        break;
456
 
457
      case '%':
458
        if (constraint == orig_constraint
459
            && input_num + 1 == ninputs - ninout)
460
          {
461
            error ("%<%%%> constraint used with last operand");
462
            return false;
463
          }
464
        break;
465
 
466
      case 'V':  case 'm':  case 'o':
467
        *allows_mem = true;
468
        break;
469
 
470
      case '<':  case '>':
471
      case '?':  case '!':  case '*':  case '#':
472
      case 'E':  case 'F':  case 'G':  case 'H':
473
      case 's':  case 'i':  case 'n':
474
      case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
475
      case 'N':  case 'O':  case 'P':  case ',':
476
        break;
477
 
478
        /* Whether or not a numeric constraint allows a register is
479
           decided by the matching constraint, and so there is no need
480
           to do anything special with them.  We must handle them in
481
           the default case, so that we don't unnecessarily force
482
           operands to memory.  */
483
      case '0':  case '1':  case '2':  case '3':  case '4':
484
      case '5':  case '6':  case '7':  case '8':  case '9':
485
        {
486
          char *end;
487
          unsigned long match;
488
 
489
          saw_match = true;
490
 
491
          match = strtoul (constraint + j, &end, 10);
492
          if (match >= (unsigned long) noutputs)
493
            {
494
              error ("matching constraint references invalid operand number");
495
              return false;
496
            }
497
 
498
          /* Try and find the real constraint for this dup.  Only do this
499
             if the matching constraint is the only alternative.  */
500
          if (*end == '\0'
501
              && (j == 0 || (j == 1 && constraint[0] == '%')))
502
            {
503
              constraint = constraints[match];
504
              *constraint_p = constraint;
505
              c_len = strlen (constraint);
506
              j = 0;
507
              /* ??? At the end of the loop, we will skip the first part of
508
                 the matched constraint.  This assumes not only that the
509
                 other constraint is an output constraint, but also that
510
                 the '=' or '+' come first.  */
511
              break;
512
            }
513
          else
514
            j = end - constraint;
515
          /* Anticipate increment at end of loop.  */
516
          j--;
517
        }
518
        /* Fall through.  */
519
 
520
      case 'p':  case 'r':
521
        *allows_reg = true;
522
        break;
523
 
524
      case 'g':  case 'X':
525
        *allows_reg = true;
526
        *allows_mem = true;
527
        break;
528
 
529
      default:
530
        if (! ISALPHA (constraint[j]))
531
          {
532
            error ("invalid punctuation %qc in constraint", constraint[j]);
533
            return false;
534
          }
535
        if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
536
            != NO_REGS)
537
          *allows_reg = true;
538
#ifdef EXTRA_CONSTRAINT_STR
539
        else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
540
          *allows_reg = true;
541
        else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
542
          *allows_mem = true;
543
        else
544
          {
545
            /* Otherwise we can't assume anything about the nature of
546
               the constraint except that it isn't purely registers.
547
               Treat it like "g" and hope for the best.  */
548
            *allows_reg = true;
549
            *allows_mem = true;
550
          }
551
#endif
552
        break;
553
      }
554
 
555
  if (saw_match && !*allows_reg)
556
    warning (0, "matching constraint does not allow a register");
557
 
558
  return true;
559
}
560
 
561
/* Return DECL iff there's an overlap between *REGS and DECL, where DECL
562
   can be an asm-declared register.  Called via walk_tree.  */
563
 
564
static tree
565
decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
566
                              void *data)
567
{
568
  tree decl = *declp;
569
  const HARD_REG_SET *regs = data;
570
 
571
  if (TREE_CODE (decl) == VAR_DECL)
572
    {
573
      if (DECL_HARD_REGISTER (decl)
574
          && REG_P (DECL_RTL (decl))
575
          && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
576
        {
577
          rtx reg = DECL_RTL (decl);
578
          unsigned int regno;
579
 
580
          for (regno = REGNO (reg);
581
               regno < (REGNO (reg)
582
                        + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
583
               regno++)
584
            if (TEST_HARD_REG_BIT (*regs, regno))
585
              return decl;
586
        }
587
      walk_subtrees = 0;
588
    }
589
  else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL)
590
    walk_subtrees = 0;
591
  return NULL_TREE;
592
}
593
 
594
/* If there is an overlap between *REGS and DECL, return the first overlap
595
   found.  */
596
tree
597
tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
598
{
599
  return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
600
}
601
 
602
/* Check for overlap between registers marked in CLOBBERED_REGS and
603
   anything inappropriate in T.  Emit error and return the register
604
   variable definition for error, NULL_TREE for ok.  */
605
 
606
static bool
607
tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
608
{
609
  /* Conflicts between asm-declared register variables and the clobber
610
     list are not allowed.  */
611
  tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
612
 
613
  if (overlap)
614
    {
615
      error ("asm-specifier for variable %qs conflicts with asm clobber list",
616
             IDENTIFIER_POINTER (DECL_NAME (overlap)));
617
 
618
      /* Reset registerness to stop multiple errors emitted for a single
619
         variable.  */
620
      DECL_REGISTER (overlap) = 0;
621
      return true;
622
    }
623
 
624
  return false;
625
}
626
 
627
/* Generate RTL for an asm statement with arguments.
628
   STRING is the instruction template.
629
   OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
630
   Each output or input has an expression in the TREE_VALUE and
631
   and a tree list in TREE_PURPOSE which in turn contains a constraint
632
   name in TREE_VALUE (or NULL_TREE) and a constraint string
633
   in TREE_PURPOSE.
634
   CLOBBERS is a list of STRING_CST nodes each naming a hard register
635
   that is clobbered by this insn.
636
 
637
   Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
638
   Some elements of OUTPUTS may be replaced with trees representing temporary
639
   values.  The caller should copy those temporary values to the originally
640
   specified lvalues.
641
 
642
   VOL nonzero means the insn is volatile; don't optimize it.  */
643
 
644
static void
645
expand_asm_operands (tree string, tree outputs, tree inputs,
646
                     tree clobbers, int vol, location_t locus)
647
{
648
  rtvec argvec, constraintvec;
649
  rtx body;
650
  int ninputs = list_length (inputs);
651
  int noutputs = list_length (outputs);
652
  int ninout;
653
  int nclobbers;
654
  HARD_REG_SET clobbered_regs;
655
  int clobber_conflict_found = 0;
656
  tree tail;
657
  tree t;
658
  int i;
659
  /* Vector of RTX's of evaluated output operands.  */
660
  rtx *output_rtx = alloca (noutputs * sizeof (rtx));
661
  int *inout_opnum = alloca (noutputs * sizeof (int));
662
  rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
663
  enum machine_mode *inout_mode
664
    = alloca (noutputs * sizeof (enum machine_mode));
665
  const char **constraints
666
    = alloca ((noutputs + ninputs) * sizeof (const char *));
667
  int old_generating_concat_p = generating_concat_p;
668
 
669
  /* An ASM with no outputs needs to be treated as volatile, for now.  */
670
  if (noutputs == 0)
671
    vol = 1;
672
 
673
  if (! check_operand_nalternatives (outputs, inputs))
674
    return;
675
 
676
  string = resolve_asm_operand_names (string, outputs, inputs);
677
 
678
  /* Collect constraints.  */
679
  i = 0;
680
  for (t = outputs; t ; t = TREE_CHAIN (t), i++)
681
    constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
682
  for (t = inputs; t ; t = TREE_CHAIN (t), i++)
683
    constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
684
 
685
  /* Sometimes we wish to automatically clobber registers across an asm.
686
     Case in point is when the i386 backend moved from cc0 to a hard reg --
687
     maintaining source-level compatibility means automatically clobbering
688
     the flags register.  */
689
  clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
690
 
691
  /* Count the number of meaningful clobbered registers, ignoring what
692
     we would ignore later.  */
693
  nclobbers = 0;
694
  CLEAR_HARD_REG_SET (clobbered_regs);
695
  for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
696
    {
697
      const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
698
 
699
      i = decode_reg_name (regname);
700
      if (i >= 0 || i == -4)
701
        ++nclobbers;
702
      else if (i == -2)
703
        error ("unknown register name %qs in %<asm%>", regname);
704
 
705
      /* Mark clobbered registers.  */
706
      if (i >= 0)
707
        {
708
          /* Clobbering the PIC register is an error.  */
709
          if (i == (int) PIC_OFFSET_TABLE_REGNUM)
710
            {
711
              error ("PIC register %qs clobbered in %<asm%>", regname);
712
              return;
713
            }
714
 
715
          SET_HARD_REG_BIT (clobbered_regs, i);
716
        }
717
    }
718
 
719
  /* First pass over inputs and outputs checks validity and sets
720
     mark_addressable if needed.  */
721
 
722
  ninout = 0;
723
  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
724
    {
725
      tree val = TREE_VALUE (tail);
726
      tree type = TREE_TYPE (val);
727
      const char *constraint;
728
      bool is_inout;
729
      bool allows_reg;
730
      bool allows_mem;
731
 
732
      /* If there's an erroneous arg, emit no insn.  */
733
      if (type == error_mark_node)
734
        return;
735
 
736
      /* Try to parse the output constraint.  If that fails, there's
737
         no point in going further.  */
738
      constraint = constraints[i];
739
      if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
740
                                    &allows_mem, &allows_reg, &is_inout))
741
        return;
742
 
743
      if (! allows_reg
744
          && (allows_mem
745
              || is_inout
746
              || (DECL_P (val)
747
                  && REG_P (DECL_RTL (val))
748
                  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
749
        lang_hooks.mark_addressable (val);
750
 
751
      if (is_inout)
752
        ninout++;
753
    }
754
 
755
  ninputs += ninout;
756
  if (ninputs + noutputs > MAX_RECOG_OPERANDS)
757
    {
758
      error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
759
      return;
760
    }
761
 
762
  for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
763
    {
764
      bool allows_reg, allows_mem;
765
      const char *constraint;
766
 
767
      /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
768
         would get VOIDmode and that could cause a crash in reload.  */
769
      if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
770
        return;
771
 
772
      constraint = constraints[i + noutputs];
773
      if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
774
                                    constraints, &allows_mem, &allows_reg))
775
        return;
776
 
777
      if (! allows_reg && allows_mem)
778
        lang_hooks.mark_addressable (TREE_VALUE (tail));
779
    }
780
 
781
  /* Second pass evaluates arguments.  */
782
 
783
  ninout = 0;
784
  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
785
    {
786
      tree val = TREE_VALUE (tail);
787
      tree type = TREE_TYPE (val);
788
      bool is_inout;
789
      bool allows_reg;
790
      bool allows_mem;
791
      rtx op;
792
      bool ok;
793
 
794
      ok = parse_output_constraint (&constraints[i], i, ninputs,
795
                                    noutputs, &allows_mem, &allows_reg,
796
                                    &is_inout);
797
      gcc_assert (ok);
798
 
799
      /* If an output operand is not a decl or indirect ref and our constraint
800
         allows a register, make a temporary to act as an intermediate.
801
         Make the asm insn write into that, then our caller will copy it to
802
         the real output operand.  Likewise for promoted variables.  */
803
 
804
      generating_concat_p = 0;
805
 
806
      real_output_rtx[i] = NULL_RTX;
807
      if ((TREE_CODE (val) == INDIRECT_REF
808
           && allows_mem)
809
          || (DECL_P (val)
810
              && (allows_mem || REG_P (DECL_RTL (val)))
811
              && ! (REG_P (DECL_RTL (val))
812
                    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
813
          || ! allows_reg
814
          || is_inout)
815
        {
816
          op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
817
          if (MEM_P (op))
818
            op = validize_mem (op);
819
 
820
          if (! allows_reg && !MEM_P (op))
821
            error ("output number %d not directly addressable", i);
822
          if ((! allows_mem && MEM_P (op))
823
              || GET_CODE (op) == CONCAT)
824
            {
825
              real_output_rtx[i] = op;
826
              op = gen_reg_rtx (GET_MODE (op));
827
              if (is_inout)
828
                emit_move_insn (op, real_output_rtx[i]);
829
            }
830
        }
831
      else
832
        {
833
          op = assign_temp (type, 0, 0, 1);
834
          op = validize_mem (op);
835
          TREE_VALUE (tail) = make_tree (type, op);
836
        }
837
      output_rtx[i] = op;
838
 
839
      generating_concat_p = old_generating_concat_p;
840
 
841
      if (is_inout)
842
        {
843
          inout_mode[ninout] = TYPE_MODE (type);
844
          inout_opnum[ninout++] = i;
845
        }
846
 
847
      if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
848
        clobber_conflict_found = 1;
849
    }
850
 
851
  /* Make vectors for the expression-rtx, constraint strings,
852
     and named operands.  */
853
 
854
  argvec = rtvec_alloc (ninputs);
855
  constraintvec = rtvec_alloc (ninputs);
856
 
857
  body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
858
                                : GET_MODE (output_rtx[0])),
859
                               ggc_strdup (TREE_STRING_POINTER (string)),
860
                               empty_string, 0, argvec, constraintvec,
861
                               locus);
862
 
863
  MEM_VOLATILE_P (body) = vol;
864
 
865
  /* Eval the inputs and put them into ARGVEC.
866
     Put their constraints into ASM_INPUTs and store in CONSTRAINTS.  */
867
 
868
  for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
869
    {
870
      bool allows_reg, allows_mem;
871
      const char *constraint;
872
      tree val, type;
873
      rtx op;
874
      bool ok;
875
 
876
      constraint = constraints[i + noutputs];
877
      ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
878
                                   constraints, &allows_mem, &allows_reg);
879
      gcc_assert (ok);
880
 
881
      generating_concat_p = 0;
882
 
883
      val = TREE_VALUE (tail);
884
      type = TREE_TYPE (val);
885
      op = expand_expr (val, NULL_RTX, VOIDmode,
886
                        (allows_mem && !allows_reg
887
                         ? EXPAND_MEMORY : EXPAND_NORMAL));
888
 
889
      /* Never pass a CONCAT to an ASM.  */
890
      if (GET_CODE (op) == CONCAT)
891
        op = force_reg (GET_MODE (op), op);
892
      else if (MEM_P (op))
893
        op = validize_mem (op);
894
 
895
      if (asm_operand_ok (op, constraint) <= 0)
896
        {
897
          if (allows_reg && TYPE_MODE (type) != BLKmode)
898
            op = force_reg (TYPE_MODE (type), op);
899
          else if (!allows_mem)
900
            warning (0, "asm operand %d probably doesn%'t match constraints",
901
                     i + noutputs);
902
          else if (MEM_P (op))
903
            {
904
              /* We won't recognize either volatile memory or memory
905
                 with a queued address as available a memory_operand
906
                 at this point.  Ignore it: clearly this *is* a memory.  */
907
            }
908
          else
909
            {
910
              warning (0, "use of memory input without lvalue in "
911
                       "asm operand %d is deprecated", i + noutputs);
912
 
913
              if (CONSTANT_P (op))
914
                {
915
                  rtx mem = force_const_mem (TYPE_MODE (type), op);
916
                  if (mem)
917
                    op = validize_mem (mem);
918
                  else
919
                    op = force_reg (TYPE_MODE (type), op);
920
                }
921
              if (REG_P (op)
922
                  || GET_CODE (op) == SUBREG
923
                  || GET_CODE (op) == CONCAT)
924
                {
925
                  tree qual_type = build_qualified_type (type,
926
                                                         (TYPE_QUALS (type)
927
                                                          | TYPE_QUAL_CONST));
928
                  rtx memloc = assign_temp (qual_type, 1, 1, 1);
929
                  memloc = validize_mem (memloc);
930
                  emit_move_insn (memloc, op);
931
                  op = memloc;
932
                }
933
            }
934
        }
935
 
936
      generating_concat_p = old_generating_concat_p;
937
      ASM_OPERANDS_INPUT (body, i) = op;
938
 
939
      ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
940
        = gen_rtx_ASM_INPUT (TYPE_MODE (type),
941
                             ggc_strdup (constraints[i + noutputs]));
942
 
943
      if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
944
        clobber_conflict_found = 1;
945
    }
946
 
947
  /* Protect all the operands from the queue now that they have all been
948
     evaluated.  */
949
 
950
  generating_concat_p = 0;
951
 
952
  /* For in-out operands, copy output rtx to input rtx.  */
953
  for (i = 0; i < ninout; i++)
954
    {
955
      int j = inout_opnum[i];
956
      char buffer[16];
957
 
958
      ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
959
        = output_rtx[j];
960
 
961
      sprintf (buffer, "%d", j);
962
      ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
963
        = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
964
    }
965
 
966
  generating_concat_p = old_generating_concat_p;
967
 
968
  /* Now, for each output, construct an rtx
969
     (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
970
                               ARGVEC CONSTRAINTS OPNAMES))
971
     If there is more than one, put them inside a PARALLEL.  */
972
 
973
  if (noutputs == 1 && nclobbers == 0)
974
    {
975
      ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
976
      emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
977
    }
978
 
979
  else if (noutputs == 0 && nclobbers == 0)
980
    {
981
      /* No output operands: put in a raw ASM_OPERANDS rtx.  */
982
      emit_insn (body);
983
    }
984
 
985
  else
986
    {
987
      rtx obody = body;
988
      int num = noutputs;
989
 
990
      if (num == 0)
991
        num = 1;
992
 
993
      body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
994
 
995
      /* For each output operand, store a SET.  */
996
      for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
997
        {
998
          XVECEXP (body, 0, i)
999
            = gen_rtx_SET (VOIDmode,
1000
                           output_rtx[i],
1001
                           gen_rtx_ASM_OPERANDS
1002
                           (GET_MODE (output_rtx[i]),
1003
                            ggc_strdup (TREE_STRING_POINTER (string)),
1004
                            ggc_strdup (constraints[i]),
1005
                            i, argvec, constraintvec, locus));
1006
 
1007
          MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1008
        }
1009
 
1010
      /* If there are no outputs (but there are some clobbers)
1011
         store the bare ASM_OPERANDS into the PARALLEL.  */
1012
 
1013
      if (i == 0)
1014
        XVECEXP (body, 0, i++) = obody;
1015
 
1016
      /* Store (clobber REG) for each clobbered register specified.  */
1017
 
1018
      for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1019
        {
1020
          const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1021
          int j = decode_reg_name (regname);
1022
          rtx clobbered_reg;
1023
 
1024
          if (j < 0)
1025
            {
1026
              if (j == -3)      /* `cc', which is not a register */
1027
                continue;
1028
 
1029
              if (j == -4)      /* `memory', don't cache memory across asm */
1030
                {
1031
                  XVECEXP (body, 0, i++)
1032
                    = gen_rtx_CLOBBER (VOIDmode,
1033
                                       gen_rtx_MEM
1034
                                       (BLKmode,
1035
                                        gen_rtx_SCRATCH (VOIDmode)));
1036
                  continue;
1037
                }
1038
 
1039
              /* Ignore unknown register, error already signaled.  */
1040
              continue;
1041
            }
1042
 
1043
          /* Use QImode since that's guaranteed to clobber just one reg.  */
1044
          clobbered_reg = gen_rtx_REG (QImode, j);
1045
 
1046
          /* Do sanity check for overlap between clobbers and respectively
1047
             input and outputs that hasn't been handled.  Such overlap
1048
             should have been detected and reported above.  */
1049
          if (!clobber_conflict_found)
1050
            {
1051
              int opno;
1052
 
1053
              /* We test the old body (obody) contents to avoid tripping
1054
                 over the under-construction body.  */
1055
              for (opno = 0; opno < noutputs; opno++)
1056
                if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1057
                  internal_error ("asm clobber conflict with output operand");
1058
 
1059
              for (opno = 0; opno < ninputs - ninout; opno++)
1060
                if (reg_overlap_mentioned_p (clobbered_reg,
1061
                                             ASM_OPERANDS_INPUT (obody, opno)))
1062
                  internal_error ("asm clobber conflict with input operand");
1063
            }
1064
 
1065
          XVECEXP (body, 0, i++)
1066
            = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1067
        }
1068
 
1069
      emit_insn (body);
1070
    }
1071
 
1072
  /* For any outputs that needed reloading into registers, spill them
1073
     back to where they belong.  */
1074
  for (i = 0; i < noutputs; ++i)
1075
    if (real_output_rtx[i])
1076
      emit_move_insn (real_output_rtx[i], output_rtx[i]);
1077
 
1078
  free_temp_slots ();
1079
}
1080
 
1081
void
1082
expand_asm_expr (tree exp)
1083
{
1084
  int noutputs, i;
1085
  tree outputs, tail;
1086
  tree *o;
1087
 
1088
  if (ASM_INPUT_P (exp))
1089
    {
1090
      expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1091
      return;
1092
    }
1093
 
1094
  outputs = ASM_OUTPUTS (exp);
1095
  noutputs = list_length (outputs);
1096
  /* o[I] is the place that output number I should be written.  */
1097
  o = (tree *) alloca (noutputs * sizeof (tree));
1098
 
1099
  /* Record the contents of OUTPUTS before it is modified.  */
1100
  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1101
    o[i] = TREE_VALUE (tail);
1102
 
1103
  /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1104
     OUTPUTS some trees for where the values were actually stored.  */
1105
  expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1106
                       ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1107
                       input_location);
1108
 
1109
  /* Copy all the intermediate outputs into the specified outputs.  */
1110
  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1111
    {
1112
      if (o[i] != TREE_VALUE (tail))
1113
        {
1114
          expand_assignment (o[i], TREE_VALUE (tail));
1115
          free_temp_slots ();
1116
 
1117
          /* Restore the original value so that it's correct the next
1118
             time we expand this function.  */
1119
          TREE_VALUE (tail) = o[i];
1120
        }
1121
    }
1122
}
1123
 
1124
/* A subroutine of expand_asm_operands.  Check that all operands have
1125
   the same number of alternatives.  Return true if so.  */
1126
 
1127
static bool
1128
check_operand_nalternatives (tree outputs, tree inputs)
1129
{
1130
  if (outputs || inputs)
1131
    {
1132
      tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1133
      int nalternatives
1134
        = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1135
      tree next = inputs;
1136
 
1137
      if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1138
        {
1139
          error ("too many alternatives in %<asm%>");
1140
          return false;
1141
        }
1142
 
1143
      tmp = outputs;
1144
      while (tmp)
1145
        {
1146
          const char *constraint
1147
            = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1148
 
1149
          if (n_occurrences (',', constraint) != nalternatives)
1150
            {
1151
              error ("operand constraints for %<asm%> differ "
1152
                     "in number of alternatives");
1153
              return false;
1154
            }
1155
 
1156
          if (TREE_CHAIN (tmp))
1157
            tmp = TREE_CHAIN (tmp);
1158
          else
1159
            tmp = next, next = 0;
1160
        }
1161
    }
1162
 
1163
  return true;
1164
}
1165
 
1166
/* A subroutine of expand_asm_operands.  Check that all operand names
1167
   are unique.  Return true if so.  We rely on the fact that these names
1168
   are identifiers, and so have been canonicalized by get_identifier,
1169
   so all we need are pointer comparisons.  */
1170
 
1171
static bool
1172
check_unique_operand_names (tree outputs, tree inputs)
1173
{
1174
  tree i, j;
1175
 
1176
  for (i = outputs; i ; i = TREE_CHAIN (i))
1177
    {
1178
      tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1179
      if (! i_name)
1180
        continue;
1181
 
1182
      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1183
        if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1184
          goto failure;
1185
    }
1186
 
1187
  for (i = inputs; i ; i = TREE_CHAIN (i))
1188
    {
1189
      tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1190
      if (! i_name)
1191
        continue;
1192
 
1193
      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1194
        if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1195
          goto failure;
1196
      for (j = outputs; j ; j = TREE_CHAIN (j))
1197
        if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1198
          goto failure;
1199
    }
1200
 
1201
  return true;
1202
 
1203
 failure:
1204
  error ("duplicate asm operand name %qs",
1205
         TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1206
  return false;
1207
}
1208
 
1209
/* A subroutine of expand_asm_operands.  Resolve the names of the operands
1210
   in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1211
   STRING and in the constraints to those numbers.  */
1212
 
1213
tree
1214
resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1215
{
1216
  char *buffer;
1217
  char *p;
1218
  const char *c;
1219
  tree t;
1220
 
1221
  check_unique_operand_names (outputs, inputs);
1222
 
1223
  /* Substitute [<name>] in input constraint strings.  There should be no
1224
     named operands in output constraints.  */
1225
  for (t = inputs; t ; t = TREE_CHAIN (t))
1226
    {
1227
      c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1228
      if (strchr (c, '[') != NULL)
1229
        {
1230
          p = buffer = xstrdup (c);
1231
          while ((p = strchr (p, '[')) != NULL)
1232
            p = resolve_operand_name_1 (p, outputs, inputs);
1233
          TREE_VALUE (TREE_PURPOSE (t))
1234
            = build_string (strlen (buffer), buffer);
1235
          free (buffer);
1236
        }
1237
    }
1238
 
1239
  /* Now check for any needed substitutions in the template.  */
1240
  c = TREE_STRING_POINTER (string);
1241
  while ((c = strchr (c, '%')) != NULL)
1242
    {
1243
      if (c[1] == '[')
1244
        break;
1245
      else if (ISALPHA (c[1]) && c[2] == '[')
1246
        break;
1247
      else
1248
        {
1249
          c += 1;
1250
          continue;
1251
        }
1252
    }
1253
 
1254
  if (c)
1255
    {
1256
      /* OK, we need to make a copy so we can perform the substitutions.
1257
         Assume that we will not need extra space--we get to remove '['
1258
         and ']', which means we cannot have a problem until we have more
1259
         than 999 operands.  */
1260
      buffer = xstrdup (TREE_STRING_POINTER (string));
1261
      p = buffer + (c - TREE_STRING_POINTER (string));
1262
 
1263
      while ((p = strchr (p, '%')) != NULL)
1264
        {
1265
          if (p[1] == '[')
1266
            p += 1;
1267
          else if (ISALPHA (p[1]) && p[2] == '[')
1268
            p += 2;
1269
          else
1270
            {
1271
              p += 1;
1272
              continue;
1273
            }
1274
 
1275
          p = resolve_operand_name_1 (p, outputs, inputs);
1276
        }
1277
 
1278
      string = build_string (strlen (buffer), buffer);
1279
      free (buffer);
1280
    }
1281
 
1282
  return string;
1283
}
1284
 
1285
/* A subroutine of resolve_operand_names.  P points to the '[' for a
1286
   potential named operand of the form [<name>].  In place, replace
1287
   the name and brackets with a number.  Return a pointer to the
1288
   balance of the string after substitution.  */
1289
 
1290
static char *
1291
resolve_operand_name_1 (char *p, tree outputs, tree inputs)
1292
{
1293
  char *q;
1294
  int op;
1295
  tree t;
1296
  size_t len;
1297
 
1298
  /* Collect the operand name.  */
1299
  q = strchr (p, ']');
1300
  if (!q)
1301
    {
1302
      error ("missing close brace for named operand");
1303
      return strchr (p, '\0');
1304
    }
1305
  len = q - p - 1;
1306
 
1307
  /* Resolve the name to a number.  */
1308
  for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1309
    {
1310
      tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1311
      if (name)
1312
        {
1313
          const char *c = TREE_STRING_POINTER (name);
1314
          if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1315
            goto found;
1316
        }
1317
    }
1318
  for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1319
    {
1320
      tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1321
      if (name)
1322
        {
1323
          const char *c = TREE_STRING_POINTER (name);
1324
          if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1325
            goto found;
1326
        }
1327
    }
1328
 
1329
  *q = '\0';
1330
  error ("undefined named operand %qs", p + 1);
1331
  op = 0;
1332
 found:
1333
 
1334
  /* Replace the name with the number.  Unfortunately, not all libraries
1335
     get the return value of sprintf correct, so search for the end of the
1336
     generated string by hand.  */
1337
  sprintf (p, "%d", op);
1338
  p = strchr (p, '\0');
1339
 
1340
  /* Verify the no extra buffer space assumption.  */
1341
  gcc_assert (p <= q);
1342
 
1343
  /* Shift the rest of the buffer down to fill the gap.  */
1344
  memmove (p, q + 1, strlen (q + 1) + 1);
1345
 
1346
  return p;
1347
}
1348
 
1349
/* Generate RTL to evaluate the expression EXP.  */
1350
 
1351
void
1352
expand_expr_stmt (tree exp)
1353
{
1354
  rtx value;
1355
  tree type;
1356
 
1357
  value = expand_expr (exp, const0_rtx, VOIDmode, 0);
1358
  type = TREE_TYPE (exp);
1359
 
1360
  /* If all we do is reference a volatile value in memory,
1361
     copy it to a register to be sure it is actually touched.  */
1362
  if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1363
    {
1364
      if (TYPE_MODE (type) == VOIDmode)
1365
        ;
1366
      else if (TYPE_MODE (type) != BLKmode)
1367
        value = copy_to_reg (value);
1368
      else
1369
        {
1370
          rtx lab = gen_label_rtx ();
1371
 
1372
          /* Compare the value with itself to reference it.  */
1373
          emit_cmp_and_jump_insns (value, value, EQ,
1374
                                   expand_expr (TYPE_SIZE (type),
1375
                                                NULL_RTX, VOIDmode, 0),
1376
                                   BLKmode, 0, lab);
1377
          emit_label (lab);
1378
        }
1379
    }
1380
 
1381
  /* Free any temporaries used to evaluate this expression.  */
1382
  free_temp_slots ();
1383
}
1384
 
1385
/* Warn if EXP contains any computations whose results are not used.
1386
   Return 1 if a warning is printed; 0 otherwise.  LOCUS is the
1387
   (potential) location of the expression.  */
1388
 
1389
int
1390
warn_if_unused_value (tree exp, location_t locus)
1391
{
1392
 restart:
1393
  if (TREE_USED (exp) || TREE_NO_WARNING (exp))
1394
    return 0;
1395
 
1396
  /* Don't warn about void constructs.  This includes casting to void,
1397
     void function calls, and statement expressions with a final cast
1398
     to void.  */
1399
  if (VOID_TYPE_P (TREE_TYPE (exp)))
1400
    return 0;
1401
 
1402
  if (EXPR_HAS_LOCATION (exp))
1403
    locus = EXPR_LOCATION (exp);
1404
 
1405
  switch (TREE_CODE (exp))
1406
    {
1407
    case PREINCREMENT_EXPR:
1408
    case POSTINCREMENT_EXPR:
1409
    case PREDECREMENT_EXPR:
1410
    case POSTDECREMENT_EXPR:
1411
    case MODIFY_EXPR:
1412
    case INIT_EXPR:
1413
    case TARGET_EXPR:
1414
    case CALL_EXPR:
1415
    case TRY_CATCH_EXPR:
1416
    case WITH_CLEANUP_EXPR:
1417
    case EXIT_EXPR:
1418
      return 0;
1419
 
1420
    case BIND_EXPR:
1421
      /* For a binding, warn if no side effect within it.  */
1422
      exp = BIND_EXPR_BODY (exp);
1423
      goto restart;
1424
 
1425
    case SAVE_EXPR:
1426
      exp = TREE_OPERAND (exp, 0);
1427
      goto restart;
1428
 
1429
    case TRUTH_ORIF_EXPR:
1430
    case TRUTH_ANDIF_EXPR:
1431
      /* In && or ||, warn if 2nd operand has no side effect.  */
1432
      exp = TREE_OPERAND (exp, 1);
1433
      goto restart;
1434
 
1435
    case COMPOUND_EXPR:
1436
      if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1437
        return 1;
1438
      /* Let people do `(foo (), 0)' without a warning.  */
1439
      if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1440
        return 0;
1441
      exp = TREE_OPERAND (exp, 1);
1442
      goto restart;
1443
 
1444
    case COND_EXPR:
1445
      /* If this is an expression with side effects, don't warn; this
1446
         case commonly appears in macro expansions.  */
1447
      if (TREE_SIDE_EFFECTS (exp))
1448
        return 0;
1449
      goto warn;
1450
 
1451
    case INDIRECT_REF:
1452
      /* Don't warn about automatic dereferencing of references, since
1453
         the user cannot control it.  */
1454
      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1455
        {
1456
          exp = TREE_OPERAND (exp, 0);
1457
          goto restart;
1458
        }
1459
      /* Fall through.  */
1460
 
1461
    default:
1462
      /* Referencing a volatile value is a side effect, so don't warn.  */
1463
      if ((DECL_P (exp) || REFERENCE_CLASS_P (exp))
1464
          && TREE_THIS_VOLATILE (exp))
1465
        return 0;
1466
 
1467
      /* If this is an expression which has no operands, there is no value
1468
         to be unused.  There are no such language-independent codes,
1469
         but front ends may define such.  */
1470
      if (EXPRESSION_CLASS_P (exp) && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
1471
        return 0;
1472
 
1473
    warn:
1474
      warning (0, "%Hvalue computed is not used", &locus);
1475
      return 1;
1476
    }
1477
}
1478
 
1479
 
1480
/* Generate RTL to return from the current function, with no value.
1481
   (That is, we do not do anything about returning any value.)  */
1482
 
1483
void
1484
expand_null_return (void)
1485
{
1486
  /* If this function was declared to return a value, but we
1487
     didn't, clobber the return registers so that they are not
1488
     propagated live to the rest of the function.  */
1489
  clobber_return_register ();
1490
 
1491
  expand_null_return_1 ();
1492
}
1493
 
1494
/* Generate RTL to return directly from the current function.
1495
   (That is, we bypass any return value.)  */
1496
 
1497
void
1498
expand_naked_return (void)
1499
{
1500
  rtx end_label;
1501
 
1502
  clear_pending_stack_adjust ();
1503
  do_pending_stack_adjust ();
1504
 
1505
  end_label = naked_return_label;
1506
  if (end_label == 0)
1507
    end_label = naked_return_label = gen_label_rtx ();
1508
 
1509
  emit_jump (end_label);
1510
}
1511
 
1512
/* Generate RTL to return from the current function, with value VAL.  */
1513
 
1514
static void
1515
expand_value_return (rtx val)
1516
{
1517
  /* Copy the value to the return location
1518
     unless it's already there.  */
1519
 
1520
  rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
1521
  if (return_reg != val)
1522
    {
1523
      tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
1524
      if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
1525
      {
1526
        int unsignedp = TYPE_UNSIGNED (type);
1527
        enum machine_mode old_mode
1528
          = DECL_MODE (DECL_RESULT (current_function_decl));
1529
        enum machine_mode mode
1530
          = promote_mode (type, old_mode, &unsignedp, 1);
1531
 
1532
        if (mode != old_mode)
1533
          val = convert_modes (mode, old_mode, val, unsignedp);
1534
      }
1535
      if (GET_CODE (return_reg) == PARALLEL)
1536
        emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1537
      else
1538
        emit_move_insn (return_reg, val);
1539
    }
1540
 
1541
  expand_null_return_1 ();
1542
}
1543
 
1544
/* Output a return with no value.  */
1545
 
1546
static void
1547
expand_null_return_1 (void)
1548
{
1549
  clear_pending_stack_adjust ();
1550
  do_pending_stack_adjust ();
1551
  emit_jump (return_label);
1552
}
1553
 
1554
/* Generate RTL to evaluate the expression RETVAL and return it
1555
   from the current function.  */
1556
 
1557
void
1558
expand_return (tree retval)
1559
{
1560
  rtx result_rtl;
1561
  rtx val = 0;
1562
  tree retval_rhs;
1563
 
1564
  /* If function wants no value, give it none.  */
1565
  if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1566
    {
1567
      expand_expr (retval, NULL_RTX, VOIDmode, 0);
1568
      expand_null_return ();
1569
      return;
1570
    }
1571
 
1572
  if (retval == error_mark_node)
1573
    {
1574
      /* Treat this like a return of no value from a function that
1575
         returns a value.  */
1576
      expand_null_return ();
1577
      return;
1578
    }
1579
  else if ((TREE_CODE (retval) == MODIFY_EXPR
1580
            || TREE_CODE (retval) == INIT_EXPR)
1581
           && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1582
    retval_rhs = TREE_OPERAND (retval, 1);
1583
  else
1584
    retval_rhs = retval;
1585
 
1586
  result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1587
 
1588
  /* If we are returning the RESULT_DECL, then the value has already
1589
     been stored into it, so we don't have to do anything special.  */
1590
  if (TREE_CODE (retval_rhs) == RESULT_DECL)
1591
    expand_value_return (result_rtl);
1592
 
1593
  /* If the result is an aggregate that is being returned in one (or more)
1594
     registers, load the registers here.  The compiler currently can't handle
1595
     copying a BLKmode value into registers.  We could put this code in a
1596
     more general area (for use by everyone instead of just function
1597
     call/return), but until this feature is generally usable it is kept here
1598
     (and in expand_call).  */
1599
 
1600
  else if (retval_rhs != 0
1601
           && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1602
           && REG_P (result_rtl))
1603
    {
1604
      int i;
1605
      unsigned HOST_WIDE_INT bitpos, xbitpos;
1606
      unsigned HOST_WIDE_INT padding_correction = 0;
1607
      unsigned HOST_WIDE_INT bytes
1608
        = int_size_in_bytes (TREE_TYPE (retval_rhs));
1609
      int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1610
      unsigned int bitsize
1611
        = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
1612
      rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
1613
      rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
1614
      rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
1615
      enum machine_mode tmpmode, result_reg_mode;
1616
 
1617
      if (bytes == 0)
1618
        {
1619
          expand_null_return ();
1620
          return;
1621
        }
1622
 
1623
      /* If the structure doesn't take up a whole number of words, see
1624
         whether the register value should be padded on the left or on
1625
         the right.  Set PADDING_CORRECTION to the number of padding
1626
         bits needed on the left side.
1627
 
1628
         In most ABIs, the structure will be returned at the least end of
1629
         the register, which translates to right padding on little-endian
1630
         targets and left padding on big-endian targets.  The opposite
1631
         holds if the structure is returned at the most significant
1632
         end of the register.  */
1633
      if (bytes % UNITS_PER_WORD != 0
1634
          && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
1635
              ? !BYTES_BIG_ENDIAN
1636
              : BYTES_BIG_ENDIAN))
1637
        padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
1638
                                               * BITS_PER_UNIT));
1639
 
1640
      /* Copy the structure BITSIZE bits at a time.  */
1641
      for (bitpos = 0, xbitpos = padding_correction;
1642
           bitpos < bytes * BITS_PER_UNIT;
1643
           bitpos += bitsize, xbitpos += bitsize)
1644
        {
1645
          /* We need a new destination pseudo each time xbitpos is
1646
             on a word boundary and when xbitpos == padding_correction
1647
             (the first time through).  */
1648
          if (xbitpos % BITS_PER_WORD == 0
1649
              || xbitpos == padding_correction)
1650
            {
1651
              /* Generate an appropriate register.  */
1652
              dst = gen_reg_rtx (word_mode);
1653
              result_pseudos[xbitpos / BITS_PER_WORD] = dst;
1654
 
1655
              /* Clear the destination before we move anything into it.  */
1656
              emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
1657
            }
1658
 
1659
          /* We need a new source operand each time bitpos is on a word
1660
             boundary.  */
1661
          if (bitpos % BITS_PER_WORD == 0)
1662
            src = operand_subword_force (result_val,
1663
                                         bitpos / BITS_PER_WORD,
1664
                                         BLKmode);
1665
 
1666
          /* Use bitpos for the source extraction (left justified) and
1667
             xbitpos for the destination store (right justified).  */
1668
          store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
1669
                           extract_bit_field (src, bitsize,
1670
                                              bitpos % BITS_PER_WORD, 1,
1671
                                              NULL_RTX, word_mode, word_mode));
1672
        }
1673
 
1674
      tmpmode = GET_MODE (result_rtl);
1675
      if (tmpmode == BLKmode)
1676
        {
1677
          /* Find the smallest integer mode large enough to hold the
1678
             entire structure and use that mode instead of BLKmode
1679
             on the USE insn for the return register.  */
1680
          for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1681
               tmpmode != VOIDmode;
1682
               tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1683
            /* Have we found a large enough mode?  */
1684
            if (GET_MODE_SIZE (tmpmode) >= bytes)
1685
              break;
1686
 
1687
          /* A suitable mode should have been found.  */
1688
          gcc_assert (tmpmode != VOIDmode);
1689
 
1690
          PUT_MODE (result_rtl, tmpmode);
1691
        }
1692
 
1693
      if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
1694
        result_reg_mode = word_mode;
1695
      else
1696
        result_reg_mode = tmpmode;
1697
      result_reg = gen_reg_rtx (result_reg_mode);
1698
 
1699
      for (i = 0; i < n_regs; i++)
1700
        emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
1701
                        result_pseudos[i]);
1702
 
1703
      if (tmpmode != result_reg_mode)
1704
        result_reg = gen_lowpart (tmpmode, result_reg);
1705
 
1706
      expand_value_return (result_reg);
1707
    }
1708
  else if (retval_rhs != 0
1709
           && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1710
           && (REG_P (result_rtl)
1711
               || (GET_CODE (result_rtl) == PARALLEL)))
1712
    {
1713
      /* Calculate the return value into a temporary (usually a pseudo
1714
         reg).  */
1715
      tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
1716
      tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
1717
 
1718
      val = assign_temp (nt, 0, 0, 1);
1719
      val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
1720
      val = force_not_mem (val);
1721
      /* Return the calculated value.  */
1722
      expand_value_return (val);
1723
    }
1724
  else
1725
    {
1726
      /* No hard reg used; calculate value into hard return reg.  */
1727
      expand_expr (retval, const0_rtx, VOIDmode, 0);
1728
      expand_value_return (result_rtl);
1729
    }
1730
}
1731
 
1732
/* Given a pointer to a BLOCK node return nonzero if (and only if) the node
1733
   in question represents the outermost pair of curly braces (i.e. the "body
1734
   block") of a function or method.
1735
 
1736
   For any BLOCK node representing a "body block" of a function or method, the
1737
   BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
1738
   represents the outermost (function) scope for the function or method (i.e.
1739
   the one which includes the formal parameters).  The BLOCK_SUPERCONTEXT of
1740
   *that* node in turn will point to the relevant FUNCTION_DECL node.  */
1741
 
1742
int
1743
is_body_block (tree stmt)
1744
{
1745
  if (lang_hooks.no_body_blocks)
1746
    return 0;
1747
 
1748
  if (TREE_CODE (stmt) == BLOCK)
1749
    {
1750
      tree parent = BLOCK_SUPERCONTEXT (stmt);
1751
 
1752
      if (parent && TREE_CODE (parent) == BLOCK)
1753
        {
1754
          tree grandparent = BLOCK_SUPERCONTEXT (parent);
1755
 
1756
          if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
1757
            return 1;
1758
        }
1759
    }
1760
 
1761
  return 0;
1762
}
1763
 
1764
/* Emit code to restore vital registers at the beginning of a nonlocal goto
1765
   handler.  */
1766
static void
1767
expand_nl_goto_receiver (void)
1768
{
1769
  /* Clobber the FP when we get here, so we have to make sure it's
1770
     marked as used by this function.  */
1771
  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
1772
 
1773
  /* Mark the static chain as clobbered here so life information
1774
     doesn't get messed up for it.  */
1775
  emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
1776
 
1777
#ifdef HAVE_nonlocal_goto
1778
  if (! HAVE_nonlocal_goto)
1779
#endif
1780
    /* First adjust our frame pointer to its actual value.  It was
1781
       previously set to the start of the virtual area corresponding to
1782
       the stacked variables when we branched here and now needs to be
1783
       adjusted to the actual hardware fp value.
1784
 
1785
       Assignments are to virtual registers are converted by
1786
       instantiate_virtual_regs into the corresponding assignment
1787
       to the underlying register (fp in this case) that makes
1788
       the original assignment true.
1789
       So the following insn will actually be
1790
       decrementing fp by STARTING_FRAME_OFFSET.  */
1791
    emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1792
 
1793
#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1794
  if (fixed_regs[ARG_POINTER_REGNUM])
1795
    {
1796
#ifdef ELIMINABLE_REGS
1797
      /* If the argument pointer can be eliminated in favor of the
1798
         frame pointer, we don't need to restore it.  We assume here
1799
         that if such an elimination is present, it can always be used.
1800
         This is the case on all known machines; if we don't make this
1801
         assumption, we do unnecessary saving on many machines.  */
1802
      static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1803
      size_t i;
1804
 
1805
      for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1806
        if (elim_regs[i].from == ARG_POINTER_REGNUM
1807
            && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1808
          break;
1809
 
1810
      if (i == ARRAY_SIZE (elim_regs))
1811
#endif
1812
        {
1813
          /* Now restore our arg pointer from the address at which it
1814
             was saved in our stack frame.  */
1815
          emit_move_insn (virtual_incoming_args_rtx,
1816
                          copy_to_reg (get_arg_pointer_save_area (cfun)));
1817
        }
1818
    }
1819
#endif
1820
 
1821
#ifdef HAVE_nonlocal_goto_receiver
1822
  if (HAVE_nonlocal_goto_receiver)
1823
    emit_insn (gen_nonlocal_goto_receiver ());
1824
#endif
1825
 
1826
  /* @@@ This is a kludge.  Not all machine descriptions define a blockage
1827
     insn, but we must not allow the code we just generated to be reordered
1828
     by scheduling.  Specifically, the update of the frame pointer must
1829
     happen immediately, not later.  So emit an ASM_INPUT to act as blockage
1830
     insn.  */
1831
  emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1832
}
1833
 
1834
/* Generate RTL for the automatic variable declaration DECL.
1835
   (Other kinds of declarations are simply ignored if seen here.)  */
1836
 
1837
void
1838
expand_decl (tree decl)
1839
{
1840
  tree type;
1841
 
1842
  type = TREE_TYPE (decl);
1843
 
1844
  /* For a CONST_DECL, set mode, alignment, and sizes from those of the
1845
     type in case this node is used in a reference.  */
1846
  if (TREE_CODE (decl) == CONST_DECL)
1847
    {
1848
      DECL_MODE (decl) = TYPE_MODE (type);
1849
      DECL_ALIGN (decl) = TYPE_ALIGN (type);
1850
      DECL_SIZE (decl) = TYPE_SIZE (type);
1851
      DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
1852
      return;
1853
    }
1854
 
1855
  /* Otherwise, only automatic variables need any expansion done.  Static and
1856
     external variables, and external functions, will be handled by
1857
     `assemble_variable' (called from finish_decl).  TYPE_DECL requires
1858
     nothing.  PARM_DECLs are handled in `assign_parms'.  */
1859
  if (TREE_CODE (decl) != VAR_DECL)
1860
    return;
1861
 
1862
  if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1863
    return;
1864
 
1865
  /* Create the RTL representation for the variable.  */
1866
 
1867
  if (type == error_mark_node)
1868
    SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1869
 
1870
  else if (DECL_SIZE (decl) == 0)
1871
    /* Variable with incomplete type.  */
1872
    {
1873
      rtx x;
1874
      if (DECL_INITIAL (decl) == 0)
1875
        /* Error message was already done; now avoid a crash.  */
1876
        x = gen_rtx_MEM (BLKmode, const0_rtx);
1877
      else
1878
        /* An initializer is going to decide the size of this array.
1879
           Until we know the size, represent its address with a reg.  */
1880
        x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
1881
 
1882
      set_mem_attributes (x, decl, 1);
1883
      SET_DECL_RTL (decl, x);
1884
    }
1885
  else if (use_register_for_decl (decl))
1886
    {
1887
      /* Automatic variable that can go in a register.  */
1888
      int unsignedp = TYPE_UNSIGNED (type);
1889
      enum machine_mode reg_mode
1890
        = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
1891
 
1892
      SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
1893
 
1894
      /* Note if the object is a user variable.  */
1895
      if (!DECL_ARTIFICIAL (decl))
1896
        {
1897
          mark_user_reg (DECL_RTL (decl));
1898
 
1899
          /* Trust user variables which have a pointer type to really
1900
             be pointers.  Do not trust compiler generated temporaries
1901
             as our type system is totally busted as it relates to
1902
             pointer arithmetic which translates into lots of compiler
1903
             generated objects with pointer types, but which are not really
1904
             pointers.  */
1905
          if (POINTER_TYPE_P (type))
1906
            mark_reg_pointer (DECL_RTL (decl),
1907
                              TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1908
        }
1909
    }
1910
 
1911
  else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
1912
           && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
1913
                 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
1914
                                          STACK_CHECK_MAX_VAR_SIZE)))
1915
    {
1916
      /* Variable of fixed size that goes on the stack.  */
1917
      rtx oldaddr = 0;
1918
      rtx addr;
1919
      rtx x;
1920
 
1921
      /* If we previously made RTL for this decl, it must be an array
1922
         whose size was determined by the initializer.
1923
         The old address was a register; set that register now
1924
         to the proper address.  */
1925
      if (DECL_RTL_SET_P (decl))
1926
        {
1927
          gcc_assert (MEM_P (DECL_RTL (decl)));
1928
          gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0)));
1929
          oldaddr = XEXP (DECL_RTL (decl), 0);
1930
        }
1931
 
1932
      /* Set alignment we actually gave this decl.  */
1933
      DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
1934
                           : GET_MODE_BITSIZE (DECL_MODE (decl)));
1935
      DECL_USER_ALIGN (decl) = 0;
1936
 
1937
      x = assign_temp (decl, 1, 1, 1);
1938
      set_mem_attributes (x, decl, 1);
1939
      SET_DECL_RTL (decl, x);
1940
 
1941
      if (oldaddr)
1942
        {
1943
          addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
1944
          if (addr != oldaddr)
1945
            emit_move_insn (oldaddr, addr);
1946
        }
1947
    }
1948
  else
1949
    /* Dynamic-size object: must push space on the stack.  */
1950
    {
1951
      rtx address, size, x;
1952
 
1953
      /* Record the stack pointer on entry to block, if have
1954
         not already done so.  */
1955
      do_pending_stack_adjust ();
1956
 
1957
      /* Compute the variable's size, in bytes.  This will expand any
1958
         needed SAVE_EXPRs for the first time.  */
1959
      size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
1960
      free_temp_slots ();
1961
 
1962
      /* Allocate space on the stack for the variable.  Note that
1963
         DECL_ALIGN says how the variable is to be aligned and we
1964
         cannot use it to conclude anything about the alignment of
1965
         the size.  */
1966
      address = allocate_dynamic_stack_space (size, NULL_RTX,
1967
                                              TYPE_ALIGN (TREE_TYPE (decl)));
1968
 
1969
      /* Reference the variable indirect through that rtx.  */
1970
      x = gen_rtx_MEM (DECL_MODE (decl), address);
1971
      set_mem_attributes (x, decl, 1);
1972
      SET_DECL_RTL (decl, x);
1973
 
1974
 
1975
      /* Indicate the alignment we actually gave this variable.  */
1976
#ifdef STACK_BOUNDARY
1977
      DECL_ALIGN (decl) = STACK_BOUNDARY;
1978
#else
1979
      DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
1980
#endif
1981
      DECL_USER_ALIGN (decl) = 0;
1982
    }
1983
}
1984
 
1985
/* Emit code to save the current value of stack.  */
1986
rtx
1987
expand_stack_save (void)
1988
{
1989
  rtx ret = NULL_RTX;
1990
 
1991
  do_pending_stack_adjust ();
1992
  emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
1993
  return ret;
1994
}
1995
 
1996
/* Emit code to restore the current value of stack.  */
1997
void
1998
expand_stack_restore (tree var)
1999
{
2000
  rtx sa = DECL_RTL (var);
2001
 
2002
  emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
2003
}
2004
 
2005
/* DECL is an anonymous union.  CLEANUP is a cleanup for DECL.
2006
   DECL_ELTS is the list of elements that belong to DECL's type.
2007
   In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup.  */
2008
 
2009
void
2010
expand_anon_union_decl (tree decl, tree cleanup ATTRIBUTE_UNUSED,
2011
                        tree decl_elts)
2012
{
2013
  rtx x;
2014
  tree t;
2015
 
2016
  /* If any of the elements are addressable, so is the entire union.  */
2017
  for (t = decl_elts; t; t = TREE_CHAIN (t))
2018
    if (TREE_ADDRESSABLE (TREE_VALUE (t)))
2019
      {
2020
        TREE_ADDRESSABLE (decl) = 1;
2021
        break;
2022
      }
2023
 
2024
  expand_decl (decl);
2025
  x = DECL_RTL (decl);
2026
 
2027
  /* Go through the elements, assigning RTL to each.  */
2028
  for (t = decl_elts; t; t = TREE_CHAIN (t))
2029
    {
2030
      tree decl_elt = TREE_VALUE (t);
2031
      enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2032
      rtx decl_rtl;
2033
 
2034
      /* If any of the elements are addressable, so is the entire
2035
         union.  */
2036
      if (TREE_USED (decl_elt))
2037
        TREE_USED (decl) = 1;
2038
 
2039
      /* Propagate the union's alignment to the elements.  */
2040
      DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
2041
      DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
2042
 
2043
      /* If the element has BLKmode and the union doesn't, the union is
2044
         aligned such that the element doesn't need to have BLKmode, so
2045
         change the element's mode to the appropriate one for its size.  */
2046
      if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
2047
        DECL_MODE (decl_elt) = mode
2048
          = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
2049
 
2050
      if (mode == GET_MODE (x))
2051
        decl_rtl = x;
2052
      else if (MEM_P (x))
2053
        /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2054
           instead create a new MEM rtx with the proper mode.  */
2055
        decl_rtl = adjust_address_nv (x, mode, 0);
2056
      else
2057
        {
2058
          gcc_assert (REG_P (x));
2059
          decl_rtl = gen_lowpart_SUBREG (mode, x);
2060
        }
2061
      SET_DECL_RTL (decl_elt, decl_rtl);
2062
    }
2063
}
2064
 
2065
/* Do the insertion of a case label into case_list.  The labels are
2066
   fed to us in descending order from the sorted vector of case labels used
2067
   in the tree part of the middle end.  So the list we construct is
2068
   sorted in ascending order.  The bounds on the case range, LOW and HIGH,
2069
   are converted to case's index type TYPE.  */
2070
 
2071
static struct case_node *
2072
add_case_node (struct case_node *head, tree type, tree low, tree high,
2073
               tree label)
2074
{
2075
  tree min_value, max_value;
2076
  struct case_node *r;
2077
 
2078
  gcc_assert (TREE_CODE (low) == INTEGER_CST);
2079
  gcc_assert (!high || TREE_CODE (high) == INTEGER_CST);
2080
 
2081
  min_value = TYPE_MIN_VALUE (type);
2082
  max_value = TYPE_MAX_VALUE (type);
2083
 
2084
  /* If there's no HIGH value, then this is not a case range; it's
2085
     just a simple case label.  But that's just a degenerate case
2086
     range.
2087
     If the bounds are equal, turn this into the one-value case.  */
2088
  if (!high || tree_int_cst_equal (low, high))
2089
    {
2090
      /* If the simple case value is unreachable, ignore it.  */
2091
      if ((TREE_CODE (min_value) == INTEGER_CST
2092
            && tree_int_cst_compare (low, min_value) < 0)
2093
          || (TREE_CODE (max_value) == INTEGER_CST
2094
              && tree_int_cst_compare (low, max_value) > 0))
2095
        return head;
2096
      low = fold_convert (type, low);
2097
      high = low;
2098
    }
2099
  else
2100
    {
2101
      /* If the entire case range is unreachable, ignore it.  */
2102
      if ((TREE_CODE (min_value) == INTEGER_CST
2103
            && tree_int_cst_compare (high, min_value) < 0)
2104
          || (TREE_CODE (max_value) == INTEGER_CST
2105
              && tree_int_cst_compare (low, max_value) > 0))
2106
        return head;
2107
 
2108
      /* If the lower bound is less than the index type's minimum
2109
         value, truncate the range bounds.  */
2110
      if (TREE_CODE (min_value) == INTEGER_CST
2111
            && tree_int_cst_compare (low, min_value) < 0)
2112
        low = min_value;
2113
      low = fold_convert (type, low);
2114
 
2115
      /* If the upper bound is greater than the index type's maximum
2116
         value, truncate the range bounds.  */
2117
      if (TREE_CODE (max_value) == INTEGER_CST
2118
          && tree_int_cst_compare (high, max_value) > 0)
2119
        high = max_value;
2120
      high = fold_convert (type, high);
2121
    }
2122
 
2123
 
2124
  /* Add this label to the chain.  Make sure to drop overflow flags.  */
2125
  r = ggc_alloc (sizeof (struct case_node));
2126
  r->low = build_int_cst_wide (TREE_TYPE (low), TREE_INT_CST_LOW (low),
2127
                               TREE_INT_CST_HIGH (low));
2128
  r->high = build_int_cst_wide (TREE_TYPE (high), TREE_INT_CST_LOW (high),
2129
                                TREE_INT_CST_HIGH (high));
2130
  r->code_label = label;
2131
  r->parent = r->left = NULL;
2132
  r->right = head;
2133
  return r;
2134
}
2135
 
2136
/* Maximum number of case bit tests.  */
2137
#define MAX_CASE_BIT_TESTS  3
2138
 
2139
/* By default, enable case bit tests on targets with ashlsi3.  */
2140
#ifndef CASE_USE_BIT_TESTS
2141
#define CASE_USE_BIT_TESTS  (ashl_optab->handlers[word_mode].insn_code \
2142
                             != CODE_FOR_nothing)
2143
#endif
2144
 
2145
 
2146
/* A case_bit_test represents a set of case nodes that may be
2147
   selected from using a bit-wise comparison.  HI and LO hold
2148
   the integer to be tested against, LABEL contains the label
2149
   to jump to upon success and BITS counts the number of case
2150
   nodes handled by this test, typically the number of bits
2151
   set in HI:LO.  */
2152
 
2153
struct case_bit_test
2154
{
2155
  HOST_WIDE_INT hi;
2156
  HOST_WIDE_INT lo;
2157
  rtx label;
2158
  int bits;
2159
};
2160
 
2161
/* Determine whether "1 << x" is relatively cheap in word_mode.  */
2162
 
2163
static
2164
bool lshift_cheap_p (void)
2165
{
2166
  static bool init = false;
2167
  static bool cheap = true;
2168
 
2169
  if (!init)
2170
    {
2171
      rtx reg = gen_rtx_REG (word_mode, 10000);
2172
      int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
2173
      cheap = cost < COSTS_N_INSNS (3);
2174
      init = true;
2175
    }
2176
 
2177
  return cheap;
2178
}
2179
 
2180
/* Comparison function for qsort to order bit tests by decreasing
2181
   number of case nodes, i.e. the node with the most cases gets
2182
   tested first.  */
2183
 
2184
static int
2185
case_bit_test_cmp (const void *p1, const void *p2)
2186
{
2187
  const struct case_bit_test *d1 = p1;
2188
  const struct case_bit_test *d2 = p2;
2189
 
2190
  return d2->bits - d1->bits;
2191
}
2192
 
2193
/*  Expand a switch statement by a short sequence of bit-wise
2194
    comparisons.  "switch(x)" is effectively converted into
2195
    "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
2196
    integer constants.
2197
 
2198
    INDEX_EXPR is the value being switched on, which is of
2199
    type INDEX_TYPE.  MINVAL is the lowest case value of in
2200
    the case nodes, of INDEX_TYPE type, and RANGE is highest
2201
    value minus MINVAL, also of type INDEX_TYPE.  NODES is
2202
    the set of case nodes, and DEFAULT_LABEL is the label to
2203
    branch to should none of the cases match.
2204
 
2205
    There *MUST* be MAX_CASE_BIT_TESTS or less unique case
2206
    node targets.  */
2207
 
2208
static void
2209
emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
2210
                     tree range, case_node_ptr nodes, rtx default_label)
2211
{
2212
  struct case_bit_test test[MAX_CASE_BIT_TESTS];
2213
  enum machine_mode mode;
2214
  rtx expr, index, label;
2215
  unsigned int i,j,lo,hi;
2216
  struct case_node *n;
2217
  unsigned int count;
2218
 
2219
  count = 0;
2220
  for (n = nodes; n; n = n->right)
2221
    {
2222
      label = label_rtx (n->code_label);
2223
      for (i = 0; i < count; i++)
2224
        if (label == test[i].label)
2225
          break;
2226
 
2227
      if (i == count)
2228
        {
2229
          gcc_assert (count < MAX_CASE_BIT_TESTS);
2230
          test[i].hi = 0;
2231
          test[i].lo = 0;
2232
          test[i].label = label;
2233
          test[i].bits = 1;
2234
          count++;
2235
        }
2236
      else
2237
        test[i].bits++;
2238
 
2239
      lo = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2240
                                      n->low, minval), 1);
2241
      hi = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2242
                                      n->high, minval), 1);
2243
      for (j = lo; j <= hi; j++)
2244
        if (j >= HOST_BITS_PER_WIDE_INT)
2245
          test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
2246
        else
2247
          test[i].lo |= (HOST_WIDE_INT) 1 << j;
2248
    }
2249
 
2250
  qsort (test, count, sizeof(*test), case_bit_test_cmp);
2251
 
2252
  index_expr = fold_build2 (MINUS_EXPR, index_type,
2253
                            fold_convert (index_type, index_expr),
2254
                            fold_convert (index_type, minval));
2255
  index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
2256
  do_pending_stack_adjust ();
2257
 
2258
  mode = TYPE_MODE (index_type);
2259
  expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
2260
  emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
2261
                           default_label);
2262
 
2263
  index = convert_to_mode (word_mode, index, 0);
2264
  index = expand_binop (word_mode, ashl_optab, const1_rtx,
2265
                        index, NULL_RTX, 1, OPTAB_WIDEN);
2266
 
2267
  for (i = 0; i < count; i++)
2268
    {
2269
      expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
2270
      expr = expand_binop (word_mode, and_optab, index, expr,
2271
                           NULL_RTX, 1, OPTAB_WIDEN);
2272
      emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
2273
                               word_mode, 1, test[i].label);
2274
    }
2275
 
2276
  emit_jump (default_label);
2277
}
2278
 
2279
#ifndef HAVE_casesi
2280
#define HAVE_casesi 0
2281
#endif
2282
 
2283
#ifndef HAVE_tablejump
2284
#define HAVE_tablejump 0
2285
#endif
2286
 
2287
/* Terminate a case (Pascal/Ada) or switch (C) statement
2288
   in which ORIG_INDEX is the expression to be tested.
2289
   If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2290
   type as given in the source before any compiler conversions.
2291
   Generate the code to test it and jump to the right place.  */
2292
 
2293
void
2294
expand_case (tree exp)
2295
{
2296
  tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2297
  rtx default_label = 0;
2298
  struct case_node *n;
2299
  unsigned int count, uniq;
2300
  rtx index;
2301
  rtx table_label;
2302
  int ncases;
2303
  rtx *labelvec;
2304
  int i, fail;
2305
  rtx before_case, end, lab;
2306
 
2307
  tree vec = SWITCH_LABELS (exp);
2308
  tree orig_type = TREE_TYPE (exp);
2309
  tree index_expr = SWITCH_COND (exp);
2310
  tree index_type = TREE_TYPE (index_expr);
2311
  int unsignedp = TYPE_UNSIGNED (index_type);
2312
 
2313
  /* The insn after which the case dispatch should finally
2314
     be emitted.  Zero for a dummy.  */
2315
  rtx start;
2316
 
2317
  /* A list of case labels; it is first built as a list and it may then
2318
     be rearranged into a nearly balanced binary tree.  */
2319
  struct case_node *case_list = 0;
2320
 
2321
  /* Label to jump to if no case matches.  */
2322
  tree default_label_decl;
2323
 
2324
  /* The switch body is lowered in gimplify.c, we should never have
2325
     switches with a non-NULL SWITCH_BODY here.  */
2326
  gcc_assert (!SWITCH_BODY (exp));
2327
  gcc_assert (SWITCH_LABELS (exp));
2328
 
2329
  do_pending_stack_adjust ();
2330
 
2331
  /* An ERROR_MARK occurs for various reasons including invalid data type.  */
2332
  if (index_type != error_mark_node)
2333
    {
2334
      tree elt;
2335
      bitmap label_bitmap;
2336
 
2337
      /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
2338
         expressions being INTEGER_CST.  */
2339
      gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);
2340
 
2341
      /* The default case is at the end of TREE_VEC.  */
2342
      elt = TREE_VEC_ELT (vec, TREE_VEC_LENGTH (vec) - 1);
2343
      gcc_assert (!CASE_HIGH (elt));
2344
      gcc_assert (!CASE_LOW (elt));
2345
      default_label_decl = CASE_LABEL (elt);
2346
 
2347
      for (i = TREE_VEC_LENGTH (vec) - 1; --i >= 0; )
2348
        {
2349
          tree low, high;
2350
          elt = TREE_VEC_ELT (vec, i);
2351
 
2352
          low = CASE_LOW (elt);
2353
          gcc_assert (low);
2354
          high = CASE_HIGH (elt);
2355
 
2356
          /* Discard empty ranges.  */
2357
          if (high && INT_CST_LT (high, low))
2358
            continue;
2359
 
2360
          case_list = add_case_node (case_list, index_type, low, high,
2361
                                     CASE_LABEL (elt));
2362
        }
2363
 
2364
 
2365
      /* Make sure start points to something that won't need any
2366
         transformation before the end of this function.  */
2367
      start = get_last_insn ();
2368
      if (! NOTE_P (start))
2369
        {
2370
          emit_note (NOTE_INSN_DELETED);
2371
          start = get_last_insn ();
2372
        }
2373
 
2374
      default_label = label_rtx (default_label_decl);
2375
 
2376
      before_case = get_last_insn ();
2377
 
2378
      /* Get upper and lower bounds of case values.  */
2379
 
2380
      uniq = 0;
2381
      count = 0;
2382
      label_bitmap = BITMAP_ALLOC (NULL);
2383
      for (n = case_list; n; n = n->right)
2384
        {
2385
          /* Count the elements and track the largest and smallest
2386
             of them (treating them as signed even if they are not).  */
2387
          if (count++ == 0)
2388
            {
2389
              minval = n->low;
2390
              maxval = n->high;
2391
            }
2392
          else
2393
            {
2394
              if (INT_CST_LT (n->low, minval))
2395
                minval = n->low;
2396
              if (INT_CST_LT (maxval, n->high))
2397
                maxval = n->high;
2398
            }
2399
          /* A range counts double, since it requires two compares.  */
2400
          if (! tree_int_cst_equal (n->low, n->high))
2401
            count++;
2402
 
2403
          /* If we have not seen this label yet, then increase the
2404
             number of unique case node targets seen.  */
2405
          lab = label_rtx (n->code_label);
2406
          if (!bitmap_bit_p (label_bitmap, CODE_LABEL_NUMBER (lab)))
2407
            {
2408
              bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab));
2409
              uniq++;
2410
            }
2411
        }
2412
 
2413
      BITMAP_FREE (label_bitmap);
2414
 
2415
      /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2416
         destination, such as one with a default case only.  However,
2417
         it doesn't remove cases that are out of range for the switch
2418
         type, so we may still get a zero here.  */
2419
      if (count == 0)
2420
        {
2421
          emit_jump (default_label);
2422
          return;
2423
        }
2424
 
2425
      /* Compute span of values.  */
2426
      range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
2427
 
2428
      /* Try implementing this switch statement by a short sequence of
2429
         bit-wise comparisons.  However, we let the binary-tree case
2430
         below handle constant index expressions.  */
2431
      if (CASE_USE_BIT_TESTS
2432
          && ! TREE_CONSTANT (index_expr)
2433
          && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
2434
          && compare_tree_int (range, 0) > 0
2435
          && lshift_cheap_p ()
2436
          && ((uniq == 1 && count >= 3)
2437
              || (uniq == 2 && count >= 5)
2438
              || (uniq == 3 && count >= 6)))
2439
        {
2440
          /* Optimize the case where all the case values fit in a
2441
             word without having to subtract MINVAL.  In this case,
2442
             we can optimize away the subtraction.  */
2443
          if (compare_tree_int (minval, 0) > 0
2444
              && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
2445
            {
2446
              minval = build_int_cst (index_type, 0);
2447
              range = maxval;
2448
            }
2449
          emit_case_bit_tests (index_type, index_expr, minval, range,
2450
                               case_list, default_label);
2451
        }
2452
 
2453
      /* If range of values is much bigger than number of values,
2454
         make a sequence of conditional branches instead of a dispatch.
2455
         If the switch-index is a constant, do it this way
2456
         because we can optimize it.  */
2457
 
2458
      else if (count < case_values_threshold ()
2459
               || compare_tree_int (range,
2460
                                    (optimize_size ? 3 : 10) * count) > 0
2461
               /* RANGE may be signed, and really large ranges will show up
2462
                  as negative numbers.  */
2463
               || compare_tree_int (range, 0) < 0
2464
#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
2465
               || flag_pic
2466
#endif
2467
               || !flag_jump_tables
2468
               || TREE_CONSTANT (index_expr)
2469
               /* If neither casesi or tablejump is available, we can
2470
                  only go this way.  */
2471
               || (!HAVE_casesi && !HAVE_tablejump))
2472
        {
2473
          index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
2474
 
2475
          /* If the index is a short or char that we do not have
2476
             an insn to handle comparisons directly, convert it to
2477
             a full integer now, rather than letting each comparison
2478
             generate the conversion.  */
2479
 
2480
          if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
2481
              && ! have_insn_for (COMPARE, GET_MODE (index)))
2482
            {
2483
              enum machine_mode wider_mode;
2484
              for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
2485
                   wider_mode = GET_MODE_WIDER_MODE (wider_mode))
2486
                if (have_insn_for (COMPARE, wider_mode))
2487
                  {
2488
                    index = convert_to_mode (wider_mode, index, unsignedp);
2489
                    break;
2490
                  }
2491
            }
2492
 
2493
          do_pending_stack_adjust ();
2494
 
2495
          if (MEM_P (index))
2496
            index = copy_to_reg (index);
2497
 
2498
          /* We generate a binary decision tree to select the
2499
             appropriate target code.  This is done as follows:
2500
 
2501
             The list of cases is rearranged into a binary tree,
2502
             nearly optimal assuming equal probability for each case.
2503
 
2504
             The tree is transformed into RTL, eliminating
2505
             redundant test conditions at the same time.
2506
 
2507
             If program flow could reach the end of the
2508
             decision tree an unconditional jump to the
2509
             default code is emitted.  */
2510
 
2511
          use_cost_table
2512
            = (TREE_CODE (orig_type) != ENUMERAL_TYPE
2513
               && estimate_case_costs (case_list));
2514
          balance_case_nodes (&case_list, NULL);
2515
          emit_case_nodes (index, case_list, default_label, index_type);
2516
          emit_jump (default_label);
2517
        }
2518
      else
2519
        {
2520
          table_label = gen_label_rtx ();
2521
          if (! try_casesi (index_type, index_expr, minval, range,
2522
                            table_label, default_label))
2523
            {
2524
              bool ok;
2525
 
2526
              /* Index jumptables from zero for suitable values of
2527
                 minval to avoid a subtraction.  */
2528
              if (! optimize_size
2529
                  && compare_tree_int (minval, 0) > 0
2530
                  && compare_tree_int (minval, 3) < 0)
2531
                {
2532
                  minval = build_int_cst (index_type, 0);
2533
                  range = maxval;
2534
                }
2535
 
2536
              ok = try_tablejump (index_type, index_expr, minval, range,
2537
                                  table_label, default_label);
2538
              gcc_assert (ok);
2539
            }
2540
 
2541
          /* Get table of labels to jump to, in order of case index.  */
2542
 
2543
          ncases = tree_low_cst (range, 0) + 1;
2544
          labelvec = alloca (ncases * sizeof (rtx));
2545
          memset (labelvec, 0, ncases * sizeof (rtx));
2546
 
2547
          for (n = case_list; n; n = n->right)
2548
            {
2549
              /* Compute the low and high bounds relative to the minimum
2550
                 value since that should fit in a HOST_WIDE_INT while the
2551
                 actual values may not.  */
2552
              HOST_WIDE_INT i_low
2553
                = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2554
                                             n->low, minval), 1);
2555
              HOST_WIDE_INT i_high
2556
                = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2557
                                             n->high, minval), 1);
2558
              HOST_WIDE_INT i;
2559
 
2560
              for (i = i_low; i <= i_high; i ++)
2561
                labelvec[i]
2562
                  = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
2563
            }
2564
 
2565
          /* Fill in the gaps with the default.  */
2566
          for (i = 0; i < ncases; i++)
2567
            if (labelvec[i] == 0)
2568
              labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
2569
 
2570
          /* Output the table.  */
2571
          emit_label (table_label);
2572
 
2573
          if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2574
            emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
2575
                                                   gen_rtx_LABEL_REF (Pmode, table_label),
2576
                                                   gen_rtvec_v (ncases, labelvec),
2577
                                                   const0_rtx, const0_rtx));
2578
          else
2579
            emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
2580
                                              gen_rtvec_v (ncases, labelvec)));
2581
 
2582
          /* Record no drop-through after the table.  */
2583
          emit_barrier ();
2584
        }
2585
 
2586
      before_case = NEXT_INSN (before_case);
2587
      end = get_last_insn ();
2588
      fail = squeeze_notes (&before_case, &end);
2589
      gcc_assert (!fail);
2590
      reorder_insns (before_case, end, start);
2591
    }
2592
 
2593
  free_temp_slots ();
2594
}
2595
 
2596
/* Generate code to jump to LABEL if OP1 and OP2 are equal.  */
2597
 
2598
static void
2599
do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
2600
{
2601
  if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
2602
    {
2603
      if (op1 == op2)
2604
        emit_jump (label);
2605
    }
2606
  else
2607
    emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
2608
                             (GET_MODE (op1) == VOIDmode
2609
                             ? GET_MODE (op2) : GET_MODE (op1)),
2610
                             unsignedp, label);
2611
}
2612
 
2613
/* Not all case values are encountered equally.  This function
2614
   uses a heuristic to weight case labels, in cases where that
2615
   looks like a reasonable thing to do.
2616
 
2617
   Right now, all we try to guess is text, and we establish the
2618
   following weights:
2619
 
2620
        chars above space:      16
2621
        digits:                 16
2622
        default:                12
2623
        space, punct:           8
2624
        tab:                    4
2625
        newline:                2
2626
        other "\" chars:        1
2627
        remaining chars:        0
2628
 
2629
   If we find any cases in the switch that are not either -1 or in the range
2630
   of valid ASCII characters, or are control characters other than those
2631
   commonly used with "\", don't treat this switch scanning text.
2632
 
2633
   Return 1 if these nodes are suitable for cost estimation, otherwise
2634
   return 0.  */
2635
 
2636
static int
2637
estimate_case_costs (case_node_ptr node)
2638
{
2639
  tree min_ascii = integer_minus_one_node;
2640
  tree max_ascii = build_int_cst (TREE_TYPE (node->high), 127);
2641
  case_node_ptr n;
2642
  int i;
2643
 
2644
  /* If we haven't already made the cost table, make it now.  Note that the
2645
     lower bound of the table is -1, not zero.  */
2646
 
2647
  if (! cost_table_initialized)
2648
    {
2649
      cost_table_initialized = 1;
2650
 
2651
      for (i = 0; i < 128; i++)
2652
        {
2653
          if (ISALNUM (i))
2654
            COST_TABLE (i) = 16;
2655
          else if (ISPUNCT (i))
2656
            COST_TABLE (i) = 8;
2657
          else if (ISCNTRL (i))
2658
            COST_TABLE (i) = -1;
2659
        }
2660
 
2661
      COST_TABLE (' ') = 8;
2662
      COST_TABLE ('\t') = 4;
2663
      COST_TABLE ('\0') = 4;
2664
      COST_TABLE ('\n') = 2;
2665
      COST_TABLE ('\f') = 1;
2666
      COST_TABLE ('\v') = 1;
2667
      COST_TABLE ('\b') = 1;
2668
    }
2669
 
2670
  /* See if all the case expressions look like text.  It is text if the
2671
     constant is >= -1 and the highest constant is <= 127.  Do all comparisons
2672
     as signed arithmetic since we don't want to ever access cost_table with a
2673
     value less than -1.  Also check that none of the constants in a range
2674
     are strange control characters.  */
2675
 
2676
  for (n = node; n; n = n->right)
2677
    {
2678
      if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
2679
        return 0;
2680
 
2681
      for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
2682
           i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2683
        if (COST_TABLE (i) < 0)
2684
          return 0;
2685
    }
2686
 
2687
  /* All interesting values are within the range of interesting
2688
     ASCII characters.  */
2689
  return 1;
2690
}
2691
 
2692
/* Take an ordered list of case nodes
2693
   and transform them into a near optimal binary tree,
2694
   on the assumption that any target code selection value is as
2695
   likely as any other.
2696
 
2697
   The transformation is performed by splitting the ordered
2698
   list into two equal sections plus a pivot.  The parts are
2699
   then attached to the pivot as left and right branches.  Each
2700
   branch is then transformed recursively.  */
2701
 
2702
static void
2703
balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
2704
{
2705
  case_node_ptr np;
2706
 
2707
  np = *head;
2708
  if (np)
2709
    {
2710
      int cost = 0;
2711
      int i = 0;
2712
      int ranges = 0;
2713
      case_node_ptr *npp;
2714
      case_node_ptr left;
2715
 
2716
      /* Count the number of entries on branch.  Also count the ranges.  */
2717
 
2718
      while (np)
2719
        {
2720
          if (!tree_int_cst_equal (np->low, np->high))
2721
            {
2722
              ranges++;
2723
              if (use_cost_table)
2724
                cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
2725
            }
2726
 
2727
          if (use_cost_table)
2728
            cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
2729
 
2730
          i++;
2731
          np = np->right;
2732
        }
2733
 
2734
      if (i > 2)
2735
        {
2736
          /* Split this list if it is long enough for that to help.  */
2737
          npp = head;
2738
          left = *npp;
2739
          if (use_cost_table)
2740
            {
2741
              /* Find the place in the list that bisects the list's total cost,
2742
                 Here I gets half the total cost.  */
2743
              int n_moved = 0;
2744
              i = (cost + 1) / 2;
2745
              while (1)
2746
                {
2747
                  /* Skip nodes while their cost does not reach that amount.  */
2748
                  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2749
                    i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
2750
                  i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
2751
                  if (i <= 0)
2752
                    break;
2753
                  npp = &(*npp)->right;
2754
                  n_moved += 1;
2755
                }
2756
              if (n_moved == 0)
2757
                {
2758
                  /* Leave this branch lopsided, but optimize left-hand
2759
                     side and fill in `parent' fields for right-hand side.  */
2760
                  np = *head;
2761
                  np->parent = parent;
2762
                  balance_case_nodes (&np->left, np);
2763
                  for (; np->right; np = np->right)
2764
                    np->right->parent = np;
2765
                  return;
2766
                }
2767
            }
2768
          /* If there are just three nodes, split at the middle one.  */
2769
          else if (i == 3)
2770
            npp = &(*npp)->right;
2771
          else
2772
            {
2773
              /* Find the place in the list that bisects the list's total cost,
2774
                 where ranges count as 2.
2775
                 Here I gets half the total cost.  */
2776
              i = (i + ranges + 1) / 2;
2777
              while (1)
2778
                {
2779
                  /* Skip nodes while their cost does not reach that amount.  */
2780
                  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2781
                    i--;
2782
                  i--;
2783
                  if (i <= 0)
2784
                    break;
2785
                  npp = &(*npp)->right;
2786
                }
2787
            }
2788
          *head = np = *npp;
2789
          *npp = 0;
2790
          np->parent = parent;
2791
          np->left = left;
2792
 
2793
          /* Optimize each of the two split parts.  */
2794
          balance_case_nodes (&np->left, np);
2795
          balance_case_nodes (&np->right, np);
2796
        }
2797
      else
2798
        {
2799
          /* Else leave this branch as one level,
2800
             but fill in `parent' fields.  */
2801
          np = *head;
2802
          np->parent = parent;
2803
          for (; np->right; np = np->right)
2804
            np->right->parent = np;
2805
        }
2806
    }
2807
}
2808
 
2809
/* Search the parent sections of the case node tree
2810
   to see if a test for the lower bound of NODE would be redundant.
2811
   INDEX_TYPE is the type of the index expression.
2812
 
2813
   The instructions to generate the case decision tree are
2814
   output in the same order as nodes are processed so it is
2815
   known that if a parent node checks the range of the current
2816
   node minus one that the current node is bounded at its lower
2817
   span.  Thus the test would be redundant.  */
2818
 
2819
static int
2820
node_has_low_bound (case_node_ptr node, tree index_type)
2821
{
2822
  tree low_minus_one;
2823
  case_node_ptr pnode;
2824
 
2825
  /* If the lower bound of this node is the lowest value in the index type,
2826
     we need not test it.  */
2827
 
2828
  if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
2829
    return 1;
2830
 
2831
  /* If this node has a left branch, the value at the left must be less
2832
     than that at this node, so it cannot be bounded at the bottom and
2833
     we need not bother testing any further.  */
2834
 
2835
  if (node->left)
2836
    return 0;
2837
 
2838
  low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
2839
                               node->low,
2840
                               build_int_cst (TREE_TYPE (node->low), 1));
2841
 
2842
  /* If the subtraction above overflowed, we can't verify anything.
2843
     Otherwise, look for a parent that tests our value - 1.  */
2844
 
2845
  if (! tree_int_cst_lt (low_minus_one, node->low))
2846
    return 0;
2847
 
2848
  for (pnode = node->parent; pnode; pnode = pnode->parent)
2849
    if (tree_int_cst_equal (low_minus_one, pnode->high))
2850
      return 1;
2851
 
2852
  return 0;
2853
}
2854
 
2855
/* Search the parent sections of the case node tree
2856
   to see if a test for the upper bound of NODE would be redundant.
2857
   INDEX_TYPE is the type of the index expression.
2858
 
2859
   The instructions to generate the case decision tree are
2860
   output in the same order as nodes are processed so it is
2861
   known that if a parent node checks the range of the current
2862
   node plus one that the current node is bounded at its upper
2863
   span.  Thus the test would be redundant.  */
2864
 
2865
static int
2866
node_has_high_bound (case_node_ptr node, tree index_type)
2867
{
2868
  tree high_plus_one;
2869
  case_node_ptr pnode;
2870
 
2871
  /* If there is no upper bound, obviously no test is needed.  */
2872
 
2873
  if (TYPE_MAX_VALUE (index_type) == NULL)
2874
    return 1;
2875
 
2876
  /* If the upper bound of this node is the highest value in the type
2877
     of the index expression, we need not test against it.  */
2878
 
2879
  if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
2880
    return 1;
2881
 
2882
  /* If this node has a right branch, the value at the right must be greater
2883
     than that at this node, so it cannot be bounded at the top and
2884
     we need not bother testing any further.  */
2885
 
2886
  if (node->right)
2887
    return 0;
2888
 
2889
  high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
2890
                               node->high,
2891
                               build_int_cst (TREE_TYPE (node->high), 1));
2892
 
2893
  /* If the addition above overflowed, we can't verify anything.
2894
     Otherwise, look for a parent that tests our value + 1.  */
2895
 
2896
  if (! tree_int_cst_lt (node->high, high_plus_one))
2897
    return 0;
2898
 
2899
  for (pnode = node->parent; pnode; pnode = pnode->parent)
2900
    if (tree_int_cst_equal (high_plus_one, pnode->low))
2901
      return 1;
2902
 
2903
  return 0;
2904
}
2905
 
2906
/* Search the parent sections of the
2907
   case node tree to see if both tests for the upper and lower
2908
   bounds of NODE would be redundant.  */
2909
 
2910
static int
2911
node_is_bounded (case_node_ptr node, tree index_type)
2912
{
2913
  return (node_has_low_bound (node, index_type)
2914
          && node_has_high_bound (node, index_type));
2915
}
2916
 
2917
/* Emit step-by-step code to select a case for the value of INDEX.
2918
   The thus generated decision tree follows the form of the
2919
   case-node binary tree NODE, whose nodes represent test conditions.
2920
   INDEX_TYPE is the type of the index of the switch.
2921
 
2922
   Care is taken to prune redundant tests from the decision tree
2923
   by detecting any boundary conditions already checked by
2924
   emitted rtx.  (See node_has_high_bound, node_has_low_bound
2925
   and node_is_bounded, above.)
2926
 
2927
   Where the test conditions can be shown to be redundant we emit
2928
   an unconditional jump to the target code.  As a further
2929
   optimization, the subordinates of a tree node are examined to
2930
   check for bounded nodes.  In this case conditional and/or
2931
   unconditional jumps as a result of the boundary check for the
2932
   current node are arranged to target the subordinates associated
2933
   code for out of bound conditions on the current node.
2934
 
2935
   We can assume that when control reaches the code generated here,
2936
   the index value has already been compared with the parents
2937
   of this node, and determined to be on the same side of each parent
2938
   as this node is.  Thus, if this node tests for the value 51,
2939
   and a parent tested for 52, we don't need to consider
2940
   the possibility of a value greater than 51.  If another parent
2941
   tests for the value 50, then this node need not test anything.  */
2942
 
2943
static void
2944
emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
2945
                 tree index_type)
2946
{
2947
  /* If INDEX has an unsigned type, we must make unsigned branches.  */
2948
  int unsignedp = TYPE_UNSIGNED (index_type);
2949
  enum machine_mode mode = GET_MODE (index);
2950
  enum machine_mode imode = TYPE_MODE (index_type);
2951
 
2952
  /* Handle indices detected as constant during RTL expansion.  */
2953
  if (mode == VOIDmode)
2954
    mode = imode;
2955
 
2956
  /* See if our parents have already tested everything for us.
2957
     If they have, emit an unconditional jump for this node.  */
2958
  if (node_is_bounded (node, index_type))
2959
    emit_jump (label_rtx (node->code_label));
2960
 
2961
  else if (tree_int_cst_equal (node->low, node->high))
2962
    {
2963
      /* Node is single valued.  First see if the index expression matches
2964
         this node and then check our children, if any.  */
2965
 
2966
      do_jump_if_equal (index,
2967
                        convert_modes (mode, imode,
2968
                                       expand_expr (node->low, NULL_RTX,
2969
                                                    VOIDmode, 0),
2970
                                       unsignedp),
2971
                        label_rtx (node->code_label), unsignedp);
2972
 
2973
      if (node->right != 0 && node->left != 0)
2974
        {
2975
          /* This node has children on both sides.
2976
             Dispatch to one side or the other
2977
             by comparing the index value with this node's value.
2978
             If one subtree is bounded, check that one first,
2979
             so we can avoid real branches in the tree.  */
2980
 
2981
          if (node_is_bounded (node->right, index_type))
2982
            {
2983
              emit_cmp_and_jump_insns (index,
2984
                                       convert_modes
2985
                                       (mode, imode,
2986
                                        expand_expr (node->high, NULL_RTX,
2987
                                                     VOIDmode, 0),
2988
                                        unsignedp),
2989
                                       GT, NULL_RTX, mode, unsignedp,
2990
                                       label_rtx (node->right->code_label));
2991
              emit_case_nodes (index, node->left, default_label, index_type);
2992
            }
2993
 
2994
          else if (node_is_bounded (node->left, index_type))
2995
            {
2996
              emit_cmp_and_jump_insns (index,
2997
                                       convert_modes
2998
                                       (mode, imode,
2999
                                        expand_expr (node->high, NULL_RTX,
3000
                                                     VOIDmode, 0),
3001
                                        unsignedp),
3002
                                       LT, NULL_RTX, mode, unsignedp,
3003
                                       label_rtx (node->left->code_label));
3004
              emit_case_nodes (index, node->right, default_label, index_type);
3005
            }
3006
 
3007
          /* If both children are single-valued cases with no
3008
             children, finish up all the work.  This way, we can save
3009
             one ordered comparison.  */
3010
          else if (tree_int_cst_equal (node->right->low, node->right->high)
3011
                   && node->right->left == 0
3012
                   && node->right->right == 0
3013
                   && tree_int_cst_equal (node->left->low, node->left->high)
3014
                   && node->left->left == 0
3015
                   && node->left->right == 0)
3016
            {
3017
              /* Neither node is bounded.  First distinguish the two sides;
3018
                 then emit the code for one side at a time.  */
3019
 
3020
              /* See if the value matches what the right hand side
3021
                 wants.  */
3022
              do_jump_if_equal (index,
3023
                                convert_modes (mode, imode,
3024
                                               expand_expr (node->right->low,
3025
                                                            NULL_RTX,
3026
                                                            VOIDmode, 0),
3027
                                               unsignedp),
3028
                                label_rtx (node->right->code_label),
3029
                                unsignedp);
3030
 
3031
              /* See if the value matches what the left hand side
3032
                 wants.  */
3033
              do_jump_if_equal (index,
3034
                                convert_modes (mode, imode,
3035
                                               expand_expr (node->left->low,
3036
                                                            NULL_RTX,
3037
                                                            VOIDmode, 0),
3038
                                               unsignedp),
3039
                                label_rtx (node->left->code_label),
3040
                                unsignedp);
3041
            }
3042
 
3043
          else
3044
            {
3045
              /* Neither node is bounded.  First distinguish the two sides;
3046
                 then emit the code for one side at a time.  */
3047
 
3048
              tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3049
 
3050
              /* See if the value is on the right.  */
3051
              emit_cmp_and_jump_insns (index,
3052
                                       convert_modes
3053
                                       (mode, imode,
3054
                                        expand_expr (node->high, NULL_RTX,
3055
                                                     VOIDmode, 0),
3056
                                        unsignedp),
3057
                                       GT, NULL_RTX, mode, unsignedp,
3058
                                       label_rtx (test_label));
3059
 
3060
              /* Value must be on the left.
3061
                 Handle the left-hand subtree.  */
3062
              emit_case_nodes (index, node->left, default_label, index_type);
3063
              /* If left-hand subtree does nothing,
3064
                 go to default.  */
3065
              emit_jump (default_label);
3066
 
3067
              /* Code branches here for the right-hand subtree.  */
3068
              expand_label (test_label);
3069
              emit_case_nodes (index, node->right, default_label, index_type);
3070
            }
3071
        }
3072
 
3073
      else if (node->right != 0 && node->left == 0)
3074
        {
3075
          /* Here we have a right child but no left so we issue a conditional
3076
             branch to default and process the right child.
3077
 
3078
             Omit the conditional branch to default if the right child
3079
             does not have any children and is single valued; it would
3080
             cost too much space to save so little time.  */
3081
 
3082
          if (node->right->right || node->right->left
3083
              || !tree_int_cst_equal (node->right->low, node->right->high))
3084
            {
3085
              if (!node_has_low_bound (node, index_type))
3086
                {
3087
                  emit_cmp_and_jump_insns (index,
3088
                                           convert_modes
3089
                                           (mode, imode,
3090
                                            expand_expr (node->high, NULL_RTX,
3091
                                                         VOIDmode, 0),
3092
                                            unsignedp),
3093
                                           LT, NULL_RTX, mode, unsignedp,
3094
                                           default_label);
3095
                }
3096
 
3097
              emit_case_nodes (index, node->right, default_label, index_type);
3098
            }
3099
          else
3100
            /* We cannot process node->right normally
3101
               since we haven't ruled out the numbers less than
3102
               this node's value.  So handle node->right explicitly.  */
3103
            do_jump_if_equal (index,
3104
                              convert_modes
3105
                              (mode, imode,
3106
                               expand_expr (node->right->low, NULL_RTX,
3107
                                            VOIDmode, 0),
3108
                               unsignedp),
3109
                              label_rtx (node->right->code_label), unsignedp);
3110
        }
3111
 
3112
      else if (node->right == 0 && node->left != 0)
3113
        {
3114
          /* Just one subtree, on the left.  */
3115
          if (node->left->left || node->left->right
3116
              || !tree_int_cst_equal (node->left->low, node->left->high))
3117
            {
3118
              if (!node_has_high_bound (node, index_type))
3119
                {
3120
                  emit_cmp_and_jump_insns (index,
3121
                                           convert_modes
3122
                                           (mode, imode,
3123
                                            expand_expr (node->high, NULL_RTX,
3124
                                                         VOIDmode, 0),
3125
                                            unsignedp),
3126
                                           GT, NULL_RTX, mode, unsignedp,
3127
                                           default_label);
3128
                }
3129
 
3130
              emit_case_nodes (index, node->left, default_label, index_type);
3131
            }
3132
          else
3133
            /* We cannot process node->left normally
3134
               since we haven't ruled out the numbers less than
3135
               this node's value.  So handle node->left explicitly.  */
3136
            do_jump_if_equal (index,
3137
                              convert_modes
3138
                              (mode, imode,
3139
                               expand_expr (node->left->low, NULL_RTX,
3140
                                            VOIDmode, 0),
3141
                               unsignedp),
3142
                              label_rtx (node->left->code_label), unsignedp);
3143
        }
3144
    }
3145
  else
3146
    {
3147
      /* Node is a range.  These cases are very similar to those for a single
3148
         value, except that we do not start by testing whether this node
3149
         is the one to branch to.  */
3150
 
3151
      if (node->right != 0 && node->left != 0)
3152
        {
3153
          /* Node has subtrees on both sides.
3154
             If the right-hand subtree is bounded,
3155
             test for it first, since we can go straight there.
3156
             Otherwise, we need to make a branch in the control structure,
3157
             then handle the two subtrees.  */
3158
          tree test_label = 0;
3159
 
3160
          if (node_is_bounded (node->right, index_type))
3161
            /* Right hand node is fully bounded so we can eliminate any
3162
               testing and branch directly to the target code.  */
3163
            emit_cmp_and_jump_insns (index,
3164
                                     convert_modes
3165
                                     (mode, imode,
3166
                                      expand_expr (node->high, NULL_RTX,
3167
                                                   VOIDmode, 0),
3168
                                      unsignedp),
3169
                                     GT, NULL_RTX, mode, unsignedp,
3170
                                     label_rtx (node->right->code_label));
3171
          else
3172
            {
3173
              /* Right hand node requires testing.
3174
                 Branch to a label where we will handle it later.  */
3175
 
3176
              test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3177
              emit_cmp_and_jump_insns (index,
3178
                                       convert_modes
3179
                                       (mode, imode,
3180
                                        expand_expr (node->high, NULL_RTX,
3181
                                                     VOIDmode, 0),
3182
                                        unsignedp),
3183
                                       GT, NULL_RTX, mode, unsignedp,
3184
                                       label_rtx (test_label));
3185
            }
3186
 
3187
          /* Value belongs to this node or to the left-hand subtree.  */
3188
 
3189
          emit_cmp_and_jump_insns (index,
3190
                                   convert_modes
3191
                                   (mode, imode,
3192
                                    expand_expr (node->low, NULL_RTX,
3193
                                                 VOIDmode, 0),
3194
                                    unsignedp),
3195
                                   GE, NULL_RTX, mode, unsignedp,
3196
                                   label_rtx (node->code_label));
3197
 
3198
          /* Handle the left-hand subtree.  */
3199
          emit_case_nodes (index, node->left, default_label, index_type);
3200
 
3201
          /* If right node had to be handled later, do that now.  */
3202
 
3203
          if (test_label)
3204
            {
3205
              /* If the left-hand subtree fell through,
3206
                 don't let it fall into the right-hand subtree.  */
3207
              emit_jump (default_label);
3208
 
3209
              expand_label (test_label);
3210
              emit_case_nodes (index, node->right, default_label, index_type);
3211
            }
3212
        }
3213
 
3214
      else if (node->right != 0 && node->left == 0)
3215
        {
3216
          /* Deal with values to the left of this node,
3217
             if they are possible.  */
3218
          if (!node_has_low_bound (node, index_type))
3219
            {
3220
              emit_cmp_and_jump_insns (index,
3221
                                       convert_modes
3222
                                       (mode, imode,
3223
                                        expand_expr (node->low, NULL_RTX,
3224
                                                     VOIDmode, 0),
3225
                                        unsignedp),
3226
                                       LT, NULL_RTX, mode, unsignedp,
3227
                                       default_label);
3228
            }
3229
 
3230
          /* Value belongs to this node or to the right-hand subtree.  */
3231
 
3232
          emit_cmp_and_jump_insns (index,
3233
                                   convert_modes
3234
                                   (mode, imode,
3235
                                    expand_expr (node->high, NULL_RTX,
3236
                                                 VOIDmode, 0),
3237
                                    unsignedp),
3238
                                   LE, NULL_RTX, mode, unsignedp,
3239
                                   label_rtx (node->code_label));
3240
 
3241
          emit_case_nodes (index, node->right, default_label, index_type);
3242
        }
3243
 
3244
      else if (node->right == 0 && node->left != 0)
3245
        {
3246
          /* Deal with values to the right of this node,
3247
             if they are possible.  */
3248
          if (!node_has_high_bound (node, index_type))
3249
            {
3250
              emit_cmp_and_jump_insns (index,
3251
                                       convert_modes
3252
                                       (mode, imode,
3253
                                        expand_expr (node->high, NULL_RTX,
3254
                                                     VOIDmode, 0),
3255
                                        unsignedp),
3256
                                       GT, NULL_RTX, mode, unsignedp,
3257
                                       default_label);
3258
            }
3259
 
3260
          /* Value belongs to this node or to the left-hand subtree.  */
3261
 
3262
          emit_cmp_and_jump_insns (index,
3263
                                   convert_modes
3264
                                   (mode, imode,
3265
                                    expand_expr (node->low, NULL_RTX,
3266
                                                 VOIDmode, 0),
3267
                                    unsignedp),
3268
                                   GE, NULL_RTX, mode, unsignedp,
3269
                                   label_rtx (node->code_label));
3270
 
3271
          emit_case_nodes (index, node->left, default_label, index_type);
3272
        }
3273
 
3274
      else
3275
        {
3276
          /* Node has no children so we check low and high bounds to remove
3277
             redundant tests.  Only one of the bounds can exist,
3278
             since otherwise this node is bounded--a case tested already.  */
3279
          int high_bound = node_has_high_bound (node, index_type);
3280
          int low_bound = node_has_low_bound (node, index_type);
3281
 
3282
          if (!high_bound && low_bound)
3283
            {
3284
              emit_cmp_and_jump_insns (index,
3285
                                       convert_modes
3286
                                       (mode, imode,
3287
                                        expand_expr (node->high, NULL_RTX,
3288
                                                     VOIDmode, 0),
3289
                                        unsignedp),
3290
                                       GT, NULL_RTX, mode, unsignedp,
3291
                                       default_label);
3292
            }
3293
 
3294
          else if (!low_bound && high_bound)
3295
            {
3296
              emit_cmp_and_jump_insns (index,
3297
                                       convert_modes
3298
                                       (mode, imode,
3299
                                        expand_expr (node->low, NULL_RTX,
3300
                                                     VOIDmode, 0),
3301
                                        unsignedp),
3302
                                       LT, NULL_RTX, mode, unsignedp,
3303
                                       default_label);
3304
            }
3305
          else if (!low_bound && !high_bound)
3306
            {
3307
              /* Widen LOW and HIGH to the same width as INDEX.  */
3308
              tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3309
              tree low = build1 (CONVERT_EXPR, type, node->low);
3310
              tree high = build1 (CONVERT_EXPR, type, node->high);
3311
              rtx low_rtx, new_index, new_bound;
3312
 
3313
              /* Instead of doing two branches, emit one unsigned branch for
3314
                 (index-low) > (high-low).  */
3315
              low_rtx = expand_expr (low, NULL_RTX, mode, 0);
3316
              new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
3317
                                               NULL_RTX, unsignedp,
3318
                                               OPTAB_WIDEN);
3319
              new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
3320
                                                    high, low),
3321
                                       NULL_RTX, mode, 0);
3322
 
3323
              emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3324
                                       mode, 1, default_label);
3325
            }
3326
 
3327
          emit_jump (label_rtx (node->code_label));
3328
        }
3329
    }
3330
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.