OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [stmt.c] - Blame information for rev 324

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Expands front end tree to back end RTL for GCC
2
   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3
   1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4
   2010 Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
/* This file handles the generation of rtl code from tree structure
23
   above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24
   The functions whose names start with `expand_' are called by the
25
   expander to generate RTL instructions for various kinds of constructs.  */
26
 
27
#include "config.h"
28
#include "system.h"
29
#include "coretypes.h"
30
#include "tm.h"
31
 
32
#include "rtl.h"
33
#include "hard-reg-set.h"
34
#include "tree.h"
35
#include "tm_p.h"
36
#include "flags.h"
37
#include "except.h"
38
#include "function.h"
39
#include "insn-config.h"
40
#include "expr.h"
41
#include "libfuncs.h"
42
#include "recog.h"
43
#include "machmode.h"
44
#include "toplev.h"
45
#include "output.h"
46
#include "ggc.h"
47
#include "langhooks.h"
48
#include "predict.h"
49
#include "optabs.h"
50
#include "target.h"
51
#include "gimple.h"
52
#include "regs.h"
53
#include "alloc-pool.h"
54
#include "pretty-print.h"
55
 
56
/* Functions and data structures for expanding case statements.  */
57
 
58
/* Case label structure, used to hold info on labels within case
59
   statements.  We handle "range" labels; for a single-value label
60
   as in C, the high and low limits are the same.
61
 
62
   We start with a vector of case nodes sorted in ascending order, and
63
   the default label as the last element in the vector.  Before expanding
64
   to RTL, we transform this vector into a list linked via the RIGHT
65
   fields in the case_node struct.  Nodes with higher case values are
66
   later in the list.
67
 
68
   Switch statements can be output in three forms.  A branch table is
69
   used if there are more than a few labels and the labels are dense
70
   within the range between the smallest and largest case value.  If a
71
   branch table is used, no further manipulations are done with the case
72
   node chain.
73
 
74
   The alternative to the use of a branch table is to generate a series
75
   of compare and jump insns.  When that is done, we use the LEFT, RIGHT,
76
   and PARENT fields to hold a binary tree.  Initially the tree is
77
   totally unbalanced, with everything on the right.  We balance the tree
78
   with nodes on the left having lower case values than the parent
79
   and nodes on the right having higher values.  We then output the tree
80
   in order.
81
 
82
   For very small, suitable switch statements, we can generate a series
83
   of simple bit test and branches instead.  */
84
 
85
struct case_node
86
{
87
  struct case_node      *left;  /* Left son in binary tree */
88
  struct case_node      *right; /* Right son in binary tree; also node chain */
89
  struct case_node      *parent; /* Parent of node in binary tree */
90
  tree                  low;    /* Lowest index value for this label */
91
  tree                  high;   /* Highest index value for this label */
92
  tree                  code_label; /* Label to jump to when node matches */
93
};
94
 
95
typedef struct case_node case_node;
96
typedef struct case_node *case_node_ptr;
97
 
98
/* These are used by estimate_case_costs and balance_case_nodes.  */
99
 
100
/* This must be a signed type, and non-ANSI compilers lack signed char.  */
101
static short cost_table_[129];
102
static int use_cost_table;
103
static int cost_table_initialized;
104
 
105
/* Special care is needed because we allow -1, but TREE_INT_CST_LOW
106
   is unsigned.  */
107
#define COST_TABLE(I)  cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
108
 
109
static int n_occurrences (int, const char *);
110
static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *);
111
static void expand_nl_goto_receiver (void);
112
static bool check_operand_nalternatives (tree, tree);
113
static bool check_unique_operand_names (tree, tree, tree);
114
static char *resolve_operand_name_1 (char *, tree, tree, tree);
115
static void expand_null_return_1 (void);
116
static void expand_value_return (rtx);
117
static int estimate_case_costs (case_node_ptr);
118
static bool lshift_cheap_p (void);
119
static int case_bit_test_cmp (const void *, const void *);
120
static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
121
static void balance_case_nodes (case_node_ptr *, case_node_ptr);
122
static int node_has_low_bound (case_node_ptr, tree);
123
static int node_has_high_bound (case_node_ptr, tree);
124
static int node_is_bounded (case_node_ptr, tree);
125
static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
126
static struct case_node *add_case_node (struct case_node *, tree,
127
                                        tree, tree, tree, alloc_pool);
128
 
129
 
130
/* Return the rtx-label that corresponds to a LABEL_DECL,
131
   creating it if necessary.  */
132
 
133
rtx
134
label_rtx (tree label)
135
{
136
  gcc_assert (TREE_CODE (label) == LABEL_DECL);
137
 
138
  if (!DECL_RTL_SET_P (label))
139
    {
140
      rtx r = gen_label_rtx ();
141
      SET_DECL_RTL (label, r);
142
      if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
143
        LABEL_PRESERVE_P (r) = 1;
144
    }
145
 
146
  return DECL_RTL (label);
147
}
148
 
149
/* As above, but also put it on the forced-reference list of the
150
   function that contains it.  */
151
rtx
152
force_label_rtx (tree label)
153
{
154
  rtx ref = label_rtx (label);
155
  tree function = decl_function_context (label);
156
 
157
  gcc_assert (function);
158
 
159
  forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref, forced_labels);
160
  return ref;
161
}
162
 
163
/* Add an unconditional jump to LABEL as the next sequential instruction.  */
164
 
165
void
166
emit_jump (rtx label)
167
{
168
  do_pending_stack_adjust ();
169
  emit_jump_insn (gen_jump (label));
170
  emit_barrier ();
171
}
172
 
173
/* Emit code to jump to the address
174
   specified by the pointer expression EXP.  */
175
 
176
void
177
expand_computed_goto (tree exp)
178
{
179
  rtx x = expand_normal (exp);
180
 
181
  x = convert_memory_address (Pmode, x);
182
 
183
  do_pending_stack_adjust ();
184
  emit_indirect_jump (x);
185
}
186
 
187
/* Handle goto statements and the labels that they can go to.  */
188
 
189
/* Specify the location in the RTL code of a label LABEL,
190
   which is a LABEL_DECL tree node.
191
 
192
   This is used for the kind of label that the user can jump to with a
193
   goto statement, and for alternatives of a switch or case statement.
194
   RTL labels generated for loops and conditionals don't go through here;
195
   they are generated directly at the RTL level, by other functions below.
196
 
197
   Note that this has nothing to do with defining label *names*.
198
   Languages vary in how they do that and what that even means.  */
199
 
200
void
201
expand_label (tree label)
202
{
203
  rtx label_r = label_rtx (label);
204
 
205
  do_pending_stack_adjust ();
206
  emit_label (label_r);
207
  if (DECL_NAME (label))
208
    LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
209
 
210
  if (DECL_NONLOCAL (label))
211
    {
212
      expand_nl_goto_receiver ();
213
      nonlocal_goto_handler_labels
214
        = gen_rtx_EXPR_LIST (VOIDmode, label_r,
215
                             nonlocal_goto_handler_labels);
216
    }
217
 
218
  if (FORCED_LABEL (label))
219
    forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
220
 
221
  if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
222
    maybe_set_first_label_num (label_r);
223
}
224
 
225
/* Generate RTL code for a `goto' statement with target label LABEL.
226
   LABEL should be a LABEL_DECL tree node that was or will later be
227
   defined with `expand_label'.  */
228
 
229
void
230
expand_goto (tree label)
231
{
232
#ifdef ENABLE_CHECKING
233
  /* Check for a nonlocal goto to a containing function.  Should have
234
     gotten translated to __builtin_nonlocal_goto.  */
235
  tree context = decl_function_context (label);
236
  gcc_assert (!context || context == current_function_decl);
237
#endif
238
 
239
  emit_jump (label_rtx (label));
240
}
241
 
242
/* Return the number of times character C occurs in string S.  */
243
static int
244
n_occurrences (int c, const char *s)
245
{
246
  int n = 0;
247
  while (*s)
248
    n += (*s++ == c);
249
  return n;
250
}
251
 
252
/* Generate RTL for an asm statement (explicit assembler code).
253
   STRING is a STRING_CST node containing the assembler code text,
254
   or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
255
   insn is volatile; don't optimize it.  */
256
 
257
static void
258
expand_asm_loc (tree string, int vol, location_t locus)
259
{
260
  rtx body;
261
 
262
  if (TREE_CODE (string) == ADDR_EXPR)
263
    string = TREE_OPERAND (string, 0);
264
 
265
  body = gen_rtx_ASM_INPUT_loc (VOIDmode,
266
                                ggc_strdup (TREE_STRING_POINTER (string)),
267
                                locus);
268
 
269
  MEM_VOLATILE_P (body) = vol;
270
 
271
  emit_insn (body);
272
}
273
 
274
/* Parse the output constraint pointed to by *CONSTRAINT_P.  It is the
275
   OPERAND_NUMth output operand, indexed from zero.  There are NINPUTS
276
   inputs and NOUTPUTS outputs to this extended-asm.  Upon return,
277
   *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
278
   memory operand.  Similarly, *ALLOWS_REG will be TRUE iff the
279
   constraint allows the use of a register operand.  And, *IS_INOUT
280
   will be true if the operand is read-write, i.e., if it is used as
281
   an input as well as an output.  If *CONSTRAINT_P is not in
282
   canonical form, it will be made canonical.  (Note that `+' will be
283
   replaced with `=' as part of this process.)
284
 
285
   Returns TRUE if all went well; FALSE if an error occurred.  */
286
 
287
bool
288
parse_output_constraint (const char **constraint_p, int operand_num,
289
                         int ninputs, int noutputs, bool *allows_mem,
290
                         bool *allows_reg, bool *is_inout)
291
{
292
  const char *constraint = *constraint_p;
293
  const char *p;
294
 
295
  /* Assume the constraint doesn't allow the use of either a register
296
     or memory.  */
297
  *allows_mem = false;
298
  *allows_reg = false;
299
 
300
  /* Allow the `=' or `+' to not be at the beginning of the string,
301
     since it wasn't explicitly documented that way, and there is a
302
     large body of code that puts it last.  Swap the character to
303
     the front, so as not to uglify any place else.  */
304
  p = strchr (constraint, '=');
305
  if (!p)
306
    p = strchr (constraint, '+');
307
 
308
  /* If the string doesn't contain an `=', issue an error
309
     message.  */
310
  if (!p)
311
    {
312
      error ("output operand constraint lacks %<=%>");
313
      return false;
314
    }
315
 
316
  /* If the constraint begins with `+', then the operand is both read
317
     from and written to.  */
318
  *is_inout = (*p == '+');
319
 
320
  /* Canonicalize the output constraint so that it begins with `='.  */
321
  if (p != constraint || *is_inout)
322
    {
323
      char *buf;
324
      size_t c_len = strlen (constraint);
325
 
326
      if (p != constraint)
327
        warning (0, "output constraint %qc for operand %d "
328
                 "is not at the beginning",
329
                 *p, operand_num);
330
 
331
      /* Make a copy of the constraint.  */
332
      buf = XALLOCAVEC (char, c_len + 1);
333
      strcpy (buf, constraint);
334
      /* Swap the first character and the `=' or `+'.  */
335
      buf[p - constraint] = buf[0];
336
      /* Make sure the first character is an `='.  (Until we do this,
337
         it might be a `+'.)  */
338
      buf[0] = '=';
339
      /* Replace the constraint with the canonicalized string.  */
340
      *constraint_p = ggc_alloc_string (buf, c_len);
341
      constraint = *constraint_p;
342
    }
343
 
344
  /* Loop through the constraint string.  */
345
  for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
346
    switch (*p)
347
      {
348
      case '+':
349
      case '=':
350
        error ("operand constraint contains incorrectly positioned "
351
               "%<+%> or %<=%>");
352
        return false;
353
 
354
      case '%':
355
        if (operand_num + 1 == ninputs + noutputs)
356
          {
357
            error ("%<%%%> constraint used with last operand");
358
            return false;
359
          }
360
        break;
361
 
362
      case 'V':  case TARGET_MEM_CONSTRAINT:  case 'o':
363
        *allows_mem = true;
364
        break;
365
 
366
      case '?':  case '!':  case '*':  case '&':  case '#':
367
      case 'E':  case 'F':  case 'G':  case 'H':
368
      case 's':  case 'i':  case 'n':
369
      case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
370
      case 'N':  case 'O':  case 'P':  case ',':
371
        break;
372
 
373
      case '0':  case '1':  case '2':  case '3':  case '4':
374
      case '5':  case '6':  case '7':  case '8':  case '9':
375
      case '[':
376
        error ("matching constraint not valid in output operand");
377
        return false;
378
 
379
      case '<':  case '>':
380
        /* ??? Before flow, auto inc/dec insns are not supposed to exist,
381
           excepting those that expand_call created.  So match memory
382
           and hope.  */
383
        *allows_mem = true;
384
        break;
385
 
386
      case 'g':  case 'X':
387
        *allows_reg = true;
388
        *allows_mem = true;
389
        break;
390
 
391
      case 'p': case 'r':
392
        *allows_reg = true;
393
        break;
394
 
395
      default:
396
        if (!ISALPHA (*p))
397
          break;
398
        if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
399
          *allows_reg = true;
400
#ifdef EXTRA_CONSTRAINT_STR
401
        else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
402
          *allows_reg = true;
403
        else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
404
          *allows_mem = true;
405
        else
406
          {
407
            /* Otherwise we can't assume anything about the nature of
408
               the constraint except that it isn't purely registers.
409
               Treat it like "g" and hope for the best.  */
410
            *allows_reg = true;
411
            *allows_mem = true;
412
          }
413
#endif
414
        break;
415
      }
416
 
417
  return true;
418
}
419
 
420
/* Similar, but for input constraints.  */
421
 
422
bool
423
parse_input_constraint (const char **constraint_p, int input_num,
424
                        int ninputs, int noutputs, int ninout,
425
                        const char * const * constraints,
426
                        bool *allows_mem, bool *allows_reg)
427
{
428
  const char *constraint = *constraint_p;
429
  const char *orig_constraint = constraint;
430
  size_t c_len = strlen (constraint);
431
  size_t j;
432
  bool saw_match = false;
433
 
434
  /* Assume the constraint doesn't allow the use of either
435
     a register or memory.  */
436
  *allows_mem = false;
437
  *allows_reg = false;
438
 
439
  /* Make sure constraint has neither `=', `+', nor '&'.  */
440
 
441
  for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
442
    switch (constraint[j])
443
      {
444
      case '+':  case '=':  case '&':
445
        if (constraint == orig_constraint)
446
          {
447
            error ("input operand constraint contains %qc", constraint[j]);
448
            return false;
449
          }
450
        break;
451
 
452
      case '%':
453
        if (constraint == orig_constraint
454
            && input_num + 1 == ninputs - ninout)
455
          {
456
            error ("%<%%%> constraint used with last operand");
457
            return false;
458
          }
459
        break;
460
 
461
      case 'V':  case TARGET_MEM_CONSTRAINT:  case 'o':
462
        *allows_mem = true;
463
        break;
464
 
465
      case '<':  case '>':
466
      case '?':  case '!':  case '*':  case '#':
467
      case 'E':  case 'F':  case 'G':  case 'H':
468
      case 's':  case 'i':  case 'n':
469
      case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
470
      case 'N':  case 'O':  case 'P':  case ',':
471
        break;
472
 
473
        /* Whether or not a numeric constraint allows a register is
474
           decided by the matching constraint, and so there is no need
475
           to do anything special with them.  We must handle them in
476
           the default case, so that we don't unnecessarily force
477
           operands to memory.  */
478
      case '0':  case '1':  case '2':  case '3':  case '4':
479
      case '5':  case '6':  case '7':  case '8':  case '9':
480
        {
481
          char *end;
482
          unsigned long match;
483
 
484
          saw_match = true;
485
 
486
          match = strtoul (constraint + j, &end, 10);
487
          if (match >= (unsigned long) noutputs)
488
            {
489
              error ("matching constraint references invalid operand number");
490
              return false;
491
            }
492
 
493
          /* Try and find the real constraint for this dup.  Only do this
494
             if the matching constraint is the only alternative.  */
495
          if (*end == '\0'
496
              && (j == 0 || (j == 1 && constraint[0] == '%')))
497
            {
498
              constraint = constraints[match];
499
              *constraint_p = constraint;
500
              c_len = strlen (constraint);
501
              j = 0;
502
              /* ??? At the end of the loop, we will skip the first part of
503
                 the matched constraint.  This assumes not only that the
504
                 other constraint is an output constraint, but also that
505
                 the '=' or '+' come first.  */
506
              break;
507
            }
508
          else
509
            j = end - constraint;
510
          /* Anticipate increment at end of loop.  */
511
          j--;
512
        }
513
        /* Fall through.  */
514
 
515
      case 'p':  case 'r':
516
        *allows_reg = true;
517
        break;
518
 
519
      case 'g':  case 'X':
520
        *allows_reg = true;
521
        *allows_mem = true;
522
        break;
523
 
524
      default:
525
        if (! ISALPHA (constraint[j]))
526
          {
527
            error ("invalid punctuation %qc in constraint", constraint[j]);
528
            return false;
529
          }
530
        if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
531
            != NO_REGS)
532
          *allows_reg = true;
533
#ifdef EXTRA_CONSTRAINT_STR
534
        else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
535
          *allows_reg = true;
536
        else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
537
          *allows_mem = true;
538
        else
539
          {
540
            /* Otherwise we can't assume anything about the nature of
541
               the constraint except that it isn't purely registers.
542
               Treat it like "g" and hope for the best.  */
543
            *allows_reg = true;
544
            *allows_mem = true;
545
          }
546
#endif
547
        break;
548
      }
549
 
550
  if (saw_match && !*allows_reg)
551
    warning (0, "matching constraint does not allow a register");
552
 
553
  return true;
554
}
555
 
556
/* Return DECL iff there's an overlap between *REGS and DECL, where DECL
557
   can be an asm-declared register.  Called via walk_tree.  */
558
 
559
static tree
560
decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
561
                              void *data)
562
{
563
  tree decl = *declp;
564
  const HARD_REG_SET *const regs = (const HARD_REG_SET *) data;
565
 
566
  if (TREE_CODE (decl) == VAR_DECL)
567
    {
568
      if (DECL_HARD_REGISTER (decl)
569
          && REG_P (DECL_RTL (decl))
570
          && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
571
        {
572
          rtx reg = DECL_RTL (decl);
573
 
574
          if (overlaps_hard_reg_set_p (*regs, GET_MODE (reg), REGNO (reg)))
575
            return decl;
576
        }
577
      walk_subtrees = 0;
578
    }
579
  else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL)
580
    walk_subtrees = 0;
581
  return NULL_TREE;
582
}
583
 
584
/* If there is an overlap between *REGS and DECL, return the first overlap
585
   found.  */
586
tree
587
tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
588
{
589
  return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
590
}
591
 
592
/* Check for overlap between registers marked in CLOBBERED_REGS and
593
   anything inappropriate in T.  Emit error and return the register
594
   variable definition for error, NULL_TREE for ok.  */
595
 
596
static bool
597
tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
598
{
599
  /* Conflicts between asm-declared register variables and the clobber
600
     list are not allowed.  */
601
  tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
602
 
603
  if (overlap)
604
    {
605
      error ("asm-specifier for variable %qE conflicts with asm clobber list",
606
             DECL_NAME (overlap));
607
 
608
      /* Reset registerness to stop multiple errors emitted for a single
609
         variable.  */
610
      DECL_REGISTER (overlap) = 0;
611
      return true;
612
    }
613
 
614
  return false;
615
}
616
 
617
/* Generate RTL for an asm statement with arguments.
618
   STRING is the instruction template.
619
   OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
620
   Each output or input has an expression in the TREE_VALUE and
621
   a tree list in TREE_PURPOSE which in turn contains a constraint
622
   name in TREE_VALUE (or NULL_TREE) and a constraint string
623
   in TREE_PURPOSE.
624
   CLOBBERS is a list of STRING_CST nodes each naming a hard register
625
   that is clobbered by this insn.
626
 
627
   Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
628
   Some elements of OUTPUTS may be replaced with trees representing temporary
629
   values.  The caller should copy those temporary values to the originally
630
   specified lvalues.
631
 
632
   VOL nonzero means the insn is volatile; don't optimize it.  */
633
 
634
static void
635
expand_asm_operands (tree string, tree outputs, tree inputs,
636
                     tree clobbers, tree labels, int vol, location_t locus)
637
{
638
  rtvec argvec, constraintvec, labelvec;
639
  rtx body;
640
  int ninputs = list_length (inputs);
641
  int noutputs = list_length (outputs);
642
  int nlabels = list_length (labels);
643
  int ninout;
644
  int nclobbers;
645
  HARD_REG_SET clobbered_regs;
646
  int clobber_conflict_found = 0;
647
  tree tail;
648
  tree t;
649
  int i;
650
  /* Vector of RTX's of evaluated output operands.  */
651
  rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
652
  int *inout_opnum = XALLOCAVEC (int, noutputs);
653
  rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
654
  enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
655
  const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
656
  int old_generating_concat_p = generating_concat_p;
657
 
658
  /* An ASM with no outputs needs to be treated as volatile, for now.  */
659
  if (noutputs == 0)
660
    vol = 1;
661
 
662
  if (! check_operand_nalternatives (outputs, inputs))
663
    return;
664
 
665
  string = resolve_asm_operand_names (string, outputs, inputs, labels);
666
 
667
  /* Collect constraints.  */
668
  i = 0;
669
  for (t = outputs; t ; t = TREE_CHAIN (t), i++)
670
    constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
671
  for (t = inputs; t ; t = TREE_CHAIN (t), i++)
672
    constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
673
 
674
  /* Sometimes we wish to automatically clobber registers across an asm.
675
     Case in point is when the i386 backend moved from cc0 to a hard reg --
676
     maintaining source-level compatibility means automatically clobbering
677
     the flags register.  */
678
  clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
679
 
680
  /* Count the number of meaningful clobbered registers, ignoring what
681
     we would ignore later.  */
682
  nclobbers = 0;
683
  CLEAR_HARD_REG_SET (clobbered_regs);
684
  for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
685
    {
686
      const char *regname;
687
 
688
      if (TREE_VALUE (tail) == error_mark_node)
689
        return;
690
      regname = TREE_STRING_POINTER (TREE_VALUE (tail));
691
 
692
      i = decode_reg_name (regname);
693
      if (i >= 0 || i == -4)
694
        ++nclobbers;
695
      else if (i == -2)
696
        error ("unknown register name %qs in %<asm%>", regname);
697
 
698
      /* Mark clobbered registers.  */
699
      if (i >= 0)
700
        {
701
          /* Clobbering the PIC register is an error.  */
702
          if (i == (int) PIC_OFFSET_TABLE_REGNUM)
703
            {
704
              error ("PIC register %qs clobbered in %<asm%>", regname);
705
              return;
706
            }
707
 
708
          SET_HARD_REG_BIT (clobbered_regs, i);
709
        }
710
    }
711
 
712
  /* First pass over inputs and outputs checks validity and sets
713
     mark_addressable if needed.  */
714
 
715
  ninout = 0;
716
  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
717
    {
718
      tree val = TREE_VALUE (tail);
719
      tree type = TREE_TYPE (val);
720
      const char *constraint;
721
      bool is_inout;
722
      bool allows_reg;
723
      bool allows_mem;
724
 
725
      /* If there's an erroneous arg, emit no insn.  */
726
      if (type == error_mark_node)
727
        return;
728
 
729
      /* Try to parse the output constraint.  If that fails, there's
730
         no point in going further.  */
731
      constraint = constraints[i];
732
      if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
733
                                    &allows_mem, &allows_reg, &is_inout))
734
        return;
735
 
736
      if (! allows_reg
737
          && (allows_mem
738
              || is_inout
739
              || (DECL_P (val)
740
                  && REG_P (DECL_RTL (val))
741
                  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
742
        mark_addressable (val);
743
 
744
      if (is_inout)
745
        ninout++;
746
    }
747
 
748
  ninputs += ninout;
749
  if (ninputs + noutputs > MAX_RECOG_OPERANDS)
750
    {
751
      error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
752
      return;
753
    }
754
 
755
  for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
756
    {
757
      bool allows_reg, allows_mem;
758
      const char *constraint;
759
 
760
      /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
761
         would get VOIDmode and that could cause a crash in reload.  */
762
      if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
763
        return;
764
 
765
      constraint = constraints[i + noutputs];
766
      if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
767
                                    constraints, &allows_mem, &allows_reg))
768
        return;
769
 
770
      if (! allows_reg && allows_mem)
771
        mark_addressable (TREE_VALUE (tail));
772
    }
773
 
774
  /* Second pass evaluates arguments.  */
775
 
776
  ninout = 0;
777
  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
778
    {
779
      tree val = TREE_VALUE (tail);
780
      tree type = TREE_TYPE (val);
781
      bool is_inout;
782
      bool allows_reg;
783
      bool allows_mem;
784
      rtx op;
785
      bool ok;
786
 
787
      ok = parse_output_constraint (&constraints[i], i, ninputs,
788
                                    noutputs, &allows_mem, &allows_reg,
789
                                    &is_inout);
790
      gcc_assert (ok);
791
 
792
      /* If an output operand is not a decl or indirect ref and our constraint
793
         allows a register, make a temporary to act as an intermediate.
794
         Make the asm insn write into that, then our caller will copy it to
795
         the real output operand.  Likewise for promoted variables.  */
796
 
797
      generating_concat_p = 0;
798
 
799
      real_output_rtx[i] = NULL_RTX;
800
      if ((TREE_CODE (val) == INDIRECT_REF
801
           && allows_mem)
802
          || (DECL_P (val)
803
              && (allows_mem || REG_P (DECL_RTL (val)))
804
              && ! (REG_P (DECL_RTL (val))
805
                    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
806
          || ! allows_reg
807
          || is_inout)
808
        {
809
          op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
810
          if (MEM_P (op))
811
            op = validize_mem (op);
812
 
813
          if (! allows_reg && !MEM_P (op))
814
            error ("output number %d not directly addressable", i);
815
          if ((! allows_mem && MEM_P (op))
816
              || GET_CODE (op) == CONCAT)
817
            {
818
              real_output_rtx[i] = op;
819
              op = gen_reg_rtx (GET_MODE (op));
820
              if (is_inout)
821
                emit_move_insn (op, real_output_rtx[i]);
822
            }
823
        }
824
      else
825
        {
826
          op = assign_temp (type, 0, 0, 1);
827
          op = validize_mem (op);
828
          if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
829
            set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
830
          TREE_VALUE (tail) = make_tree (type, op);
831
        }
832
      output_rtx[i] = op;
833
 
834
      generating_concat_p = old_generating_concat_p;
835
 
836
      if (is_inout)
837
        {
838
          inout_mode[ninout] = TYPE_MODE (type);
839
          inout_opnum[ninout++] = i;
840
        }
841
 
842
      if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
843
        clobber_conflict_found = 1;
844
    }
845
 
846
  /* Make vectors for the expression-rtx, constraint strings,
847
     and named operands.  */
848
 
849
  argvec = rtvec_alloc (ninputs);
850
  constraintvec = rtvec_alloc (ninputs);
851
  labelvec = rtvec_alloc (nlabels);
852
 
853
  body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
854
                                : GET_MODE (output_rtx[0])),
855
                               ggc_strdup (TREE_STRING_POINTER (string)),
856
                               empty_string, 0, argvec, constraintvec,
857
                               labelvec, locus);
858
 
859
  MEM_VOLATILE_P (body) = vol;
860
 
861
  /* Eval the inputs and put them into ARGVEC.
862
     Put their constraints into ASM_INPUTs and store in CONSTRAINTS.  */
863
 
864
  for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
865
    {
866
      bool allows_reg, allows_mem;
867
      const char *constraint;
868
      tree val, type;
869
      rtx op;
870
      bool ok;
871
 
872
      constraint = constraints[i + noutputs];
873
      ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
874
                                   constraints, &allows_mem, &allows_reg);
875
      gcc_assert (ok);
876
 
877
      generating_concat_p = 0;
878
 
879
      val = TREE_VALUE (tail);
880
      type = TREE_TYPE (val);
881
      /* EXPAND_INITIALIZER will not generate code for valid initializer
882
         constants, but will still generate code for other types of operand.
883
         This is the behavior we want for constant constraints.  */
884
      op = expand_expr (val, NULL_RTX, VOIDmode,
885
                        allows_reg ? EXPAND_NORMAL
886
                        : allows_mem ? EXPAND_MEMORY
887
                        : EXPAND_INITIALIZER);
888
 
889
      /* Never pass a CONCAT to an ASM.  */
890
      if (GET_CODE (op) == CONCAT)
891
        op = force_reg (GET_MODE (op), op);
892
      else if (MEM_P (op))
893
        op = validize_mem (op);
894
 
895
      if (asm_operand_ok (op, constraint, NULL) <= 0)
896
        {
897
          if (allows_reg && TYPE_MODE (type) != BLKmode)
898
            op = force_reg (TYPE_MODE (type), op);
899
          else if (!allows_mem)
900
            warning (0, "asm operand %d probably doesn%'t match constraints",
901
                     i + noutputs);
902
          else if (MEM_P (op))
903
            {
904
              /* We won't recognize either volatile memory or memory
905
                 with a queued address as available a memory_operand
906
                 at this point.  Ignore it: clearly this *is* a memory.  */
907
            }
908
          else
909
            {
910
              warning (0, "use of memory input without lvalue in "
911
                       "asm operand %d is deprecated", i + noutputs);
912
 
913
              if (CONSTANT_P (op))
914
                {
915
                  rtx mem = force_const_mem (TYPE_MODE (type), op);
916
                  if (mem)
917
                    op = validize_mem (mem);
918
                  else
919
                    op = force_reg (TYPE_MODE (type), op);
920
                }
921
              if (REG_P (op)
922
                  || GET_CODE (op) == SUBREG
923
                  || GET_CODE (op) == CONCAT)
924
                {
925
                  tree qual_type = build_qualified_type (type,
926
                                                         (TYPE_QUALS (type)
927
                                                          | TYPE_QUAL_CONST));
928
                  rtx memloc = assign_temp (qual_type, 1, 1, 1);
929
                  memloc = validize_mem (memloc);
930
                  emit_move_insn (memloc, op);
931
                  op = memloc;
932
                }
933
            }
934
        }
935
 
936
      generating_concat_p = old_generating_concat_p;
937
      ASM_OPERANDS_INPUT (body, i) = op;
938
 
939
      ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
940
        = gen_rtx_ASM_INPUT (TYPE_MODE (type),
941
                             ggc_strdup (constraints[i + noutputs]));
942
 
943
      if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
944
        clobber_conflict_found = 1;
945
    }
946
 
947
  /* Protect all the operands from the queue now that they have all been
948
     evaluated.  */
949
 
950
  generating_concat_p = 0;
951
 
952
  /* For in-out operands, copy output rtx to input rtx.  */
953
  for (i = 0; i < ninout; i++)
954
    {
955
      int j = inout_opnum[i];
956
      char buffer[16];
957
 
958
      ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
959
        = output_rtx[j];
960
 
961
      sprintf (buffer, "%d", j);
962
      ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
963
        = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
964
    }
965
 
966
  /* Copy labels to the vector.  */
967
  for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
968
    ASM_OPERANDS_LABEL (body, i)
969
      = gen_rtx_LABEL_REF (Pmode, label_rtx (TREE_VALUE (tail)));
970
 
971
  generating_concat_p = old_generating_concat_p;
972
 
973
  /* Now, for each output, construct an rtx
974
     (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
975
                               ARGVEC CONSTRAINTS OPNAMES))
976
     If there is more than one, put them inside a PARALLEL.  */
977
 
978
  if (nlabels > 0 && nclobbers == 0)
979
    {
980
      gcc_assert (noutputs == 0);
981
      emit_jump_insn (body);
982
    }
983
  else if (noutputs == 0 && nclobbers == 0)
984
    {
985
      /* No output operands: put in a raw ASM_OPERANDS rtx.  */
986
      emit_insn (body);
987
    }
988
  else if (noutputs == 1 && nclobbers == 0)
989
    {
990
      ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
991
      emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
992
    }
993
  else
994
    {
995
      rtx obody = body;
996
      int num = noutputs;
997
 
998
      if (num == 0)
999
        num = 1;
1000
 
1001
      body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1002
 
1003
      /* For each output operand, store a SET.  */
1004
      for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1005
        {
1006
          XVECEXP (body, 0, i)
1007
            = gen_rtx_SET (VOIDmode,
1008
                           output_rtx[i],
1009
                           gen_rtx_ASM_OPERANDS
1010
                           (GET_MODE (output_rtx[i]),
1011
                            ggc_strdup (TREE_STRING_POINTER (string)),
1012
                            ggc_strdup (constraints[i]),
1013
                            i, argvec, constraintvec, labelvec, locus));
1014
 
1015
          MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1016
        }
1017
 
1018
      /* If there are no outputs (but there are some clobbers)
1019
         store the bare ASM_OPERANDS into the PARALLEL.  */
1020
 
1021
      if (i == 0)
1022
        XVECEXP (body, 0, i++) = obody;
1023
 
1024
      /* Store (clobber REG) for each clobbered register specified.  */
1025
 
1026
      for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1027
        {
1028
          const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1029
          int j = decode_reg_name (regname);
1030
          rtx clobbered_reg;
1031
 
1032
          if (j < 0)
1033
            {
1034
              if (j == -3)      /* `cc', which is not a register */
1035
                continue;
1036
 
1037
              if (j == -4)      /* `memory', don't cache memory across asm */
1038
                {
1039
                  XVECEXP (body, 0, i++)
1040
                    = gen_rtx_CLOBBER (VOIDmode,
1041
                                       gen_rtx_MEM
1042
                                       (BLKmode,
1043
                                        gen_rtx_SCRATCH (VOIDmode)));
1044
                  continue;
1045
                }
1046
 
1047
              /* Ignore unknown register, error already signaled.  */
1048
              continue;
1049
            }
1050
 
1051
          /* Use QImode since that's guaranteed to clobber just one reg.  */
1052
          clobbered_reg = gen_rtx_REG (QImode, j);
1053
 
1054
          /* Do sanity check for overlap between clobbers and respectively
1055
             input and outputs that hasn't been handled.  Such overlap
1056
             should have been detected and reported above.  */
1057
          if (!clobber_conflict_found)
1058
            {
1059
              int opno;
1060
 
1061
              /* We test the old body (obody) contents to avoid tripping
1062
                 over the under-construction body.  */
1063
              for (opno = 0; opno < noutputs; opno++)
1064
                if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1065
                  internal_error ("asm clobber conflict with output operand");
1066
 
1067
              for (opno = 0; opno < ninputs - ninout; opno++)
1068
                if (reg_overlap_mentioned_p (clobbered_reg,
1069
                                             ASM_OPERANDS_INPUT (obody, opno)))
1070
                  internal_error ("asm clobber conflict with input operand");
1071
            }
1072
 
1073
          XVECEXP (body, 0, i++)
1074
            = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1075
        }
1076
 
1077
      if (nlabels > 0)
1078
        emit_jump_insn (body);
1079
      else
1080
        emit_insn (body);
1081
    }
1082
 
1083
  /* For any outputs that needed reloading into registers, spill them
1084
     back to where they belong.  */
1085
  for (i = 0; i < noutputs; ++i)
1086
    if (real_output_rtx[i])
1087
      emit_move_insn (real_output_rtx[i], output_rtx[i]);
1088
 
1089
  crtl->has_asm_statement = 1;
1090
  free_temp_slots ();
1091
}
1092
 
1093
void
1094
expand_asm_stmt (gimple stmt)
1095
{
1096
  int noutputs;
1097
  tree outputs, tail, t;
1098
  tree *o;
1099
  size_t i, n;
1100
  const char *s;
1101
  tree str, out, in, cl, labels;
1102
  location_t locus = gimple_location (stmt);
1103
 
1104
  /* Meh... convert the gimple asm operands into real tree lists.
1105
     Eventually we should make all routines work on the vectors instead
1106
     of relying on TREE_CHAIN.  */
1107
  out = NULL_TREE;
1108
  n = gimple_asm_noutputs (stmt);
1109
  if (n > 0)
1110
    {
1111
      t = out = gimple_asm_output_op (stmt, 0);
1112
      for (i = 1; i < n; i++)
1113
        t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
1114
    }
1115
 
1116
  in = NULL_TREE;
1117
  n = gimple_asm_ninputs (stmt);
1118
  if (n > 0)
1119
    {
1120
      t = in = gimple_asm_input_op (stmt, 0);
1121
      for (i = 1; i < n; i++)
1122
        t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
1123
    }
1124
 
1125
  cl = NULL_TREE;
1126
  n = gimple_asm_nclobbers (stmt);
1127
  if (n > 0)
1128
    {
1129
      t = cl = gimple_asm_clobber_op (stmt, 0);
1130
      for (i = 1; i < n; i++)
1131
        t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
1132
    }
1133
 
1134
  labels = NULL_TREE;
1135
  n = gimple_asm_nlabels (stmt);
1136
  if (n > 0)
1137
    {
1138
      t = labels = gimple_asm_label_op (stmt, 0);
1139
      for (i = 1; i < n; i++)
1140
        t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
1141
    }
1142
 
1143
  s = gimple_asm_string (stmt);
1144
  str = build_string (strlen (s), s);
1145
 
1146
  if (gimple_asm_input_p (stmt))
1147
    {
1148
      expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
1149
      return;
1150
    }
1151
 
1152
  outputs = out;
1153
  noutputs = gimple_asm_noutputs (stmt);
1154
  /* o[I] is the place that output number I should be written.  */
1155
  o = (tree *) alloca (noutputs * sizeof (tree));
1156
 
1157
  /* Record the contents of OUTPUTS before it is modified.  */
1158
  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1159
    o[i] = TREE_VALUE (tail);
1160
 
1161
  /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1162
     OUTPUTS some trees for where the values were actually stored.  */
1163
  expand_asm_operands (str, outputs, in, cl, labels,
1164
                       gimple_asm_volatile_p (stmt), locus);
1165
 
1166
  /* Copy all the intermediate outputs into the specified outputs.  */
1167
  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1168
    {
1169
      if (o[i] != TREE_VALUE (tail))
1170
        {
1171
          expand_assignment (o[i], TREE_VALUE (tail), false);
1172
          free_temp_slots ();
1173
 
1174
          /* Restore the original value so that it's correct the next
1175
             time we expand this function.  */
1176
          TREE_VALUE (tail) = o[i];
1177
        }
1178
    }
1179
}
1180
 
1181
/* A subroutine of expand_asm_operands.  Check that all operands have
1182
   the same number of alternatives.  Return true if so.  */
1183
 
1184
static bool
1185
check_operand_nalternatives (tree outputs, tree inputs)
1186
{
1187
  if (outputs || inputs)
1188
    {
1189
      tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1190
      int nalternatives
1191
        = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1192
      tree next = inputs;
1193
 
1194
      if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1195
        {
1196
          error ("too many alternatives in %<asm%>");
1197
          return false;
1198
        }
1199
 
1200
      tmp = outputs;
1201
      while (tmp)
1202
        {
1203
          const char *constraint
1204
            = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1205
 
1206
          if (n_occurrences (',', constraint) != nalternatives)
1207
            {
1208
              error ("operand constraints for %<asm%> differ "
1209
                     "in number of alternatives");
1210
              return false;
1211
            }
1212
 
1213
          if (TREE_CHAIN (tmp))
1214
            tmp = TREE_CHAIN (tmp);
1215
          else
1216
            tmp = next, next = 0;
1217
        }
1218
    }
1219
 
1220
  return true;
1221
}
1222
 
1223
/* A subroutine of expand_asm_operands.  Check that all operand names
1224
   are unique.  Return true if so.  We rely on the fact that these names
1225
   are identifiers, and so have been canonicalized by get_identifier,
1226
   so all we need are pointer comparisons.  */
1227
 
1228
static bool
1229
check_unique_operand_names (tree outputs, tree inputs, tree labels)
1230
{
1231
  tree i, j;
1232
 
1233
  for (i = outputs; i ; i = TREE_CHAIN (i))
1234
    {
1235
      tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1236
      if (! i_name)
1237
        continue;
1238
 
1239
      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1240
        if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1241
          goto failure;
1242
    }
1243
 
1244
  for (i = inputs; i ; i = TREE_CHAIN (i))
1245
    {
1246
      tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1247
      if (! i_name)
1248
        continue;
1249
 
1250
      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1251
        if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1252
          goto failure;
1253
      for (j = outputs; j ; j = TREE_CHAIN (j))
1254
        if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1255
          goto failure;
1256
    }
1257
 
1258
  for (i = labels; i ; i = TREE_CHAIN (i))
1259
    {
1260
      tree i_name = TREE_PURPOSE (i);
1261
      if (! i_name)
1262
        continue;
1263
 
1264
      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1265
        if (simple_cst_equal (i_name, TREE_PURPOSE (j)))
1266
          goto failure;
1267
      for (j = inputs; j ; j = TREE_CHAIN (j))
1268
        if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1269
          goto failure;
1270
    }
1271
 
1272
  return true;
1273
 
1274
 failure:
1275
  error ("duplicate asm operand name %qs",
1276
         TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1277
  return false;
1278
}
1279
 
1280
/* A subroutine of expand_asm_operands.  Resolve the names of the operands
1281
   in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1282
   STRING and in the constraints to those numbers.  */
1283
 
1284
tree
1285
resolve_asm_operand_names (tree string, tree outputs, tree inputs, tree labels)
1286
{
1287
  char *buffer;
1288
  char *p;
1289
  const char *c;
1290
  tree t;
1291
 
1292
  check_unique_operand_names (outputs, inputs, labels);
1293
 
1294
  /* Substitute [<name>] in input constraint strings.  There should be no
1295
     named operands in output constraints.  */
1296
  for (t = inputs; t ; t = TREE_CHAIN (t))
1297
    {
1298
      c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1299
      if (strchr (c, '[') != NULL)
1300
        {
1301
          p = buffer = xstrdup (c);
1302
          while ((p = strchr (p, '[')) != NULL)
1303
            p = resolve_operand_name_1 (p, outputs, inputs, NULL);
1304
          TREE_VALUE (TREE_PURPOSE (t))
1305
            = build_string (strlen (buffer), buffer);
1306
          free (buffer);
1307
        }
1308
    }
1309
 
1310
  /* Now check for any needed substitutions in the template.  */
1311
  c = TREE_STRING_POINTER (string);
1312
  while ((c = strchr (c, '%')) != NULL)
1313
    {
1314
      if (c[1] == '[')
1315
        break;
1316
      else if (ISALPHA (c[1]) && c[2] == '[')
1317
        break;
1318
      else
1319
        {
1320
          c += 1;
1321
          continue;
1322
        }
1323
    }
1324
 
1325
  if (c)
1326
    {
1327
      /* OK, we need to make a copy so we can perform the substitutions.
1328
         Assume that we will not need extra space--we get to remove '['
1329
         and ']', which means we cannot have a problem until we have more
1330
         than 999 operands.  */
1331
      buffer = xstrdup (TREE_STRING_POINTER (string));
1332
      p = buffer + (c - TREE_STRING_POINTER (string));
1333
 
1334
      while ((p = strchr (p, '%')) != NULL)
1335
        {
1336
          if (p[1] == '[')
1337
            p += 1;
1338
          else if (ISALPHA (p[1]) && p[2] == '[')
1339
            p += 2;
1340
          else
1341
            {
1342
              p += 1;
1343
              continue;
1344
            }
1345
 
1346
          p = resolve_operand_name_1 (p, outputs, inputs, labels);
1347
        }
1348
 
1349
      string = build_string (strlen (buffer), buffer);
1350
      free (buffer);
1351
    }
1352
 
1353
  return string;
1354
}
1355
 
1356
/* A subroutine of resolve_operand_names.  P points to the '[' for a
1357
   potential named operand of the form [<name>].  In place, replace
1358
   the name and brackets with a number.  Return a pointer to the
1359
   balance of the string after substitution.  */
1360
 
1361
static char *
1362
resolve_operand_name_1 (char *p, tree outputs, tree inputs, tree labels)
1363
{
1364
  char *q;
1365
  int op;
1366
  tree t;
1367
 
1368
  /* Collect the operand name.  */
1369
  q = strchr (++p, ']');
1370
  if (!q)
1371
    {
1372
      error ("missing close brace for named operand");
1373
      return strchr (p, '\0');
1374
    }
1375
  *q = '\0';
1376
 
1377
  /* Resolve the name to a number.  */
1378
  for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1379
    {
1380
      tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1381
      if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1382
        goto found;
1383
    }
1384
  for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1385
    {
1386
      tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1387
      if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1388
        goto found;
1389
    }
1390
  for (t = labels; t ; t = TREE_CHAIN (t), op++)
1391
    {
1392
      tree name = TREE_PURPOSE (t);
1393
      if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1394
        goto found;
1395
    }
1396
 
1397
  error ("undefined named operand %qs", identifier_to_locale (p));
1398
  op = 0;
1399
 
1400
 found:
1401
  /* Replace the name with the number.  Unfortunately, not all libraries
1402
     get the return value of sprintf correct, so search for the end of the
1403
     generated string by hand.  */
1404
  sprintf (--p, "%d", op);
1405
  p = strchr (p, '\0');
1406
 
1407
  /* Verify the no extra buffer space assumption.  */
1408
  gcc_assert (p <= q);
1409
 
1410
  /* Shift the rest of the buffer down to fill the gap.  */
1411
  memmove (p, q + 1, strlen (q + 1) + 1);
1412
 
1413
  return p;
1414
}
1415
 
1416
/* Generate RTL to evaluate the expression EXP.  */
1417
 
1418
void
1419
expand_expr_stmt (tree exp)
1420
{
1421
  rtx value;
1422
  tree type;
1423
 
1424
  value = expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
1425
  type = TREE_TYPE (exp);
1426
 
1427
  /* If all we do is reference a volatile value in memory,
1428
     copy it to a register to be sure it is actually touched.  */
1429
  if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1430
    {
1431
      if (TYPE_MODE (type) == VOIDmode)
1432
        ;
1433
      else if (TYPE_MODE (type) != BLKmode)
1434
        value = copy_to_reg (value);
1435
      else
1436
        {
1437
          rtx lab = gen_label_rtx ();
1438
 
1439
          /* Compare the value with itself to reference it.  */
1440
          emit_cmp_and_jump_insns (value, value, EQ,
1441
                                   expand_normal (TYPE_SIZE (type)),
1442
                                   BLKmode, 0, lab);
1443
          emit_label (lab);
1444
        }
1445
    }
1446
 
1447
  /* Free any temporaries used to evaluate this expression.  */
1448
  free_temp_slots ();
1449
}
1450
 
1451
/* Warn if EXP contains any computations whose results are not used.
1452
   Return 1 if a warning is printed; 0 otherwise.  LOCUS is the
1453
   (potential) location of the expression.  */
1454
 
1455
int
1456
warn_if_unused_value (const_tree exp, location_t locus)
1457
{
1458
 restart:
1459
  if (TREE_USED (exp) || TREE_NO_WARNING (exp))
1460
    return 0;
1461
 
1462
  /* Don't warn about void constructs.  This includes casting to void,
1463
     void function calls, and statement expressions with a final cast
1464
     to void.  */
1465
  if (VOID_TYPE_P (TREE_TYPE (exp)))
1466
    return 0;
1467
 
1468
  if (EXPR_HAS_LOCATION (exp))
1469
    locus = EXPR_LOCATION (exp);
1470
 
1471
  switch (TREE_CODE (exp))
1472
    {
1473
    case PREINCREMENT_EXPR:
1474
    case POSTINCREMENT_EXPR:
1475
    case PREDECREMENT_EXPR:
1476
    case POSTDECREMENT_EXPR:
1477
    case MODIFY_EXPR:
1478
    case INIT_EXPR:
1479
    case TARGET_EXPR:
1480
    case CALL_EXPR:
1481
    case TRY_CATCH_EXPR:
1482
    case WITH_CLEANUP_EXPR:
1483
    case EXIT_EXPR:
1484
    case VA_ARG_EXPR:
1485
      return 0;
1486
 
1487
    case BIND_EXPR:
1488
      /* For a binding, warn if no side effect within it.  */
1489
      exp = BIND_EXPR_BODY (exp);
1490
      goto restart;
1491
 
1492
    case SAVE_EXPR:
1493
    case NON_LVALUE_EXPR:
1494
      exp = TREE_OPERAND (exp, 0);
1495
      goto restart;
1496
 
1497
    case TRUTH_ORIF_EXPR:
1498
    case TRUTH_ANDIF_EXPR:
1499
      /* In && or ||, warn if 2nd operand has no side effect.  */
1500
      exp = TREE_OPERAND (exp, 1);
1501
      goto restart;
1502
 
1503
    case COMPOUND_EXPR:
1504
      if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1505
        return 1;
1506
      /* Let people do `(foo (), 0)' without a warning.  */
1507
      if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1508
        return 0;
1509
      exp = TREE_OPERAND (exp, 1);
1510
      goto restart;
1511
 
1512
    case COND_EXPR:
1513
      /* If this is an expression with side effects, don't warn; this
1514
         case commonly appears in macro expansions.  */
1515
      if (TREE_SIDE_EFFECTS (exp))
1516
        return 0;
1517
      goto warn;
1518
 
1519
    case INDIRECT_REF:
1520
      /* Don't warn about automatic dereferencing of references, since
1521
         the user cannot control it.  */
1522
      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1523
        {
1524
          exp = TREE_OPERAND (exp, 0);
1525
          goto restart;
1526
        }
1527
      /* Fall through.  */
1528
 
1529
    default:
1530
      /* Referencing a volatile value is a side effect, so don't warn.  */
1531
      if ((DECL_P (exp) || REFERENCE_CLASS_P (exp))
1532
          && TREE_THIS_VOLATILE (exp))
1533
        return 0;
1534
 
1535
      /* If this is an expression which has no operands, there is no value
1536
         to be unused.  There are no such language-independent codes,
1537
         but front ends may define such.  */
1538
      if (EXPRESSION_CLASS_P (exp) && TREE_OPERAND_LENGTH (exp) == 0)
1539
        return 0;
1540
 
1541
    warn:
1542
      warning_at (locus, OPT_Wunused_value, "value computed is not used");
1543
      return 1;
1544
    }
1545
}
1546
 
1547
 
1548
/* Generate RTL to return from the current function, with no value.
1549
   (That is, we do not do anything about returning any value.)  */
1550
 
1551
void
1552
expand_null_return (void)
1553
{
1554
  /* If this function was declared to return a value, but we
1555
     didn't, clobber the return registers so that they are not
1556
     propagated live to the rest of the function.  */
1557
  clobber_return_register ();
1558
 
1559
  expand_null_return_1 ();
1560
}
1561
 
1562
/* Generate RTL to return directly from the current function.
1563
   (That is, we bypass any return value.)  */
1564
 
1565
void
1566
expand_naked_return (void)
1567
{
1568
  rtx end_label;
1569
 
1570
  clear_pending_stack_adjust ();
1571
  do_pending_stack_adjust ();
1572
 
1573
  end_label = naked_return_label;
1574
  if (end_label == 0)
1575
    end_label = naked_return_label = gen_label_rtx ();
1576
 
1577
  emit_jump (end_label);
1578
}
1579
 
1580
/* Generate RTL to return from the current function, with value VAL.  */
1581
 
1582
static void
1583
expand_value_return (rtx val)
1584
{
1585
  /* Copy the value to the return location unless it's already there.  */
1586
 
1587
  tree decl = DECL_RESULT (current_function_decl);
1588
  rtx return_reg = DECL_RTL (decl);
1589
  if (return_reg != val)
1590
    {
1591
      tree funtype = TREE_TYPE (current_function_decl);
1592
      tree type = TREE_TYPE (decl);
1593
      int unsignedp = TYPE_UNSIGNED (type);
1594
      enum machine_mode old_mode = DECL_MODE (decl);
1595
      enum machine_mode mode = promote_function_mode (type, old_mode,
1596
                                                      &unsignedp, funtype, 1);
1597
 
1598
      if (mode != old_mode)
1599
        val = convert_modes (mode, old_mode, val, unsignedp);
1600
 
1601
      if (GET_CODE (return_reg) == PARALLEL)
1602
        emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1603
      else
1604
        emit_move_insn (return_reg, val);
1605
    }
1606
 
1607
  expand_null_return_1 ();
1608
}
1609
 
1610
/* Output a return with no value.  */
1611
 
1612
static void
1613
expand_null_return_1 (void)
1614
{
1615
  clear_pending_stack_adjust ();
1616
  do_pending_stack_adjust ();
1617
  emit_jump (return_label);
1618
}
1619
 
1620
/* Generate RTL to evaluate the expression RETVAL and return it
1621
   from the current function.  */
1622
 
1623
void
1624
expand_return (tree retval)
1625
{
1626
  rtx result_rtl;
1627
  rtx val = 0;
1628
  tree retval_rhs;
1629
 
1630
  /* If function wants no value, give it none.  */
1631
  if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1632
    {
1633
      expand_normal (retval);
1634
      expand_null_return ();
1635
      return;
1636
    }
1637
 
1638
  if (retval == error_mark_node)
1639
    {
1640
      /* Treat this like a return of no value from a function that
1641
         returns a value.  */
1642
      expand_null_return ();
1643
      return;
1644
    }
1645
  else if ((TREE_CODE (retval) == MODIFY_EXPR
1646
            || TREE_CODE (retval) == INIT_EXPR)
1647
           && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1648
    retval_rhs = TREE_OPERAND (retval, 1);
1649
  else
1650
    retval_rhs = retval;
1651
 
1652
  result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1653
 
1654
  /* If we are returning the RESULT_DECL, then the value has already
1655
     been stored into it, so we don't have to do anything special.  */
1656
  if (TREE_CODE (retval_rhs) == RESULT_DECL)
1657
    expand_value_return (result_rtl);
1658
 
1659
  /* If the result is an aggregate that is being returned in one (or more)
1660
     registers, load the registers here.  The compiler currently can't handle
1661
     copying a BLKmode value into registers.  We could put this code in a
1662
     more general area (for use by everyone instead of just function
1663
     call/return), but until this feature is generally usable it is kept here
1664
     (and in expand_call).  */
1665
 
1666
  else if (retval_rhs != 0
1667
           && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1668
           && REG_P (result_rtl))
1669
    {
1670
      int i;
1671
      unsigned HOST_WIDE_INT bitpos, xbitpos;
1672
      unsigned HOST_WIDE_INT padding_correction = 0;
1673
      unsigned HOST_WIDE_INT bytes
1674
        = int_size_in_bytes (TREE_TYPE (retval_rhs));
1675
      int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1676
      unsigned int bitsize
1677
        = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
1678
      rtx *result_pseudos = XALLOCAVEC (rtx, n_regs);
1679
      rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
1680
      rtx result_val = expand_normal (retval_rhs);
1681
      enum machine_mode tmpmode, result_reg_mode;
1682
 
1683
      if (bytes == 0)
1684
        {
1685
          expand_null_return ();
1686
          return;
1687
        }
1688
 
1689
      /* If the structure doesn't take up a whole number of words, see
1690
         whether the register value should be padded on the left or on
1691
         the right.  Set PADDING_CORRECTION to the number of padding
1692
         bits needed on the left side.
1693
 
1694
         In most ABIs, the structure will be returned at the least end of
1695
         the register, which translates to right padding on little-endian
1696
         targets and left padding on big-endian targets.  The opposite
1697
         holds if the structure is returned at the most significant
1698
         end of the register.  */
1699
      if (bytes % UNITS_PER_WORD != 0
1700
          && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
1701
              ? !BYTES_BIG_ENDIAN
1702
              : BYTES_BIG_ENDIAN))
1703
        padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
1704
                                               * BITS_PER_UNIT));
1705
 
1706
      /* Copy the structure BITSIZE bits at a time.  */
1707
      for (bitpos = 0, xbitpos = padding_correction;
1708
           bitpos < bytes * BITS_PER_UNIT;
1709
           bitpos += bitsize, xbitpos += bitsize)
1710
        {
1711
          /* We need a new destination pseudo each time xbitpos is
1712
             on a word boundary and when xbitpos == padding_correction
1713
             (the first time through).  */
1714
          if (xbitpos % BITS_PER_WORD == 0
1715
              || xbitpos == padding_correction)
1716
            {
1717
              /* Generate an appropriate register.  */
1718
              dst = gen_reg_rtx (word_mode);
1719
              result_pseudos[xbitpos / BITS_PER_WORD] = dst;
1720
 
1721
              /* Clear the destination before we move anything into it.  */
1722
              emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
1723
            }
1724
 
1725
          /* We need a new source operand each time bitpos is on a word
1726
             boundary.  */
1727
          if (bitpos % BITS_PER_WORD == 0)
1728
            src = operand_subword_force (result_val,
1729
                                         bitpos / BITS_PER_WORD,
1730
                                         BLKmode);
1731
 
1732
          /* Use bitpos for the source extraction (left justified) and
1733
             xbitpos for the destination store (right justified).  */
1734
          store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
1735
                           extract_bit_field (src, bitsize,
1736
                                              bitpos % BITS_PER_WORD, 1,
1737
                                              NULL_RTX, word_mode, word_mode));
1738
        }
1739
 
1740
      tmpmode = GET_MODE (result_rtl);
1741
      if (tmpmode == BLKmode)
1742
        {
1743
          /* Find the smallest integer mode large enough to hold the
1744
             entire structure and use that mode instead of BLKmode
1745
             on the USE insn for the return register.  */
1746
          for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1747
               tmpmode != VOIDmode;
1748
               tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1749
            /* Have we found a large enough mode?  */
1750
            if (GET_MODE_SIZE (tmpmode) >= bytes)
1751
              break;
1752
 
1753
          /* A suitable mode should have been found.  */
1754
          gcc_assert (tmpmode != VOIDmode);
1755
 
1756
          PUT_MODE (result_rtl, tmpmode);
1757
        }
1758
 
1759
      if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
1760
        result_reg_mode = word_mode;
1761
      else
1762
        result_reg_mode = tmpmode;
1763
      result_reg = gen_reg_rtx (result_reg_mode);
1764
 
1765
      for (i = 0; i < n_regs; i++)
1766
        emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
1767
                        result_pseudos[i]);
1768
 
1769
      if (tmpmode != result_reg_mode)
1770
        result_reg = gen_lowpart (tmpmode, result_reg);
1771
 
1772
      expand_value_return (result_reg);
1773
    }
1774
  else if (retval_rhs != 0
1775
           && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1776
           && (REG_P (result_rtl)
1777
               || (GET_CODE (result_rtl) == PARALLEL)))
1778
    {
1779
      /* Calculate the return value into a temporary (usually a pseudo
1780
         reg).  */
1781
      tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
1782
      tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
1783
 
1784
      val = assign_temp (nt, 0, 0, 1);
1785
      val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
1786
      val = force_not_mem (val);
1787
      /* Return the calculated value.  */
1788
      expand_value_return (val);
1789
    }
1790
  else
1791
    {
1792
      /* No hard reg used; calculate value into hard return reg.  */
1793
      expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
1794
      expand_value_return (result_rtl);
1795
    }
1796
}
1797
 
1798
/* Emit code to restore vital registers at the beginning of a nonlocal goto
1799
   handler.  */
1800
static void
1801
expand_nl_goto_receiver (void)
1802
{
1803
  rtx chain;
1804
 
1805
  /* Clobber the FP when we get here, so we have to make sure it's
1806
     marked as used by this function.  */
1807
  emit_use (hard_frame_pointer_rtx);
1808
 
1809
  /* Mark the static chain as clobbered here so life information
1810
     doesn't get messed up for it.  */
1811
  chain = targetm.calls.static_chain (current_function_decl, true);
1812
  if (chain && REG_P (chain))
1813
    emit_clobber (chain);
1814
 
1815
#ifdef HAVE_nonlocal_goto
1816
  if (! HAVE_nonlocal_goto)
1817
#endif
1818
    /* First adjust our frame pointer to its actual value.  It was
1819
       previously set to the start of the virtual area corresponding to
1820
       the stacked variables when we branched here and now needs to be
1821
       adjusted to the actual hardware fp value.
1822
 
1823
       Assignments are to virtual registers are converted by
1824
       instantiate_virtual_regs into the corresponding assignment
1825
       to the underlying register (fp in this case) that makes
1826
       the original assignment true.
1827
       So the following insn will actually be
1828
       decrementing fp by STARTING_FRAME_OFFSET.  */
1829
    emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1830
 
1831
#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1832
  if (fixed_regs[ARG_POINTER_REGNUM])
1833
    {
1834
#ifdef ELIMINABLE_REGS
1835
      /* If the argument pointer can be eliminated in favor of the
1836
         frame pointer, we don't need to restore it.  We assume here
1837
         that if such an elimination is present, it can always be used.
1838
         This is the case on all known machines; if we don't make this
1839
         assumption, we do unnecessary saving on many machines.  */
1840
      static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1841
      size_t i;
1842
 
1843
      for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1844
        if (elim_regs[i].from == ARG_POINTER_REGNUM
1845
            && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1846
          break;
1847
 
1848
      if (i == ARRAY_SIZE (elim_regs))
1849
#endif
1850
        {
1851
          /* Now restore our arg pointer from the address at which it
1852
             was saved in our stack frame.  */
1853
          emit_move_insn (crtl->args.internal_arg_pointer,
1854
                          copy_to_reg (get_arg_pointer_save_area ()));
1855
        }
1856
    }
1857
#endif
1858
 
1859
#ifdef HAVE_nonlocal_goto_receiver
1860
  if (HAVE_nonlocal_goto_receiver)
1861
    emit_insn (gen_nonlocal_goto_receiver ());
1862
#endif
1863
 
1864
  /* We must not allow the code we just generated to be reordered by
1865
     scheduling.  Specifically, the update of the frame pointer must
1866
     happen immediately, not later.  */
1867
  emit_insn (gen_blockage ());
1868
}
1869
 
1870
/* Generate RTL for the automatic variable declaration DECL.
1871
   (Other kinds of declarations are simply ignored if seen here.)  */
1872
 
1873
void
1874
expand_decl (tree decl)
1875
{
1876
  tree type;
1877
 
1878
  type = TREE_TYPE (decl);
1879
 
1880
  /* For a CONST_DECL, set mode, alignment, and sizes from those of the
1881
     type in case this node is used in a reference.  */
1882
  if (TREE_CODE (decl) == CONST_DECL)
1883
    {
1884
      DECL_MODE (decl) = TYPE_MODE (type);
1885
      DECL_ALIGN (decl) = TYPE_ALIGN (type);
1886
      DECL_SIZE (decl) = TYPE_SIZE (type);
1887
      DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
1888
      return;
1889
    }
1890
 
1891
  /* Otherwise, only automatic variables need any expansion done.  Static and
1892
     external variables, and external functions, will be handled by
1893
     `assemble_variable' (called from finish_decl).  TYPE_DECL requires
1894
     nothing.  PARM_DECLs are handled in `assign_parms'.  */
1895
  if (TREE_CODE (decl) != VAR_DECL)
1896
    return;
1897
 
1898
  if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1899
    return;
1900
 
1901
  /* Create the RTL representation for the variable.  */
1902
 
1903
  if (type == error_mark_node)
1904
    SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1905
 
1906
  else if (DECL_SIZE (decl) == 0)
1907
    {
1908
      /* Variable with incomplete type.  */
1909
      rtx x;
1910
      if (DECL_INITIAL (decl) == 0)
1911
        /* Error message was already done; now avoid a crash.  */
1912
        x = gen_rtx_MEM (BLKmode, const0_rtx);
1913
      else
1914
        /* An initializer is going to decide the size of this array.
1915
           Until we know the size, represent its address with a reg.  */
1916
        x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
1917
 
1918
      set_mem_attributes (x, decl, 1);
1919
      SET_DECL_RTL (decl, x);
1920
    }
1921
  else if (use_register_for_decl (decl))
1922
    {
1923
      /* Automatic variable that can go in a register.  */
1924
      enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1925
 
1926
      SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
1927
 
1928
      /* Note if the object is a user variable.  */
1929
      if (!DECL_ARTIFICIAL (decl))
1930
          mark_user_reg (DECL_RTL (decl));
1931
 
1932
      if (POINTER_TYPE_P (type))
1933
        mark_reg_pointer (DECL_RTL (decl),
1934
                          TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1935
    }
1936
 
1937
  else
1938
    {
1939
      rtx oldaddr = 0;
1940
      rtx addr;
1941
      rtx x;
1942
 
1943
      /* Variable-sized decls are dealt with in the gimplifier.  */
1944
      gcc_assert (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST);
1945
 
1946
      /* If we previously made RTL for this decl, it must be an array
1947
         whose size was determined by the initializer.
1948
         The old address was a register; set that register now
1949
         to the proper address.  */
1950
      if (DECL_RTL_SET_P (decl))
1951
        {
1952
          gcc_assert (MEM_P (DECL_RTL (decl)));
1953
          gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0)));
1954
          oldaddr = XEXP (DECL_RTL (decl), 0);
1955
        }
1956
 
1957
      /* Set alignment we actually gave this decl.  */
1958
      DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
1959
                           : GET_MODE_BITSIZE (DECL_MODE (decl)));
1960
      DECL_USER_ALIGN (decl) = 0;
1961
 
1962
      x = assign_temp (decl, 1, 1, 1);
1963
      set_mem_attributes (x, decl, 1);
1964
      SET_DECL_RTL (decl, x);
1965
 
1966
      if (oldaddr)
1967
        {
1968
          addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
1969
          if (addr != oldaddr)
1970
            emit_move_insn (oldaddr, addr);
1971
        }
1972
    }
1973
}
1974
 
1975
/* Emit code to save the current value of stack.  */
1976
rtx
1977
expand_stack_save (void)
1978
{
1979
  rtx ret = NULL_RTX;
1980
 
1981
  do_pending_stack_adjust ();
1982
  emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
1983
  return ret;
1984
}
1985
 
1986
/* Emit code to restore the current value of stack.  */
1987
void
1988
expand_stack_restore (tree var)
1989
{
1990
  rtx sa = expand_normal (var);
1991
 
1992
  sa = convert_memory_address (Pmode, sa);
1993
  emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
1994
}
1995
 
1996
/* Do the insertion of a case label into case_list.  The labels are
1997
   fed to us in descending order from the sorted vector of case labels used
1998
   in the tree part of the middle end.  So the list we construct is
1999
   sorted in ascending order.  The bounds on the case range, LOW and HIGH,
2000
   are converted to case's index type TYPE.  */
2001
 
2002
static struct case_node *
2003
add_case_node (struct case_node *head, tree type, tree low, tree high,
2004
               tree label, alloc_pool case_node_pool)
2005
{
2006
  tree min_value, max_value;
2007
  struct case_node *r;
2008
 
2009
  gcc_assert (TREE_CODE (low) == INTEGER_CST);
2010
  gcc_assert (!high || TREE_CODE (high) == INTEGER_CST);
2011
 
2012
  min_value = TYPE_MIN_VALUE (type);
2013
  max_value = TYPE_MAX_VALUE (type);
2014
 
2015
  /* If there's no HIGH value, then this is not a case range; it's
2016
     just a simple case label.  But that's just a degenerate case
2017
     range.
2018
     If the bounds are equal, turn this into the one-value case.  */
2019
  if (!high || tree_int_cst_equal (low, high))
2020
    {
2021
      /* If the simple case value is unreachable, ignore it.  */
2022
      if ((TREE_CODE (min_value) == INTEGER_CST
2023
            && tree_int_cst_compare (low, min_value) < 0)
2024
          || (TREE_CODE (max_value) == INTEGER_CST
2025
              && tree_int_cst_compare (low, max_value) > 0))
2026
        return head;
2027
      low = fold_convert (type, low);
2028
      high = low;
2029
    }
2030
  else
2031
    {
2032
      /* If the entire case range is unreachable, ignore it.  */
2033
      if ((TREE_CODE (min_value) == INTEGER_CST
2034
            && tree_int_cst_compare (high, min_value) < 0)
2035
          || (TREE_CODE (max_value) == INTEGER_CST
2036
              && tree_int_cst_compare (low, max_value) > 0))
2037
        return head;
2038
 
2039
      /* If the lower bound is less than the index type's minimum
2040
         value, truncate the range bounds.  */
2041
      if (TREE_CODE (min_value) == INTEGER_CST
2042
            && tree_int_cst_compare (low, min_value) < 0)
2043
        low = min_value;
2044
      low = fold_convert (type, low);
2045
 
2046
      /* If the upper bound is greater than the index type's maximum
2047
         value, truncate the range bounds.  */
2048
      if (TREE_CODE (max_value) == INTEGER_CST
2049
          && tree_int_cst_compare (high, max_value) > 0)
2050
        high = max_value;
2051
      high = fold_convert (type, high);
2052
    }
2053
 
2054
 
2055
  /* Add this label to the chain.  Make sure to drop overflow flags.  */
2056
  r = (struct case_node *) pool_alloc (case_node_pool);
2057
  r->low = build_int_cst_wide (TREE_TYPE (low), TREE_INT_CST_LOW (low),
2058
                               TREE_INT_CST_HIGH (low));
2059
  r->high = build_int_cst_wide (TREE_TYPE (high), TREE_INT_CST_LOW (high),
2060
                                TREE_INT_CST_HIGH (high));
2061
  r->code_label = label;
2062
  r->parent = r->left = NULL;
2063
  r->right = head;
2064
  return r;
2065
}
2066
 
2067
/* Maximum number of case bit tests.  */
2068
#define MAX_CASE_BIT_TESTS  3
2069
 
2070
/* By default, enable case bit tests on targets with ashlsi3.  */
2071
#ifndef CASE_USE_BIT_TESTS
2072
#define CASE_USE_BIT_TESTS  (optab_handler (ashl_optab, word_mode)->insn_code \
2073
                             != CODE_FOR_nothing)
2074
#endif
2075
 
2076
 
2077
/* A case_bit_test represents a set of case nodes that may be
2078
   selected from using a bit-wise comparison.  HI and LO hold
2079
   the integer to be tested against, LABEL contains the label
2080
   to jump to upon success and BITS counts the number of case
2081
   nodes handled by this test, typically the number of bits
2082
   set in HI:LO.  */
2083
 
2084
struct case_bit_test
2085
{
2086
  HOST_WIDE_INT hi;
2087
  HOST_WIDE_INT lo;
2088
  rtx label;
2089
  int bits;
2090
};
2091
 
2092
/* Determine whether "1 << x" is relatively cheap in word_mode.  */
2093
 
2094
static
2095
bool lshift_cheap_p (void)
2096
{
2097
  static bool init = false;
2098
  static bool cheap = true;
2099
 
2100
  if (!init)
2101
    {
2102
      rtx reg = gen_rtx_REG (word_mode, 10000);
2103
      int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET,
2104
                           optimize_insn_for_speed_p ());
2105
      cheap = cost < COSTS_N_INSNS (3);
2106
      init = true;
2107
    }
2108
 
2109
  return cheap;
2110
}
2111
 
2112
/* Comparison function for qsort to order bit tests by decreasing
2113
   number of case nodes, i.e. the node with the most cases gets
2114
   tested first.  */
2115
 
2116
static int
2117
case_bit_test_cmp (const void *p1, const void *p2)
2118
{
2119
  const struct case_bit_test *const d1 = (const struct case_bit_test *) p1;
2120
  const struct case_bit_test *const d2 = (const struct case_bit_test *) p2;
2121
 
2122
  if (d2->bits != d1->bits)
2123
    return d2->bits - d1->bits;
2124
 
2125
  /* Stabilize the sort.  */
2126
  return CODE_LABEL_NUMBER (d2->label) - CODE_LABEL_NUMBER (d1->label);
2127
}
2128
 
2129
/*  Expand a switch statement by a short sequence of bit-wise
2130
    comparisons.  "switch(x)" is effectively converted into
2131
    "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
2132
    integer constants.
2133
 
2134
    INDEX_EXPR is the value being switched on, which is of
2135
    type INDEX_TYPE.  MINVAL is the lowest case value of in
2136
    the case nodes, of INDEX_TYPE type, and RANGE is highest
2137
    value minus MINVAL, also of type INDEX_TYPE.  NODES is
2138
    the set of case nodes, and DEFAULT_LABEL is the label to
2139
    branch to should none of the cases match.
2140
 
2141
    There *MUST* be MAX_CASE_BIT_TESTS or less unique case
2142
    node targets.  */
2143
 
2144
static void
2145
emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
2146
                     tree range, case_node_ptr nodes, rtx default_label)
2147
{
2148
  struct case_bit_test test[MAX_CASE_BIT_TESTS];
2149
  enum machine_mode mode;
2150
  rtx expr, index, label;
2151
  unsigned int i,j,lo,hi;
2152
  struct case_node *n;
2153
  unsigned int count;
2154
 
2155
  count = 0;
2156
  for (n = nodes; n; n = n->right)
2157
    {
2158
      label = label_rtx (n->code_label);
2159
      for (i = 0; i < count; i++)
2160
        if (label == test[i].label)
2161
          break;
2162
 
2163
      if (i == count)
2164
        {
2165
          gcc_assert (count < MAX_CASE_BIT_TESTS);
2166
          test[i].hi = 0;
2167
          test[i].lo = 0;
2168
          test[i].label = label;
2169
          test[i].bits = 1;
2170
          count++;
2171
        }
2172
      else
2173
        test[i].bits++;
2174
 
2175
      lo = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2176
                                      n->low, minval), 1);
2177
      hi = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2178
                                      n->high, minval), 1);
2179
      for (j = lo; j <= hi; j++)
2180
        if (j >= HOST_BITS_PER_WIDE_INT)
2181
          test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
2182
        else
2183
          test[i].lo |= (HOST_WIDE_INT) 1 << j;
2184
    }
2185
 
2186
  qsort (test, count, sizeof(*test), case_bit_test_cmp);
2187
 
2188
  index_expr = fold_build2 (MINUS_EXPR, index_type,
2189
                            fold_convert (index_type, index_expr),
2190
                            fold_convert (index_type, minval));
2191
  index = expand_normal (index_expr);
2192
  do_pending_stack_adjust ();
2193
 
2194
  mode = TYPE_MODE (index_type);
2195
  expr = expand_normal (range);
2196
  if (default_label)
2197
    emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
2198
                             default_label);
2199
 
2200
  index = convert_to_mode (word_mode, index, 0);
2201
  index = expand_binop (word_mode, ashl_optab, const1_rtx,
2202
                        index, NULL_RTX, 1, OPTAB_WIDEN);
2203
 
2204
  for (i = 0; i < count; i++)
2205
    {
2206
      expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
2207
      expr = expand_binop (word_mode, and_optab, index, expr,
2208
                           NULL_RTX, 1, OPTAB_WIDEN);
2209
      emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
2210
                               word_mode, 1, test[i].label);
2211
    }
2212
 
2213
  if (default_label)
2214
    emit_jump (default_label);
2215
}
2216
 
2217
#ifndef HAVE_casesi
2218
#define HAVE_casesi 0
2219
#endif
2220
 
2221
#ifndef HAVE_tablejump
2222
#define HAVE_tablejump 0
2223
#endif
2224
 
2225
/* Terminate a case (Pascal/Ada) or switch (C) statement
2226
   in which ORIG_INDEX is the expression to be tested.
2227
   If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2228
   type as given in the source before any compiler conversions.
2229
   Generate the code to test it and jump to the right place.  */
2230
 
2231
void
2232
expand_case (gimple stmt)
2233
{
2234
  tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2235
  rtx default_label = 0;
2236
  struct case_node *n;
2237
  unsigned int count, uniq;
2238
  rtx index;
2239
  rtx table_label;
2240
  int ncases;
2241
  rtx *labelvec;
2242
  int i;
2243
  rtx before_case, end, lab;
2244
 
2245
  tree index_expr = gimple_switch_index (stmt);
2246
  tree index_type = TREE_TYPE (index_expr);
2247
  int unsignedp = TYPE_UNSIGNED (index_type);
2248
 
2249
  /* The insn after which the case dispatch should finally
2250
     be emitted.  Zero for a dummy.  */
2251
  rtx start;
2252
 
2253
  /* A list of case labels; it is first built as a list and it may then
2254
     be rearranged into a nearly balanced binary tree.  */
2255
  struct case_node *case_list = 0;
2256
 
2257
  /* Label to jump to if no case matches.  */
2258
  tree default_label_decl = NULL_TREE;
2259
 
2260
  alloc_pool case_node_pool = create_alloc_pool ("struct case_node pool",
2261
                                                 sizeof (struct case_node),
2262
                                                 100);
2263
 
2264
  do_pending_stack_adjust ();
2265
 
2266
  /* An ERROR_MARK occurs for various reasons including invalid data type.  */
2267
  if (index_type != error_mark_node)
2268
    {
2269
      tree elt;
2270
      bitmap label_bitmap;
2271
      int stopi = 0;
2272
 
2273
      /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
2274
         expressions being INTEGER_CST.  */
2275
      gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);
2276
 
2277
      /* The default case, if ever taken, is the first element.  */
2278
      elt = gimple_switch_label (stmt, 0);
2279
      if (!CASE_LOW (elt) && !CASE_HIGH (elt))
2280
        {
2281
          default_label_decl = CASE_LABEL (elt);
2282
          stopi = 1;
2283
        }
2284
 
2285
      for (i = gimple_switch_num_labels (stmt) - 1; i >= stopi; --i)
2286
        {
2287
          tree low, high;
2288
          elt = gimple_switch_label (stmt, i);
2289
 
2290
          low = CASE_LOW (elt);
2291
          gcc_assert (low);
2292
          high = CASE_HIGH (elt);
2293
 
2294
          /* Discard empty ranges.  */
2295
          if (high && tree_int_cst_lt (high, low))
2296
            continue;
2297
 
2298
          case_list = add_case_node (case_list, index_type, low, high,
2299
                                     CASE_LABEL (elt), case_node_pool);
2300
        }
2301
 
2302
 
2303
      before_case = start = get_last_insn ();
2304
      if (default_label_decl)
2305
        default_label = label_rtx (default_label_decl);
2306
 
2307
      /* Get upper and lower bounds of case values.  */
2308
 
2309
      uniq = 0;
2310
      count = 0;
2311
      label_bitmap = BITMAP_ALLOC (NULL);
2312
      for (n = case_list; n; n = n->right)
2313
        {
2314
          /* Count the elements and track the largest and smallest
2315
             of them (treating them as signed even if they are not).  */
2316
          if (count++ == 0)
2317
            {
2318
              minval = n->low;
2319
              maxval = n->high;
2320
            }
2321
          else
2322
            {
2323
              if (tree_int_cst_lt (n->low, minval))
2324
                minval = n->low;
2325
              if (tree_int_cst_lt (maxval, n->high))
2326
                maxval = n->high;
2327
            }
2328
          /* A range counts double, since it requires two compares.  */
2329
          if (! tree_int_cst_equal (n->low, n->high))
2330
            count++;
2331
 
2332
          /* If we have not seen this label yet, then increase the
2333
             number of unique case node targets seen.  */
2334
          lab = label_rtx (n->code_label);
2335
          if (!bitmap_bit_p (label_bitmap, CODE_LABEL_NUMBER (lab)))
2336
            {
2337
              bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab));
2338
              uniq++;
2339
            }
2340
        }
2341
 
2342
      BITMAP_FREE (label_bitmap);
2343
 
2344
      /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2345
         destination, such as one with a default case only.  However,
2346
         it doesn't remove cases that are out of range for the switch
2347
         type, so we may still get a zero here.  */
2348
      if (count == 0)
2349
        {
2350
          if (default_label)
2351
            emit_jump (default_label);
2352
          free_alloc_pool (case_node_pool);
2353
          return;
2354
        }
2355
 
2356
      /* Compute span of values.  */
2357
      range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
2358
 
2359
      /* Try implementing this switch statement by a short sequence of
2360
         bit-wise comparisons.  However, we let the binary-tree case
2361
         below handle constant index expressions.  */
2362
      if (CASE_USE_BIT_TESTS
2363
          && ! TREE_CONSTANT (index_expr)
2364
          && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
2365
          && compare_tree_int (range, 0) > 0
2366
          && lshift_cheap_p ()
2367
          && ((uniq == 1 && count >= 3)
2368
              || (uniq == 2 && count >= 5)
2369
              || (uniq == 3 && count >= 6)))
2370
        {
2371
          /* Optimize the case where all the case values fit in a
2372
             word without having to subtract MINVAL.  In this case,
2373
             we can optimize away the subtraction.  */
2374
          if (compare_tree_int (minval, 0) > 0
2375
              && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
2376
            {
2377
              minval = build_int_cst (index_type, 0);
2378
              range = maxval;
2379
            }
2380
          emit_case_bit_tests (index_type, index_expr, minval, range,
2381
                               case_list, default_label);
2382
        }
2383
 
2384
      /* If range of values is much bigger than number of values,
2385
         make a sequence of conditional branches instead of a dispatch.
2386
         If the switch-index is a constant, do it this way
2387
         because we can optimize it.  */
2388
 
2389
      else if (count < targetm.case_values_threshold ()
2390
               || compare_tree_int (range,
2391
                                    (optimize_insn_for_size_p () ? 3 : 10) * count) > 0
2392
               /* RANGE may be signed, and really large ranges will show up
2393
                  as negative numbers.  */
2394
               || compare_tree_int (range, 0) < 0
2395
#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
2396
               || flag_pic
2397
#endif
2398
               || !flag_jump_tables
2399
               || TREE_CONSTANT (index_expr)
2400
               /* If neither casesi or tablejump is available, we can
2401
                  only go this way.  */
2402
               || (!HAVE_casesi && !HAVE_tablejump))
2403
        {
2404
          index = expand_normal (index_expr);
2405
 
2406
          /* If the index is a short or char that we do not have
2407
             an insn to handle comparisons directly, convert it to
2408
             a full integer now, rather than letting each comparison
2409
             generate the conversion.  */
2410
 
2411
          if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
2412
              && ! have_insn_for (COMPARE, GET_MODE (index)))
2413
            {
2414
              enum machine_mode wider_mode;
2415
              for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
2416
                   wider_mode = GET_MODE_WIDER_MODE (wider_mode))
2417
                if (have_insn_for (COMPARE, wider_mode))
2418
                  {
2419
                    index = convert_to_mode (wider_mode, index, unsignedp);
2420
                    break;
2421
                  }
2422
            }
2423
 
2424
          do_pending_stack_adjust ();
2425
 
2426
          if (MEM_P (index))
2427
            index = copy_to_reg (index);
2428
 
2429
          /* We generate a binary decision tree to select the
2430
             appropriate target code.  This is done as follows:
2431
 
2432
             The list of cases is rearranged into a binary tree,
2433
             nearly optimal assuming equal probability for each case.
2434
 
2435
             The tree is transformed into RTL, eliminating
2436
             redundant test conditions at the same time.
2437
 
2438
             If program flow could reach the end of the
2439
             decision tree an unconditional jump to the
2440
             default code is emitted.  */
2441
 
2442
          use_cost_table = estimate_case_costs (case_list);
2443
          balance_case_nodes (&case_list, NULL);
2444
          emit_case_nodes (index, case_list, default_label, index_type);
2445
          if (default_label)
2446
            emit_jump (default_label);
2447
        }
2448
      else
2449
        {
2450
          rtx fallback_label = label_rtx (case_list->code_label);
2451
          table_label = gen_label_rtx ();
2452
          if (! try_casesi (index_type, index_expr, minval, range,
2453
                            table_label, default_label, fallback_label))
2454
            {
2455
              bool ok;
2456
 
2457
              /* Index jumptables from zero for suitable values of
2458
                 minval to avoid a subtraction.  */
2459
              if (optimize_insn_for_speed_p ()
2460
                  && compare_tree_int (minval, 0) > 0
2461
                  && compare_tree_int (minval, 3) < 0)
2462
                {
2463
                  minval = build_int_cst (index_type, 0);
2464
                  range = maxval;
2465
                }
2466
 
2467
              ok = try_tablejump (index_type, index_expr, minval, range,
2468
                                  table_label, default_label);
2469
              gcc_assert (ok);
2470
            }
2471
 
2472
          /* Get table of labels to jump to, in order of case index.  */
2473
 
2474
          ncases = tree_low_cst (range, 0) + 1;
2475
          labelvec = XALLOCAVEC (rtx, ncases);
2476
          memset (labelvec, 0, ncases * sizeof (rtx));
2477
 
2478
          for (n = case_list; n; n = n->right)
2479
            {
2480
              /* Compute the low and high bounds relative to the minimum
2481
                 value since that should fit in a HOST_WIDE_INT while the
2482
                 actual values may not.  */
2483
              HOST_WIDE_INT i_low
2484
                = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2485
                                             n->low, minval), 1);
2486
              HOST_WIDE_INT i_high
2487
                = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2488
                                             n->high, minval), 1);
2489
              HOST_WIDE_INT i;
2490
 
2491
              for (i = i_low; i <= i_high; i ++)
2492
                labelvec[i]
2493
                  = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
2494
            }
2495
 
2496
          /* Fill in the gaps with the default.  We may have gaps at
2497
             the beginning if we tried to avoid the minval subtraction,
2498
             so substitute some label even if the default label was
2499
             deemed unreachable.  */
2500
          if (!default_label)
2501
            default_label = fallback_label;
2502
          for (i = 0; i < ncases; i++)
2503
            if (labelvec[i] == 0)
2504
              labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
2505
 
2506
          /* Output the table.  */
2507
          emit_label (table_label);
2508
 
2509
          if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2510
            emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
2511
                                                   gen_rtx_LABEL_REF (Pmode, table_label),
2512
                                                   gen_rtvec_v (ncases, labelvec),
2513
                                                   const0_rtx, const0_rtx));
2514
          else
2515
            emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
2516
                                              gen_rtvec_v (ncases, labelvec)));
2517
 
2518
          /* Record no drop-through after the table.  */
2519
          emit_barrier ();
2520
        }
2521
 
2522
      before_case = NEXT_INSN (before_case);
2523
      end = get_last_insn ();
2524
      reorder_insns (before_case, end, start);
2525
    }
2526
 
2527
  free_temp_slots ();
2528
  free_alloc_pool (case_node_pool);
2529
}
2530
 
2531
/* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE.  */
2532
 
2533
static void
2534
do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label,
2535
                  int unsignedp)
2536
{
2537
  do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
2538
                           NULL_RTX, NULL_RTX, label, -1);
2539
}
2540
 
2541
/* Not all case values are encountered equally.  This function
2542
   uses a heuristic to weight case labels, in cases where that
2543
   looks like a reasonable thing to do.
2544
 
2545
   Right now, all we try to guess is text, and we establish the
2546
   following weights:
2547
 
2548
        chars above space:      16
2549
        digits:                 16
2550
        default:                12
2551
        space, punct:           8
2552
        tab:                    4
2553
        newline:                2
2554
        other "\" chars:        1
2555
        remaining chars:        0
2556
 
2557
   If we find any cases in the switch that are not either -1 or in the range
2558
   of valid ASCII characters, or are control characters other than those
2559
   commonly used with "\", don't treat this switch scanning text.
2560
 
2561
   Return 1 if these nodes are suitable for cost estimation, otherwise
2562
   return 0.  */
2563
 
2564
static int
2565
estimate_case_costs (case_node_ptr node)
2566
{
2567
  tree min_ascii = integer_minus_one_node;
2568
  tree max_ascii = build_int_cst (TREE_TYPE (node->high), 127);
2569
  case_node_ptr n;
2570
  int i;
2571
 
2572
  /* If we haven't already made the cost table, make it now.  Note that the
2573
     lower bound of the table is -1, not zero.  */
2574
 
2575
  if (! cost_table_initialized)
2576
    {
2577
      cost_table_initialized = 1;
2578
 
2579
      for (i = 0; i < 128; i++)
2580
        {
2581
          if (ISALNUM (i))
2582
            COST_TABLE (i) = 16;
2583
          else if (ISPUNCT (i))
2584
            COST_TABLE (i) = 8;
2585
          else if (ISCNTRL (i))
2586
            COST_TABLE (i) = -1;
2587
        }
2588
 
2589
      COST_TABLE (' ') = 8;
2590
      COST_TABLE ('\t') = 4;
2591
      COST_TABLE ('\0') = 4;
2592
      COST_TABLE ('\n') = 2;
2593
      COST_TABLE ('\f') = 1;
2594
      COST_TABLE ('\v') = 1;
2595
      COST_TABLE ('\b') = 1;
2596
    }
2597
 
2598
  /* See if all the case expressions look like text.  It is text if the
2599
     constant is >= -1 and the highest constant is <= 127.  Do all comparisons
2600
     as signed arithmetic since we don't want to ever access cost_table with a
2601
     value less than -1.  Also check that none of the constants in a range
2602
     are strange control characters.  */
2603
 
2604
  for (n = node; n; n = n->right)
2605
    {
2606
      if (tree_int_cst_lt (n->low, min_ascii)
2607
          || tree_int_cst_lt (max_ascii, n->high))
2608
        return 0;
2609
 
2610
      for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
2611
           i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2612
        if (COST_TABLE (i) < 0)
2613
          return 0;
2614
    }
2615
 
2616
  /* All interesting values are within the range of interesting
2617
     ASCII characters.  */
2618
  return 1;
2619
}
2620
 
2621
/* Take an ordered list of case nodes
2622
   and transform them into a near optimal binary tree,
2623
   on the assumption that any target code selection value is as
2624
   likely as any other.
2625
 
2626
   The transformation is performed by splitting the ordered
2627
   list into two equal sections plus a pivot.  The parts are
2628
   then attached to the pivot as left and right branches.  Each
2629
   branch is then transformed recursively.  */
2630
 
2631
static void
2632
balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
2633
{
2634
  case_node_ptr np;
2635
 
2636
  np = *head;
2637
  if (np)
2638
    {
2639
      int cost = 0;
2640
      int i = 0;
2641
      int ranges = 0;
2642
      case_node_ptr *npp;
2643
      case_node_ptr left;
2644
 
2645
      /* Count the number of entries on branch.  Also count the ranges.  */
2646
 
2647
      while (np)
2648
        {
2649
          if (!tree_int_cst_equal (np->low, np->high))
2650
            {
2651
              ranges++;
2652
              if (use_cost_table)
2653
                cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
2654
            }
2655
 
2656
          if (use_cost_table)
2657
            cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
2658
 
2659
          i++;
2660
          np = np->right;
2661
        }
2662
 
2663
      if (i > 2)
2664
        {
2665
          /* Split this list if it is long enough for that to help.  */
2666
          npp = head;
2667
          left = *npp;
2668
          if (use_cost_table)
2669
            {
2670
              /* Find the place in the list that bisects the list's total cost,
2671
                 Here I gets half the total cost.  */
2672
              int n_moved = 0;
2673
              i = (cost + 1) / 2;
2674
              while (1)
2675
                {
2676
                  /* Skip nodes while their cost does not reach that amount.  */
2677
                  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2678
                    i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
2679
                  i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
2680
                  if (i <= 0)
2681
                    break;
2682
                  npp = &(*npp)->right;
2683
                  n_moved += 1;
2684
                }
2685
              if (n_moved == 0)
2686
                {
2687
                  /* Leave this branch lopsided, but optimize left-hand
2688
                     side and fill in `parent' fields for right-hand side.  */
2689
                  np = *head;
2690
                  np->parent = parent;
2691
                  balance_case_nodes (&np->left, np);
2692
                  for (; np->right; np = np->right)
2693
                    np->right->parent = np;
2694
                  return;
2695
                }
2696
            }
2697
          /* If there are just three nodes, split at the middle one.  */
2698
          else if (i == 3)
2699
            npp = &(*npp)->right;
2700
          else
2701
            {
2702
              /* Find the place in the list that bisects the list's total cost,
2703
                 where ranges count as 2.
2704
                 Here I gets half the total cost.  */
2705
              i = (i + ranges + 1) / 2;
2706
              while (1)
2707
                {
2708
                  /* Skip nodes while their cost does not reach that amount.  */
2709
                  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2710
                    i--;
2711
                  i--;
2712
                  if (i <= 0)
2713
                    break;
2714
                  npp = &(*npp)->right;
2715
                }
2716
            }
2717
          *head = np = *npp;
2718
          *npp = 0;
2719
          np->parent = parent;
2720
          np->left = left;
2721
 
2722
          /* Optimize each of the two split parts.  */
2723
          balance_case_nodes (&np->left, np);
2724
          balance_case_nodes (&np->right, np);
2725
        }
2726
      else
2727
        {
2728
          /* Else leave this branch as one level,
2729
             but fill in `parent' fields.  */
2730
          np = *head;
2731
          np->parent = parent;
2732
          for (; np->right; np = np->right)
2733
            np->right->parent = np;
2734
        }
2735
    }
2736
}
2737
 
2738
/* Search the parent sections of the case node tree
2739
   to see if a test for the lower bound of NODE would be redundant.
2740
   INDEX_TYPE is the type of the index expression.
2741
 
2742
   The instructions to generate the case decision tree are
2743
   output in the same order as nodes are processed so it is
2744
   known that if a parent node checks the range of the current
2745
   node minus one that the current node is bounded at its lower
2746
   span.  Thus the test would be redundant.  */
2747
 
2748
static int
2749
node_has_low_bound (case_node_ptr node, tree index_type)
2750
{
2751
  tree low_minus_one;
2752
  case_node_ptr pnode;
2753
 
2754
  /* If the lower bound of this node is the lowest value in the index type,
2755
     we need not test it.  */
2756
 
2757
  if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
2758
    return 1;
2759
 
2760
  /* If this node has a left branch, the value at the left must be less
2761
     than that at this node, so it cannot be bounded at the bottom and
2762
     we need not bother testing any further.  */
2763
 
2764
  if (node->left)
2765
    return 0;
2766
 
2767
  low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
2768
                               node->low,
2769
                               build_int_cst (TREE_TYPE (node->low), 1));
2770
 
2771
  /* If the subtraction above overflowed, we can't verify anything.
2772
     Otherwise, look for a parent that tests our value - 1.  */
2773
 
2774
  if (! tree_int_cst_lt (low_minus_one, node->low))
2775
    return 0;
2776
 
2777
  for (pnode = node->parent; pnode; pnode = pnode->parent)
2778
    if (tree_int_cst_equal (low_minus_one, pnode->high))
2779
      return 1;
2780
 
2781
  return 0;
2782
}
2783
 
2784
/* Search the parent sections of the case node tree
2785
   to see if a test for the upper bound of NODE would be redundant.
2786
   INDEX_TYPE is the type of the index expression.
2787
 
2788
   The instructions to generate the case decision tree are
2789
   output in the same order as nodes are processed so it is
2790
   known that if a parent node checks the range of the current
2791
   node plus one that the current node is bounded at its upper
2792
   span.  Thus the test would be redundant.  */
2793
 
2794
static int
2795
node_has_high_bound (case_node_ptr node, tree index_type)
2796
{
2797
  tree high_plus_one;
2798
  case_node_ptr pnode;
2799
 
2800
  /* If there is no upper bound, obviously no test is needed.  */
2801
 
2802
  if (TYPE_MAX_VALUE (index_type) == NULL)
2803
    return 1;
2804
 
2805
  /* If the upper bound of this node is the highest value in the type
2806
     of the index expression, we need not test against it.  */
2807
 
2808
  if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
2809
    return 1;
2810
 
2811
  /* If this node has a right branch, the value at the right must be greater
2812
     than that at this node, so it cannot be bounded at the top and
2813
     we need not bother testing any further.  */
2814
 
2815
  if (node->right)
2816
    return 0;
2817
 
2818
  high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
2819
                               node->high,
2820
                               build_int_cst (TREE_TYPE (node->high), 1));
2821
 
2822
  /* If the addition above overflowed, we can't verify anything.
2823
     Otherwise, look for a parent that tests our value + 1.  */
2824
 
2825
  if (! tree_int_cst_lt (node->high, high_plus_one))
2826
    return 0;
2827
 
2828
  for (pnode = node->parent; pnode; pnode = pnode->parent)
2829
    if (tree_int_cst_equal (high_plus_one, pnode->low))
2830
      return 1;
2831
 
2832
  return 0;
2833
}
2834
 
2835
/* Search the parent sections of the
2836
   case node tree to see if both tests for the upper and lower
2837
   bounds of NODE would be redundant.  */
2838
 
2839
static int
2840
node_is_bounded (case_node_ptr node, tree index_type)
2841
{
2842
  return (node_has_low_bound (node, index_type)
2843
          && node_has_high_bound (node, index_type));
2844
}
2845
 
2846
/* Emit step-by-step code to select a case for the value of INDEX.
2847
   The thus generated decision tree follows the form of the
2848
   case-node binary tree NODE, whose nodes represent test conditions.
2849
   INDEX_TYPE is the type of the index of the switch.
2850
 
2851
   Care is taken to prune redundant tests from the decision tree
2852
   by detecting any boundary conditions already checked by
2853
   emitted rtx.  (See node_has_high_bound, node_has_low_bound
2854
   and node_is_bounded, above.)
2855
 
2856
   Where the test conditions can be shown to be redundant we emit
2857
   an unconditional jump to the target code.  As a further
2858
   optimization, the subordinates of a tree node are examined to
2859
   check for bounded nodes.  In this case conditional and/or
2860
   unconditional jumps as a result of the boundary check for the
2861
   current node are arranged to target the subordinates associated
2862
   code for out of bound conditions on the current node.
2863
 
2864
   We can assume that when control reaches the code generated here,
2865
   the index value has already been compared with the parents
2866
   of this node, and determined to be on the same side of each parent
2867
   as this node is.  Thus, if this node tests for the value 51,
2868
   and a parent tested for 52, we don't need to consider
2869
   the possibility of a value greater than 51.  If another parent
2870
   tests for the value 50, then this node need not test anything.  */
2871
 
2872
static void
2873
emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
2874
                 tree index_type)
2875
{
2876
  /* If INDEX has an unsigned type, we must make unsigned branches.  */
2877
  int unsignedp = TYPE_UNSIGNED (index_type);
2878
  enum machine_mode mode = GET_MODE (index);
2879
  enum machine_mode imode = TYPE_MODE (index_type);
2880
 
2881
  /* Handle indices detected as constant during RTL expansion.  */
2882
  if (mode == VOIDmode)
2883
    mode = imode;
2884
 
2885
  /* See if our parents have already tested everything for us.
2886
     If they have, emit an unconditional jump for this node.  */
2887
  if (node_is_bounded (node, index_type))
2888
    emit_jump (label_rtx (node->code_label));
2889
 
2890
  else if (tree_int_cst_equal (node->low, node->high))
2891
    {
2892
      /* Node is single valued.  First see if the index expression matches
2893
         this node and then check our children, if any.  */
2894
 
2895
      do_jump_if_equal (mode, index,
2896
                        convert_modes (mode, imode,
2897
                                       expand_normal (node->low),
2898
                                       unsignedp),
2899
                        label_rtx (node->code_label), unsignedp);
2900
 
2901
      if (node->right != 0 && node->left != 0)
2902
        {
2903
          /* This node has children on both sides.
2904
             Dispatch to one side or the other
2905
             by comparing the index value with this node's value.
2906
             If one subtree is bounded, check that one first,
2907
             so we can avoid real branches in the tree.  */
2908
 
2909
          if (node_is_bounded (node->right, index_type))
2910
            {
2911
              emit_cmp_and_jump_insns (index,
2912
                                       convert_modes
2913
                                       (mode, imode,
2914
                                        expand_normal (node->high),
2915
                                        unsignedp),
2916
                                       GT, NULL_RTX, mode, unsignedp,
2917
                                       label_rtx (node->right->code_label));
2918
              emit_case_nodes (index, node->left, default_label, index_type);
2919
            }
2920
 
2921
          else if (node_is_bounded (node->left, index_type))
2922
            {
2923
              emit_cmp_and_jump_insns (index,
2924
                                       convert_modes
2925
                                       (mode, imode,
2926
                                        expand_normal (node->high),
2927
                                        unsignedp),
2928
                                       LT, NULL_RTX, mode, unsignedp,
2929
                                       label_rtx (node->left->code_label));
2930
              emit_case_nodes (index, node->right, default_label, index_type);
2931
            }
2932
 
2933
          /* If both children are single-valued cases with no
2934
             children, finish up all the work.  This way, we can save
2935
             one ordered comparison.  */
2936
          else if (tree_int_cst_equal (node->right->low, node->right->high)
2937
                   && node->right->left == 0
2938
                   && node->right->right == 0
2939
                   && tree_int_cst_equal (node->left->low, node->left->high)
2940
                   && node->left->left == 0
2941
                   && node->left->right == 0)
2942
            {
2943
              /* Neither node is bounded.  First distinguish the two sides;
2944
                 then emit the code for one side at a time.  */
2945
 
2946
              /* See if the value matches what the right hand side
2947
                 wants.  */
2948
              do_jump_if_equal (mode, index,
2949
                                convert_modes (mode, imode,
2950
                                               expand_normal (node->right->low),
2951
                                               unsignedp),
2952
                                label_rtx (node->right->code_label),
2953
                                unsignedp);
2954
 
2955
              /* See if the value matches what the left hand side
2956
                 wants.  */
2957
              do_jump_if_equal (mode, index,
2958
                                convert_modes (mode, imode,
2959
                                               expand_normal (node->left->low),
2960
                                               unsignedp),
2961
                                label_rtx (node->left->code_label),
2962
                                unsignedp);
2963
            }
2964
 
2965
          else
2966
            {
2967
              /* Neither node is bounded.  First distinguish the two sides;
2968
                 then emit the code for one side at a time.  */
2969
 
2970
              tree test_label
2971
                = build_decl (CURR_INSN_LOCATION,
2972
                              LABEL_DECL, NULL_TREE, NULL_TREE);
2973
 
2974
              /* See if the value is on the right.  */
2975
              emit_cmp_and_jump_insns (index,
2976
                                       convert_modes
2977
                                       (mode, imode,
2978
                                        expand_normal (node->high),
2979
                                        unsignedp),
2980
                                       GT, NULL_RTX, mode, unsignedp,
2981
                                       label_rtx (test_label));
2982
 
2983
              /* Value must be on the left.
2984
                 Handle the left-hand subtree.  */
2985
              emit_case_nodes (index, node->left, default_label, index_type);
2986
              /* If left-hand subtree does nothing,
2987
                 go to default.  */
2988
              if (default_label)
2989
                emit_jump (default_label);
2990
 
2991
              /* Code branches here for the right-hand subtree.  */
2992
              expand_label (test_label);
2993
              emit_case_nodes (index, node->right, default_label, index_type);
2994
            }
2995
        }
2996
 
2997
      else if (node->right != 0 && node->left == 0)
2998
        {
2999
          /* Here we have a right child but no left so we issue a conditional
3000
             branch to default and process the right child.
3001
 
3002
             Omit the conditional branch to default if the right child
3003
             does not have any children and is single valued; it would
3004
             cost too much space to save so little time.  */
3005
 
3006
          if (node->right->right || node->right->left
3007
              || !tree_int_cst_equal (node->right->low, node->right->high))
3008
            {
3009
              if (!node_has_low_bound (node, index_type))
3010
                {
3011
                  emit_cmp_and_jump_insns (index,
3012
                                           convert_modes
3013
                                           (mode, imode,
3014
                                            expand_normal (node->high),
3015
                                            unsignedp),
3016
                                           LT, NULL_RTX, mode, unsignedp,
3017
                                           default_label);
3018
                }
3019
 
3020
              emit_case_nodes (index, node->right, default_label, index_type);
3021
            }
3022
          else
3023
            /* We cannot process node->right normally
3024
               since we haven't ruled out the numbers less than
3025
               this node's value.  So handle node->right explicitly.  */
3026
            do_jump_if_equal (mode, index,
3027
                              convert_modes
3028
                              (mode, imode,
3029
                               expand_normal (node->right->low),
3030
                               unsignedp),
3031
                              label_rtx (node->right->code_label), unsignedp);
3032
        }
3033
 
3034
      else if (node->right == 0 && node->left != 0)
3035
        {
3036
          /* Just one subtree, on the left.  */
3037
          if (node->left->left || node->left->right
3038
              || !tree_int_cst_equal (node->left->low, node->left->high))
3039
            {
3040
              if (!node_has_high_bound (node, index_type))
3041
                {
3042
                  emit_cmp_and_jump_insns (index,
3043
                                           convert_modes
3044
                                           (mode, imode,
3045
                                            expand_normal (node->high),
3046
                                            unsignedp),
3047
                                           GT, NULL_RTX, mode, unsignedp,
3048
                                           default_label);
3049
                }
3050
 
3051
              emit_case_nodes (index, node->left, default_label, index_type);
3052
            }
3053
          else
3054
            /* We cannot process node->left normally
3055
               since we haven't ruled out the numbers less than
3056
               this node's value.  So handle node->left explicitly.  */
3057
            do_jump_if_equal (mode, index,
3058
                              convert_modes
3059
                              (mode, imode,
3060
                               expand_normal (node->left->low),
3061
                               unsignedp),
3062
                              label_rtx (node->left->code_label), unsignedp);
3063
        }
3064
    }
3065
  else
3066
    {
3067
      /* Node is a range.  These cases are very similar to those for a single
3068
         value, except that we do not start by testing whether this node
3069
         is the one to branch to.  */
3070
 
3071
      if (node->right != 0 && node->left != 0)
3072
        {
3073
          /* Node has subtrees on both sides.
3074
             If the right-hand subtree is bounded,
3075
             test for it first, since we can go straight there.
3076
             Otherwise, we need to make a branch in the control structure,
3077
             then handle the two subtrees.  */
3078
          tree test_label = 0;
3079
 
3080
          if (node_is_bounded (node->right, index_type))
3081
            /* Right hand node is fully bounded so we can eliminate any
3082
               testing and branch directly to the target code.  */
3083
            emit_cmp_and_jump_insns (index,
3084
                                     convert_modes
3085
                                     (mode, imode,
3086
                                      expand_normal (node->high),
3087
                                      unsignedp),
3088
                                     GT, NULL_RTX, mode, unsignedp,
3089
                                     label_rtx (node->right->code_label));
3090
          else
3091
            {
3092
              /* Right hand node requires testing.
3093
                 Branch to a label where we will handle it later.  */
3094
 
3095
              test_label = build_decl (CURR_INSN_LOCATION,
3096
                                       LABEL_DECL, NULL_TREE, NULL_TREE);
3097
              emit_cmp_and_jump_insns (index,
3098
                                       convert_modes
3099
                                       (mode, imode,
3100
                                        expand_normal (node->high),
3101
                                        unsignedp),
3102
                                       GT, NULL_RTX, mode, unsignedp,
3103
                                       label_rtx (test_label));
3104
            }
3105
 
3106
          /* Value belongs to this node or to the left-hand subtree.  */
3107
 
3108
          emit_cmp_and_jump_insns (index,
3109
                                   convert_modes
3110
                                   (mode, imode,
3111
                                    expand_normal (node->low),
3112
                                    unsignedp),
3113
                                   GE, NULL_RTX, mode, unsignedp,
3114
                                   label_rtx (node->code_label));
3115
 
3116
          /* Handle the left-hand subtree.  */
3117
          emit_case_nodes (index, node->left, default_label, index_type);
3118
 
3119
          /* If right node had to be handled later, do that now.  */
3120
 
3121
          if (test_label)
3122
            {
3123
              /* If the left-hand subtree fell through,
3124
                 don't let it fall into the right-hand subtree.  */
3125
              if (default_label)
3126
                emit_jump (default_label);
3127
 
3128
              expand_label (test_label);
3129
              emit_case_nodes (index, node->right, default_label, index_type);
3130
            }
3131
        }
3132
 
3133
      else if (node->right != 0 && node->left == 0)
3134
        {
3135
          /* Deal with values to the left of this node,
3136
             if they are possible.  */
3137
          if (!node_has_low_bound (node, index_type))
3138
            {
3139
              emit_cmp_and_jump_insns (index,
3140
                                       convert_modes
3141
                                       (mode, imode,
3142
                                        expand_normal (node->low),
3143
                                        unsignedp),
3144
                                       LT, NULL_RTX, mode, unsignedp,
3145
                                       default_label);
3146
            }
3147
 
3148
          /* Value belongs to this node or to the right-hand subtree.  */
3149
 
3150
          emit_cmp_and_jump_insns (index,
3151
                                   convert_modes
3152
                                   (mode, imode,
3153
                                    expand_normal (node->high),
3154
                                    unsignedp),
3155
                                   LE, NULL_RTX, mode, unsignedp,
3156
                                   label_rtx (node->code_label));
3157
 
3158
          emit_case_nodes (index, node->right, default_label, index_type);
3159
        }
3160
 
3161
      else if (node->right == 0 && node->left != 0)
3162
        {
3163
          /* Deal with values to the right of this node,
3164
             if they are possible.  */
3165
          if (!node_has_high_bound (node, index_type))
3166
            {
3167
              emit_cmp_and_jump_insns (index,
3168
                                       convert_modes
3169
                                       (mode, imode,
3170
                                        expand_normal (node->high),
3171
                                        unsignedp),
3172
                                       GT, NULL_RTX, mode, unsignedp,
3173
                                       default_label);
3174
            }
3175
 
3176
          /* Value belongs to this node or to the left-hand subtree.  */
3177
 
3178
          emit_cmp_and_jump_insns (index,
3179
                                   convert_modes
3180
                                   (mode, imode,
3181
                                    expand_normal (node->low),
3182
                                    unsignedp),
3183
                                   GE, NULL_RTX, mode, unsignedp,
3184
                                   label_rtx (node->code_label));
3185
 
3186
          emit_case_nodes (index, node->left, default_label, index_type);
3187
        }
3188
 
3189
      else
3190
        {
3191
          /* Node has no children so we check low and high bounds to remove
3192
             redundant tests.  Only one of the bounds can exist,
3193
             since otherwise this node is bounded--a case tested already.  */
3194
          int high_bound = node_has_high_bound (node, index_type);
3195
          int low_bound = node_has_low_bound (node, index_type);
3196
 
3197
          if (!high_bound && low_bound)
3198
            {
3199
              emit_cmp_and_jump_insns (index,
3200
                                       convert_modes
3201
                                       (mode, imode,
3202
                                        expand_normal (node->high),
3203
                                        unsignedp),
3204
                                       GT, NULL_RTX, mode, unsignedp,
3205
                                       default_label);
3206
            }
3207
 
3208
          else if (!low_bound && high_bound)
3209
            {
3210
              emit_cmp_and_jump_insns (index,
3211
                                       convert_modes
3212
                                       (mode, imode,
3213
                                        expand_normal (node->low),
3214
                                        unsignedp),
3215
                                       LT, NULL_RTX, mode, unsignedp,
3216
                                       default_label);
3217
            }
3218
          else if (!low_bound && !high_bound)
3219
            {
3220
              /* Widen LOW and HIGH to the same width as INDEX.  */
3221
              tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3222
              tree low = build1 (CONVERT_EXPR, type, node->low);
3223
              tree high = build1 (CONVERT_EXPR, type, node->high);
3224
              rtx low_rtx, new_index, new_bound;
3225
 
3226
              /* Instead of doing two branches, emit one unsigned branch for
3227
                 (index-low) > (high-low).  */
3228
              low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL);
3229
              new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
3230
                                               NULL_RTX, unsignedp,
3231
                                               OPTAB_WIDEN);
3232
              new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
3233
                                                    high, low),
3234
                                       NULL_RTX, mode, EXPAND_NORMAL);
3235
 
3236
              emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3237
                                       mode, 1, default_label);
3238
            }
3239
 
3240
          emit_jump (label_rtx (node->code_label));
3241
        }
3242
    }
3243
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.