OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [config/] [rx/] [rx.c] - Blame information for rev 433

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 282 jeremybenn
/* Subroutines used for code generation on Renesas RX processors.
2
   Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
3
   Contributed by Red Hat.
4
 
5
   This file is part of GCC.
6
 
7
   GCC is free software; you can redistribute it and/or modify
8
   it under the terms of the GNU General Public License as published by
9
   the Free Software Foundation; either version 3, or (at your option)
10
   any later version.
11
 
12
   GCC is distributed in the hope that it will be useful,
13
   but WITHOUT ANY WARRANTY; without even the implied warranty of
14
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15
   GNU General Public License for more details.
16
 
17
   You should have received a copy of the GNU General Public License
18
   along with GCC; see the file COPYING3.  If not see
19
   <http://www.gnu.org/licenses/>.  */
20
 
21
/* To Do:
22
 
23
 * Re-enable memory-to-memory copies and fix up reload.  */
24
 
25
#include "config.h"
26
#include "system.h"
27
#include "coretypes.h"
28
#include "tm.h"
29
#include "tree.h"
30
#include "rtl.h"
31
#include "regs.h"
32
#include "hard-reg-set.h"
33
#include "real.h"
34
#include "insn-config.h"
35
#include "conditions.h"
36
#include "output.h"
37
#include "insn-attr.h"
38
#include "flags.h"
39
#include "function.h"
40
#include "expr.h"
41
#include "optabs.h"
42
#include "libfuncs.h"
43
#include "recog.h"
44
#include "toplev.h"
45
#include "reload.h"
46
#include "df.h"
47
#include "ggc.h"
48
#include "tm_p.h"
49
#include "debug.h"
50
#include "target.h"
51
#include "target-def.h"
52
#include "langhooks.h"
53
 
54
enum rx_cpu_types  rx_cpu_type = RX600;
55
 
56
/* Return true if OP is a reference to an object in a small data area.  */
57
 
58
static bool
59
rx_small_data_operand (rtx op)
60
{
61
  if (rx_small_data_limit == 0)
62
    return false;
63
 
64
  if (GET_CODE (op) == SYMBOL_REF)
65
    return SYMBOL_REF_SMALL_P (op);
66
 
67
  return false;
68
}
69
 
70
static bool
71
rx_is_legitimate_address (Mmode mode, rtx x, bool strict ATTRIBUTE_UNUSED)
72
{
73
  if (RTX_OK_FOR_BASE (x, strict))
74
    /* Register Indirect.  */
75
    return true;
76
 
77
  if (GET_MODE_SIZE (mode) == 4
78
      && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
79
    /* Pre-decrement Register Indirect or
80
       Post-increment Register Indirect.  */
81
    return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
82
 
83
  if (GET_CODE (x) == PLUS)
84
    {
85
      rtx arg1 = XEXP (x, 0);
86
      rtx arg2 = XEXP (x, 1);
87
      rtx index = NULL_RTX;
88
 
89
      if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
90
        index = arg2;
91
      else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
92
        index = arg1;
93
      else
94
        return false;
95
 
96
      switch (GET_CODE (index))
97
        {
98
        case CONST_INT:
99
          {
100
            /* Register Relative: REG + INT.
101
               Only positive, mode-aligned, mode-sized
102
               displacements are allowed.  */
103
            HOST_WIDE_INT val = INTVAL (index);
104
            int factor;
105
 
106
            if (val < 0)
107
              return false;
108
 
109
            switch (GET_MODE_SIZE (mode))
110
              {
111
              default:
112
              case 4: factor = 4; break;
113
              case 2: factor = 2; break;
114
              case 1: factor = 1; break;
115
              }
116
 
117
            if (val > (65535 * factor))
118
              return false;
119
            return (val % factor) == 0;
120
          }
121
 
122
        case REG:
123
          /* Unscaled Indexed Register Indirect: REG + REG
124
             Size has to be "QI", REG has to be valid.  */
125
          return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
126
 
127
        case MULT:
128
          {
129
            /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
130
               Factor has to equal the mode size, REG has to be valid.  */
131
            rtx factor;
132
 
133
            factor = XEXP (index, 1);
134
            index = XEXP (index, 0);
135
 
136
            return REG_P (index)
137
              && RTX_OK_FOR_BASE (index, strict)
138
              && CONST_INT_P (factor)
139
              && GET_MODE_SIZE (mode) == INTVAL (factor);
140
          }
141
 
142
        default:
143
          return false;
144
        }
145
    }
146
 
147
  /* Small data area accesses turn into register relative offsets.  */
148
  return rx_small_data_operand (x);
149
}
150
 
151
/* Returns TRUE for simple memory addreses, ie ones
152
   that do not involve register indirect addressing
153
   or pre/post increment/decrement.  */
154
 
155
bool
156
rx_is_restricted_memory_address (rtx mem, enum machine_mode mode)
157
{
158
  rtx base, index;
159
 
160
  if (! rx_is_legitimate_address
161
      (mode, mem, reload_in_progress || reload_completed))
162
    return false;
163
 
164
  switch (GET_CODE (mem))
165
    {
166
    case REG:
167
      /* Simple memory addresses are OK.  */
168
      return true;
169
 
170
    case PRE_DEC:
171
    case POST_INC:
172
      return false;
173
 
174
    case PLUS:
175
      /* Only allow REG+INT addressing.  */
176
      base = XEXP (mem, 0);
177
      index = XEXP (mem, 1);
178
 
179
      return RX_REG_P (base) && CONST_INT_P (index);
180
 
181
    case SYMBOL_REF:
182
      /* Can happen when small data is being supported.
183
         Assume that it will be resolved into GP+INT.  */
184
      return true;
185
 
186
    default:
187
      gcc_unreachable ();
188
    }
189
}
190
 
191
bool
192
rx_is_mode_dependent_addr (rtx addr)
193
{
194
  if (GET_CODE (addr) == CONST)
195
    addr = XEXP (addr, 0);
196
 
197
  switch (GET_CODE (addr))
198
    {
199
      /* --REG and REG++ only work in SImode.  */
200
    case PRE_DEC:
201
    case POST_INC:
202
      return true;
203
 
204
    case MINUS:
205
    case PLUS:
206
      if (! REG_P (XEXP (addr, 0)))
207
        return true;
208
 
209
      addr = XEXP (addr, 1);
210
 
211
      switch (GET_CODE (addr))
212
        {
213
        case REG:
214
          /* REG+REG only works in SImode.  */
215
          return true;
216
 
217
        case CONST_INT:
218
          /* REG+INT is only mode independent if INT is a
219
             multiple of 4, positive and will fit into 8-bits.  */
220
          if (((INTVAL (addr) & 3) == 0)
221
              && IN_RANGE (INTVAL (addr), 4, 252))
222
            return false;
223
          return true;
224
 
225
        case SYMBOL_REF:
226
        case LABEL_REF:
227
          return true;
228
 
229
        case MULT:
230
          gcc_assert (REG_P (XEXP (addr, 0)));
231
          gcc_assert (CONST_INT_P (XEXP (addr, 1)));
232
          /* REG+REG*SCALE is always mode dependent.  */
233
          return true;
234
 
235
        default:
236
          /* Not recognized, so treat as mode dependent.  */
237
          return true;
238
        }
239
 
240
    case CONST_INT:
241
    case SYMBOL_REF:
242
    case LABEL_REF:
243
    case REG:
244
      /* These are all mode independent.  */
245
      return false;
246
 
247
    default:
248
      /* Everything else is unrecognized,
249
         so treat as mode dependent.  */
250
      return true;
251
    }
252
}
253
 
254
/* A C compound statement to output to stdio stream FILE the
255
   assembler syntax for an instruction operand that is a memory
256
   reference whose address is ADDR.  */
257
 
258
void
259
rx_print_operand_address (FILE * file, rtx addr)
260
{
261
  switch (GET_CODE (addr))
262
    {
263
    case REG:
264
      fprintf (file, "[");
265
      rx_print_operand (file, addr, 0);
266
      fprintf (file, "]");
267
      break;
268
 
269
    case PRE_DEC:
270
      fprintf (file, "[-");
271
      rx_print_operand (file, XEXP (addr, 0), 0);
272
      fprintf (file, "]");
273
      break;
274
 
275
    case POST_INC:
276
      fprintf (file, "[");
277
      rx_print_operand (file, XEXP (addr, 0), 0);
278
      fprintf (file, "+]");
279
      break;
280
 
281
    case PLUS:
282
      {
283
        rtx arg1 = XEXP (addr, 0);
284
        rtx arg2 = XEXP (addr, 1);
285
        rtx base, index;
286
 
287
        if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
288
          base = arg1, index = arg2;
289
        else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
290
          base = arg2, index = arg1;
291
        else
292
          {
293
            rx_print_operand (file, arg1, 0);
294
            fprintf (file, " + ");
295
            rx_print_operand (file, arg2, 0);
296
            break;
297
          }
298
 
299
        if (REG_P (index) || GET_CODE (index) == MULT)
300
          {
301
            fprintf (file, "[");
302
            rx_print_operand (file, index, 'A');
303
            fprintf (file, ",");
304
          }
305
        else /* GET_CODE (index) == CONST_INT  */
306
          {
307
            rx_print_operand (file, index, 'A');
308
            fprintf (file, "[");
309
          }
310
        rx_print_operand (file, base, 0);
311
        fprintf (file, "]");
312
        break;
313
      }
314
 
315
    case LABEL_REF:
316
    case SYMBOL_REF:
317
    case CONST:
318
      fprintf (file, "#");
319
    default:
320
      output_addr_const (file, addr);
321
      break;
322
    }
323
}
324
 
325
static void
326
rx_print_integer (FILE * file, HOST_WIDE_INT val)
327
{
328
  if (IN_RANGE (val, -64, 64))
329
    fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
330
  else
331
    fprintf (file,
332
             TARGET_AS100_SYNTAX
333
             ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
334
             val);
335
}
336
 
337
static bool
338
rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
339
{
340
  const char *  op = integer_asm_op (size, is_aligned);
341
 
342
  if (! CONST_INT_P (x))
343
    return default_assemble_integer (x, size, is_aligned);
344
 
345
  if (op == NULL)
346
    return false;
347
  fputs (op, asm_out_file);
348
 
349
  rx_print_integer (asm_out_file, INTVAL (x));
350
  fputc ('\n', asm_out_file);
351
  return true;
352
}
353
 
354
 
355
int rx_float_compare_mode;
356
 
357
/* Handles the insertion of a single operand into the assembler output.
358
   The %<letter> directives supported are:
359
 
360
     %A  Print an operand without a leading # character.
361
     %B  Print an integer comparison name.
362
     %C  Print a control register name.
363
     %F  Print a condition code flag name.
364
     %H  Print high part of a DImode register, integer or address.
365
     %L  Print low part of a DImode register, integer or address.
366
     %N  Print the negation of the immediate value.
367
     %Q  If the operand is a MEM, then correctly generate
368
         register indirect or register relative addressing.  */
369
 
370
void
371
rx_print_operand (FILE * file, rtx op, int letter)
372
{
373
  switch (letter)
374
    {
375
    case 'A':
376
      /* Print an operand without a leading #.  */
377
      if (MEM_P (op))
378
        op = XEXP (op, 0);
379
 
380
      switch (GET_CODE (op))
381
        {
382
        case LABEL_REF:
383
        case SYMBOL_REF:
384
          output_addr_const (file, op);
385
          break;
386
        case CONST_INT:
387
          fprintf (file, "%ld", (long) INTVAL (op));
388
          break;
389
        default:
390
          rx_print_operand (file, op, 0);
391
          break;
392
        }
393
      break;
394
 
395
    case 'B':
396
      switch (GET_CODE (op))
397
        {
398
        case LT:  fprintf (file, "lt"); break;
399
        case GE:  fprintf (file, "ge"); break;
400
        case GT:  fprintf (file, "gt"); break;
401
        case LE:  fprintf (file, "le"); break;
402
        case GEU: fprintf (file, "geu"); break;
403
        case LTU: fprintf (file, "ltu"); break;
404
        case GTU: fprintf (file, "gtu"); break;
405
        case LEU: fprintf (file, "leu"); break;
406
        case EQ:  fprintf (file, "eq"); break;
407
        case NE:  fprintf (file, "ne"); break;
408
        default:  debug_rtx (op); gcc_unreachable ();
409
        }
410
      break;
411
 
412
    case 'C':
413
      gcc_assert (CONST_INT_P (op));
414
      switch (INTVAL (op))
415
        {
416
        case 0:   fprintf (file, "psw"); break;
417
        case 2:   fprintf (file, "usp"); break;
418
        case 3:   fprintf (file, "fpsw"); break;
419
        case 4:   fprintf (file, "cpen"); break;
420
        case 8:   fprintf (file, "bpsw"); break;
421
        case 9:   fprintf (file, "bpc"); break;
422
        case 0xa: fprintf (file, "isp"); break;
423
        case 0xb: fprintf (file, "fintv"); break;
424
        case 0xc: fprintf (file, "intb"); break;
425
        default:
426
          warning (0, "unreocgnized control register number: %d - using 'psw'",
427
                   (int) INTVAL (op));
428
          fprintf (file, "psw");
429
          break;
430
        }
431
      break;
432
 
433
    case 'F':
434
      gcc_assert (CONST_INT_P (op));
435
      switch (INTVAL (op))
436
        {
437
        case 0: case 'c': case 'C': fprintf (file, "C"); break;
438
        case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
439
        case 2: case 's': case 'S': fprintf (file, "S"); break;
440
        case 3: case 'o': case 'O': fprintf (file, "O"); break;
441
        case 8: case 'i': case 'I': fprintf (file, "I"); break;
442
        case 9: case 'u': case 'U': fprintf (file, "U"); break;
443
        default:
444
          gcc_unreachable ();
445
        }
446
      break;
447
 
448
    case 'H':
449
      switch (GET_CODE (op))
450
        {
451
        case REG:
452
          fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
453
          break;
454
        case CONST_INT:
455
          {
456
            HOST_WIDE_INT v = INTVAL (op);
457
 
458
            fprintf (file, "#");
459
            /* Trickery to avoid problems with shifting 32 bits at a time.  */
460
            v = v >> 16;
461
            v = v >> 16;
462
            rx_print_integer (file, v);
463
            break;
464
          }
465
        case CONST_DOUBLE:
466
          fprintf (file, "#");
467
          rx_print_integer (file, CONST_DOUBLE_HIGH (op));
468
          break;
469
        case MEM:
470
          if (! WORDS_BIG_ENDIAN)
471
            op = adjust_address (op, SImode, 4);
472
          output_address (XEXP (op, 0));
473
          break;
474
        default:
475
          gcc_unreachable ();
476
        }
477
      break;
478
 
479
    case 'L':
480
      switch (GET_CODE (op))
481
        {
482
        case REG:
483
          fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
484
          break;
485
        case CONST_INT:
486
          fprintf (file, "#");
487
          rx_print_integer (file, INTVAL (op) & 0xffffffff);
488
          break;
489
        case CONST_DOUBLE:
490
          fprintf (file, "#");
491
          rx_print_integer (file, CONST_DOUBLE_LOW (op));
492
          break;
493
        case MEM:
494
          if (WORDS_BIG_ENDIAN)
495
            op = adjust_address (op, SImode, 4);
496
          output_address (XEXP (op, 0));
497
          break;
498
        default:
499
          gcc_unreachable ();
500
        }
501
      break;
502
 
503
    case 'N':
504
      gcc_assert (CONST_INT_P (op));
505
      fprintf (file, "#");
506
      rx_print_integer (file, - INTVAL (op));
507
      break;
508
 
509
    case 'Q':
510
      if (MEM_P (op))
511
        {
512
          HOST_WIDE_INT offset;
513
 
514
          op = XEXP (op, 0);
515
 
516
          if (REG_P (op))
517
            offset = 0;
518
          else if (GET_CODE (op) == PLUS)
519
            {
520
              rtx displacement;
521
 
522
              if (REG_P (XEXP (op, 0)))
523
                {
524
                  displacement = XEXP (op, 1);
525
                  op = XEXP (op, 0);
526
                }
527
              else
528
                {
529
                  displacement = XEXP (op, 0);
530
                  op = XEXP (op, 1);
531
                  gcc_assert (REG_P (op));
532
                }
533
 
534
              gcc_assert (CONST_INT_P (displacement));
535
              offset = INTVAL (displacement);
536
              gcc_assert (offset >= 0);
537
 
538
              fprintf (file, "%ld", offset);
539
            }
540
          else
541
            gcc_unreachable ();
542
 
543
          fprintf (file, "[");
544
          rx_print_operand (file, op, 0);
545
          fprintf (file, "].");
546
 
547
          switch (GET_MODE_SIZE (GET_MODE (op)))
548
            {
549
            case 1:
550
              gcc_assert (offset < 65535 * 1);
551
              fprintf (file, "B");
552
              break;
553
            case 2:
554
              gcc_assert (offset % 2 == 0);
555
              gcc_assert (offset < 65535 * 2);
556
              fprintf (file, "W");
557
              break;
558
            default:
559
              gcc_assert (offset % 4 == 0);
560
              gcc_assert (offset < 65535 * 4);
561
              fprintf (file, "L");
562
              break;
563
            }
564
          break;
565
        }
566
 
567
      /* Fall through.  */
568
 
569
    default:
570
      switch (GET_CODE (op))
571
        {
572
        case MULT:
573
          /* Should be the scaled part of an
574
             indexed register indirect address.  */
575
          {
576
            rtx base = XEXP (op, 0);
577
            rtx index = XEXP (op, 1);
578
 
579
            /* Check for a swaped index register and scaling factor.
580
               Not sure if this can happen, but be prepared to handle it.  */
581
            if (CONST_INT_P (base) && REG_P (index))
582
              {
583
                rtx tmp = base;
584
                base = index;
585
                index = tmp;
586
              }
587
 
588
            gcc_assert (REG_P (base));
589
            gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
590
            gcc_assert (CONST_INT_P (index));
591
            /* Do not try to verify the value of the scalar as it is based
592
               on the mode of the MEM not the mode of the MULT.  (Which
593
               will always be SImode).  */
594
            fprintf (file, "%s", reg_names [REGNO (base)]);
595
            break;
596
          }
597
 
598
        case MEM:
599
          output_address (XEXP (op, 0));
600
          break;
601
 
602
        case PLUS:
603
          output_address (op);
604
          break;
605
 
606
        case REG:
607
          gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
608
          fprintf (file, "%s", reg_names [REGNO (op)]);
609
          break;
610
 
611
        case SUBREG:
612
          gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
613
          fprintf (file, "%s", reg_names [subreg_regno (op)]);
614
          break;
615
 
616
          /* This will only be single precision....  */
617
        case CONST_DOUBLE:
618
          {
619
            unsigned long val;
620
            REAL_VALUE_TYPE rv;
621
 
622
            REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
623
            REAL_VALUE_TO_TARGET_SINGLE (rv, val);
624
            fprintf (file, TARGET_AS100_SYNTAX ? "#0%lxH" : "#0x%lx", val);
625
            break;
626
          }
627
 
628
        case CONST_INT:
629
          fprintf (file, "#");
630
          rx_print_integer (file, INTVAL (op));
631
          break;
632
 
633
        case SYMBOL_REF:
634
        case CONST:
635
        case LABEL_REF:
636
        case CODE_LABEL:
637
        case UNSPEC:
638
          rx_print_operand_address (file, op);
639
          break;
640
 
641
        default:
642
          gcc_unreachable ();
643
        }
644
      break;
645
    }
646
}
647
 
648
/* Returns an assembler template for a move instruction.  */
649
 
650
char *
651
rx_gen_move_template (rtx * operands, bool is_movu)
652
{
653
  static char  out_template [64];
654
  const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
655
  const char * src_template;
656
  const char * dst_template;
657
  rtx          dest = operands[0];
658
  rtx          src  = operands[1];
659
 
660
  /* Decide which extension, if any, should be given to the move instruction.  */
661
  switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
662
    {
663
    case QImode:
664
      /* The .B extension is not valid when
665
         loading an immediate into a register.  */
666
      if (! REG_P (dest) || ! CONST_INT_P (src))
667
        extension = ".B";
668
      break;
669
    case HImode:
670
      if (! REG_P (dest) || ! CONST_INT_P (src))
671
        /* The .W extension is not valid when
672
           loading an immediate into a register.  */
673
        extension = ".W";
674
      break;
675
    case SFmode:
676
    case SImode:
677
      extension = ".L";
678
      break;
679
    case VOIDmode:
680
      /* This mode is used by constants.  */
681
      break;
682
    default:
683
      debug_rtx (src);
684
      gcc_unreachable ();
685
    }
686
 
687
  if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
688
    src_template = "%%gp(%A1)[r13]";
689
  else
690
    src_template = "%1";
691
 
692
  if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
693
    dst_template = "%%gp(%A0)[r13]";
694
  else
695
    dst_template = "%0";
696
 
697
  sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
698
           extension, src_template, dst_template);
699
  return out_template;
700
}
701
 
702
/* Returns an assembler template for a conditional branch instruction.  */
703
 
704
const char *
705
rx_gen_cond_branch_template (rtx condition, bool reversed)
706
{
707
  enum rtx_code code = GET_CODE (condition);
708
 
709
  if (reversed)
710
    {
711
      if (rx_float_compare_mode)
712
        code = reverse_condition_maybe_unordered (code);
713
      else
714
        code = reverse_condition (code);
715
    }
716
 
717
  /* We do not worry about encoding the branch length here as GAS knows
718
     how to choose the smallest version, and how to expand a branch that
719
     is to a destination that is out of range.  */
720
 
721
  switch (code)
722
    {
723
    case UNEQ:      return "bo\t1f\n\tbeq\t%0\n1:";
724
    case LTGT:      return "bo\t1f\n\tbne\t%0\n1:";
725
    case UNLT:      return "bo\t1f\n\tbn\t%0\n1:";
726
    case UNGE:      return "bo\t1f\n\tbpz\t%0\n1:";
727
    case UNLE:      return "bo\t1f\n\tbgt\t1f\n\tbra\t%0\n1:";
728
    case UNGT:      return "bo\t1f\n\tble\t1f\n\tbra\t%0\n1:";
729
    case UNORDERED: return "bo\t%0";
730
    case ORDERED:   return "bno\t%0";
731
 
732
    case LT:        return rx_float_compare_mode ? "bn\t%0" : "blt\t%0";
733
    case GE:        return rx_float_compare_mode ? "bpz\t%0" : "bge\t%0";
734
    case GT:        return "bgt\t%0";
735
    case LE:        return "ble\t%0";
736
    case GEU:       return "bgeu\t%0";
737
    case LTU:       return "bltu\t%0";
738
    case GTU:       return "bgtu\t%0";
739
    case LEU:       return "bleu\t%0";
740
    case EQ:        return "beq\t%0";
741
    case NE:        return "bne\t%0";
742
    default:
743
      gcc_unreachable ();
744
    }
745
}
746
 
747
/* Return VALUE rounded up to the next ALIGNMENT boundary.  */
748
 
749
static inline unsigned int
750
rx_round_up (unsigned int value, unsigned int alignment)
751
{
752
  alignment -= 1;
753
  return (value + alignment) & (~ alignment);
754
}
755
 
756
/* Return the number of bytes in the argument registers
757
   occupied by an argument of type TYPE and mode MODE.  */
758
 
759
unsigned int
760
rx_function_arg_size (Mmode mode, const_tree type)
761
{
762
  unsigned int num_bytes;
763
 
764
  num_bytes = (mode == BLKmode)
765
    ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
766
  return rx_round_up (num_bytes, UNITS_PER_WORD);
767
}
768
 
769
#define NUM_ARG_REGS            4
770
#define MAX_NUM_ARG_BYTES       (NUM_ARG_REGS * UNITS_PER_WORD)
771
 
772
/* Return an RTL expression describing the register holding a function
773
   parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
774
   be passed on the stack.  CUM describes the previous parameters to the
775
   function and NAMED is false if the parameter is part of a variable
776
   parameter list, or the last named parameter before the start of a
777
   variable parameter list.  */
778
 
779
rtx
780
rx_function_arg (Fargs * cum, Mmode mode, const_tree type, bool named)
781
{
782
  unsigned int next_reg;
783
  unsigned int bytes_so_far = *cum;
784
  unsigned int size;
785
  unsigned int rounded_size;
786
 
787
  /* An exploded version of rx_function_arg_size.  */
788
  size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
789
  /* If the size is not known it cannot be passed in registers.  */
790
  if (size < 1)
791
    return NULL_RTX;
792
 
793
  rounded_size = rx_round_up (size, UNITS_PER_WORD);
794
 
795
  /* Don't pass this arg via registers if there
796
     are insufficient registers to hold all of it.  */
797
  if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
798
    return NULL_RTX;
799
 
800
  /* Unnamed arguments and the last named argument in a
801
     variadic function are always passed on the stack.  */
802
  if (!named)
803
    return NULL_RTX;
804
 
805
  /* Structures must occupy an exact number of registers,
806
     otherwise they are passed on the stack.  */
807
  if ((type == NULL || AGGREGATE_TYPE_P (type))
808
      && (size % UNITS_PER_WORD) != 0)
809
    return NULL_RTX;
810
 
811
  next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
812
 
813
  return gen_rtx_REG (mode, next_reg);
814
}
815
 
816
/* Return an RTL describing where a function return value of type RET_TYPE
817
   is held.  */
818
 
819
static rtx
820
rx_function_value (const_tree ret_type,
821
                   const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
822
                   bool       outgoing ATTRIBUTE_UNUSED)
823
{
824
  return gen_rtx_REG (TYPE_MODE (ret_type), FUNC_RETURN_REGNUM);
825
}
826
 
827
static bool
828
rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
829
{
830
  HOST_WIDE_INT size;
831
 
832
  if (TYPE_MODE (type) != BLKmode
833
      && ! AGGREGATE_TYPE_P (type))
834
    return false;
835
 
836
  size = int_size_in_bytes (type);
837
  /* Large structs and those whose size is not an
838
     exact multiple of 4 are returned in memory.  */
839
  return size < 1
840
    || size > 16
841
    || (size % UNITS_PER_WORD) != 0;
842
}
843
 
844
static rtx
845
rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
846
                     int incoming ATTRIBUTE_UNUSED)
847
{
848
  return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
849
}
850
 
851
static bool
852
rx_return_in_msb (const_tree valtype)
853
{
854
  return TARGET_BIG_ENDIAN_DATA
855
    && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
856
}
857
 
858
/* Returns true if the provided function has the specified attribute.  */
859
 
860
static inline bool
861
has_func_attr (const_tree decl, const char * func_attr)
862
{
863
  if (decl == NULL_TREE)
864
    decl = current_function_decl;
865
 
866
  return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
867
}
868
 
869
/* Returns true if the provided function has the "fast_interrupt" attribute.  */
870
 
871
static inline bool
872
is_fast_interrupt_func (const_tree decl)
873
{
874
  return has_func_attr (decl, "fast_interrupt");
875
}
876
 
877
/* Returns true if the provided function has the "interrupt" attribute.  */
878
 
879
static inline bool
880
is_interrupt_func (const_tree decl)
881
{
882
  return has_func_attr (decl, "interrupt");
883
}
884
 
885
/* Returns true if the provided function has the "naked" attribute.  */
886
 
887
static inline bool
888
is_naked_func (const_tree decl)
889
{
890
  return has_func_attr (decl, "naked");
891
}
892
 
893
static bool use_fixed_regs = false;
894
 
895
void
896
rx_conditional_register_usage (void)
897
{
898
  static bool using_fixed_regs = false;
899
 
900
  if (rx_small_data_limit > 0)
901
    fixed_regs[GP_BASE_REGNUM] = call_used_regs [GP_BASE_REGNUM] = 1;
902
 
903
  if (use_fixed_regs != using_fixed_regs)
904
    {
905
      static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
906
      static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
907
 
908
      if (use_fixed_regs)
909
        {
910
          unsigned int r;
911
 
912
          memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
913
          memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
914
 
915
          /* This is for fast interrupt handlers.  Any register in
916
             the range r10 to r13 (inclusive) that is currently
917
             marked as fixed is now a viable, call-used register.  */
918
          for (r = 10; r <= 13; r++)
919
            if (fixed_regs[r])
920
              {
921
                fixed_regs[r] = 0;
922
                call_used_regs[r] = 1;
923
              }
924
 
925
          /* Mark r7 as fixed.  This is just a hack to avoid
926
             altering the reg_alloc_order array so that the newly
927
             freed r10-r13 registers are the preferred registers.  */
928
          fixed_regs[7] = call_used_regs[7] = 1;
929
        }
930
      else
931
        {
932
          /* Restore the normal register masks.  */
933
          memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
934
          memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
935
        }
936
 
937
      using_fixed_regs = use_fixed_regs;
938
    }
939
}
940
 
941
/* Perform any actions necessary before starting to compile FNDECL.
942
   For the RX we use this to make sure that we have the correct
943
   set of register masks selected.  If FNDECL is NULL then we are
944
   compiling top level things.  */
945
 
946
static void
947
rx_set_current_function (tree fndecl)
948
{
949
  /* Remember the last target of rx_set_current_function.  */
950
  static tree rx_previous_fndecl;
951
  bool prev_was_fast_interrupt;
952
  bool current_is_fast_interrupt;
953
 
954
  /* Only change the context if the function changes.  This hook is called
955
     several times in the course of compiling a function, and we don't want
956
     to slow things down too much or call target_reinit when it isn't safe.  */
957
  if (fndecl == rx_previous_fndecl)
958
    return;
959
 
960
  prev_was_fast_interrupt
961
    = rx_previous_fndecl
962
    ? is_fast_interrupt_func (rx_previous_fndecl) : false;
963
 
964
  current_is_fast_interrupt
965
    = fndecl ? is_fast_interrupt_func (fndecl) : false;
966
 
967
  if (prev_was_fast_interrupt != current_is_fast_interrupt)
968
    {
969
      use_fixed_regs = current_is_fast_interrupt;
970
      target_reinit ();
971
    }
972
 
973
  rx_previous_fndecl = fndecl;
974
}
975
 
976
/* Typical stack layout should looks like this after the function's prologue:
977
 
978
                            |    |
979
                              --                       ^
980
                            |    | \                   |
981
                            |    |   arguments saved   | Increasing
982
                            |    |   on the stack      |  addresses
983
    PARENT   arg pointer -> |    | /
984
  -------------------------- ---- -------------------
985
    CHILD                   |ret |   return address
986
                              --
987
                            |    | \
988
                            |    |   call saved
989
                            |    |   registers
990
                            |    | /
991
                              --
992
                            |    | \
993
                            |    |   local
994
                            |    |   variables
995
        frame pointer ->    |    | /
996
                              --
997
                            |    | \
998
                            |    |   outgoing          | Decreasing
999
                            |    |   arguments         |  addresses
1000
   current stack pointer -> |    | /                   |
1001
  -------------------------- ---- ------------------   V
1002
                            |    |                 */
1003
 
1004
static unsigned int
1005
bit_count (unsigned int x)
1006
{
1007
  const unsigned int m1 = 0x55555555;
1008
  const unsigned int m2 = 0x33333333;
1009
  const unsigned int m4 = 0x0f0f0f0f;
1010
 
1011
  x -= (x >> 1) & m1;
1012
  x = (x & m2) + ((x >> 2) & m2);
1013
  x = (x + (x >> 4)) & m4;
1014
  x += x >>  8;
1015
 
1016
  return (x + (x >> 16)) & 0x3f;
1017
}
1018
 
1019
#define MUST_SAVE_ACC_REGISTER                  \
1020
  (TARGET_SAVE_ACC_REGISTER                     \
1021
   && (is_interrupt_func (NULL_TREE)            \
1022
       || is_fast_interrupt_func (NULL_TREE)))
1023
 
1024
/* Returns either the lowest numbered and highest numbered registers that
1025
   occupy the call-saved area of the stack frame, if the registers are
1026
   stored as a contiguous block, or else a bitmask of the individual
1027
   registers if they are stored piecemeal.
1028
 
1029
   Also computes the size of the frame and the size of the outgoing
1030
   arguments block (in bytes).  */
1031
 
1032
static void
1033
rx_get_stack_layout (unsigned int * lowest,
1034
                     unsigned int * highest,
1035
                     unsigned int * register_mask,
1036
                     unsigned int * frame_size,
1037
                     unsigned int * stack_size)
1038
{
1039
  unsigned int reg;
1040
  unsigned int low;
1041
  unsigned int high;
1042
  unsigned int fixed_reg = 0;
1043
  unsigned int save_mask;
1044
  unsigned int pushed_mask;
1045
  unsigned int unneeded_pushes;
1046
 
1047
  if (is_naked_func (NULL_TREE))
1048
    {
1049
      /* Naked functions do not create their own stack frame.
1050
         Instead the programmer must do that for us.  */
1051
      * lowest = 0;
1052
      * highest = 0;
1053
      * register_mask = 0;
1054
      * frame_size = 0;
1055
      * stack_size = 0;
1056
      return;
1057
    }
1058
 
1059
  for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
1060
    {
1061
      if (df_regs_ever_live_p (reg)
1062
          && (! call_used_regs[reg]
1063
              /* Even call clobbered registered must
1064
                 be pushed inside interrupt handlers.  */
1065
              || is_interrupt_func (NULL_TREE)
1066
              /* Likewise for fast interrupt handlers, except registers r10 -
1067
                 r13.  These are normally call-saved, but may have been set
1068
                 to call-used by rx_conditional_register_usage.  If so then
1069
                 they can be used in the fast interrupt handler without
1070
                 saving them on the stack.  */
1071
              || (is_fast_interrupt_func (NULL_TREE)
1072
                  && ! IN_RANGE (reg, 10, 13))))
1073
        {
1074
          if (low == 0)
1075
            low = reg;
1076
          high = reg;
1077
 
1078
          save_mask |= 1 << reg;
1079
        }
1080
 
1081
      /* Remember if we see a fixed register
1082
         after having found the low register.  */
1083
      if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1084
        fixed_reg = reg;
1085
    }
1086
 
1087
  /* If we have to save the accumulator register, make sure
1088
     that at least two registers are pushed into the frame.  */
1089
  if (MUST_SAVE_ACC_REGISTER
1090
      && bit_count (save_mask) < 2)
1091
    {
1092
      save_mask |= (1 << 13) | (1 << 14);
1093
      if (low == 0)
1094
        low = 13;
1095
      if (high == 0 || low == high)
1096
        high = low + 1;
1097
    }
1098
 
1099
  /* Decide if it would be faster fill in the call-saved area of the stack
1100
     frame using multiple PUSH instructions instead of a single PUSHM
1101
     instruction.
1102
 
1103
     SAVE_MASK is a bitmask of the registers that must be stored in the
1104
     call-save area.  PUSHED_MASK is a bitmask of the registers that would
1105
     be pushed into the area if we used a PUSHM instruction.  UNNEEDED_PUSHES
1106
     is a bitmask of those registers in pushed_mask that are not in
1107
     save_mask.
1108
 
1109
     We use a simple heuristic that says that it is better to use
1110
     multiple PUSH instructions if the number of unnecessary pushes is
1111
     greater than the number of necessary pushes.
1112
 
1113
     We also use multiple PUSH instructions if there are any fixed registers
1114
     between LOW and HIGH.  The only way that this can happen is if the user
1115
     has specified --fixed-<reg-name> on the command line and in such
1116
     circumstances we do not want to touch the fixed registers at all.
1117
 
1118
     FIXME: Is it worth improving this heuristic ?  */
1119
  pushed_mask = (-1 << low) & ~(-1 << (high + 1));
1120
  unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1121
 
1122
  if ((fixed_reg && fixed_reg <= high)
1123
      || (optimize_function_for_speed_p (cfun)
1124
          && bit_count (save_mask) < bit_count (unneeded_pushes)))
1125
    {
1126
      /* Use multiple pushes.  */
1127
      * lowest = 0;
1128
      * highest = 0;
1129
      * register_mask = save_mask;
1130
    }
1131
  else
1132
    {
1133
      /* Use one push multiple instruction.  */
1134
      * lowest = low;
1135
      * highest = high;
1136
      * register_mask = 0;
1137
    }
1138
 
1139
  * frame_size = rx_round_up
1140
    (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1141
 
1142
  if (crtl->args.size > 0)
1143
    * frame_size += rx_round_up
1144
      (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1145
 
1146
  * stack_size = rx_round_up
1147
    (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1148
}
1149
 
1150
/* Generate a PUSHM instruction that matches the given operands.  */
1151
 
1152
void
1153
rx_emit_stack_pushm (rtx * operands)
1154
{
1155
  HOST_WIDE_INT last_reg;
1156
  rtx first_push;
1157
 
1158
  gcc_assert (CONST_INT_P (operands[0]));
1159
  last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1160
 
1161
  gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1162
  first_push = XVECEXP (operands[1], 0, 1);
1163
  gcc_assert (SET_P (first_push));
1164
  first_push = SET_SRC (first_push);
1165
  gcc_assert (REG_P (first_push));
1166
 
1167
  asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
1168
               reg_names [REGNO (first_push) - last_reg],
1169
               reg_names [REGNO (first_push)]);
1170
}
1171
 
1172
/* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate.  */
1173
 
1174
static rtx
1175
gen_rx_store_vector (unsigned int low, unsigned int high)
1176
{
1177
  unsigned int i;
1178
  unsigned int count = (high - low) + 2;
1179
  rtx vector;
1180
 
1181
  vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1182
 
1183
  XVECEXP (vector, 0, 0) =
1184
    gen_rtx_SET (SImode, stack_pointer_rtx,
1185
                 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1186
                                GEN_INT ((count - 1) * UNITS_PER_WORD)));
1187
 
1188
  for (i = 0; i < count - 1; i++)
1189
    XVECEXP (vector, 0, i + 1) =
1190
      gen_rtx_SET (SImode,
1191
                   gen_rtx_MEM (SImode,
1192
                                gen_rtx_MINUS (SImode, stack_pointer_rtx,
1193
                                               GEN_INT ((i + 1) * UNITS_PER_WORD))),
1194
                   gen_rtx_REG (SImode, high - i));
1195
  return vector;
1196
}
1197
 
1198
/* Mark INSN as being frame related.  If it is a PARALLEL
1199
   then mark each element as being frame related as well.  */
1200
 
1201
static void
1202
mark_frame_related (rtx insn)
1203
{
1204
  RTX_FRAME_RELATED_P (insn) = 1;
1205
  insn = PATTERN (insn);
1206
 
1207
  if (GET_CODE (insn) == PARALLEL)
1208
    {
1209
      unsigned int i;
1210
 
1211
      for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
1212
        RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1213
    }
1214
}
1215
 
1216
void
1217
rx_expand_prologue (void)
1218
{
1219
  unsigned int stack_size;
1220
  unsigned int frame_size;
1221
  unsigned int mask;
1222
  unsigned int low;
1223
  unsigned int high;
1224
  unsigned int reg;
1225
  rtx insn;
1226
 
1227
  /* Naked functions use their own, programmer provided prologues.  */
1228
  if (is_naked_func (NULL_TREE))
1229
    return;
1230
 
1231
  rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1232
 
1233
  /* If we use any of the callee-saved registers, save them now.  */
1234
  if (mask)
1235
    {
1236
      /* Push registers in reverse order.  */
1237
      for (reg = CC_REGNUM; reg --;)
1238
        if (mask & (1 << reg))
1239
          {
1240
            insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, reg)));
1241
            mark_frame_related (insn);
1242
          }
1243
    }
1244
  else if (low)
1245
    {
1246
      if (high == low)
1247
        insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1248
      else
1249
        insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1)
1250
                                                    * UNITS_PER_WORD),
1251
                                           gen_rx_store_vector (low, high)));
1252
      mark_frame_related (insn);
1253
    }
1254
 
1255
  if (MUST_SAVE_ACC_REGISTER)
1256
    {
1257
      unsigned int acc_high, acc_low;
1258
 
1259
      /* Interrupt handlers have to preserve the accumulator
1260
         register if so requested by the user.  Use the first
1261
         two pushed registers as intermediaries.  */
1262
      if (mask)
1263
        {
1264
          acc_low = acc_high = 0;
1265
 
1266
          for (reg = 1; reg < CC_REGNUM; reg ++)
1267
            if (mask & (1 << reg))
1268
              {
1269
                if (acc_low == 0)
1270
                  acc_low = reg;
1271
                else
1272
                  {
1273
                    acc_high = reg;
1274
                    break;
1275
                  }
1276
              }
1277
 
1278
          /* We have assumed that there are at least two registers pushed... */
1279
          gcc_assert (acc_high != 0);
1280
 
1281
          /* Note - the bottom 16 bits of the accumulator are inaccessible.
1282
             We just assume that they are zero.  */
1283
          emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1284
          emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1285
          emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1286
          emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1287
        }
1288
      else
1289
        {
1290
          acc_low = low;
1291
          acc_high = low + 1;
1292
 
1293
          /* We have assumed that there are at least two registers pushed... */
1294
          gcc_assert (acc_high <= high);
1295
 
1296
          emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1297
          emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1298
          emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1299
                                      gen_rx_store_vector (acc_low, acc_high)));
1300
        }
1301
 
1302
      frame_size += 2 * UNITS_PER_WORD;
1303
    }
1304
 
1305
  /* If needed, set up the frame pointer.  */
1306
  if (frame_pointer_needed)
1307
    {
1308
      if (frame_size)
1309
        insn = emit_insn (gen_addsi3 (frame_pointer_rtx, stack_pointer_rtx,
1310
                                      GEN_INT (- (HOST_WIDE_INT) frame_size)));
1311
      else
1312
        insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1313
 
1314
      RTX_FRAME_RELATED_P (insn) = 1;
1315
    }
1316
 
1317
  insn = NULL_RTX;
1318
 
1319
  /* Allocate space for the outgoing args.
1320
     If the stack frame has not already been set up then handle this as well.  */
1321
  if (stack_size)
1322
    {
1323
      if (frame_size)
1324
        {
1325
          if (frame_pointer_needed)
1326
            insn = emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1327
                                          GEN_INT (- (HOST_WIDE_INT)
1328
                                                   stack_size)));
1329
          else
1330
            insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1331
                                          GEN_INT (- (HOST_WIDE_INT)
1332
                                                   (frame_size + stack_size))));
1333
        }
1334
      else
1335
        insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1336
                                      GEN_INT (- (HOST_WIDE_INT) stack_size)));
1337
    }
1338
  else if (frame_size)
1339
    {
1340
      if (! frame_pointer_needed)
1341
        insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1342
                                      GEN_INT (- (HOST_WIDE_INT) frame_size)));
1343
      else
1344
        insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1345
    }
1346
 
1347
  if (insn != NULL_RTX)
1348
    RTX_FRAME_RELATED_P (insn) = 1;
1349
}
1350
 
1351
static void
1352
rx_output_function_prologue (FILE * file,
1353
                             HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
1354
{
1355
  if (is_fast_interrupt_func (NULL_TREE))
1356
    asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1357
 
1358
  if (is_interrupt_func (NULL_TREE))
1359
    asm_fprintf (file, "\t; Note: Interrupt Handler\n");
1360
 
1361
  if (is_naked_func (NULL_TREE))
1362
    asm_fprintf (file, "\t; Note: Naked Function\n");
1363
 
1364
  if (cfun->static_chain_decl != NULL)
1365
    asm_fprintf (file, "\t; Note: Nested function declared "
1366
                 "inside another function.\n");
1367
 
1368
  if (crtl->calls_eh_return)
1369
    asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1370
}
1371
 
1372
/* Generate a POPM or RTSD instruction that matches the given operands.  */
1373
 
1374
void
1375
rx_emit_stack_popm (rtx * operands, bool is_popm)
1376
{
1377
  HOST_WIDE_INT stack_adjust;
1378
  HOST_WIDE_INT last_reg;
1379
  rtx first_push;
1380
 
1381
  gcc_assert (CONST_INT_P (operands[0]));
1382
  stack_adjust = INTVAL (operands[0]);
1383
 
1384
  gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1385
  last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1386
 
1387
  first_push = XVECEXP (operands[1], 0, 1);
1388
  gcc_assert (SET_P (first_push));
1389
  first_push = SET_DEST (first_push);
1390
  gcc_assert (REG_P (first_push));
1391
 
1392
  if (is_popm)
1393
    asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1394
                 reg_names [REGNO (first_push)],
1395
                 reg_names [REGNO (first_push) + last_reg]);
1396
  else
1397
    asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1398
                 (int) stack_adjust,
1399
                 reg_names [REGNO (first_push)],
1400
                 reg_names [REGNO (first_push) + last_reg]);
1401
}
1402
 
1403
/* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate.  */
1404
 
1405
static rtx
1406
gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1407
{
1408
  unsigned int i;
1409
  unsigned int bias = 3;
1410
  unsigned int count = (high - low) + bias;
1411
  rtx vector;
1412
 
1413
  vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1414
 
1415
  XVECEXP (vector, 0, 0) =
1416
    gen_rtx_SET (SImode, stack_pointer_rtx,
1417
                 plus_constant (stack_pointer_rtx, adjust));
1418
 
1419
  for (i = 0; i < count - 2; i++)
1420
    XVECEXP (vector, 0, i + 1) =
1421
      gen_rtx_SET (SImode,
1422
                   gen_rtx_REG (SImode, low + i),
1423
                   gen_rtx_MEM (SImode,
1424
                                i == 0 ? stack_pointer_rtx
1425
                                : plus_constant (stack_pointer_rtx,
1426
                                                 i * UNITS_PER_WORD)));
1427
 
1428
  XVECEXP (vector, 0, count - 1) = gen_rtx_RETURN (VOIDmode);
1429
 
1430
  return vector;
1431
}
1432
 
1433
/* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate.  */
1434
 
1435
static rtx
1436
gen_rx_popm_vector (unsigned int low, unsigned int high)
1437
{
1438
  unsigned int i;
1439
  unsigned int count = (high - low) + 2;
1440
  rtx vector;
1441
 
1442
  vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1443
 
1444
  XVECEXP (vector, 0, 0) =
1445
    gen_rtx_SET (SImode, stack_pointer_rtx,
1446
                 plus_constant (stack_pointer_rtx,
1447
                                (count - 1) * UNITS_PER_WORD));
1448
 
1449
  for (i = 0; i < count - 1; i++)
1450
    XVECEXP (vector, 0, i + 1) =
1451
      gen_rtx_SET (SImode,
1452
                   gen_rtx_REG (SImode, low + i),
1453
                   gen_rtx_MEM (SImode,
1454
                                i == 0 ? stack_pointer_rtx
1455
                                : plus_constant (stack_pointer_rtx,
1456
                                                 i * UNITS_PER_WORD)));
1457
 
1458
  return vector;
1459
}
1460
 
1461
void
1462
rx_expand_epilogue (bool is_sibcall)
1463
{
1464
  unsigned int low;
1465
  unsigned int high;
1466
  unsigned int frame_size;
1467
  unsigned int stack_size;
1468
  unsigned int register_mask;
1469
  unsigned int regs_size;
1470
  unsigned int reg;
1471
  unsigned HOST_WIDE_INT total_size;
1472
 
1473
  /* FIXME: We do not support indirect sibcalls at the moment becaause we
1474
     cannot guarantee that the register holding the function address is a
1475
     call-used register.  If it is a call-saved register then the stack
1476
     pop instructions generated in the epilogue will corrupt the address
1477
     before it is used.
1478
 
1479
     Creating a new call-used-only register class works but then the
1480
     reload pass gets stuck because it cannot always find a call-used
1481
     register for spilling sibcalls.
1482
 
1483
     The other possible solution is for this pass to scan forward for the
1484
     sibcall instruction (if it has been generated) and work out if it
1485
     is an indirect sibcall using a call-saved register.  If it is then
1486
     the address can copied into a call-used register in this epilogue
1487
     code and the sibcall instruction modified to use that register.  */
1488
 
1489
  if (is_naked_func (NULL_TREE))
1490
    {
1491
      gcc_assert (! is_sibcall);
1492
 
1493
      /* Naked functions use their own, programmer provided epilogues.
1494
         But, in order to keep gcc happy we have to generate some kind of
1495
         epilogue RTL.  */
1496
      emit_jump_insn (gen_naked_return ());
1497
      return;
1498
    }
1499
 
1500
  rx_get_stack_layout (& low, & high, & register_mask,
1501
                       & frame_size, & stack_size);
1502
 
1503
  total_size = frame_size + stack_size;
1504
  regs_size = ((high - low) + 1) * UNITS_PER_WORD;
1505
 
1506
  /* See if we are unable to use the special stack frame deconstruct and
1507
     return instructions.  In most cases we can use them, but the exceptions
1508
     are:
1509
 
1510
     - Sibling calling functions deconstruct the frame but do not return to
1511
       their caller.  Instead they branch to their sibling and allow their
1512
       return instruction to return to this function's parent.
1513
 
1514
     - Fast and normal interrupt handling functions have to use special
1515
       return instructions.
1516
 
1517
     - Functions where we have pushed a fragmented set of registers into the
1518
       call-save area must have the same set of registers popped.  */
1519
  if (is_sibcall
1520
      || is_fast_interrupt_func (NULL_TREE)
1521
      || is_interrupt_func (NULL_TREE)
1522
      || register_mask)
1523
    {
1524
      /* Cannot use the special instructions - deconstruct by hand.  */
1525
      if (total_size)
1526
        emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1527
                               GEN_INT (total_size)));
1528
 
1529
      if (MUST_SAVE_ACC_REGISTER)
1530
        {
1531
          unsigned int acc_low, acc_high;
1532
 
1533
          /* Reverse the saving of the accumulator register onto the stack.
1534
             Note we must adjust the saved "low" accumulator value as it
1535
             is really the middle 32-bits of the accumulator.  */
1536
          if (register_mask)
1537
            {
1538
              acc_low = acc_high = 0;
1539
              for (reg = 1; reg < CC_REGNUM; reg ++)
1540
                if (register_mask & (1 << reg))
1541
                  {
1542
                    if (acc_low == 0)
1543
                      acc_low = reg;
1544
                    else
1545
                      {
1546
                        acc_high = reg;
1547
                        break;
1548
                      }
1549
                  }
1550
              emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
1551
              emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
1552
            }
1553
          else
1554
            {
1555
              acc_low = low;
1556
              acc_high = low + 1;
1557
              emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
1558
                                         gen_rx_popm_vector (acc_low, acc_high)));
1559
            }
1560
 
1561
          emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
1562
                                  gen_rtx_REG (SImode, acc_low),
1563
                                  GEN_INT (16)));
1564
          emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
1565
          emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
1566
        }
1567
 
1568
      if (register_mask)
1569
        {
1570
          for (reg = 0; reg < CC_REGNUM; reg ++)
1571
            if (register_mask & (1 << reg))
1572
              emit_insn (gen_stack_pop (gen_rtx_REG (SImode, reg)));
1573
        }
1574
      else if (low)
1575
        {
1576
          if (high == low)
1577
            emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
1578
          else
1579
            emit_insn (gen_stack_popm (GEN_INT (regs_size),
1580
                                       gen_rx_popm_vector (low, high)));
1581
        }
1582
 
1583
      if (is_fast_interrupt_func (NULL_TREE))
1584
        {
1585
          gcc_assert (! is_sibcall);
1586
          emit_jump_insn (gen_fast_interrupt_return ());
1587
        }
1588
      else if (is_interrupt_func (NULL_TREE))
1589
        {
1590
          gcc_assert (! is_sibcall);
1591
          emit_jump_insn (gen_exception_return ());
1592
        }
1593
      else if (! is_sibcall)
1594
        emit_jump_insn (gen_simple_return ());
1595
 
1596
      return;
1597
    }
1598
 
1599
  /* If we allocated space on the stack, free it now.  */
1600
  if (total_size)
1601
    {
1602
      unsigned HOST_WIDE_INT rtsd_size;
1603
 
1604
      /* See if we can use the RTSD instruction.  */
1605
      rtsd_size = total_size + regs_size;
1606
      if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
1607
        {
1608
          if (low)
1609
            emit_jump_insn (gen_pop_and_return
1610
                            (GEN_INT (rtsd_size),
1611
                             gen_rx_rtsd_vector (rtsd_size, low, high)));
1612
          else
1613
            emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
1614
 
1615
          return;
1616
        }
1617
 
1618
      emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1619
                             GEN_INT (total_size)));
1620
    }
1621
 
1622
  if (low)
1623
    emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
1624
                                        gen_rx_rtsd_vector (regs_size,
1625
                                                            low, high)));
1626
  else
1627
    emit_jump_insn (gen_simple_return ());
1628
}
1629
 
1630
 
1631
/* Compute the offset (in words) between FROM (arg pointer
1632
   or frame pointer) and TO (frame pointer or stack pointer).
1633
   See ASCII art comment at the start of rx_expand_prologue
1634
   for more information.  */
1635
 
1636
int
1637
rx_initial_elimination_offset (int from, int to)
1638
{
1639
  unsigned int low;
1640
  unsigned int high;
1641
  unsigned int frame_size;
1642
  unsigned int stack_size;
1643
  unsigned int mask;
1644
 
1645
  rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1646
 
1647
  if (from == ARG_POINTER_REGNUM)
1648
    {
1649
      /* Extend the computed size of the stack frame to
1650
         include the registers pushed in the prologue.  */
1651
      if (low)
1652
        frame_size += ((high - low) + 1) * UNITS_PER_WORD;
1653
      else
1654
        frame_size += bit_count (mask) * UNITS_PER_WORD;
1655
 
1656
      /* Remember to include the return address.  */
1657
      frame_size += 1 * UNITS_PER_WORD;
1658
 
1659
      if (to == FRAME_POINTER_REGNUM)
1660
        return frame_size;
1661
 
1662
      gcc_assert (to == STACK_POINTER_REGNUM);
1663
      return frame_size + stack_size;
1664
    }
1665
 
1666
  gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
1667
  return stack_size;
1668
}
1669
 
1670
/* Decide if a variable should go into one of the small data sections.  */
1671
 
1672
static bool
1673
rx_in_small_data (const_tree decl)
1674
{
1675
  int size;
1676
  const_tree section;
1677
 
1678
  if (rx_small_data_limit == 0)
1679
    return false;
1680
 
1681
  if (TREE_CODE (decl) != VAR_DECL)
1682
    return false;
1683
 
1684
  /* We do not put read-only variables into a small data area because
1685
     they would be placed with the other read-only sections, far away
1686
     from the read-write data sections, and we only have one small
1687
     data area pointer.
1688
     Similarly commons are placed in the .bss section which might be
1689
     far away (and out of alignment with respect to) the .data section.  */
1690
  if (TREE_READONLY (decl) || DECL_COMMON (decl))
1691
    return false;
1692
 
1693
  section = DECL_SECTION_NAME (decl);
1694
  if (section)
1695
    {
1696
      const char * const name = TREE_STRING_POINTER (section);
1697
 
1698
      return (strcmp (name, "D_2") == 0) || (strcmp (name, "B_2") == 0);
1699
    }
1700
 
1701
  size = int_size_in_bytes (TREE_TYPE (decl));
1702
 
1703
  return (size > 0) && (size <= rx_small_data_limit);
1704
}
1705
 
1706
/* Return a section for X.
1707
   The only special thing we do here is to honor small data.  */
1708
 
1709
static section *
1710
rx_select_rtx_section (enum machine_mode mode,
1711
                       rtx x,
1712
                       unsigned HOST_WIDE_INT align)
1713
{
1714
  if (rx_small_data_limit > 0
1715
      && GET_MODE_SIZE (mode) <= rx_small_data_limit
1716
      && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
1717
    return sdata_section;
1718
 
1719
  return default_elf_select_rtx_section (mode, x, align);
1720
}
1721
 
1722
static section *
1723
rx_select_section (tree decl,
1724
                   int reloc,
1725
                   unsigned HOST_WIDE_INT align)
1726
{
1727
  if (rx_small_data_limit > 0)
1728
    {
1729
      switch (categorize_decl_for_section (decl, reloc))
1730
        {
1731
        case SECCAT_SDATA:      return sdata_section;
1732
        case SECCAT_SBSS:       return sbss_section;
1733
        case SECCAT_SRODATA:
1734
          /* Fall through.  We do not put small, read only
1735
             data into the C_2 section because we are not
1736
             using the C_2 section.  We do not use the C_2
1737
             section because it is located with the other
1738
             read-only data sections, far away from the read-write
1739
             data sections and we only have one small data
1740
             pointer (r13).  */
1741
        default:
1742
          break;
1743
        }
1744
    }
1745
 
1746
  /* If we are supporting the Renesas assembler
1747
     we cannot use mergeable sections.  */
1748
  if (TARGET_AS100_SYNTAX)
1749
    switch (categorize_decl_for_section (decl, reloc))
1750
      {
1751
      case SECCAT_RODATA_MERGE_CONST:
1752
      case SECCAT_RODATA_MERGE_STR_INIT:
1753
      case SECCAT_RODATA_MERGE_STR:
1754
        return readonly_data_section;
1755
 
1756
      default:
1757
        break;
1758
      }
1759
 
1760
  return default_elf_select_section (decl, reloc, align);
1761
}
1762
 
1763
enum rx_builtin
1764
{
1765
  RX_BUILTIN_BRK,
1766
  RX_BUILTIN_CLRPSW,
1767
  RX_BUILTIN_INT,
1768
  RX_BUILTIN_MACHI,
1769
  RX_BUILTIN_MACLO,
1770
  RX_BUILTIN_MULHI,
1771
  RX_BUILTIN_MULLO,
1772
  RX_BUILTIN_MVFACHI,
1773
  RX_BUILTIN_MVFACMI,
1774
  RX_BUILTIN_MVFC,
1775
  RX_BUILTIN_MVTACHI,
1776
  RX_BUILTIN_MVTACLO,
1777
  RX_BUILTIN_MVTC,
1778
  RX_BUILTIN_MVTIPL,
1779
  RX_BUILTIN_RACW,
1780
  RX_BUILTIN_REVW,
1781
  RX_BUILTIN_RMPA,
1782
  RX_BUILTIN_ROUND,
1783
  RX_BUILTIN_SAT,
1784
  RX_BUILTIN_SETPSW,
1785
  RX_BUILTIN_WAIT,
1786
  RX_BUILTIN_max
1787
};
1788
 
1789
static void
1790
rx_init_builtins (void)
1791
{
1792
#define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE)           \
1793
  add_builtin_function ("__builtin_rx_" LC_NAME,                        \
1794
                        build_function_type_list (RET_TYPE##_type_node, \
1795
                                                  ARG_TYPE##_type_node, \
1796
                                                  NULL_TREE),           \
1797
                        RX_BUILTIN_##UC_NAME,                           \
1798
                        BUILT_IN_MD, NULL, NULL_TREE)
1799
 
1800
#define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
1801
  add_builtin_function ("__builtin_rx_" LC_NAME,                        \
1802
                        build_function_type_list (RET_TYPE##_type_node, \
1803
                                                  ARG_TYPE1##_type_node,\
1804
                                                  ARG_TYPE2##_type_node,\
1805
                                                  NULL_TREE),           \
1806
                        RX_BUILTIN_##UC_NAME,                           \
1807
                        BUILT_IN_MD, NULL, NULL_TREE)
1808
 
1809
#define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
1810
  add_builtin_function ("__builtin_rx_" LC_NAME,                        \
1811
                        build_function_type_list (RET_TYPE##_type_node, \
1812
                                                  ARG_TYPE1##_type_node,\
1813
                                                  ARG_TYPE2##_type_node,\
1814
                                                  ARG_TYPE3##_type_node,\
1815
                                                  NULL_TREE),           \
1816
                        RX_BUILTIN_##UC_NAME,                           \
1817
                        BUILT_IN_MD, NULL, NULL_TREE)
1818
 
1819
  ADD_RX_BUILTIN1 (BRK,     "brk",     void,  void);
1820
  ADD_RX_BUILTIN1 (CLRPSW,  "clrpsw",  void,  integer);
1821
  ADD_RX_BUILTIN1 (SETPSW,  "setpsw",  void,  integer);
1822
  ADD_RX_BUILTIN1 (INT,     "int",     void,  integer);
1823
  ADD_RX_BUILTIN2 (MACHI,   "machi",   void,  intSI, intSI);
1824
  ADD_RX_BUILTIN2 (MACLO,   "maclo",   void,  intSI, intSI);
1825
  ADD_RX_BUILTIN2 (MULHI,   "mulhi",   void,  intSI, intSI);
1826
  ADD_RX_BUILTIN2 (MULLO,   "mullo",   void,  intSI, intSI);
1827
  ADD_RX_BUILTIN1 (MVFACHI, "mvfachi", intSI, void);
1828
  ADD_RX_BUILTIN1 (MVFACMI, "mvfacmi", intSI, void);
1829
  ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void,  intSI);
1830
  ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void,  intSI);
1831
  ADD_RX_BUILTIN1 (RMPA,    "rmpa",    void,  void);
1832
  ADD_RX_BUILTIN1 (MVFC,    "mvfc",    intSI, integer);
1833
  ADD_RX_BUILTIN2 (MVTC,    "mvtc",    void,  integer, integer);
1834
  ADD_RX_BUILTIN1 (MVTIPL,  "mvtipl",  void,  integer);
1835
  ADD_RX_BUILTIN1 (RACW,    "racw",    void,  integer);
1836
  ADD_RX_BUILTIN1 (ROUND,   "round",   intSI, float);
1837
  ADD_RX_BUILTIN1 (REVW,    "revw",    intSI, intSI);
1838
  ADD_RX_BUILTIN1 (SAT,     "sat",     intSI, intSI);
1839
  ADD_RX_BUILTIN1 (WAIT,    "wait",    void,  void);
1840
}
1841
 
1842
static rtx
1843
rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
1844
{
1845
  if (reg && ! REG_P (arg))
1846
    arg = force_reg (SImode, arg);
1847
 
1848
  emit_insn (gen_func (arg));
1849
 
1850
  return NULL_RTX;
1851
}
1852
 
1853
static rtx
1854
rx_expand_builtin_mvtc (tree exp)
1855
{
1856
  rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
1857
  rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
1858
 
1859
  if (! CONST_INT_P (arg1))
1860
    return NULL_RTX;
1861
 
1862
  if (! REG_P (arg2))
1863
    arg2 = force_reg (SImode, arg2);
1864
 
1865
  emit_insn (gen_mvtc (arg1, arg2));
1866
 
1867
  return NULL_RTX;
1868
}
1869
 
1870
static rtx
1871
rx_expand_builtin_mvfc (tree t_arg, rtx target)
1872
{
1873
  rtx arg = expand_normal (t_arg);
1874
 
1875
  if (! CONST_INT_P (arg))
1876
    return NULL_RTX;
1877
 
1878
  if (target == NULL_RTX)
1879
    return NULL_RTX;
1880
 
1881
  if (! REG_P (target))
1882
    target = force_reg (SImode, target);
1883
 
1884
  emit_insn (gen_mvfc (target, arg));
1885
 
1886
  return target;
1887
}
1888
 
1889
static rtx
1890
rx_expand_builtin_mvtipl (rtx arg)
1891
{
1892
  /* The RX610 does not support the MVTIPL instruction.  */
1893
  if (rx_cpu_type == RX610)
1894
    return NULL_RTX;
1895
 
1896
  if (! CONST_INT_P (arg) || ! IN_RANGE (arg, 0, (1 << 4) - 1))
1897
    return NULL_RTX;
1898
 
1899
  emit_insn (gen_mvtipl (arg));
1900
 
1901
  return NULL_RTX;
1902
}
1903
 
1904
static rtx
1905
rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
1906
{
1907
  rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
1908
  rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
1909
 
1910
  if (! REG_P (arg1))
1911
    arg1 = force_reg (SImode, arg1);
1912
 
1913
  if (! REG_P (arg2))
1914
    arg2 = force_reg (SImode, arg2);
1915
 
1916
  emit_insn (gen_func (arg1, arg2));
1917
 
1918
  return NULL_RTX;
1919
}
1920
 
1921
static rtx
1922
rx_expand_int_builtin_1_arg (rtx arg,
1923
                             rtx target,
1924
                             rtx (* gen_func)(rtx, rtx),
1925
                             bool mem_ok)
1926
{
1927
  if (! REG_P (arg))
1928
    if (!mem_ok || ! MEM_P (arg))
1929
      arg = force_reg (SImode, arg);
1930
 
1931
  if (target == NULL_RTX || ! REG_P (target))
1932
    target = gen_reg_rtx (SImode);
1933
 
1934
  emit_insn (gen_func (target, arg));
1935
 
1936
  return target;
1937
}
1938
 
1939
static rtx
1940
rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
1941
{
1942
  if (target == NULL_RTX || ! REG_P (target))
1943
    target = gen_reg_rtx (SImode);
1944
 
1945
  emit_insn (gen_func (target));
1946
 
1947
  return target;
1948
}
1949
 
1950
static rtx
1951
rx_expand_builtin_round (rtx arg, rtx target)
1952
{
1953
  if ((! REG_P (arg) && ! MEM_P (arg))
1954
      || GET_MODE (arg) != SFmode)
1955
    arg = force_reg (SFmode, arg);
1956
 
1957
  if (target == NULL_RTX || ! REG_P (target))
1958
    target = gen_reg_rtx (SImode);
1959
 
1960
  emit_insn (gen_lrintsf2 (target, arg));
1961
 
1962
  return target;
1963
}
1964
 
1965
static rtx
1966
rx_expand_builtin (tree exp,
1967
                   rtx target,
1968
                   rtx subtarget ATTRIBUTE_UNUSED,
1969
                   enum machine_mode mode ATTRIBUTE_UNUSED,
1970
                   int ignore ATTRIBUTE_UNUSED)
1971
{
1972
  tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
1973
  tree arg    = CALL_EXPR_ARGS (exp) ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
1974
  rtx  op     = arg ? expand_normal (arg) : NULL_RTX;
1975
  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
1976
 
1977
  switch (fcode)
1978
    {
1979
    case RX_BUILTIN_BRK:     emit_insn (gen_brk ()); return NULL_RTX;
1980
    case RX_BUILTIN_CLRPSW:  return rx_expand_void_builtin_1_arg
1981
        (op, gen_clrpsw, false);
1982
    case RX_BUILTIN_SETPSW:  return rx_expand_void_builtin_1_arg
1983
        (op, gen_setpsw, false);
1984
    case RX_BUILTIN_INT:     return rx_expand_void_builtin_1_arg
1985
        (op, gen_int, false);
1986
    case RX_BUILTIN_MACHI:   return rx_expand_builtin_mac (exp, gen_machi);
1987
    case RX_BUILTIN_MACLO:   return rx_expand_builtin_mac (exp, gen_maclo);
1988
    case RX_BUILTIN_MULHI:   return rx_expand_builtin_mac (exp, gen_mulhi);
1989
    case RX_BUILTIN_MULLO:   return rx_expand_builtin_mac (exp, gen_mullo);
1990
    case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
1991
        (target, gen_mvfachi);
1992
    case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
1993
        (target, gen_mvfacmi);
1994
    case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
1995
        (op, gen_mvtachi, true);
1996
    case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
1997
        (op, gen_mvtaclo, true);
1998
    case RX_BUILTIN_RMPA:    emit_insn (gen_rmpa ()); return NULL_RTX;
1999
    case RX_BUILTIN_MVFC:    return rx_expand_builtin_mvfc (arg, target);
2000
    case RX_BUILTIN_MVTC:    return rx_expand_builtin_mvtc (exp);
2001
    case RX_BUILTIN_MVTIPL:  return rx_expand_builtin_mvtipl (op);
2002
    case RX_BUILTIN_RACW:    return rx_expand_void_builtin_1_arg
2003
        (op, gen_racw, false);
2004
    case RX_BUILTIN_ROUND:   return rx_expand_builtin_round (op, target);
2005
    case RX_BUILTIN_REVW:    return rx_expand_int_builtin_1_arg
2006
        (op, target, gen_revw, false);
2007
    case RX_BUILTIN_SAT:     return rx_expand_int_builtin_1_arg
2008
        (op, target, gen_sat, false);
2009
    case RX_BUILTIN_WAIT:    emit_insn (gen_wait ()); return NULL_RTX;
2010
 
2011
    default:
2012
      internal_error ("bad builtin code");
2013
      break;
2014
    }
2015
 
2016
  return NULL_RTX;
2017
}
2018
 
2019
/* Place an element into a constructor or destructor section.
2020
   Like default_ctor_section_asm_out_constructor in varasm.c
2021
   except that it uses .init_array (or .fini_array) and it
2022
   handles constructor priorities.  */
2023
 
2024
static void
2025
rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2026
{
2027
  section * s;
2028
 
2029
  if (priority != DEFAULT_INIT_PRIORITY)
2030
    {
2031
      char buf[18];
2032
 
2033
      sprintf (buf, "%s.%.5u",
2034
               is_ctor ? ".init_array" : ".fini_array",
2035
               priority);
2036
      s = get_section (buf, SECTION_WRITE, NULL_TREE);
2037
    }
2038
  else if (is_ctor)
2039
    s = ctors_section;
2040
  else
2041
    s = dtors_section;
2042
 
2043
  switch_to_section (s);
2044
  assemble_align (POINTER_SIZE);
2045
  assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2046
}
2047
 
2048
static void
2049
rx_elf_asm_constructor (rtx symbol, int priority)
2050
{
2051
  rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2052
}
2053
 
2054
static void
2055
rx_elf_asm_destructor (rtx symbol, int priority)
2056
{
2057
  rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2058
}
2059
 
2060
/* Check "fast_interrupt", "interrupt" and "naked" attributes.  */
2061
 
2062
static tree
2063
rx_handle_func_attribute (tree * node,
2064
                          tree   name,
2065
                          tree   args,
2066
                          int    flags ATTRIBUTE_UNUSED,
2067
                          bool * no_add_attrs)
2068
{
2069
  gcc_assert (DECL_P (* node));
2070
  gcc_assert (args == NULL_TREE);
2071
 
2072
  if (TREE_CODE (* node) != FUNCTION_DECL)
2073
    {
2074
      warning (OPT_Wattributes, "%qE attribute only applies to functions",
2075
               name);
2076
      * no_add_attrs = true;
2077
    }
2078
 
2079
  /* FIXME: We ought to check for conflicting attributes.  */
2080
 
2081
  /* FIXME: We ought to check that the interrupt and exception
2082
     handler attributes have been applied to void functions.  */
2083
  return NULL_TREE;
2084
}
2085
 
2086
/* Table of RX specific attributes.  */
2087
const struct attribute_spec rx_attribute_table[] =
2088
{
2089
  /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler.  */
2090
  { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute },
2091
  { "interrupt",      0, 0, true, false, false, rx_handle_func_attribute },
2092
  { "naked",          0, 0, true, false, false, rx_handle_func_attribute },
2093
  { NULL,             0, 0, false, false, false, NULL }
2094
};
2095
 
2096
/* Extra processing for target specific command line options.  */
2097
 
2098
static bool
2099
rx_handle_option (size_t code, const char *  arg ATTRIBUTE_UNUSED, int value)
2100
{
2101
  switch (code)
2102
    {
2103
    case OPT_mint_register_:
2104
      switch (value)
2105
        {
2106
        case 4:
2107
          fixed_regs[10] = call_used_regs [10] = 1;
2108
          /* Fall through.  */
2109
        case 3:
2110
          fixed_regs[11] = call_used_regs [11] = 1;
2111
          /* Fall through.  */
2112
        case 2:
2113
          fixed_regs[12] = call_used_regs [12] = 1;
2114
          /* Fall through.  */
2115
        case 1:
2116
          fixed_regs[13] = call_used_regs [13] = 1;
2117
          /* Fall through.  */
2118
        case 0:
2119
          return true;
2120
        default:
2121
          return false;
2122
        }
2123
      break;
2124
 
2125
    case OPT_mmax_constant_size_:
2126
      /* Make sure that the -mmax-constant_size option is in range.  */
2127
      return value >= 0 && value <= 4;
2128
 
2129
    case OPT_mcpu_:
2130
    case OPT_patch_:
2131
      if (strcasecmp (arg, "RX610") == 0)
2132
        rx_cpu_type = RX610;
2133
      else if (strcasecmp (arg, "RX200") == 0)
2134
        {
2135
          target_flags |= MASK_NO_USE_FPU;
2136
          rx_cpu_type = RX200;
2137
        }
2138
      else if (strcasecmp (arg, "RX600") != 0)
2139
        warning (0, "unrecognized argument '%s' to -mcpu= option", arg);
2140
      break;
2141
 
2142
    case OPT_fpu:
2143
      if (rx_cpu_type == RX200)
2144
        error ("The RX200 cpu does not have FPU hardware");
2145
      break;
2146
 
2147
    default:
2148
      break;
2149
    }
2150
 
2151
  return true;
2152
}
2153
 
2154
void
2155
rx_set_optimization_options (void)
2156
{
2157
  static bool first_time = TRUE;
2158
  static bool saved_allow_rx_fpu = TRUE;
2159
 
2160
  if (first_time)
2161
    {
2162
      /* If this is the first time through and the user has not disabled
2163
         the use of RX FPU hardware then enable unsafe math optimizations,
2164
         since the FPU instructions themselves are unsafe.  */
2165
      if (TARGET_USE_FPU)
2166
        set_fast_math_flags (true);
2167
 
2168
      /* FIXME: For some unknown reason LTO compression is not working,
2169
         at least on my local system.  So set the default compression
2170
         level to none, for now.  */
2171
      if (flag_lto_compression_level == -1)
2172
        flag_lto_compression_level = 0;
2173
 
2174
      saved_allow_rx_fpu = ALLOW_RX_FPU_INSNS;
2175
      first_time = FALSE;
2176
    }
2177
  else
2178
    {
2179
      /* Alert the user if they are changing the optimization options
2180
         to use IEEE compliant floating point arithmetic with RX FPU insns.  */
2181
      if (TARGET_USE_FPU
2182
          && ! fast_math_flags_set_p ())
2183
        warning (0, "RX FPU instructions are not IEEE compliant");
2184
 
2185
      if (saved_allow_rx_fpu != ALLOW_RX_FPU_INSNS)
2186
        error ("Changing the FPU insns/math optimizations pairing is not supported");
2187
    }
2188
}
2189
 
2190
 
2191
static bool
2192
rx_allocate_stack_slots_for_args (void)
2193
{
2194
  /* Naked functions should not allocate stack slots for arguments.  */
2195
  return ! is_naked_func (NULL_TREE);
2196
}
2197
 
2198
static bool
2199
rx_func_attr_inlinable (const_tree decl)
2200
{
2201
  return ! is_fast_interrupt_func (decl)
2202
    &&   ! is_interrupt_func (decl)
2203
    &&   ! is_naked_func (decl);
2204
}
2205
 
2206
/* Return nonzero if it is ok to make a tail-call to DECL,
2207
   a function_decl or NULL if this is an indirect call, using EXP  */
2208
 
2209
static bool
2210
rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2211
{
2212
  /* Do not allow indirect tailcalls.  The
2213
     sibcall patterns do not support them.  */
2214
  if (decl == NULL)
2215
    return false;
2216
 
2217
  /* Never tailcall from inside interrupt handlers or naked functions.  */
2218
  if (is_fast_interrupt_func (NULL_TREE)
2219
      || is_interrupt_func (NULL_TREE)
2220
      || is_naked_func (NULL_TREE))
2221
    return false;
2222
 
2223
  return true;
2224
}
2225
 
2226
static void
2227
rx_file_start (void)
2228
{
2229
  if (! TARGET_AS100_SYNTAX)
2230
    default_file_start ();
2231
}
2232
 
2233
static bool
2234
rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2235
{
2236
  return TRUE;
2237
}
2238
 
2239
/* Try to generate code for the "isnv" pattern which inserts bits
2240
   into a word.
2241
     operands[0] => Location to be altered.
2242
     operands[1] => Number of bits to change.
2243
     operands[2] => Starting bit.
2244
     operands[3] => Value to insert.
2245
   Returns TRUE if successful, FALSE otherwise.  */
2246
 
2247
bool
2248
rx_expand_insv (rtx * operands)
2249
{
2250
  if (INTVAL (operands[1]) != 1
2251
      || ! CONST_INT_P (operands[3]))
2252
    return false;
2253
 
2254
  if (MEM_P (operands[0])
2255
      && INTVAL (operands[2]) > 7)
2256
    return false;
2257
 
2258
  switch (INTVAL (operands[3]))
2259
    {
2260
    case 0:
2261
      if (MEM_P (operands[0]))
2262
        emit_insn (gen_bitclr_in_memory (operands[0], operands[0],
2263
                                         operands[2]));
2264
      else
2265
        emit_insn (gen_bitclr (operands[0], operands[0], operands[2]));
2266
      break;
2267
    case 1:
2268
    case -1:
2269
      if (MEM_P (operands[0]))
2270
        emit_insn (gen_bitset_in_memory (operands[0], operands[0],
2271
                                         operands[2]));
2272
      else
2273
        emit_insn (gen_bitset (operands[0], operands[0], operands[2]));
2274
      break;
2275
   default:
2276
      return false;
2277
    }
2278
  return true;
2279
}
2280
 
2281
/* Returns true if X a legitimate constant for an immediate
2282
   operand on the RX.  X is already known to satisfy CONSTANT_P.  */
2283
 
2284
bool
2285
rx_is_legitimate_constant (rtx x)
2286
{
2287
  HOST_WIDE_INT val;
2288
 
2289
  switch (GET_CODE (x))
2290
    {
2291
    case CONST:
2292
      x = XEXP (x, 0);
2293
 
2294
      if (GET_CODE (x) == PLUS)
2295
        {
2296
          if (! CONST_INT_P (XEXP (x, 1)))
2297
            return false;
2298
 
2299
          /* GCC would not pass us CONST_INT + CONST_INT so we
2300
             know that we have {SYMBOL|LABEL} + CONST_INT.  */
2301
          x = XEXP (x, 0);
2302
          gcc_assert (! CONST_INT_P (x));
2303
        }
2304
 
2305
      switch (GET_CODE (x))
2306
        {
2307
        case LABEL_REF:
2308
        case SYMBOL_REF:
2309
          return true;
2310
 
2311
          /* One day we may have to handle UNSPEC constants here.  */
2312
        default:
2313
          /* FIXME: Can this ever happen ?  */
2314
          abort ();
2315
          return false;
2316
        }
2317
      break;
2318
 
2319
    case LABEL_REF:
2320
    case SYMBOL_REF:
2321
      return true;
2322
    case CONST_DOUBLE:
2323
      return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
2324
    case CONST_VECTOR:
2325
      return false;
2326
    default:
2327
      gcc_assert (CONST_INT_P (x));
2328
      break;
2329
    }
2330
 
2331
  if (rx_max_constant_size == 0  || rx_max_constant_size == 4)
2332
    /* If there is no constraint on the size of constants
2333
       used as operands, then any value is legitimate.  */
2334
    return true;
2335
 
2336
  val = INTVAL (x);
2337
 
2338
  /* rx_max_constant_size specifies the maximum number
2339
     of bytes that can be used to hold a signed value.  */
2340
  return IN_RANGE (val, (-1 << (rx_max_constant_size * 8)),
2341
                        ( 1 << (rx_max_constant_size * 8)));
2342
}
2343
 
2344
static int
2345
rx_address_cost (rtx addr, bool speed)
2346
{
2347
  rtx a, b;
2348
 
2349
  if (GET_CODE (addr) != PLUS)
2350
    return COSTS_N_INSNS (1);
2351
 
2352
  a = XEXP (addr, 0);
2353
  b = XEXP (addr, 1);
2354
 
2355
  if (REG_P (a) && REG_P (b))
2356
    /* Try to discourage REG+REG addressing as it keeps two registers live.  */
2357
    return COSTS_N_INSNS (4);
2358
 
2359
  if (speed)
2360
    /* [REG+OFF] is just as fast as [REG].  */
2361
    return COSTS_N_INSNS (1);
2362
 
2363
  if (CONST_INT_P (b)
2364
      && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2365
    /* Try to discourage REG + <large OFF> when optimizing for size.  */
2366
    return COSTS_N_INSNS (2);
2367
 
2368
  return COSTS_N_INSNS (1);
2369
}
2370
 
2371
static bool
2372
rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2373
{
2374
  /* We can always eliminate to the frame pointer.
2375
     We can eliminate to the stack pointer unless a frame
2376
     pointer is needed.  */
2377
 
2378
  return to == FRAME_POINTER_REGNUM
2379
    || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2380
}
2381
 
2382
 
2383
static void
2384
rx_trampoline_template (FILE * file)
2385
{
2386
  /* Output assembler code for a block containing the constant
2387
     part of a trampoline, leaving space for the variable parts.
2388
 
2389
     On the RX, (where r8 is the static chain regnum) the trampoline
2390
     looks like:
2391
 
2392
           mov          #<static chain value>, r8
2393
           mov          #<function's address>, r9
2394
           jmp          r9
2395
 
2396
     In big-endian-data-mode however instructions are read into the CPU
2397
     4 bytes at a time.  These bytes are then swapped around before being
2398
     passed to the decoder.  So...we must partition our trampoline into
2399
     4 byte packets and swap these packets around so that the instruction
2400
     reader will reverse the process.  But, in order to avoid splitting
2401
     the 32-bit constants across these packet boundaries, (making inserting
2402
     them into the constructed trampoline very difficult) we have to pad the
2403
     instruction sequence with NOP insns.  ie:
2404
 
2405
           nop
2406
           nop
2407
           mov.l        #<...>, r8
2408
           nop
2409
           nop
2410
           mov.l        #<...>, r9
2411
           jmp          r9
2412
           nop
2413
           nop             */
2414
 
2415
  if (! TARGET_BIG_ENDIAN_DATA)
2416
    {
2417
      asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
2418
      asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
2419
      asm_fprintf (file, "\tjmp\tr%d\n",                TRAMPOLINE_TEMP_REGNUM);
2420
    }
2421
  else
2422
    {
2423
      char r8 = '0' + STATIC_CHAIN_REGNUM;
2424
      char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
2425
 
2426
      if (TARGET_AS100_SYNTAX)
2427
        {
2428
          asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H,  003H\n", r8);
2429
          asm_fprintf (file, "\t.BYTE 0deH,  0adH, 0beH,  0efH\n");
2430
          asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H,  003H\n", r9);
2431
          asm_fprintf (file, "\t.BYTE 0deH,  0adH, 0beH,  0efH\n");
2432
          asm_fprintf (file, "\t.BYTE 003H,  003H, 00%cH, 07fH\n", r9);
2433
        }
2434
      else
2435
        {
2436
          asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03,  0x03\n", r8);
2437
          asm_fprintf (file, "\t.byte 0xde,  0xad, 0xbe,  0xef\n");
2438
          asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03,  0x03\n", r9);
2439
          asm_fprintf (file, "\t.byte 0xde,  0xad, 0xbe,  0xef\n");
2440
          asm_fprintf (file, "\t.byte 0x03,  0x03, 0x0%c, 0x7f\n", r9);
2441
        }
2442
    }
2443
}
2444
 
2445
static void
2446
rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
2447
{
2448
  rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2449
 
2450
  emit_block_move (tramp, assemble_trampoline_template (),
2451
                   GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2452
 
2453
  if (TARGET_BIG_ENDIAN_DATA)
2454
    {
2455
      emit_move_insn (adjust_address (tramp, SImode, 4), chain);
2456
      emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
2457
    }
2458
  else
2459
    {
2460
      emit_move_insn (adjust_address (tramp, SImode, 2), chain);
2461
      emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
2462
    }
2463
}
2464
 
2465
static enum machine_mode
2466
rx_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
2467
{
2468
  if (m1 == CCmode)
2469
    return m2;
2470
  if (m2 == CCmode)
2471
    return m1;
2472
  if (m1 == m2)
2473
    return m1;
2474
  if (m1 == CC_ZSmode)
2475
    return m1;
2476
  if (m2 == CC_ZSmode)
2477
    return m2;
2478
  return VOIDmode;
2479
}
2480
 
2481
#define CC_FLAG_S (1 << 0)
2482
#define CC_FLAG_Z (1 << 1)
2483
#define CC_FLAG_O (1 << 2)
2484
#define CC_FLAG_C (1 << 3)
2485
 
2486
static unsigned int
2487
flags_needed_for_conditional (rtx conditional)
2488
{
2489
  switch (GET_CODE (conditional))
2490
    {
2491
    case LE:
2492
    case GT:    return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
2493
 
2494
    case LEU:
2495
    case GTU:   return CC_FLAG_Z | CC_FLAG_C;
2496
 
2497
    case LT:
2498
    case GE:    return CC_FLAG_S | CC_FLAG_O;
2499
 
2500
    case LTU:
2501
    case GEU:   return CC_FLAG_C;
2502
 
2503
    case EQ:
2504
    case NE:    return CC_FLAG_Z;
2505
 
2506
    default:    gcc_unreachable ();
2507
    }
2508
}
2509
 
2510
static unsigned int
2511
flags_from_mode (enum machine_mode mode)
2512
{
2513
  switch (mode)
2514
    {
2515
    case CCmode:     return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
2516
    case CC_ZSmode:  return CC_FLAG_S | CC_FLAG_Z;
2517
    case CC_ZSOmode: return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
2518
    case CC_ZSCmode: return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
2519
    default:         gcc_unreachable ();
2520
    }
2521
}
2522
 
2523
/* Returns true if a compare insn is redundant because it
2524
   would only set flags that are already set correctly.  */
2525
 
2526
bool
2527
rx_compare_redundant (rtx cmp)
2528
{
2529
  unsigned int flags_needed;
2530
  unsigned int flags_set;
2531
  rtx next;
2532
  rtx prev;
2533
  rtx source;
2534
  rtx dest;
2535
  static rtx cc_reg = NULL_RTX;
2536
 
2537
  if (cc_reg == NULL_RTX)
2538
    cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
2539
 
2540
  /* We can only eliminate compares against 0.  */
2541
  if (GET_CODE (XEXP (SET_SRC (PATTERN (cmp)), 1)) != CONST_INT
2542
      || INTVAL (XEXP (SET_SRC (PATTERN (cmp)), 1)) != 0)
2543
    return false;
2544
 
2545
  /* Locate the branch insn that follows the
2546
     compare and which tests the bits in the PSW.  */
2547
  next = cmp;
2548
  do
2549
    {
2550
      /* If we have found an insn that sets or clobbers the CC
2551
         register and it was not the IF_THEN_ELSE insn that we
2552
         are looking for, then the comparison is redundant.  */
2553
      if (next != cmp && reg_mentioned_p (cc_reg, PATTERN (next)))
2554
        return true;
2555
 
2556
      next = next_nonnote_insn (next);
2557
 
2558
      /* If we run out of insns without finding the
2559
         user then the comparison is unnecessary.  */
2560
      if (next == NULL_RTX)
2561
        return true;
2562
 
2563
      /* If we have found another comparison
2564
         insn then the first one is redundant.  */
2565
      if (INSN_P (next)
2566
          && GET_CODE (PATTERN (next)) == SET
2567
          && REG_P (SET_DEST (PATTERN (next)))
2568
          && REGNO (SET_DEST (PATTERN (next))) == CC_REGNUM)
2569
        return true;
2570
 
2571
      /* If we have found another arithmetic/logic insn that
2572
         sets the PSW flags then the comparison is redundant.  */
2573
      if (INSN_P (next)
2574
          && GET_CODE (PATTERN (next)) == PARALLEL
2575
          && GET_CODE (XVECEXP (PATTERN (next), 0, 1)) == SET
2576
          && REG_P (SET_DEST (XVECEXP (PATTERN (next), 0, 1)))
2577
          && REGNO (SET_DEST (XVECEXP (PATTERN (next), 0, 1))) == CC_REGNUM)
2578
        return true;
2579
 
2580
      /* If we have found an unconditional branch then the
2581
         PSW flags might be carried along with the jump, so
2582
         the comparison is necessary.  */
2583
      if (INSN_P (next) && JUMP_P (next))
2584
        {
2585
          if (GET_CODE (PATTERN (next)) != SET)
2586
            /* If the jump does not involve setting the PC
2587
               then it is a return of some kind, and we know
2588
               that the comparison is not used.  */
2589
            return true;
2590
 
2591
          if (GET_CODE (SET_SRC (PATTERN (next))) != IF_THEN_ELSE)
2592
            return false;
2593
        }
2594
    }
2595
  while (! INSN_P (next)
2596
         || DEBUG_INSN_P (next)
2597
         || GET_CODE (PATTERN (next)) != SET
2598
         || GET_CODE (SET_SRC (PATTERN (next))) != IF_THEN_ELSE);
2599
 
2600
  flags_needed = flags_needed_for_conditional (XEXP (SET_SRC (PATTERN (next)), 0));
2601
 
2602
  /* Now look to see if there was a previous
2603
     instruction which set the PSW bits.  */
2604
  source = XEXP (SET_SRC (PATTERN (cmp)), 0);
2605
  prev = cmp;
2606
  do
2607
    {
2608
      /* If this insn uses/sets/clobbers the CC register
2609
         and it is not the insn that we are looking for
2610
         below, then we must need the comparison.  */
2611
      if (prev != cmp && reg_mentioned_p (cc_reg, PATTERN (prev)))
2612
        return false;
2613
 
2614
      prev = prev_nonnote_insn (prev);
2615
 
2616
      if (prev == NULL_RTX)
2617
        return false;
2618
 
2619
      /* If we encounter an insn which changes the contents of
2620
         the register which is the source of the comparison then
2621
         we will definitely need the comparison.  */
2622
      if (INSN_P (prev)
2623
          && GET_CODE (PATTERN (prev)) == SET
2624
          && rtx_equal_p (SET_DEST (PATTERN (prev)), source))
2625
        {
2626
          /* Unless this instruction is a simple register move
2627
             instruction.  In which case we can continue our
2628
             scan backwards, but now using the *source* of this
2629
             set instruction.  */
2630
          if (REG_P (SET_SRC (PATTERN (prev))))
2631
            source = SET_SRC (PATTERN (prev));
2632
          /* We can also survive a sign-extension if the test is
2633
             for EQ/NE.  Note the same does not apply to zero-
2634
             extension as this can turn a non-zero bit-pattern
2635
             into zero.  */
2636
          else if (flags_needed == CC_FLAG_Z
2637
                   && GET_CODE (SET_SRC (PATTERN (prev))) == SIGN_EXTEND)
2638
            source = XEXP (SET_SRC (PATTERN (prev)), 0);
2639
          else
2640
            return false;
2641
        }
2642
 
2643
      /* A label means a possible branch into the
2644
         code here, so we have to stop scanning.  */
2645
      if (LABEL_P (prev))
2646
        return false;
2647
    }
2648
  while (! INSN_P (prev)
2649
         || DEBUG_INSN_P (prev)
2650
         || GET_CODE (PATTERN (prev)) != PARALLEL
2651
         || GET_CODE (XVECEXP (PATTERN (prev), 0, 1)) != SET
2652
         || ! REG_P (SET_DEST (XVECEXP (PATTERN (prev), 0, 1)))
2653
         || REGNO (SET_DEST (XVECEXP (PATTERN (prev), 0, 1))) != CC_REGNUM);
2654
 
2655
  flags_set = flags_from_mode (GET_MODE (SET_DEST (XVECEXP (PATTERN (prev), 0, 1))));
2656
 
2657
  dest = SET_DEST (XVECEXP (PATTERN (prev), 0, 0));
2658
  /* The destination of the previous arithmetic/logic instruction
2659
     must match the source in the comparison operation.  For registers
2660
     we ignore the mode as there may have been a sign-extension involved.  */
2661
  if (! rtx_equal_p (source, dest))
2662
    {
2663
      if (REG_P (source) && REG_P (dest) && REGNO (dest) == REGNO (source))
2664
        ;
2665
      else
2666
        return false;
2667
    }
2668
 
2669
  return ((flags_set & flags_needed) == flags_needed);
2670
}
2671
 
2672
#undef  TARGET_FUNCTION_VALUE
2673
#define TARGET_FUNCTION_VALUE           rx_function_value
2674
 
2675
#undef  TARGET_RETURN_IN_MSB
2676
#define TARGET_RETURN_IN_MSB            rx_return_in_msb
2677
 
2678
#undef  TARGET_IN_SMALL_DATA_P
2679
#define TARGET_IN_SMALL_DATA_P          rx_in_small_data
2680
 
2681
#undef  TARGET_RETURN_IN_MEMORY
2682
#define TARGET_RETURN_IN_MEMORY         rx_return_in_memory
2683
 
2684
#undef  TARGET_HAVE_SRODATA_SECTION
2685
#define TARGET_HAVE_SRODATA_SECTION     true
2686
 
2687
#undef  TARGET_ASM_SELECT_RTX_SECTION
2688
#define TARGET_ASM_SELECT_RTX_SECTION   rx_select_rtx_section
2689
 
2690
#undef  TARGET_ASM_SELECT_SECTION
2691
#define TARGET_ASM_SELECT_SECTION       rx_select_section
2692
 
2693
#undef  TARGET_INIT_BUILTINS
2694
#define TARGET_INIT_BUILTINS            rx_init_builtins
2695
 
2696
#undef  TARGET_EXPAND_BUILTIN
2697
#define TARGET_EXPAND_BUILTIN           rx_expand_builtin
2698
 
2699
#undef  TARGET_ASM_CONSTRUCTOR
2700
#define TARGET_ASM_CONSTRUCTOR          rx_elf_asm_constructor
2701
 
2702
#undef  TARGET_ASM_DESTRUCTOR
2703
#define TARGET_ASM_DESTRUCTOR           rx_elf_asm_destructor
2704
 
2705
#undef  TARGET_STRUCT_VALUE_RTX
2706
#define TARGET_STRUCT_VALUE_RTX         rx_struct_value_rtx
2707
 
2708
#undef  TARGET_ATTRIBUTE_TABLE
2709
#define TARGET_ATTRIBUTE_TABLE          rx_attribute_table
2710
 
2711
#undef  TARGET_ASM_FILE_START
2712
#define TARGET_ASM_FILE_START                   rx_file_start
2713
 
2714
#undef  TARGET_MS_BITFIELD_LAYOUT_P
2715
#define TARGET_MS_BITFIELD_LAYOUT_P             rx_is_ms_bitfield_layout
2716
 
2717
#undef  TARGET_LEGITIMATE_ADDRESS_P
2718
#define TARGET_LEGITIMATE_ADDRESS_P             rx_is_legitimate_address
2719
 
2720
#undef  TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
2721
#define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS    rx_allocate_stack_slots_for_args
2722
 
2723
#undef  TARGET_ASM_FUNCTION_PROLOGUE
2724
#define TARGET_ASM_FUNCTION_PROLOGUE            rx_output_function_prologue
2725
 
2726
#undef  TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
2727
#define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P   rx_func_attr_inlinable
2728
 
2729
#undef  TARGET_FUNCTION_OK_FOR_SIBCALL
2730
#define TARGET_FUNCTION_OK_FOR_SIBCALL          rx_function_ok_for_sibcall
2731
 
2732
#undef  TARGET_SET_CURRENT_FUNCTION
2733
#define TARGET_SET_CURRENT_FUNCTION             rx_set_current_function
2734
 
2735
#undef  TARGET_HANDLE_OPTION
2736
#define TARGET_HANDLE_OPTION                    rx_handle_option
2737
 
2738
#undef  TARGET_ASM_INTEGER
2739
#define TARGET_ASM_INTEGER                      rx_assemble_integer
2740
 
2741
#undef  TARGET_USE_BLOCKS_FOR_CONSTANT_P
2742
#define TARGET_USE_BLOCKS_FOR_CONSTANT_P        hook_bool_mode_const_rtx_true
2743
 
2744
#undef  TARGET_MAX_ANCHOR_OFFSET
2745
#define TARGET_MAX_ANCHOR_OFFSET                32
2746
 
2747
#undef  TARGET_ADDRESS_COST
2748
#define TARGET_ADDRESS_COST                     rx_address_cost
2749
 
2750
#undef  TARGET_CAN_ELIMINATE
2751
#define TARGET_CAN_ELIMINATE                    rx_can_eliminate
2752
 
2753
#undef  TARGET_ASM_TRAMPOLINE_TEMPLATE
2754
#define TARGET_ASM_TRAMPOLINE_TEMPLATE          rx_trampoline_template
2755
 
2756
#undef  TARGET_TRAMPOLINE_INIT
2757
#define TARGET_TRAMPOLINE_INIT                  rx_trampoline_init
2758
 
2759
#undef  TARGET_CC_MODES_COMPATIBLE
2760
#define TARGET_CC_MODES_COMPATIBLE              rx_cc_modes_compatible
2761
 
2762
struct gcc_target targetm = TARGET_INITIALIZER;
2763
 
2764
/* #include "gt-rx.h" */

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.