OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [config/] [mt/] [mt.c] - Blame information for rev 12

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
/* Target definitions for the MorphoRISC1
2
   Copyright (C) 2005 Free Software Foundation, Inc.
3
   Contributed by Red Hat, Inc.
4
 
5
   This file is part of GCC.
6
 
7
   GCC is free software; you can redistribute it and/or modify it
8
   under the terms of the GNU General Public License as published
9
   by the Free Software Foundation; either version 2, or (at your
10
   option) any later version.
11
 
12
   GCC is distributed in the hope that it will be useful, but WITHOUT
13
   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14
   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
15
   License for more details.
16
 
17
   You should have received a copy of the GNU General Public License
18
   along with GCC; see the file COPYING.  If not, write to the Free
19
   Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20
   02110-1301, USA.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "rtl.h"
27
#include "regs.h"
28
#include "hard-reg-set.h"
29
#include "real.h"
30
#include "insn-config.h"
31
#include "conditions.h"
32
#include "insn-attr.h"
33
#include "recog.h"
34
#include "toplev.h"
35
#include "output.h"
36
#include "integrate.h"
37
#include "tree.h"
38
#include "function.h"
39
#include "expr.h"
40
#include "optabs.h"
41
#include "libfuncs.h"
42
#include "flags.h"
43
#include "tm_p.h"
44
#include "ggc.h"
45
#include "insn-flags.h"
46
#include "obstack.h"
47
#include "except.h"
48
#include "target.h"
49
#include "target-def.h"
50
 
51
/* Frame pointer register mask.  */
52
#define FP_MASK                  (1 << (GPR_FP))
53
 
54
/* Link register mask.  */
55
#define LINK_MASK                (1 << (GPR_LINK))
56
 
57
/* Given a SIZE in bytes, advance to the next word.  */
58
#define ROUND_ADVANCE(SIZE) (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
59
 
60
/* A C structure for machine-specific, per-function data.
61
   This is added to the cfun structure.  */
62
struct machine_function GTY(())
63
{
64
  /* Flags if __builtin_return_address (n) with n >= 1 was used.  */
65
  int ra_needs_full_frame;
66
  struct rtx_def * eh_stack_adjust;
67
  int interrupt_handler;
68
};
69
 
70
/* Define the information needed to generate branch and scc insns.
71
   This is stored from the compare operation.  */
72
struct rtx_def * mt_compare_op0;
73
struct rtx_def * mt_compare_op1;
74
 
75
/* Current frame information calculated by compute_frame_size.  */
76
struct mt_frame_info current_frame_info;
77
 
78
/* Zero structure to initialize current_frame_info.  */
79
struct mt_frame_info zero_frame_info;
80
 
81
/* mt doesn't have unsigned compares need a library call for this.  */
82
struct rtx_def * mt_ucmpsi3_libcall;
83
 
84
static int mt_flag_delayed_branch;
85
 
86
 
87
static rtx
88
mt_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
89
                         int incoming ATTRIBUTE_UNUSED)
90
{
91
  return gen_rtx_REG (Pmode, RETVAL_REGNUM);
92
}
93
 
94
/* Implement RETURN_ADDR_RTX.  */
95
rtx
96
mt_return_addr_rtx (int count)
97
{
98
  if (count != 0)
99
    return NULL_RTX;
100
 
101
  return get_hard_reg_initial_val (Pmode, GPR_LINK);
102
}
103
 
104
/* The following variable value indicates the number of nops required
105
   between the current instruction and the next instruction to avoid
106
   any pipeline hazards.  */
107
static int mt_nops_required = 0;
108
static const char * mt_nop_reasons = "";
109
 
110
/* Implement ASM_OUTPUT_OPCODE.  */
111
const char *
112
mt_asm_output_opcode (FILE *f ATTRIBUTE_UNUSED, const char *ptr)
113
{
114
  if (mt_nops_required)
115
    fprintf (f, ";# need %d nops because of %s\n\t",
116
             mt_nops_required, mt_nop_reasons);
117
 
118
  while (mt_nops_required)
119
    {
120
      fprintf (f, "nop\n\t");
121
      -- mt_nops_required;
122
    }
123
 
124
  return ptr;
125
}
126
 
127
/* Given an insn, return whether it's a memory operation or a branch
128
   operation, otherwise return TYPE_ARITH.  */
129
static enum attr_type
130
mt_get_attr_type (rtx complete_insn)
131
{
132
  rtx insn = PATTERN (complete_insn);
133
 
134
  if (JUMP_P (complete_insn))
135
    return TYPE_BRANCH;
136
  if (CALL_P (complete_insn))
137
    return TYPE_BRANCH;
138
 
139
  if (GET_CODE (insn) != SET)
140
    return TYPE_ARITH;
141
 
142
  if (SET_DEST (insn) == pc_rtx)
143
    return TYPE_BRANCH;
144
 
145
  if (GET_CODE (SET_DEST (insn)) == MEM)
146
    return TYPE_STORE;
147
 
148
  if (GET_CODE (SET_SRC (insn)) == MEM)
149
    return TYPE_LOAD;
150
 
151
  return TYPE_ARITH;
152
}
153
 
154
/* A helper routine for insn_dependent_p called through note_stores.  */
155
 
156
static void
157
insn_dependent_p_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
158
{
159
  rtx * pinsn = (rtx *) data;
160
 
161
  if (*pinsn && reg_mentioned_p (x, *pinsn))
162
    *pinsn = NULL_RTX;
163
}
164
 
165
/* Return true if anything in insn X is (anti,output,true)
166
   dependent on anything in insn Y.  */
167
 
168
static bool
169
insn_dependent_p (rtx x, rtx y)
170
{
171
  rtx tmp;
172
 
173
  if (! INSN_P (x) || ! INSN_P (y))
174
    return 0;
175
 
176
  tmp = PATTERN (y);
177
  note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
178
  if (tmp == NULL_RTX)
179
    return true;
180
 
181
  tmp = PATTERN (x);
182
  note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
183
  return (tmp == NULL_RTX);
184
}
185
 
186
 
187
/* Return true if anything in insn X is true dependent on anything in
188
   insn Y.  */
189
static bool
190
insn_true_dependent_p (rtx x, rtx y)
191
{
192
  rtx tmp;
193
 
194
  if (! INSN_P (x) || ! INSN_P (y))
195
    return 0;
196
 
197
  tmp = PATTERN (y);
198
  note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
199
  return (tmp == NULL_RTX);
200
}
201
 
202
/* The following determines the number of nops that need to be
203
   inserted between the previous instructions and current instruction
204
   to avoid pipeline hazards on the mt processor.  Remember that
205
   the function is not called for asm insns.  */
206
 
207
void
208
mt_final_prescan_insn (rtx   insn,
209
                        rtx * opvec ATTRIBUTE_UNUSED,
210
                        int   noperands ATTRIBUTE_UNUSED)
211
{
212
  rtx prev_i;
213
  enum attr_type prev_attr;
214
 
215
  mt_nops_required = 0;
216
  mt_nop_reasons = "";
217
 
218
  /* ms2 constraints are dealt with in reorg.  */
219
  if (TARGET_MS2)
220
    return;
221
 
222
  /* Only worry about real instructions.  */
223
  if (! INSN_P (insn))
224
    return;
225
 
226
  /* Find the previous real instructions.  */
227
  for (prev_i = PREV_INSN (insn);
228
       prev_i != NULL
229
         && (! INSN_P (prev_i)
230
             || GET_CODE (PATTERN (prev_i)) == USE
231
             || GET_CODE (PATTERN (prev_i)) == CLOBBER);
232
       prev_i = PREV_INSN (prev_i))
233
    {
234
      /* If we meet a barrier, there is no flow through here.  */
235
      if (BARRIER_P (prev_i))
236
        return;
237
    }
238
 
239
  /* If there isn't one then there is nothing that we need do.  */
240
  if (prev_i == NULL || ! INSN_P (prev_i))
241
    return;
242
 
243
  prev_attr = mt_get_attr_type (prev_i);
244
 
245
  /* Delayed branch slots already taken care of by delay branch scheduling.  */
246
  if (prev_attr == TYPE_BRANCH)
247
    return;
248
 
249
  switch (mt_get_attr_type (insn))
250
    {
251
    case TYPE_LOAD:
252
    case TYPE_STORE:
253
      /* Avoid consecutive memory operation.  */
254
      if  ((prev_attr == TYPE_LOAD || prev_attr == TYPE_STORE)
255
           && TARGET_MS1_64_001)
256
        {
257
          mt_nops_required = 1;
258
          mt_nop_reasons = "consecutive mem ops";
259
        }
260
      /* Drop through.  */
261
 
262
    case TYPE_ARITH:
263
    case TYPE_COMPLEX:
264
      /* One cycle of delay is required between load
265
         and the dependent arithmetic instruction.  */
266
      if (prev_attr == TYPE_LOAD
267
          && insn_true_dependent_p (prev_i, insn))
268
        {
269
          mt_nops_required = 1;
270
          mt_nop_reasons = "load->arith dependency delay";
271
        }
272
      break;
273
 
274
    case TYPE_BRANCH:
275
      if (insn_dependent_p (prev_i, insn))
276
        {
277
          if (prev_attr == TYPE_ARITH && TARGET_MS1_64_001)
278
            {
279
              /* One cycle of delay between arith
280
                 instructions and branch dependent on arith.  */
281
              mt_nops_required = 1;
282
              mt_nop_reasons = "arith->branch dependency delay";
283
            }
284
          else if (prev_attr == TYPE_LOAD)
285
            {
286
              /* Two cycles of delay are required
287
                 between load and dependent branch.  */
288
              if (TARGET_MS1_64_001)
289
                mt_nops_required = 2;
290
              else
291
                mt_nops_required = 1;
292
              mt_nop_reasons = "load->branch dependency delay";
293
            }
294
        }
295
      break;
296
 
297
    default:
298
      fatal_insn ("mt_final_prescan_insn, invalid insn #1", insn);
299
      break;
300
    }
301
}
302
 
303
/* Print debugging information for a frame.  */
304
static void
305
mt_debug_stack (struct mt_frame_info * info)
306
{
307
  int regno;
308
 
309
  if (!info)
310
    {
311
      error ("info pointer NULL");
312
      gcc_unreachable ();
313
    }
314
 
315
  fprintf (stderr, "\nStack information for function %s:\n",
316
           ((current_function_decl && DECL_NAME (current_function_decl))
317
            ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
318
            : "<unknown>"));
319
 
320
  fprintf (stderr, "\ttotal_size       = %d\n", info->total_size);
321
  fprintf (stderr, "\tpretend_size     = %d\n", info->pretend_size);
322
  fprintf (stderr, "\targs_size        = %d\n", info->args_size);
323
  fprintf (stderr, "\textra_size       = %d\n", info->extra_size);
324
  fprintf (stderr, "\treg_size         = %d\n", info->reg_size);
325
  fprintf (stderr, "\tvar_size         = %d\n", info->var_size);
326
  fprintf (stderr, "\tframe_size       = %d\n", info->frame_size);
327
  fprintf (stderr, "\treg_mask         = 0x%x\n", info->reg_mask);
328
  fprintf (stderr, "\tsave_fp          = %d\n", info->save_fp);
329
  fprintf (stderr, "\tsave_lr          = %d\n", info->save_lr);
330
  fprintf (stderr, "\tinitialized      = %d\n", info->initialized);
331
  fprintf (stderr, "\tsaved registers =");
332
 
333
  /* Print out reg_mask in a more readable format.  */
334
  for (regno = GPR_R0; regno <= GPR_LAST; regno++)
335
    if ( (1 << regno) & info->reg_mask)
336
      fprintf (stderr, " %s", reg_names[regno]);
337
 
338
  putc ('\n', stderr);
339
  fflush (stderr);
340
}
341
 
342
/* Print a memory address as an operand to reference that memory location.  */
343
 
344
static void
345
mt_print_operand_simple_address (FILE * file, rtx addr)
346
{
347
  if (!addr)
348
    error ("PRINT_OPERAND_ADDRESS, null pointer");
349
 
350
  else
351
    switch (GET_CODE (addr))
352
      {
353
      case REG:
354
        fprintf (file, "%s, #0", reg_names [REGNO (addr)]);
355
        break;
356
 
357
      case PLUS:
358
        {
359
          rtx reg = 0;
360
          rtx offset = 0;
361
          rtx arg0 = XEXP (addr, 0);
362
          rtx arg1 = XEXP (addr, 1);
363
 
364
          if (GET_CODE (arg0) == REG)
365
            {
366
              reg = arg0;
367
              offset = arg1;
368
              if (GET_CODE (offset) == REG)
369
                fatal_insn ("PRINT_OPERAND_ADDRESS, 2 regs", addr);
370
            }
371
 
372
          else if (GET_CODE (arg1) == REG)
373
              reg = arg1, offset = arg0;
374
          else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
375
            {
376
              fprintf (file, "%s, #", reg_names [GPR_R0]);
377
              output_addr_const (file, addr);
378
              break;
379
            }
380
          fprintf (file, "%s, #", reg_names [REGNO (reg)]);
381
          output_addr_const (file, offset);
382
          break;
383
        }
384
 
385
      case LABEL_REF:
386
      case SYMBOL_REF:
387
      case CONST_INT:
388
      case CONST:
389
        output_addr_const (file, addr);
390
        break;
391
 
392
      default:
393
        fatal_insn ("PRINT_OPERAND_ADDRESS, invalid insn #1", addr);
394
        break;
395
      }
396
}
397
 
398
/* Implement PRINT_OPERAND_ADDRESS.  */
399
void
400
mt_print_operand_address (FILE * file, rtx addr)
401
{
402
  if (GET_CODE (addr) == AND
403
      && GET_CODE (XEXP (addr, 1)) == CONST_INT
404
      && INTVAL (XEXP (addr, 1)) == -3)
405
    mt_print_operand_simple_address (file, XEXP (addr, 0));
406
  else
407
    mt_print_operand_simple_address (file, addr);
408
}
409
 
410
/* Implement PRINT_OPERAND.  */
411
void
412
mt_print_operand (FILE * file, rtx x, int code)
413
{
414
  switch (code)
415
    {
416
    case '#':
417
      /* Output a nop if there's nothing for the delay slot.  */
418
      if (dbr_sequence_length () == 0)
419
        fputs ("\n\tnop", file);
420
      return;
421
 
422
    case 'H':
423
      fprintf(file, "#%%hi16(");
424
      output_addr_const (file, x);
425
      fprintf(file, ")");
426
      return;
427
 
428
    case 'L':
429
      fprintf(file, "#%%lo16(");
430
      output_addr_const (file, x);
431
      fprintf(file, ")");
432
      return;
433
 
434
    case 'N':
435
      fprintf(file, "#%ld", ~INTVAL (x));
436
      return;
437
 
438
    case 'z':
439
      if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0)
440
        {
441
          fputs (reg_names[GPR_R0], file);
442
          return;
443
        }
444
 
445
    case 0:
446
      /* Handled below.  */
447
      break;
448
 
449
    default:
450
      /* output_operand_lossage ("mt_print_operand: unknown code"); */
451
      fprintf (file, "unknown code");
452
      return;
453
    }
454
 
455
  switch (GET_CODE (x))
456
    {
457
    case REG:
458
      fputs (reg_names [REGNO (x)], file);
459
      break;
460
 
461
    case CONST:
462
    case CONST_INT:
463
      fprintf(file, "#%ld", INTVAL (x));
464
      break;
465
 
466
    case MEM:
467
      mt_print_operand_address(file, XEXP (x,0));
468
      break;
469
 
470
    case LABEL_REF:
471
    case SYMBOL_REF:
472
      output_addr_const (file, x);
473
      break;
474
 
475
    default:
476
      fprintf(file, "Uknown code: %d", GET_CODE (x));
477
      break;
478
    }
479
 
480
  return;
481
}
482
 
483
/* Implement INIT_CUMULATIVE_ARGS.  */
484
void
485
mt_init_cumulative_args (CUMULATIVE_ARGS * cum, tree fntype, rtx libname,
486
                         tree fndecl ATTRIBUTE_UNUSED, int incoming)
487
{
488
  *cum = 0;
489
 
490
  if (TARGET_DEBUG_ARG)
491
    {
492
      fprintf (stderr, "\nmt_init_cumulative_args:");
493
 
494
      if (incoming)
495
        fputs (" incoming", stderr);
496
 
497
      if (fntype)
498
        {
499
          tree ret_type = TREE_TYPE (fntype);
500
          fprintf (stderr, " return = %s,",
501
                   tree_code_name[ (int)TREE_CODE (ret_type) ]);
502
        }
503
 
504
      if (libname && GET_CODE (libname) == SYMBOL_REF)
505
        fprintf (stderr, " libname = %s", XSTR (libname, 0));
506
 
507
      if (cfun->returns_struct)
508
        fprintf (stderr, " return-struct");
509
 
510
      putc ('\n', stderr);
511
    }
512
}
513
 
514
/* Compute the slot number to pass an argument in.
515
   Returns the slot number or -1 if passing on the stack.
516
 
517
   CUM is a variable of type CUMULATIVE_ARGS which gives info about
518
    the preceding args and about the function being called.
519
   MODE is the argument's machine mode.
520
   TYPE is the data type of the argument (as a tree).
521
    This is null for libcalls where that information may
522
    not be available.
523
   NAMED is nonzero if this argument is a named parameter
524
    (otherwise it is an extra parameter matching an ellipsis).
525
   INCOMING_P is zero for FUNCTION_ARG, nonzero for FUNCTION_INCOMING_ARG.
526
   *PREGNO records the register number to use if scalar type.  */
527
 
528
static int
529
mt_function_arg_slotno (const CUMULATIVE_ARGS * cum,
530
                        enum machine_mode mode,
531
                        tree type,
532
                        int named ATTRIBUTE_UNUSED,
533
                        int incoming_p ATTRIBUTE_UNUSED,
534
                        int * pregno)
535
{
536
  int regbase = FIRST_ARG_REGNUM;
537
  int slotno  = * cum;
538
 
539
  if (mode == VOIDmode || targetm.calls.must_pass_in_stack (mode, type))
540
    return -1;
541
 
542
  if (slotno >= MT_NUM_ARG_REGS)
543
    return -1;
544
 
545
  * pregno = regbase + slotno;
546
 
547
  return slotno;
548
}
549
 
550
/* Implement FUNCTION_ARG.  */
551
rtx
552
mt_function_arg (const CUMULATIVE_ARGS * cum,
553
                 enum machine_mode mode,
554
                 tree type,
555
                 int named,
556
                 int incoming_p)
557
{
558
  int slotno, regno;
559
  rtx reg;
560
 
561
  slotno = mt_function_arg_slotno (cum, mode, type, named, incoming_p, &regno);
562
 
563
  if (slotno == -1)
564
    reg = NULL_RTX;
565
  else
566
    reg = gen_rtx_REG (mode, regno);
567
 
568
  return reg;
569
}
570
 
571
/* Implement FUNCTION_ARG_ADVANCE.  */
572
void
573
mt_function_arg_advance (CUMULATIVE_ARGS * cum,
574
                         enum machine_mode mode,
575
                         tree type ATTRIBUTE_UNUSED,
576
                         int named)
577
{
578
  int slotno, regno;
579
 
580
  /* We pass 0 for incoming_p here, it doesn't matter.  */
581
  slotno = mt_function_arg_slotno (cum, mode, type, named, 0, &regno);
582
 
583
  * cum += (mode != BLKmode
584
            ? ROUND_ADVANCE (GET_MODE_SIZE (mode))
585
            : ROUND_ADVANCE (int_size_in_bytes (type)));
586
 
587
  if (TARGET_DEBUG_ARG)
588
    fprintf (stderr,
589
             "mt_function_arg_advance: words = %2d, mode = %4s, named = %d, size = %3d\n",
590
             *cum, GET_MODE_NAME (mode), named,
591
             (*cum) * UNITS_PER_WORD);
592
}
593
 
594
/* Implement hook TARGET_ARG_PARTIAL_BYTES.
595
 
596
   Returns the number of bytes at the beginning of an argument that
597
   must be put in registers.  The value must be zero for arguments
598
   that are passed entirely in registers or that are entirely pushed
599
   on the stack.  */
600
static int
601
mt_arg_partial_bytes (CUMULATIVE_ARGS * pcum,
602
                       enum machine_mode mode,
603
                       tree type,
604
                       bool named ATTRIBUTE_UNUSED)
605
{
606
  int cum = * pcum;
607
  int words;
608
 
609
  if (mode == BLKmode)
610
    words = ((int_size_in_bytes (type) + UNITS_PER_WORD - 1)
611
             / UNITS_PER_WORD);
612
  else
613
    words = (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
614
 
615
  if (! targetm.calls.pass_by_reference (&cum, mode, type, named)
616
      && cum < MT_NUM_ARG_REGS
617
      && (cum + words) > MT_NUM_ARG_REGS)
618
    {
619
      int bytes = (MT_NUM_ARG_REGS - cum) * UNITS_PER_WORD;
620
 
621
      if (TARGET_DEBUG)
622
        fprintf (stderr, "function_arg_partial_nregs = %d\n", bytes);
623
      return bytes;
624
    }
625
 
626
  return 0;
627
}
628
 
629
 
630
/* Implement TARGET_PASS_BY_REFERENCE hook.  */
631
static bool
632
mt_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
633
                       enum machine_mode mode ATTRIBUTE_UNUSED,
634
                       tree type,
635
                       bool named ATTRIBUTE_UNUSED)
636
{
637
  return (type && int_size_in_bytes (type) > 4 * UNITS_PER_WORD);
638
}
639
 
640
/* Implement FUNCTION_ARG_BOUNDARY.  */
641
int
642
mt_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
643
                           tree type ATTRIBUTE_UNUSED)
644
{
645
  return BITS_PER_WORD;
646
}
647
 
648
/* Implement REG_OK_FOR_BASE_P.  */
649
int
650
mt_reg_ok_for_base_p (rtx x, int strict)
651
{
652
  if (strict)
653
    return  (((unsigned) REGNO (x)) < FIRST_PSEUDO_REGISTER);
654
  return 1;
655
}
656
 
657
/* Helper function of mt_legitimate_address_p.  Return true if XINSN
658
   is a simple address, otherwise false.  */
659
static bool
660
mt_legitimate_simple_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
661
                                rtx xinsn, int strict)
662
{
663
  if (TARGET_DEBUG)
664
    {
665
      fprintf (stderr, "\n========== GO_IF_LEGITIMATE_ADDRESS, %sstrict\n",
666
               strict ? "" : "not ");
667
      debug_rtx (xinsn);
668
    }
669
 
670
  if (GET_CODE (xinsn) == REG && mt_reg_ok_for_base_p (xinsn, strict))
671
    return true;
672
 
673
  if (GET_CODE (xinsn) == PLUS
674
      && GET_CODE (XEXP (xinsn, 0)) == REG
675
      && mt_reg_ok_for_base_p (XEXP (xinsn, 0), strict)
676
      && GET_CODE (XEXP (xinsn, 1)) == CONST_INT
677
      && SMALL_INT (XEXP (xinsn, 1)))
678
    return true;
679
 
680
  return false;
681
}
682
 
683
 
684
/* Helper function of GO_IF_LEGITIMATE_ADDRESS.  Return non-zero if
685
   XINSN is a legitimate address on MT.  */
686
int
687
mt_legitimate_address_p (enum machine_mode mode, rtx xinsn, int strict)
688
{
689
  if (mt_legitimate_simple_address_p (mode, xinsn, strict))
690
    return 1;
691
 
692
  if ((mode) == SImode
693
      && GET_CODE (xinsn) == AND
694
      && GET_CODE (XEXP (xinsn, 1)) == CONST_INT
695
      && INTVAL (XEXP (xinsn, 1)) == -3)
696
    return mt_legitimate_simple_address_p (mode, XEXP (xinsn, 0), strict);
697
  else
698
    return 0;
699
}
700
 
701
/* Return truth value of whether OP can be used as an operands where a
702
   register or 16 bit unsigned integer is needed.  */
703
 
704
int
705
uns_arith_operand (rtx op, enum machine_mode mode)
706
{
707
  if (GET_CODE (op) == CONST_INT && SMALL_INT_UNSIGNED (op))
708
    return 1;
709
 
710
  return register_operand (op, mode);
711
}
712
 
713
/* Return truth value of whether OP can be used as an operands where a
714
   16 bit integer is needed.  */
715
 
716
int
717
arith_operand (rtx op, enum machine_mode mode)
718
{
719
  if (GET_CODE (op) == CONST_INT && SMALL_INT (op))
720
    return 1;
721
 
722
  return register_operand (op, mode);
723
}
724
 
725
/* Return truth value of whether OP is a register or the constant 0.  */
726
 
727
int
728
reg_or_0_operand (rtx op, enum machine_mode mode)
729
{
730
  switch (GET_CODE (op))
731
    {
732
    case CONST_INT:
733
      return INTVAL (op) == 0;
734
 
735
    case REG:
736
    case SUBREG:
737
      return register_operand (op, mode);
738
 
739
    default:
740
      break;
741
    }
742
 
743
  return 0;
744
}
745
 
746
/* Return truth value of whether OP is a constant that requires two
747
   loads to put in a register.  */
748
 
749
int
750
big_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
751
{
752
  if (GET_CODE (op) == CONST_INT && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
753
    return 1;
754
 
755
  return 0;
756
}
757
 
758
/* Return truth value of whether OP is a constant that require only
759
   one load to put in a register.  */
760
 
761
int
762
single_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
763
{
764
  if (big_const_operand (op, mode)
765
      || GET_CODE (op) == CONST
766
      || GET_CODE (op) == LABEL_REF
767
      || GET_CODE (op) == SYMBOL_REF)
768
    return 0;
769
 
770
  return 1;
771
}
772
 
773
/* True if the current function is an interrupt handler
774
   (either via #pragma or an attribute specification).  */
775
int interrupt_handler;
776
enum processor_type mt_cpu;
777
 
778
static struct machine_function *
779
mt_init_machine_status (void)
780
{
781
  struct machine_function *f;
782
 
783
  f = ggc_alloc_cleared (sizeof (struct machine_function));
784
 
785
  return f;
786
}
787
 
788
/* Implement OVERRIDE_OPTIONS.  */
789
void
790
mt_override_options (void)
791
{
792
  if (mt_cpu_string != NULL)
793
    {
794
      if (!strcmp (mt_cpu_string, "ms1-64-001"))
795
        mt_cpu = PROCESSOR_MS1_64_001;
796
      else if (!strcmp (mt_cpu_string, "ms1-16-002"))
797
        mt_cpu = PROCESSOR_MS1_16_002;
798
      else if  (!strcmp (mt_cpu_string, "ms1-16-003"))
799
        mt_cpu = PROCESSOR_MS1_16_003;
800
      else if (!strcmp (mt_cpu_string, "ms2"))
801
        mt_cpu = PROCESSOR_MS2;
802
      else
803
        error ("bad value (%s) for -march= switch", mt_cpu_string);
804
    }
805
  else
806
    mt_cpu = PROCESSOR_MS1_64_001;
807
 
808
  if (flag_exceptions)
809
    {
810
      flag_omit_frame_pointer = 0;
811
      flag_gcse = 0;
812
    }
813
 
814
  /* We do delayed branch filling in machine dependent reorg */
815
  mt_flag_delayed_branch = flag_delayed_branch;
816
  flag_delayed_branch = 0;
817
 
818
  init_machine_status = mt_init_machine_status;
819
}
820
 
821
/* Do what is necessary for `va_start'.  We look at the current function
822
   to determine if stdarg or varargs is used and return the address of the
823
   first unnamed parameter.  */
824
 
825
static void
826
mt_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
827
                           enum machine_mode mode ATTRIBUTE_UNUSED,
828
                           tree type ATTRIBUTE_UNUSED,
829
                           int *pretend_size, int no_rtl)
830
{
831
  int regno;
832
  int regs = MT_NUM_ARG_REGS - *cum;
833
 
834
  *pretend_size = regs < 0 ? 0 : GET_MODE_SIZE (SImode) * regs;
835
 
836
  if (no_rtl)
837
    return;
838
 
839
  for (regno = *cum; regno < MT_NUM_ARG_REGS; regno++)
840
    {
841
      rtx reg = gen_rtx_REG (SImode, FIRST_ARG_REGNUM + regno);
842
      rtx slot = gen_rtx_PLUS (Pmode,
843
                               gen_rtx_REG (SImode, ARG_POINTER_REGNUM),
844
                               GEN_INT (UNITS_PER_WORD * regno));
845
 
846
      emit_move_insn (gen_rtx_MEM (SImode, slot), reg);
847
    }
848
}
849
 
850
/* Returns the number of bytes offset between the frame pointer and the stack
851
   pointer for the current function.  SIZE is the number of bytes of space
852
   needed for local variables.  */
853
 
854
unsigned int
855
mt_compute_frame_size (int size)
856
{
857
  int           regno;
858
  unsigned int  total_size;
859
  unsigned int  var_size;
860
  unsigned int  args_size;
861
  unsigned int  pretend_size;
862
  unsigned int  extra_size;
863
  unsigned int  reg_size;
864
  unsigned int  frame_size;
865
  unsigned int  reg_mask;
866
 
867
  var_size      = size;
868
  args_size     = current_function_outgoing_args_size;
869
  pretend_size  = current_function_pretend_args_size;
870
  extra_size    = FIRST_PARM_OFFSET (0);
871
  total_size    = extra_size + pretend_size + args_size + var_size;
872
  reg_size      = 0;
873
  reg_mask      = 0;
874
 
875
  /* Calculate space needed for registers.  */
876
  for (regno = GPR_R0; regno <= GPR_LAST; regno++)
877
    {
878
      if (MUST_SAVE_REGISTER (regno))
879
        {
880
          reg_size += UNITS_PER_WORD;
881
          reg_mask |= 1 << regno;
882
        }
883
    }
884
 
885
  current_frame_info.save_fp = (regs_ever_live [GPR_FP]
886
                                || frame_pointer_needed
887
                                || interrupt_handler);
888
  current_frame_info.save_lr = (regs_ever_live [GPR_LINK]
889
                                || profile_flag
890
                                || interrupt_handler);
891
 
892
  reg_size += (current_frame_info.save_fp + current_frame_info.save_lr)
893
               * UNITS_PER_WORD;
894
  total_size += reg_size;
895
  total_size = ((total_size + 3) & ~3);
896
 
897
  frame_size = total_size;
898
 
899
  /* Save computed information.  */
900
  current_frame_info.pretend_size = pretend_size;
901
  current_frame_info.var_size     = var_size;
902
  current_frame_info.args_size    = args_size;
903
  current_frame_info.reg_size     = reg_size;
904
  current_frame_info.frame_size   = args_size + var_size;
905
  current_frame_info.total_size   = total_size;
906
  current_frame_info.extra_size   = extra_size;
907
  current_frame_info.reg_mask     = reg_mask;
908
  current_frame_info.initialized  = reload_completed;
909
 
910
  return total_size;
911
}
912
 
913
/* Emit code to save REG in stack offset pointed to by MEM.
914
   STACK_OFFSET is the offset from the SP where the save will happen.
915
   This function sets the REG_FRAME_RELATED_EXPR note accordingly.  */
916
static void
917
mt_emit_save_restore (enum save_direction direction,
918
                      rtx reg, rtx mem, int stack_offset)
919
{
920
  if (direction == FROM_PROCESSOR_TO_MEM)
921
    {
922
      rtx insn;
923
 
924
      insn = emit_move_insn (mem, reg);
925
      RTX_FRAME_RELATED_P (insn) = 1;
926
      REG_NOTES (insn)
927
        = gen_rtx_EXPR_LIST
928
        (REG_FRAME_RELATED_EXPR,
929
         gen_rtx_SET (VOIDmode,
930
                      gen_rtx_MEM (SImode,
931
                                   gen_rtx_PLUS (SImode,
932
                                                 stack_pointer_rtx,
933
                                                 GEN_INT (stack_offset))),
934
                      reg),
935
         REG_NOTES (insn));
936
    }
937
  else
938
    emit_move_insn (reg, mem);
939
}
940
 
941
 
942
/* Emit code to save the frame pointer in the prologue and restore
943
   frame pointer in epilogue.  */
944
 
945
static void
946
mt_emit_save_fp (enum save_direction direction,
947
                  struct mt_frame_info info)
948
{
949
  rtx base_reg;
950
  int reg_mask = info.reg_mask  & ~(FP_MASK | LINK_MASK);
951
  int offset = info.total_size;
952
  int stack_offset = info.total_size;
953
 
954
  /* If there is nothing to save, get out now.  */
955
  if (! info.save_fp && ! info.save_lr && ! reg_mask)
956
    return;
957
 
958
  /* If offset doesn't fit in a 15-bit signed integer,
959
     uses a scratch registers to get a smaller offset.  */
960
  if (CONST_OK_FOR_LETTER_P(offset, 'O'))
961
    base_reg = stack_pointer_rtx;
962
  else
963
    {
964
      /* Use the scratch register R9 that holds old stack pointer.  */
965
      base_reg = gen_rtx_REG (SImode, GPR_R9);
966
      offset = 0;
967
    }
968
 
969
  if (info.save_fp)
970
    {
971
      offset -= UNITS_PER_WORD;
972
      stack_offset -= UNITS_PER_WORD;
973
      mt_emit_save_restore
974
        (direction, gen_rtx_REG (SImode, GPR_FP),
975
         gen_rtx_MEM (SImode,
976
                      gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
977
         stack_offset);
978
    }
979
}
980
 
981
/* Emit code to save registers in the prologue and restore register
982
   in epilogue.  */
983
 
984
static void
985
mt_emit_save_regs (enum save_direction direction,
986
                    struct mt_frame_info info)
987
{
988
  rtx base_reg;
989
  int regno;
990
  int reg_mask = info.reg_mask  & ~(FP_MASK | LINK_MASK);
991
  int offset = info.total_size;
992
  int stack_offset = info.total_size;
993
 
994
  /* If there is nothing to save, get out now.  */
995
  if (! info.save_fp && ! info.save_lr && ! reg_mask)
996
    return;
997
 
998
  /* If offset doesn't fit in a 15-bit signed integer,
999
     uses a scratch registers to get a smaller offset.  */
1000
  if (CONST_OK_FOR_LETTER_P(offset, 'O'))
1001
    base_reg = stack_pointer_rtx;
1002
  else
1003
    {
1004
      /* Use the scratch register R9 that holds old stack pointer.  */
1005
      base_reg = gen_rtx_REG (SImode, GPR_R9);
1006
      offset = 0;
1007
    }
1008
 
1009
  if (info.save_fp)
1010
    {
1011
      /* This just records the space for it, the actual move generated in
1012
         mt_emit_save_fp ().  */
1013
      offset -= UNITS_PER_WORD;
1014
      stack_offset -= UNITS_PER_WORD;
1015
    }
1016
 
1017
  if (info.save_lr)
1018
    {
1019
      offset -= UNITS_PER_WORD;
1020
      stack_offset -= UNITS_PER_WORD;
1021
      mt_emit_save_restore
1022
        (direction, gen_rtx_REG (SImode, GPR_LINK),
1023
         gen_rtx_MEM (SImode,
1024
                      gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
1025
         stack_offset);
1026
    }
1027
 
1028
  /* Save any needed call-saved regs.  */
1029
  for (regno = GPR_R0; regno <= GPR_LAST; regno++)
1030
    {
1031
      if ((reg_mask & (1 << regno)) != 0)
1032
        {
1033
          offset -= UNITS_PER_WORD;
1034
          stack_offset -= UNITS_PER_WORD;
1035
          mt_emit_save_restore
1036
            (direction, gen_rtx_REG (SImode, regno),
1037
             gen_rtx_MEM (SImode,
1038
                          gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
1039
             stack_offset);
1040
        }
1041
    }
1042
}
1043
 
1044
/* Return true if FUNC is a function with the 'interrupt' attribute.  */
1045
static bool
1046
mt_interrupt_function_p (tree func)
1047
{
1048
  tree a;
1049
 
1050
  if (TREE_CODE (func) != FUNCTION_DECL)
1051
    return false;
1052
 
1053
  a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
1054
  return a != NULL_TREE;
1055
}
1056
 
1057
/* Generate prologue code.  */
1058
void
1059
mt_expand_prologue (void)
1060
{
1061
  rtx size_rtx, insn;
1062
  unsigned int frame_size;
1063
 
1064
  if (mt_interrupt_function_p (current_function_decl))
1065
    {
1066
      interrupt_handler = 1;
1067
      if (cfun->machine)
1068
        cfun->machine->interrupt_handler = 1;
1069
    }
1070
 
1071
  mt_compute_frame_size (get_frame_size ());
1072
 
1073
  if (TARGET_DEBUG_STACK)
1074
    mt_debug_stack (&current_frame_info);
1075
 
1076
  /* Compute size of stack adjustment.  */
1077
  frame_size = current_frame_info.total_size;
1078
 
1079
  /* If offset doesn't fit in a 15-bit signed integer,
1080
     uses a scratch registers to get a smaller offset.  */
1081
  if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
1082
    size_rtx = GEN_INT (frame_size);
1083
  else
1084
    {
1085
      /* We do not have any scratch registers.  */
1086
      gcc_assert (!interrupt_handler);
1087
 
1088
      size_rtx = gen_rtx_REG (SImode, GPR_R9);
1089
      insn = emit_move_insn (size_rtx, GEN_INT (frame_size & 0xffff0000));
1090
      insn = emit_insn (gen_iorsi3 (size_rtx, size_rtx,
1091
                                    GEN_INT (frame_size & 0x0000ffff)));
1092
    }
1093
 
1094
  /* Allocate stack for this frame.  */
1095
  /* Make stack adjustment and use scratch register if constant too
1096
     large to fit as immediate.  */
1097
  if (frame_size)
1098
    {
1099
      insn = emit_insn (gen_subsi3 (stack_pointer_rtx,
1100
                                 stack_pointer_rtx,
1101
                                 size_rtx));
1102
      RTX_FRAME_RELATED_P (insn) = 1;
1103
      REG_NOTES (insn)
1104
        = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1105
                             gen_rtx_SET (VOIDmode,
1106
                                          stack_pointer_rtx,
1107
                                          gen_rtx_MINUS (SImode,
1108
                                                        stack_pointer_rtx,
1109
                                                        GEN_INT (frame_size))),
1110
                             REG_NOTES (insn));
1111
    }
1112
 
1113
  /* Set R9 to point to old sp if required for access to register save
1114
     area.  */
1115
  if ( current_frame_info.reg_size != 0
1116
       && !CONST_OK_FOR_LETTER_P (frame_size, 'O'))
1117
      emit_insn (gen_addsi3 (size_rtx, size_rtx, stack_pointer_rtx));
1118
 
1119
  /* Save the frame pointer.  */
1120
  mt_emit_save_fp (FROM_PROCESSOR_TO_MEM, current_frame_info);
1121
 
1122
  /* Now put the frame pointer into the frame pointer register.  */
1123
  if (frame_pointer_needed)
1124
    {
1125
      insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1126
      RTX_FRAME_RELATED_P (insn) = 1;
1127
    }
1128
 
1129
  /* Save the registers.  */
1130
  mt_emit_save_regs (FROM_PROCESSOR_TO_MEM, current_frame_info);
1131
 
1132
  /* If we are profiling, make sure no instructions are scheduled before
1133
     the call to mcount.  */
1134
  if (profile_flag)
1135
    emit_insn (gen_blockage ());
1136
}
1137
 
1138
/* Implement EPILOGUE_USES.  */
1139
int
1140
mt_epilogue_uses (int regno)
1141
{
1142
  if (cfun->machine && cfun->machine->interrupt_handler && reload_completed)
1143
    return 1;
1144
  return regno == GPR_LINK;
1145
}
1146
 
1147
/* Generate epilogue.  EH_MODE is NORMAL_EPILOGUE when generating a
1148
   function epilogue, or EH_EPILOGUE when generating an EH
1149
   epilogue.  */
1150
void
1151
mt_expand_epilogue (enum epilogue_type eh_mode)
1152
{
1153
  rtx size_rtx, insn;
1154
  unsigned frame_size;
1155
 
1156
  mt_compute_frame_size (get_frame_size ());
1157
 
1158
  if (TARGET_DEBUG_STACK)
1159
    mt_debug_stack (& current_frame_info);
1160
 
1161
  /* Compute size of stack adjustment.  */
1162
  frame_size = current_frame_info.total_size;
1163
 
1164
  /* If offset doesn't fit in a 15-bit signed integer,
1165
     uses a scratch registers to get a smaller offset.  */
1166
  if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
1167
    size_rtx = GEN_INT (frame_size);
1168
  else
1169
    {
1170
      /* We do not have any scratch registers.  */
1171
      gcc_assert (!interrupt_handler);
1172
 
1173
      size_rtx = gen_rtx_REG (SImode, GPR_R9);
1174
      insn = emit_move_insn (size_rtx, GEN_INT (frame_size & 0xffff0000));
1175
      insn = emit_insn (gen_iorsi3 (size_rtx, size_rtx,
1176
                                    GEN_INT (frame_size & 0x0000ffff)));
1177
      /* Set R9 to point to old sp if required for access to register
1178
         save area.  */
1179
      emit_insn (gen_addsi3 (size_rtx, size_rtx, stack_pointer_rtx));
1180
    }
1181
 
1182
  /* Restore sp if there was some possible change to it.  */
1183
  if (frame_pointer_needed)
1184
    insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1185
 
1186
  /* Restore the registers.  */
1187
  mt_emit_save_fp (FROM_MEM_TO_PROCESSOR, current_frame_info);
1188
  mt_emit_save_regs (FROM_MEM_TO_PROCESSOR, current_frame_info);
1189
 
1190
  /* Make stack adjustment and use scratch register if constant too
1191
     large to fit as immediate.  */
1192
  if (frame_size)
1193
    {
1194
      if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
1195
        /* Can handle this with simple add.  */
1196
        insn = emit_insn (gen_addsi3 (stack_pointer_rtx,
1197
                                      stack_pointer_rtx,
1198
                                      size_rtx));
1199
      else
1200
        /* Scratch reg R9 has the old sp value.  */
1201
        insn = emit_move_insn (stack_pointer_rtx,
1202
                               gen_rtx_REG (SImode, GPR_R9));
1203
 
1204
      REG_NOTES (insn)
1205
        = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1206
                             gen_rtx_SET (VOIDmode,
1207
                                          stack_pointer_rtx,
1208
                                          gen_rtx_PLUS (SImode,
1209
                                                        stack_pointer_rtx,
1210
                                                        GEN_INT (frame_size))),
1211
                             REG_NOTES (insn));
1212
    }
1213
 
1214
  if (cfun->machine && cfun->machine->eh_stack_adjust != NULL_RTX)
1215
    /* Perform the additional bump for __throw.  */
1216
    emit_insn (gen_addsi3 (stack_pointer_rtx,
1217
                           stack_pointer_rtx,
1218
                           cfun->machine->eh_stack_adjust));
1219
 
1220
  /* Generate the appropriate return.  */
1221
  if (eh_mode == EH_EPILOGUE)
1222
    {
1223
      emit_jump_insn (gen_eh_return_internal ());
1224
      emit_barrier ();
1225
    }
1226
  else if (interrupt_handler)
1227
    emit_jump_insn (gen_return_interrupt_internal ());
1228
  else
1229
    emit_jump_insn (gen_return_internal ());
1230
 
1231
  /* Reset state info for each function.  */
1232
  interrupt_handler = 0;
1233
  current_frame_info = zero_frame_info;
1234
  if (cfun->machine)
1235
    cfun->machine->eh_stack_adjust = NULL_RTX;
1236
}
1237
 
1238
 
1239
/* Generate code for the "eh_return" pattern.  */
1240
void
1241
mt_expand_eh_return (rtx * operands)
1242
{
1243
  if (GET_CODE (operands[0]) != REG
1244
      || REGNO (operands[0]) != EH_RETURN_STACKADJ_REGNO)
1245
    {
1246
      rtx sp = EH_RETURN_STACKADJ_RTX;
1247
 
1248
      emit_move_insn (sp, operands[0]);
1249
      operands[0] = sp;
1250
    }
1251
 
1252
  emit_insn (gen_eh_epilogue (operands[0]));
1253
}
1254
 
1255
/* Generate code for the "eh_epilogue" pattern.  */
1256
void
1257
mt_emit_eh_epilogue (rtx * operands ATTRIBUTE_UNUSED)
1258
{
1259
  cfun->machine->eh_stack_adjust = EH_RETURN_STACKADJ_RTX; /* operands[0]; */
1260
  mt_expand_epilogue (EH_EPILOGUE);
1261
}
1262
 
1263
/* Handle an "interrupt" attribute.  */
1264
static tree
1265
mt_handle_interrupt_attribute (tree * node,
1266
                          tree   name,
1267
                          tree   args  ATTRIBUTE_UNUSED,
1268
                          int    flags ATTRIBUTE_UNUSED,
1269
                          bool * no_add_attrs)
1270
{
1271
  if (TREE_CODE (*node) != FUNCTION_DECL)
1272
    {
1273
      warning (OPT_Wattributes,
1274
               "%qs attribute only applies to functions",
1275
               IDENTIFIER_POINTER (name));
1276
      *no_add_attrs = true;
1277
    }
1278
 
1279
  return NULL_TREE;
1280
}
1281
 
1282
/* Table of machine attributes.  */
1283
const struct attribute_spec mt_attribute_table[] =
1284
{
1285
  /* name,        min, max, decl?, type?, func?, handler  */
1286
  { "interrupt",  0,   0,   false, false, false, mt_handle_interrupt_attribute },
1287
  { NULL,         0,   0,   false, false, false, NULL }
1288
};
1289
 
1290
/* Implement INITIAL_ELIMINATION_OFFSET.  */
1291
int
1292
mt_initial_elimination_offset (int from, int to)
1293
{
1294
  mt_compute_frame_size (get_frame_size ());
1295
 
1296
  if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1297
    return 0;
1298
 
1299
  else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1300
    return current_frame_info.total_size;
1301
 
1302
  else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
1303
    return current_frame_info.total_size;
1304
 
1305
  else
1306
    gcc_unreachable ();
1307
}
1308
 
1309
/* Generate a compare for CODE.  Return a brand-new rtx that
1310
   represents the result of the compare.  */
1311
 
1312
static rtx
1313
mt_generate_compare (enum rtx_code code, rtx op0, rtx op1)
1314
{
1315
  rtx scratch0, scratch1, const_scratch;
1316
 
1317
  switch (code)
1318
    {
1319
    case GTU:
1320
    case LTU:
1321
    case GEU:
1322
    case LEU:
1323
      /* Need to adjust ranges for faking unsigned compares.  */
1324
      scratch0 = gen_reg_rtx (SImode);
1325
      scratch1 = gen_reg_rtx (SImode);
1326
      const_scratch = force_reg (SImode, GEN_INT(MT_MIN_INT));
1327
      emit_insn (gen_addsi3 (scratch0, const_scratch, op0));
1328
      emit_insn (gen_addsi3 (scratch1, const_scratch, op1));
1329
      break;
1330
    default:
1331
      scratch0 = op0;
1332
      scratch1 = op1;
1333
      break;
1334
    }
1335
 
1336
  /* Adjust compare operator to fake unsigned compares.  */
1337
  switch (code)
1338
    {
1339
    case GTU:
1340
      code = GT; break;
1341
    case LTU:
1342
      code = LT; break;
1343
    case GEU:
1344
      code = GE; break;
1345
    case LEU:
1346
      code = LE; break;
1347
    default:
1348
      /* do nothing */
1349
      break;
1350
    }
1351
 
1352
  /* Generate the actual compare.  */
1353
  return gen_rtx_fmt_ee (code, VOIDmode, scratch0, scratch1);
1354
}
1355
 
1356
/* Emit a branch of kind CODE to location LOC.  */
1357
 
1358
void
1359
mt_emit_cbranch (enum rtx_code code, rtx loc, rtx op0, rtx op1)
1360
{
1361
  rtx condition_rtx, loc_ref;
1362
 
1363
  if (! reg_or_0_operand (op0, SImode))
1364
    op0 = copy_to_mode_reg (SImode, op0);
1365
 
1366
  if (! reg_or_0_operand (op1, SImode))
1367
    op1 = copy_to_mode_reg (SImode, op1);
1368
 
1369
  condition_rtx = mt_generate_compare (code, op0, op1);
1370
  loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
1371
  emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1372
                               gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
1373
                                                     loc_ref, pc_rtx)));
1374
}
1375
 
1376
/* Subfunction of the following function.  Update the flags of any MEM
1377
   found in part of X.  */
1378
 
1379
static void
1380
mt_set_memflags_1 (rtx x, int in_struct_p, int volatile_p)
1381
{
1382
  int i;
1383
 
1384
  switch (GET_CODE (x))
1385
    {
1386
    case SEQUENCE:
1387
    case PARALLEL:
1388
      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1389
        mt_set_memflags_1 (XVECEXP (x, 0, i), in_struct_p, volatile_p);
1390
      break;
1391
 
1392
    case INSN:
1393
      mt_set_memflags_1 (PATTERN (x), in_struct_p, volatile_p);
1394
      break;
1395
 
1396
    case SET:
1397
      mt_set_memflags_1 (SET_DEST (x), in_struct_p, volatile_p);
1398
      mt_set_memflags_1 (SET_SRC (x), in_struct_p, volatile_p);
1399
      break;
1400
 
1401
    case MEM:
1402
      MEM_IN_STRUCT_P (x) = in_struct_p;
1403
      MEM_VOLATILE_P (x) = volatile_p;
1404
      /* Sadly, we cannot use alias sets because the extra aliasing
1405
         produced by the AND interferes.  Given that two-byte quantities
1406
         are the only thing we would be able to differentiate anyway,
1407
         there does not seem to be any point in convoluting the early
1408
         out of the alias check.  */
1409
      /* set_mem_alias_set (x, alias_set); */
1410
      break;
1411
 
1412
    default:
1413
      break;
1414
    }
1415
}
1416
 
1417
/* Look for any MEMs in the current sequence of insns and set the
1418
   in-struct, unchanging, and volatile flags from the flags in REF.
1419
   If REF is not a MEM, don't do anything.  */
1420
 
1421
void
1422
mt_set_memflags (rtx ref)
1423
{
1424
  rtx insn;
1425
  int in_struct_p, volatile_p;
1426
 
1427
  if (GET_CODE (ref) != MEM)
1428
    return;
1429
 
1430
  in_struct_p = MEM_IN_STRUCT_P (ref);
1431
  volatile_p = MEM_VOLATILE_P (ref);
1432
 
1433
  /* This is only called from mt.md, after having had something
1434
     generated from one of the insn patterns.  So if everything is
1435
     zero, the pattern is already up-to-date.  */
1436
  if (! in_struct_p && ! volatile_p)
1437
    return;
1438
 
1439
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1440
    mt_set_memflags_1 (insn, in_struct_p, volatile_p);
1441
}
1442
 
1443
/* Implement SECONDARY_RELOAD_CLASS.  */
1444
enum reg_class
1445
mt_secondary_reload_class (enum reg_class class ATTRIBUTE_UNUSED,
1446
                            enum machine_mode mode,
1447
                            rtx x)
1448
{
1449
  if ((mode == QImode && (!TARGET_BYTE_ACCESS)) || mode == HImode)
1450
    {
1451
      if (GET_CODE (x) == MEM
1452
          || (GET_CODE (x) == REG && true_regnum (x) == -1)
1453
          || (GET_CODE (x) == SUBREG
1454
              && (GET_CODE (SUBREG_REG (x)) == MEM
1455
                  || (GET_CODE (SUBREG_REG (x)) == REG
1456
                      && true_regnum (SUBREG_REG (x)) == -1))))
1457
        return GENERAL_REGS;
1458
    }
1459
 
1460
  return NO_REGS;
1461
}
1462
 
1463
/* Handle FUNCTION_VALUE, FUNCTION_OUTGOING_VALUE, and LIBCALL_VALUE
1464
   macros.  */
1465
rtx
1466
mt_function_value (tree valtype, enum machine_mode mode, tree func_decl ATTRIBUTE_UNUSED)
1467
{
1468
  if ((mode) == DImode || (mode) == DFmode)
1469
    return gen_rtx_MEM (mode, gen_rtx_REG (mode, RETURN_VALUE_REGNUM));
1470
 
1471
  if (valtype)
1472
    mode = TYPE_MODE (valtype);
1473
 
1474
  return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1475
}
1476
 
1477
/* Split a move into two smaller pieces.
1478
   MODE indicates the reduced mode.  OPERANDS[0] is the original destination
1479
   OPERANDS[1] is the original src.  The new destinations are
1480
   OPERANDS[2] and OPERANDS[4], while the new sources are OPERANDS[3]
1481
   and OPERANDS[5].  */
1482
 
1483
void
1484
mt_split_words (enum machine_mode nmode,
1485
                 enum machine_mode omode,
1486
                 rtx *operands)
1487
{
1488
  rtx dl,dh;    /* src/dest pieces.  */
1489
  rtx sl,sh;
1490
  int   move_high_first = 0;     /* Assume no overlap.  */
1491
 
1492
  switch (GET_CODE (operands[0])) /* Dest.  */
1493
    {
1494
    case SUBREG:
1495
    case REG:
1496
      if ((GET_CODE (operands[1]) == REG
1497
           || GET_CODE (operands[1]) == SUBREG)
1498
          && true_regnum (operands[0]) <= true_regnum (operands[1]))
1499
        move_high_first = 1;
1500
 
1501
      if (GET_CODE (operands[0]) == SUBREG)
1502
        {
1503
          dl = gen_rtx_SUBREG (nmode, SUBREG_REG (operands[0]),
1504
                               SUBREG_BYTE (operands[0]) + GET_MODE_SIZE (nmode));
1505
          dh = gen_rtx_SUBREG (nmode, SUBREG_REG (operands[0]), SUBREG_BYTE (operands[0]));
1506
        }
1507
      else if (GET_CODE (operands[0]) == REG && ! IS_PSEUDO_P (operands[0]))
1508
        {
1509
          int   r = REGNO (operands[0]);
1510
          dh = gen_rtx_REG (nmode, r);
1511
          dl = gen_rtx_REG (nmode, r + HARD_REGNO_NREGS (r, nmode));
1512
        }
1513
      else
1514
        {
1515
          dh = gen_rtx_SUBREG (nmode, operands[0], 0);
1516
          dl = gen_rtx_SUBREG (nmode, operands[0], GET_MODE_SIZE (nmode));
1517
        }
1518
      break;
1519
 
1520
    case MEM:
1521
      switch (GET_CODE (XEXP (operands[0], 0)))
1522
        {
1523
        case POST_INC:
1524
        case POST_DEC:
1525
          gcc_unreachable ();
1526
        default:
1527
          dl = operand_subword (operands[0],
1528
                                GET_MODE_SIZE (nmode)/UNITS_PER_WORD,
1529
                                0, omode);
1530
          dh = operand_subword (operands[0], 0, 0, omode);
1531
        }
1532
      break;
1533
    default:
1534
      gcc_unreachable ();
1535
    }
1536
 
1537
  switch (GET_CODE (operands[1]))
1538
    {
1539
    case REG:
1540
      if (! IS_PSEUDO_P (operands[1]))
1541
        {
1542
          int r = REGNO (operands[1]);
1543
 
1544
          sh = gen_rtx_REG (nmode, r);
1545
          sl = gen_rtx_REG (nmode, r + HARD_REGNO_NREGS (r, nmode));
1546
        }
1547
      else
1548
        {
1549
          sh = gen_rtx_SUBREG (nmode, operands[1], 0);
1550
          sl = gen_rtx_SUBREG (nmode, operands[1], GET_MODE_SIZE (nmode));
1551
        }
1552
      break;
1553
 
1554
    case CONST_DOUBLE:
1555
      if (operands[1] == const0_rtx)
1556
        sh = sl = const0_rtx;
1557
      else
1558
        split_double (operands[1], & sh, & sl);
1559
      break;
1560
 
1561
    case CONST_INT:
1562
      if (operands[1] == const0_rtx)
1563
        sh = sl = const0_rtx;
1564
      else
1565
        {
1566
          int vl, vh;
1567
 
1568
          switch (nmode)
1569
            {
1570
            default:
1571
              gcc_unreachable ();
1572
            }
1573
 
1574
          sl = GEN_INT (vl);
1575
          sh = GEN_INT (vh);
1576
        }
1577
      break;
1578
 
1579
    case SUBREG:
1580
      sl = gen_rtx_SUBREG (nmode,
1581
                           SUBREG_REG (operands[1]),
1582
                           SUBREG_BYTE (operands[1]) + GET_MODE_SIZE (nmode));
1583
      sh = gen_rtx_SUBREG (nmode,
1584
                           SUBREG_REG (operands[1]),
1585
                           SUBREG_BYTE (operands[1]));
1586
      break;
1587
 
1588
    case MEM:
1589
      switch (GET_CODE (XEXP (operands[1], 0)))
1590
        {
1591
        case POST_DEC:
1592
        case POST_INC:
1593
          gcc_unreachable ();
1594
          break;
1595
        default:
1596
          sl = operand_subword (operands[1],
1597
                                GET_MODE_SIZE (nmode)/UNITS_PER_WORD,
1598
                                0, omode);
1599
          sh = operand_subword (operands[1], 0, 0, omode);
1600
 
1601
          /* Check if the DF load is going to clobber the register
1602
             used for the address, and if so make sure that is going
1603
             to be the second move.  */
1604
          if (GET_CODE (dl) == REG
1605
              && true_regnum (dl)
1606
              == true_regnum (XEXP (XEXP (sl, 0 ), 0)))
1607
            move_high_first = 1;
1608
        }
1609
      break;
1610
    default:
1611
      gcc_unreachable ();
1612
    }
1613
 
1614
  if (move_high_first)
1615
    {
1616
      operands[2] = dh;
1617
      operands[3] = sh;
1618
      operands[4] = dl;
1619
      operands[5] = sl;
1620
    }
1621
  else
1622
    {
1623
      operands[2] = dl;
1624
      operands[3] = sl;
1625
      operands[4] = dh;
1626
      operands[5] = sh;
1627
    }
1628
  return;
1629
}
1630
 
1631
/* Implement TARGET_MUST_PASS_IN_STACK hook.  */
1632
static bool
1633
mt_pass_in_stack (enum machine_mode mode ATTRIBUTE_UNUSED, tree type)
1634
{
1635
  return (((type) != 0
1636
           && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1637
               || TREE_ADDRESSABLE (type))));
1638
}
1639
 
1640
 
1641
/* Structures to hold branch information during reorg.  */
1642
typedef struct branch_info
1643
{
1644
  rtx insn;  /* The branch insn.  */
1645
 
1646
  struct branch_info *next;
1647
} branch_info;
1648
 
1649
typedef struct label_info
1650
{
1651
  rtx label;  /* The label.  */
1652
  branch_info *branches;  /* branches to this label.  */
1653
  struct label_info *next;
1654
} label_info;
1655
 
1656
/* Chain of labels found in current function, used during reorg.  */
1657
static label_info *mt_labels;
1658
 
1659
/* If *X is a label, add INSN to the list of branches for that
1660
   label.  */
1661
 
1662
static int
1663
mt_add_branches (rtx *x, void *insn)
1664
{
1665
  if (GET_CODE (*x) == LABEL_REF)
1666
    {
1667
      branch_info *branch = xmalloc (sizeof (*branch));
1668
      rtx label = XEXP (*x, 0);
1669
      label_info *info;
1670
 
1671
      for (info = mt_labels; info; info = info->next)
1672
        if (info->label == label)
1673
          break;
1674
 
1675
      if (!info)
1676
        {
1677
          info = xmalloc (sizeof (*info));
1678
          info->next = mt_labels;
1679
          mt_labels = info;
1680
 
1681
          info->label = label;
1682
          info->branches = NULL;
1683
        }
1684
 
1685
      branch->next = info->branches;
1686
      info->branches = branch;
1687
      branch->insn = insn;
1688
    }
1689
  return 0;
1690
}
1691
 
1692
/* If BRANCH has a filled delay slot, check if INSN is dependent upon
1693
   it.  If so, undo the delay slot fill.   Returns the next insn, if
1694
   we patch out the branch.  Returns the branch insn, if we cannot
1695
   patch out the branch (due to anti-dependency in the delay slot).
1696
   In that case, the caller must insert nops at the branch target.  */
1697
 
1698
static rtx
1699
mt_check_delay_slot (rtx branch, rtx insn)
1700
{
1701
  rtx slot;
1702
  rtx tmp;
1703
  rtx p;
1704
  rtx jmp;
1705
 
1706
  gcc_assert (GET_CODE (PATTERN (branch)) == SEQUENCE);
1707
  if (INSN_DELETED_P (branch))
1708
    return NULL_RTX;
1709
  slot = XVECEXP (PATTERN (branch), 0, 1);
1710
 
1711
  tmp = PATTERN (insn);
1712
  note_stores (PATTERN (slot), insn_dependent_p_1, &tmp);
1713
  if (tmp)
1714
    /* Not dependent.  */
1715
    return NULL_RTX;
1716
 
1717
  /* Undo the delay slot.  */
1718
  jmp = XVECEXP (PATTERN (branch), 0, 0);
1719
 
1720
  tmp = PATTERN (jmp);
1721
  note_stores (PATTERN (slot), insn_dependent_p_1, &tmp);
1722
  if (!tmp)
1723
    /* Anti dependent. */
1724
    return branch;
1725
 
1726
  p = PREV_INSN (branch);
1727
  NEXT_INSN (p) = slot;
1728
  PREV_INSN (slot) = p;
1729
  NEXT_INSN (slot) = jmp;
1730
  PREV_INSN (jmp) = slot;
1731
  NEXT_INSN (jmp) = branch;
1732
  PREV_INSN (branch) = jmp;
1733
  XVECEXP (PATTERN (branch), 0, 0) = NULL_RTX;
1734
  XVECEXP (PATTERN (branch), 0, 1) = NULL_RTX;
1735
  delete_insn (branch);
1736
  return jmp;
1737
}
1738
 
1739
/* Insert nops to satisfy pipeline constraints.  We only deal with ms2
1740
   constraints here.  Earlier CPUs are dealt with by inserting nops with
1741
   final_prescan (but that can lead to inferior code, and is
1742
   impractical with ms2's JAL hazard).
1743
 
1744
   ms2 dynamic constraints
1745
   1) a load and a following use must be separated by one insn
1746
   2) an insn and a following dependent call must be separated by two insns
1747
 
1748
   only arith insns are placed in delay slots so #1 cannot happen with
1749
   a load in a delay slot.  #2 can happen with an arith insn in the
1750
   delay slot.  */
1751
 
1752
static void
1753
mt_reorg_hazard (void)
1754
{
1755
  rtx insn, next;
1756
 
1757
  /* Find all the branches */
1758
  for (insn = get_insns ();
1759
       insn;
1760
       insn = NEXT_INSN (insn))
1761
    {
1762
      rtx jmp;
1763
 
1764
      if (!INSN_P (insn))
1765
        continue;
1766
 
1767
      jmp = PATTERN (insn);
1768
 
1769
      if (GET_CODE (jmp) != SEQUENCE)
1770
        /* If it's not got a filled delay slot, then it can't
1771
           conflict.  */
1772
        continue;
1773
 
1774
      jmp = XVECEXP (jmp, 0, 0);
1775
 
1776
      if (recog_memoized (jmp) == CODE_FOR_tablejump)
1777
        for (jmp = XEXP (XEXP (XVECEXP (PATTERN (jmp), 0, 1), 0), 0);
1778
             !JUMP_TABLE_DATA_P (jmp);
1779
             jmp = NEXT_INSN (jmp))
1780
          continue;
1781
 
1782
      for_each_rtx (&PATTERN (jmp), mt_add_branches, insn);
1783
    }
1784
 
1785
  /* Now scan for dependencies.  */
1786
  for (insn = get_insns ();
1787
       insn && !INSN_P (insn);
1788
       insn = NEXT_INSN (insn))
1789
    continue;
1790
 
1791
  for (;
1792
       insn;
1793
       insn = next)
1794
    {
1795
      rtx jmp, tmp;
1796
      enum attr_type attr;
1797
 
1798
      gcc_assert (INSN_P (insn) && !INSN_DELETED_P (insn));
1799
      for (next = NEXT_INSN (insn);
1800
           next;
1801
           next = NEXT_INSN (next))
1802
        {
1803
          if (!INSN_P (next))
1804
            continue;
1805
          if (GET_CODE (PATTERN (next)) != USE)
1806
            break;
1807
        }
1808
 
1809
      jmp = insn;
1810
      if (GET_CODE (PATTERN (insn)) == SEQUENCE)
1811
        jmp = XVECEXP (PATTERN (insn), 0, 0);
1812
 
1813
      attr = recog_memoized (jmp) >= 0 ? get_attr_type (jmp) : TYPE_UNKNOWN;
1814
 
1815
      if (next && attr == TYPE_LOAD)
1816
        {
1817
          /* A load.  See if NEXT is dependent, and if so insert a
1818
             nop.  */
1819
 
1820
          tmp = PATTERN (next);
1821
          if (GET_CODE (tmp) == SEQUENCE)
1822
            tmp = PATTERN (XVECEXP (tmp, 0, 0));
1823
          note_stores (PATTERN (insn), insn_dependent_p_1, &tmp);
1824
          if (!tmp)
1825
            emit_insn_after (gen_nop (), insn);
1826
        }
1827
 
1828
      if (attr == TYPE_CALL)
1829
        {
1830
          /* A call.  Make sure we're not dependent on either of the
1831
             previous two dynamic instructions.  */
1832
          int nops = 0;
1833
          int count;
1834
          rtx prev = insn;
1835
          rtx rescan = NULL_RTX;
1836
 
1837
          for (count = 2; count && !nops;)
1838
            {
1839
              int type;
1840
 
1841
              prev = PREV_INSN (prev);
1842
              if (!prev)
1843
                {
1844
                  /* If we reach the start of the function, we must
1845
                     presume the caller set the address in the delay
1846
                     slot of the call instruction.  */
1847
                  nops = count;
1848
                  break;
1849
                }
1850
 
1851
              if (BARRIER_P (prev))
1852
                break;
1853
              if (LABEL_P (prev))
1854
                {
1855
                  /* Look at branches to this label.  */
1856
                  label_info *label;
1857
                  branch_info *branch;
1858
 
1859
                  for (label = mt_labels;
1860
                       label;
1861
                       label = label->next)
1862
                    if (label->label == prev)
1863
                      {
1864
                        for (branch = label->branches;
1865
                             branch;
1866
                             branch = branch->next)
1867
                          {
1868
                            tmp = mt_check_delay_slot (branch->insn, jmp);
1869
 
1870
                            if (tmp == branch->insn)
1871
                              {
1872
                                nops = count;
1873
                                break;
1874
                              }
1875
 
1876
                            if (tmp && branch->insn == next)
1877
                              rescan = tmp;
1878
                          }
1879
                        break;
1880
                      }
1881
                  continue;
1882
                }
1883
              if (!INSN_P (prev) || GET_CODE (PATTERN (prev)) == USE)
1884
                continue;
1885
 
1886
              if (GET_CODE (PATTERN (prev)) == SEQUENCE)
1887
                {
1888
                  /* Look at the delay slot.  */
1889
                  tmp = mt_check_delay_slot (prev, jmp);
1890
                  if (tmp == prev)
1891
                    nops = count;
1892
                  break;
1893
                }
1894
 
1895
              type = (INSN_CODE (prev) >= 0 ? get_attr_type (prev)
1896
                      : TYPE_COMPLEX);
1897
              if (type == TYPE_CALL || type == TYPE_BRANCH)
1898
                break;
1899
 
1900
              if (type == TYPE_LOAD
1901
                  || type == TYPE_ARITH
1902
                  || type == TYPE_COMPLEX)
1903
                {
1904
                  tmp = PATTERN (jmp);
1905
                  note_stores (PATTERN (prev), insn_dependent_p_1, &tmp);
1906
                  if (!tmp)
1907
                    {
1908
                      nops = count;
1909
                      break;
1910
                    }
1911
                }
1912
 
1913
              if (INSN_CODE (prev) >= 0)
1914
                count--;
1915
            }
1916
 
1917
          if (rescan)
1918
            for (next = NEXT_INSN (rescan);
1919
                 next && !INSN_P (next);
1920
                 next = NEXT_INSN (next))
1921
              continue;
1922
          while (nops--)
1923
            emit_insn_before (gen_nop (), insn);
1924
        }
1925
    }
1926
 
1927
  /* Free the data structures.  */
1928
  while (mt_labels)
1929
    {
1930
      label_info *label = mt_labels;
1931
      branch_info *branch, *next;
1932
 
1933
      mt_labels = label->next;
1934
      for (branch = label->branches; branch; branch = next)
1935
        {
1936
          next = branch->next;
1937
          free (branch);
1938
        }
1939
      free (label);
1940
    }
1941
}
1942
 
1943
/* Fixup the looping instructions, do delayed branch scheduling, fixup
1944
   scheduling hazards.  */
1945
 
1946
static void
1947
mt_machine_reorg (void)
1948
{
1949
  if (mt_flag_delayed_branch)
1950
    dbr_schedule (get_insns (), dump_file);
1951
 
1952
  if (TARGET_MS2)
1953
    {
1954
      /* Force all instructions to be split into their final form.  */
1955
      split_all_insns_noflow ();
1956
      mt_reorg_hazard ();
1957
    }
1958
}
1959
 
1960
/* Initialize the GCC target structure.  */
1961
const struct attribute_spec mt_attribute_table[];
1962
 
1963
#undef  TARGET_ATTRIBUTE_TABLE
1964
#define TARGET_ATTRIBUTE_TABLE          mt_attribute_table
1965
#undef  TARGET_STRUCT_VALUE_RTX
1966
#define TARGET_STRUCT_VALUE_RTX         mt_struct_value_rtx
1967
#undef  TARGET_PROMOTE_PROTOTYPES
1968
#define TARGET_PROMOTE_PROTOTYPES       hook_bool_tree_true
1969
#undef  TARGET_PASS_BY_REFERENCE
1970
#define TARGET_PASS_BY_REFERENCE        mt_pass_by_reference
1971
#undef  TARGET_MUST_PASS_IN_STACK
1972
#define TARGET_MUST_PASS_IN_STACK       mt_pass_in_stack
1973
#undef  TARGET_ARG_PARTIAL_BYTES
1974
#define TARGET_ARG_PARTIAL_BYTES        mt_arg_partial_bytes
1975
#undef  TARGET_SETUP_INCOMING_VARARGS
1976
#define TARGET_SETUP_INCOMING_VARARGS   mt_setup_incoming_varargs
1977
#undef  TARGET_MACHINE_DEPENDENT_REORG
1978
#define TARGET_MACHINE_DEPENDENT_REORG  mt_machine_reorg
1979
 
1980
struct gcc_target targetm = TARGET_INITIALIZER;
1981
 
1982
#include "gt-mt.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.