OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [config/] [mt/] [mt.c] - Blame information for rev 820

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Target definitions for the MorphoRISC1
2
   Copyright (C) 2005, 2007 Free Software Foundation, Inc.
3
   Contributed by Red Hat, Inc.
4
 
5
   This file is part of GCC.
6
 
7
   GCC is free software; you can redistribute it and/or modify it
8
   under the terms of the GNU General Public License as published
9
   by the Free Software Foundation; either version 3, or (at your
10
   option) any later version.
11
 
12
   GCC is distributed in the hope that it will be useful, but WITHOUT
13
   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14
   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
15
   License for more details.
16
 
17
   You should have received a copy of the GNU General Public License
18
   along with GCC; see the file COPYING3.  If not see
19
   <http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "rtl.h"
26
#include "regs.h"
27
#include "hard-reg-set.h"
28
#include "real.h"
29
#include "insn-config.h"
30
#include "conditions.h"
31
#include "insn-attr.h"
32
#include "recog.h"
33
#include "toplev.h"
34
#include "output.h"
35
#include "integrate.h"
36
#include "tree.h"
37
#include "function.h"
38
#include "expr.h"
39
#include "optabs.h"
40
#include "libfuncs.h"
41
#include "flags.h"
42
#include "tm_p.h"
43
#include "ggc.h"
44
#include "insn-flags.h"
45
#include "obstack.h"
46
#include "except.h"
47
#include "target.h"
48
#include "target-def.h"
49
#include "basic-block.h"
50
 
51
/* Frame pointer register mask.  */
52
#define FP_MASK                  (1 << (GPR_FP))
53
 
54
/* Link register mask.  */
55
#define LINK_MASK                (1 << (GPR_LINK))
56
 
57
/* Given a SIZE in bytes, advance to the next word.  */
58
#define ROUND_ADVANCE(SIZE) (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
59
 
60
/* A C structure for machine-specific, per-function data.
61
   This is added to the cfun structure.  */
62
struct machine_function GTY(())
63
{
64
  /* Flags if __builtin_return_address (n) with n >= 1 was used.  */
65
  int ra_needs_full_frame;
66
  struct rtx_def * eh_stack_adjust;
67
  int interrupt_handler;
68
  int has_loops;
69
};
70
 
71
/* Define the information needed to generate branch and scc insns.
72
   This is stored from the compare operation.  */
73
struct rtx_def * mt_compare_op0;
74
struct rtx_def * mt_compare_op1;
75
 
76
/* Current frame information calculated by compute_frame_size.  */
77
struct mt_frame_info current_frame_info;
78
 
79
/* Zero structure to initialize current_frame_info.  */
80
struct mt_frame_info zero_frame_info;
81
 
82
/* mt doesn't have unsigned compares need a library call for this.  */
83
struct rtx_def * mt_ucmpsi3_libcall;
84
 
85
static int mt_flag_delayed_branch;
86
 
87
 
88
static rtx
89
mt_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
90
                         int incoming ATTRIBUTE_UNUSED)
91
{
92
  return gen_rtx_REG (Pmode, RETVAL_REGNUM);
93
}
94
 
95
/* Implement RETURN_ADDR_RTX.  */
96
rtx
97
mt_return_addr_rtx (int count)
98
{
99
  if (count != 0)
100
    return NULL_RTX;
101
 
102
  return get_hard_reg_initial_val (Pmode, GPR_LINK);
103
}
104
 
105
/* The following variable value indicates the number of nops required
106
   between the current instruction and the next instruction to avoid
107
   any pipeline hazards.  */
108
static int mt_nops_required = 0;
109
static const char * mt_nop_reasons = "";
110
 
111
/* Implement ASM_OUTPUT_OPCODE.  */
112
const char *
113
mt_asm_output_opcode (FILE *f ATTRIBUTE_UNUSED, const char *ptr)
114
{
115
  if (mt_nops_required)
116
    fprintf (f, ";# need %d nops because of %s\n\t",
117
             mt_nops_required, mt_nop_reasons);
118
 
119
  while (mt_nops_required)
120
    {
121
      fprintf (f, "nop\n\t");
122
      -- mt_nops_required;
123
    }
124
 
125
  return ptr;
126
}
127
 
128
/* Given an insn, return whether it's a memory operation or a branch
129
   operation, otherwise return TYPE_ARITH.  */
130
static enum attr_type
131
mt_get_attr_type (rtx complete_insn)
132
{
133
  rtx insn = PATTERN (complete_insn);
134
 
135
  if (JUMP_P (complete_insn))
136
    return TYPE_BRANCH;
137
  if (CALL_P (complete_insn))
138
    return TYPE_BRANCH;
139
 
140
  if (GET_CODE (insn) != SET)
141
    return TYPE_ARITH;
142
 
143
  if (SET_DEST (insn) == pc_rtx)
144
    return TYPE_BRANCH;
145
 
146
  if (GET_CODE (SET_DEST (insn)) == MEM)
147
    return TYPE_STORE;
148
 
149
  if (GET_CODE (SET_SRC (insn)) == MEM)
150
    return TYPE_LOAD;
151
 
152
  return TYPE_ARITH;
153
}
154
 
155
/* A helper routine for insn_dependent_p called through note_stores.  */
156
 
157
static void
158
insn_dependent_p_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
159
{
160
  rtx * pinsn = (rtx *) data;
161
 
162
  if (*pinsn && reg_mentioned_p (x, *pinsn))
163
    *pinsn = NULL_RTX;
164
}
165
 
166
/* Return true if anything in insn X is (anti,output,true)
167
   dependent on anything in insn Y.  */
168
 
169
static bool
170
insn_dependent_p (rtx x, rtx y)
171
{
172
  rtx tmp;
173
 
174
  if (! INSN_P (x) || ! INSN_P (y))
175
    return 0;
176
 
177
  tmp = PATTERN (y);
178
  note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
179
  if (tmp == NULL_RTX)
180
    return true;
181
 
182
  tmp = PATTERN (x);
183
  note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
184
  return (tmp == NULL_RTX);
185
}
186
 
187
 
188
/* Return true if anything in insn X is true dependent on anything in
189
   insn Y.  */
190
static bool
191
insn_true_dependent_p (rtx x, rtx y)
192
{
193
  rtx tmp;
194
 
195
  if (! INSN_P (x) || ! INSN_P (y))
196
    return 0;
197
 
198
  tmp = PATTERN (y);
199
  note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
200
  return (tmp == NULL_RTX);
201
}
202
 
203
/* The following determines the number of nops that need to be
204
   inserted between the previous instructions and current instruction
205
   to avoid pipeline hazards on the mt processor.  Remember that
206
   the function is not called for asm insns.  */
207
 
208
void
209
mt_final_prescan_insn (rtx   insn,
210
                        rtx * opvec ATTRIBUTE_UNUSED,
211
                        int   noperands ATTRIBUTE_UNUSED)
212
{
213
  rtx prev_i;
214
  enum attr_type prev_attr;
215
 
216
  mt_nops_required = 0;
217
  mt_nop_reasons = "";
218
 
219
  /* ms2 constraints are dealt with in reorg.  */
220
  if (TARGET_MS2)
221
    return;
222
 
223
  /* Only worry about real instructions.  */
224
  if (! INSN_P (insn))
225
    return;
226
 
227
  /* Find the previous real instructions.  */
228
  for (prev_i = PREV_INSN (insn);
229
       prev_i != NULL
230
         && (! INSN_P (prev_i)
231
             || GET_CODE (PATTERN (prev_i)) == USE
232
             || GET_CODE (PATTERN (prev_i)) == CLOBBER);
233
       prev_i = PREV_INSN (prev_i))
234
    {
235
      /* If we meet a barrier, there is no flow through here.  */
236
      if (BARRIER_P (prev_i))
237
        return;
238
    }
239
 
240
  /* If there isn't one then there is nothing that we need do.  */
241
  if (prev_i == NULL || ! INSN_P (prev_i))
242
    return;
243
 
244
  prev_attr = mt_get_attr_type (prev_i);
245
 
246
  /* Delayed branch slots already taken care of by delay branch scheduling.  */
247
  if (prev_attr == TYPE_BRANCH)
248
    return;
249
 
250
  switch (mt_get_attr_type (insn))
251
    {
252
    case TYPE_LOAD:
253
    case TYPE_STORE:
254
      /* Avoid consecutive memory operation.  */
255
      if  ((prev_attr == TYPE_LOAD || prev_attr == TYPE_STORE)
256
           && TARGET_MS1_64_001)
257
        {
258
          mt_nops_required = 1;
259
          mt_nop_reasons = "consecutive mem ops";
260
        }
261
      /* Drop through.  */
262
 
263
    case TYPE_ARITH:
264
    case TYPE_COMPLEX:
265
      /* One cycle of delay is required between load
266
         and the dependent arithmetic instruction.  */
267
      if (prev_attr == TYPE_LOAD
268
          && insn_true_dependent_p (prev_i, insn))
269
        {
270
          mt_nops_required = 1;
271
          mt_nop_reasons = "load->arith dependency delay";
272
        }
273
      break;
274
 
275
    case TYPE_BRANCH:
276
      if (insn_dependent_p (prev_i, insn))
277
        {
278
          if (prev_attr == TYPE_ARITH && TARGET_MS1_64_001)
279
            {
280
              /* One cycle of delay between arith
281
                 instructions and branch dependent on arith.  */
282
              mt_nops_required = 1;
283
              mt_nop_reasons = "arith->branch dependency delay";
284
            }
285
          else if (prev_attr == TYPE_LOAD)
286
            {
287
              /* Two cycles of delay are required
288
                 between load and dependent branch.  */
289
              if (TARGET_MS1_64_001)
290
                mt_nops_required = 2;
291
              else
292
                mt_nops_required = 1;
293
              mt_nop_reasons = "load->branch dependency delay";
294
            }
295
        }
296
      break;
297
 
298
    default:
299
      fatal_insn ("mt_final_prescan_insn, invalid insn #1", insn);
300
      break;
301
    }
302
}
303
 
304
/* Print debugging information for a frame.  */
305
static void
306
mt_debug_stack (struct mt_frame_info * info)
307
{
308
  int regno;
309
 
310
  if (!info)
311
    {
312
      error ("info pointer NULL");
313
      gcc_unreachable ();
314
    }
315
 
316
  fprintf (stderr, "\nStack information for function %s:\n",
317
           ((current_function_decl && DECL_NAME (current_function_decl))
318
            ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
319
            : "<unknown>"));
320
 
321
  fprintf (stderr, "\ttotal_size       = %d\n", info->total_size);
322
  fprintf (stderr, "\tpretend_size     = %d\n", info->pretend_size);
323
  fprintf (stderr, "\targs_size        = %d\n", info->args_size);
324
  fprintf (stderr, "\textra_size       = %d\n", info->extra_size);
325
  fprintf (stderr, "\treg_size         = %d\n", info->reg_size);
326
  fprintf (stderr, "\tvar_size         = %d\n", info->var_size);
327
  fprintf (stderr, "\tframe_size       = %d\n", info->frame_size);
328
  fprintf (stderr, "\treg_mask         = 0x%x\n", info->reg_mask);
329
  fprintf (stderr, "\tsave_fp          = %d\n", info->save_fp);
330
  fprintf (stderr, "\tsave_lr          = %d\n", info->save_lr);
331
  fprintf (stderr, "\tinitialized      = %d\n", info->initialized);
332
  fprintf (stderr, "\tsaved registers =");
333
 
334
  /* Print out reg_mask in a more readable format.  */
335
  for (regno = GPR_R0; regno <= GPR_LAST; regno++)
336
    if ( (1 << regno) & info->reg_mask)
337
      fprintf (stderr, " %s", reg_names[regno]);
338
 
339
  putc ('\n', stderr);
340
  fflush (stderr);
341
}
342
 
343
/* Print a memory address as an operand to reference that memory location.  */
344
 
345
static void
346
mt_print_operand_simple_address (FILE * file, rtx addr)
347
{
348
  if (!addr)
349
    error ("PRINT_OPERAND_ADDRESS, null pointer");
350
 
351
  else
352
    switch (GET_CODE (addr))
353
      {
354
      case REG:
355
        fprintf (file, "%s, #0", reg_names [REGNO (addr)]);
356
        break;
357
 
358
      case PLUS:
359
        {
360
          rtx reg = 0;
361
          rtx offset = 0;
362
          rtx arg0 = XEXP (addr, 0);
363
          rtx arg1 = XEXP (addr, 1);
364
 
365
          if (GET_CODE (arg0) == REG)
366
            {
367
              reg = arg0;
368
              offset = arg1;
369
              if (GET_CODE (offset) == REG)
370
                fatal_insn ("PRINT_OPERAND_ADDRESS, 2 regs", addr);
371
            }
372
 
373
          else if (GET_CODE (arg1) == REG)
374
              reg = arg1, offset = arg0;
375
          else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
376
            {
377
              fprintf (file, "%s, #", reg_names [GPR_R0]);
378
              output_addr_const (file, addr);
379
              break;
380
            }
381
          fprintf (file, "%s, #", reg_names [REGNO (reg)]);
382
          output_addr_const (file, offset);
383
          break;
384
        }
385
 
386
      case LABEL_REF:
387
      case SYMBOL_REF:
388
      case CONST_INT:
389
      case CONST:
390
        output_addr_const (file, addr);
391
        break;
392
 
393
      default:
394
        fatal_insn ("PRINT_OPERAND_ADDRESS, invalid insn #1", addr);
395
        break;
396
      }
397
}
398
 
399
/* Implement PRINT_OPERAND_ADDRESS.  */
400
void
401
mt_print_operand_address (FILE * file, rtx addr)
402
{
403
  if (GET_CODE (addr) == AND
404
      && GET_CODE (XEXP (addr, 1)) == CONST_INT
405
      && INTVAL (XEXP (addr, 1)) == -3)
406
    mt_print_operand_simple_address (file, XEXP (addr, 0));
407
  else
408
    mt_print_operand_simple_address (file, addr);
409
}
410
 
411
/* Implement PRINT_OPERAND.  */
412
void
413
mt_print_operand (FILE * file, rtx x, int code)
414
{
415
  switch (code)
416
    {
417
    case '#':
418
      /* Output a nop if there's nothing for the delay slot.  */
419
      if (dbr_sequence_length () == 0)
420
        fputs ("\n\tnop", file);
421
      return;
422
 
423
    case 'H':
424
      fprintf(file, "#%%hi16(");
425
      output_addr_const (file, x);
426
      fprintf(file, ")");
427
      return;
428
 
429
    case 'L':
430
      fprintf(file, "#%%lo16(");
431
      output_addr_const (file, x);
432
      fprintf(file, ")");
433
      return;
434
 
435
    case 'N':
436
      fprintf(file, "#%ld", ~INTVAL (x));
437
      return;
438
 
439
    case 'z':
440
      if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0)
441
        {
442
          fputs (reg_names[GPR_R0], file);
443
          return;
444
        }
445
 
446
    case 0:
447
      /* Handled below.  */
448
      break;
449
 
450
    default:
451
      /* output_operand_lossage ("mt_print_operand: unknown code"); */
452
      fprintf (file, "unknown code");
453
      return;
454
    }
455
 
456
  switch (GET_CODE (x))
457
    {
458
    case REG:
459
      fputs (reg_names [REGNO (x)], file);
460
      break;
461
 
462
    case CONST:
463
    case CONST_INT:
464
      fprintf(file, "#%ld", INTVAL (x));
465
      break;
466
 
467
    case MEM:
468
      mt_print_operand_address(file, XEXP (x,0));
469
      break;
470
 
471
    case LABEL_REF:
472
    case SYMBOL_REF:
473
      output_addr_const (file, x);
474
      break;
475
 
476
    default:
477
      fprintf(file, "Uknown code: %d", GET_CODE (x));
478
      break;
479
    }
480
 
481
  return;
482
}
483
 
484
/* Implement INIT_CUMULATIVE_ARGS.  */
485
void
486
mt_init_cumulative_args (CUMULATIVE_ARGS * cum, tree fntype, rtx libname,
487
                         tree fndecl ATTRIBUTE_UNUSED, int incoming)
488
{
489
  *cum = 0;
490
 
491
  if (TARGET_DEBUG_ARG)
492
    {
493
      fprintf (stderr, "\nmt_init_cumulative_args:");
494
 
495
      if (incoming)
496
        fputs (" incoming", stderr);
497
 
498
      if (fntype)
499
        {
500
          tree ret_type = TREE_TYPE (fntype);
501
          fprintf (stderr, " return = %s,",
502
                   tree_code_name[ (int)TREE_CODE (ret_type) ]);
503
        }
504
 
505
      if (libname && GET_CODE (libname) == SYMBOL_REF)
506
        fprintf (stderr, " libname = %s", XSTR (libname, 0));
507
 
508
      if (cfun->returns_struct)
509
        fprintf (stderr, " return-struct");
510
 
511
      putc ('\n', stderr);
512
    }
513
}
514
 
515
/* Compute the slot number to pass an argument in.
516
   Returns the slot number or -1 if passing on the stack.
517
 
518
   CUM is a variable of type CUMULATIVE_ARGS which gives info about
519
    the preceding args and about the function being called.
520
   MODE is the argument's machine mode.
521
   TYPE is the data type of the argument (as a tree).
522
    This is null for libcalls where that information may
523
    not be available.
524
   NAMED is nonzero if this argument is a named parameter
525
    (otherwise it is an extra parameter matching an ellipsis).
526
   INCOMING_P is zero for FUNCTION_ARG, nonzero for FUNCTION_INCOMING_ARG.
527
   *PREGNO records the register number to use if scalar type.  */
528
 
529
static int
530
mt_function_arg_slotno (const CUMULATIVE_ARGS * cum,
531
                        enum machine_mode mode,
532
                        tree type,
533
                        int named ATTRIBUTE_UNUSED,
534
                        int incoming_p ATTRIBUTE_UNUSED,
535
                        int * pregno)
536
{
537
  int regbase = FIRST_ARG_REGNUM;
538
  int slotno  = * cum;
539
 
540
  if (mode == VOIDmode || targetm.calls.must_pass_in_stack (mode, type))
541
    return -1;
542
 
543
  if (slotno >= MT_NUM_ARG_REGS)
544
    return -1;
545
 
546
  * pregno = regbase + slotno;
547
 
548
  return slotno;
549
}
550
 
551
/* Implement FUNCTION_ARG.  */
552
rtx
553
mt_function_arg (const CUMULATIVE_ARGS * cum,
554
                 enum machine_mode mode,
555
                 tree type,
556
                 int named,
557
                 int incoming_p)
558
{
559
  int slotno, regno;
560
  rtx reg;
561
 
562
  slotno = mt_function_arg_slotno (cum, mode, type, named, incoming_p, &regno);
563
 
564
  if (slotno == -1)
565
    reg = NULL_RTX;
566
  else
567
    reg = gen_rtx_REG (mode, regno);
568
 
569
  return reg;
570
}
571
 
572
/* Implement FUNCTION_ARG_ADVANCE.  */
573
void
574
mt_function_arg_advance (CUMULATIVE_ARGS * cum,
575
                         enum machine_mode mode,
576
                         tree type ATTRIBUTE_UNUSED,
577
                         int named)
578
{
579
  int slotno, regno;
580
 
581
  /* We pass 0 for incoming_p here, it doesn't matter.  */
582
  slotno = mt_function_arg_slotno (cum, mode, type, named, 0, &regno);
583
 
584
  * cum += (mode != BLKmode
585
            ? ROUND_ADVANCE (GET_MODE_SIZE (mode))
586
            : ROUND_ADVANCE (int_size_in_bytes (type)));
587
 
588
  if (TARGET_DEBUG_ARG)
589
    fprintf (stderr,
590
             "mt_function_arg_advance: words = %2d, mode = %4s, named = %d, size = %3d\n",
591
             *cum, GET_MODE_NAME (mode), named,
592
             (*cum) * UNITS_PER_WORD);
593
}
594
 
595
/* Implement hook TARGET_ARG_PARTIAL_BYTES.
596
 
597
   Returns the number of bytes at the beginning of an argument that
598
   must be put in registers.  The value must be zero for arguments
599
   that are passed entirely in registers or that are entirely pushed
600
   on the stack.  */
601
static int
602
mt_arg_partial_bytes (CUMULATIVE_ARGS * pcum,
603
                       enum machine_mode mode,
604
                       tree type,
605
                       bool named ATTRIBUTE_UNUSED)
606
{
607
  int cum = * pcum;
608
  int words;
609
 
610
  if (mode == BLKmode)
611
    words = ((int_size_in_bytes (type) + UNITS_PER_WORD - 1)
612
             / UNITS_PER_WORD);
613
  else
614
    words = (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
615
 
616
  if (! targetm.calls.pass_by_reference (&cum, mode, type, named)
617
      && cum < MT_NUM_ARG_REGS
618
      && (cum + words) > MT_NUM_ARG_REGS)
619
    {
620
      int bytes = (MT_NUM_ARG_REGS - cum) * UNITS_PER_WORD;
621
 
622
      if (TARGET_DEBUG)
623
        fprintf (stderr, "function_arg_partial_nregs = %d\n", bytes);
624
      return bytes;
625
    }
626
 
627
  return 0;
628
}
629
 
630
 
631
/* Implement TARGET_PASS_BY_REFERENCE hook.  */
632
static bool
633
mt_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
634
                       enum machine_mode mode ATTRIBUTE_UNUSED,
635
                       tree type,
636
                       bool named ATTRIBUTE_UNUSED)
637
{
638
  return (type && int_size_in_bytes (type) > 4 * UNITS_PER_WORD);
639
}
640
 
641
/* Implement FUNCTION_ARG_BOUNDARY.  */
642
int
643
mt_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
644
                           tree type ATTRIBUTE_UNUSED)
645
{
646
  return BITS_PER_WORD;
647
}
648
 
649
/* Implement REG_OK_FOR_BASE_P.  */
650
int
651
mt_reg_ok_for_base_p (rtx x, int strict)
652
{
653
  if (strict)
654
    return  (((unsigned) REGNO (x)) < FIRST_PSEUDO_REGISTER);
655
  return 1;
656
}
657
 
658
/* Helper function of mt_legitimate_address_p.  Return true if XINSN
659
   is a simple address, otherwise false.  */
660
static bool
661
mt_legitimate_simple_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
662
                                rtx xinsn, int strict)
663
{
664
  if (TARGET_DEBUG)
665
    {
666
      fprintf (stderr, "\n========== GO_IF_LEGITIMATE_ADDRESS, %sstrict\n",
667
               strict ? "" : "not ");
668
      debug_rtx (xinsn);
669
    }
670
 
671
  if (GET_CODE (xinsn) == REG && mt_reg_ok_for_base_p (xinsn, strict))
672
    return true;
673
 
674
  if (GET_CODE (xinsn) == PLUS
675
      && GET_CODE (XEXP (xinsn, 0)) == REG
676
      && mt_reg_ok_for_base_p (XEXP (xinsn, 0), strict)
677
      && GET_CODE (XEXP (xinsn, 1)) == CONST_INT
678
      && SMALL_INT (XEXP (xinsn, 1)))
679
    return true;
680
 
681
  return false;
682
}
683
 
684
 
685
/* Helper function of GO_IF_LEGITIMATE_ADDRESS.  Return nonzero if
686
   XINSN is a legitimate address on MT.  */
687
int
688
mt_legitimate_address_p (enum machine_mode mode, rtx xinsn, int strict)
689
{
690
  if (mt_legitimate_simple_address_p (mode, xinsn, strict))
691
    return 1;
692
 
693
  if ((mode) == SImode
694
      && GET_CODE (xinsn) == AND
695
      && GET_CODE (XEXP (xinsn, 1)) == CONST_INT
696
      && INTVAL (XEXP (xinsn, 1)) == -3)
697
    return mt_legitimate_simple_address_p (mode, XEXP (xinsn, 0), strict);
698
  else
699
    return 0;
700
}
701
 
702
/* Return truth value of whether OP can be used as an operands where a
703
   register or 16 bit unsigned integer is needed.  */
704
 
705
int
706
uns_arith_operand (rtx op, enum machine_mode mode)
707
{
708
  if (GET_CODE (op) == CONST_INT && SMALL_INT_UNSIGNED (op))
709
    return 1;
710
 
711
  return register_operand (op, mode);
712
}
713
 
714
/* Return truth value of whether OP can be used as an operands where a
715
   16 bit integer is needed.  */
716
 
717
int
718
arith_operand (rtx op, enum machine_mode mode)
719
{
720
  if (GET_CODE (op) == CONST_INT && SMALL_INT (op))
721
    return 1;
722
 
723
  return register_operand (op, mode);
724
}
725
 
726
/* Return truth value of whether OP is a register or the constant 0.  */
727
 
728
int
729
reg_or_0_operand (rtx op, enum machine_mode mode)
730
{
731
  switch (GET_CODE (op))
732
    {
733
    case CONST_INT:
734
      return INTVAL (op) == 0;
735
 
736
    case REG:
737
    case SUBREG:
738
      return register_operand (op, mode);
739
 
740
    default:
741
      break;
742
    }
743
 
744
  return 0;
745
}
746
 
747
/* Return truth value of whether OP is a constant that requires two
748
   loads to put in a register.  */
749
 
750
int
751
big_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
752
{
753
  if (GET_CODE (op) == CONST_INT && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
754
    return 1;
755
 
756
  return 0;
757
}
758
 
759
/* Return truth value of whether OP is a constant that require only
760
   one load to put in a register.  */
761
 
762
int
763
single_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
764
{
765
  if (big_const_operand (op, mode)
766
      || GET_CODE (op) == CONST
767
      || GET_CODE (op) == LABEL_REF
768
      || GET_CODE (op) == SYMBOL_REF)
769
    return 0;
770
 
771
  return 1;
772
}
773
 
774
/* True if the current function is an interrupt handler
775
   (either via #pragma or an attribute specification).  */
776
int interrupt_handler;
777
enum processor_type mt_cpu;
778
 
779
static struct machine_function *
780
mt_init_machine_status (void)
781
{
782
  struct machine_function *f;
783
 
784
  f = ggc_alloc_cleared (sizeof (struct machine_function));
785
 
786
  return f;
787
}
788
 
789
/* Implement OVERRIDE_OPTIONS.  */
790
void
791
mt_override_options (void)
792
{
793
  if (mt_cpu_string != NULL)
794
    {
795
      if (!strcmp (mt_cpu_string, "ms1-64-001"))
796
        mt_cpu = PROCESSOR_MS1_64_001;
797
      else if (!strcmp (mt_cpu_string, "ms1-16-002"))
798
        mt_cpu = PROCESSOR_MS1_16_002;
799
      else if  (!strcmp (mt_cpu_string, "ms1-16-003"))
800
        mt_cpu = PROCESSOR_MS1_16_003;
801
      else if (!strcmp (mt_cpu_string, "ms2"))
802
        mt_cpu = PROCESSOR_MS2;
803
      else
804
        error ("bad value (%s) for -march= switch", mt_cpu_string);
805
    }
806
  else
807
    mt_cpu = PROCESSOR_MS1_16_002;
808
 
809
  if (flag_exceptions)
810
    {
811
      flag_omit_frame_pointer = 0;
812
      flag_gcse = 0;
813
    }
814
 
815
  /* We do delayed branch filling in machine dependent reorg */
816
  mt_flag_delayed_branch = flag_delayed_branch;
817
  flag_delayed_branch = 0;
818
 
819
  init_machine_status = mt_init_machine_status;
820
}
821
 
822
/* Do what is necessary for `va_start'.  We look at the current function
823
   to determine if stdarg or varargs is used and return the address of the
824
   first unnamed parameter.  */
825
 
826
static void
827
mt_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
828
                           enum machine_mode mode ATTRIBUTE_UNUSED,
829
                           tree type ATTRIBUTE_UNUSED,
830
                           int *pretend_size, int no_rtl)
831
{
832
  int regno;
833
  int regs = MT_NUM_ARG_REGS - *cum;
834
 
835
  *pretend_size = regs < 0 ? 0 : GET_MODE_SIZE (SImode) * regs;
836
 
837
  if (no_rtl)
838
    return;
839
 
840
  for (regno = *cum; regno < MT_NUM_ARG_REGS; regno++)
841
    {
842
      rtx reg = gen_rtx_REG (SImode, FIRST_ARG_REGNUM + regno);
843
      rtx slot = gen_rtx_PLUS (Pmode,
844
                               gen_rtx_REG (SImode, ARG_POINTER_REGNUM),
845
                               GEN_INT (UNITS_PER_WORD * regno));
846
 
847
      emit_move_insn (gen_rtx_MEM (SImode, slot), reg);
848
    }
849
}
850
 
851
/* Returns the number of bytes offset between the frame pointer and the stack
852
   pointer for the current function.  SIZE is the number of bytes of space
853
   needed for local variables.  */
854
 
855
unsigned int
856
mt_compute_frame_size (int size)
857
{
858
  int           regno;
859
  unsigned int  total_size;
860
  unsigned int  var_size;
861
  unsigned int  args_size;
862
  unsigned int  pretend_size;
863
  unsigned int  extra_size;
864
  unsigned int  reg_size;
865
  unsigned int  frame_size;
866
  unsigned int  reg_mask;
867
 
868
  var_size      = size;
869
  args_size     = current_function_outgoing_args_size;
870
  pretend_size  = current_function_pretend_args_size;
871
  extra_size    = FIRST_PARM_OFFSET (0);
872
  total_size    = extra_size + pretend_size + args_size + var_size;
873
  reg_size      = 0;
874
  reg_mask      = 0;
875
 
876
  /* Calculate space needed for registers.  */
877
  for (regno = GPR_R0; regno <= GPR_LAST; regno++)
878
    {
879
      if (MUST_SAVE_REGISTER (regno))
880
        {
881
          reg_size += UNITS_PER_WORD;
882
          reg_mask |= 1 << regno;
883
        }
884
    }
885
 
886
  current_frame_info.save_fp = (regs_ever_live [GPR_FP]
887
                                || frame_pointer_needed
888
                                || interrupt_handler);
889
  current_frame_info.save_lr = (regs_ever_live [GPR_LINK]
890
                                || profile_flag
891
                                || interrupt_handler);
892
 
893
  reg_size += (current_frame_info.save_fp + current_frame_info.save_lr)
894
               * UNITS_PER_WORD;
895
  total_size += reg_size;
896
  total_size = ((total_size + 3) & ~3);
897
 
898
  frame_size = total_size;
899
 
900
  /* Save computed information.  */
901
  current_frame_info.pretend_size = pretend_size;
902
  current_frame_info.var_size     = var_size;
903
  current_frame_info.args_size    = args_size;
904
  current_frame_info.reg_size     = reg_size;
905
  current_frame_info.frame_size   = args_size + var_size;
906
  current_frame_info.total_size   = total_size;
907
  current_frame_info.extra_size   = extra_size;
908
  current_frame_info.reg_mask     = reg_mask;
909
  current_frame_info.initialized  = reload_completed;
910
 
911
  return total_size;
912
}
913
 
914
/* Emit code to save REG in stack offset pointed to by MEM.
915
   STACK_OFFSET is the offset from the SP where the save will happen.
916
   This function sets the REG_FRAME_RELATED_EXPR note accordingly.  */
917
static void
918
mt_emit_save_restore (enum save_direction direction,
919
                      rtx reg, rtx mem, int stack_offset)
920
{
921
  if (direction == FROM_PROCESSOR_TO_MEM)
922
    {
923
      rtx insn;
924
 
925
      insn = emit_move_insn (mem, reg);
926
      RTX_FRAME_RELATED_P (insn) = 1;
927
      REG_NOTES (insn)
928
        = gen_rtx_EXPR_LIST
929
        (REG_FRAME_RELATED_EXPR,
930
         gen_rtx_SET (VOIDmode,
931
                      gen_rtx_MEM (SImode,
932
                                   gen_rtx_PLUS (SImode,
933
                                                 stack_pointer_rtx,
934
                                                 GEN_INT (stack_offset))),
935
                      reg),
936
         REG_NOTES (insn));
937
    }
938
  else
939
    emit_move_insn (reg, mem);
940
}
941
 
942
 
943
/* Emit code to save the frame pointer in the prologue and restore
944
   frame pointer in epilogue.  */
945
 
946
static void
947
mt_emit_save_fp (enum save_direction direction,
948
                  struct mt_frame_info info)
949
{
950
  rtx base_reg;
951
  int reg_mask = info.reg_mask  & ~(FP_MASK | LINK_MASK);
952
  int offset = info.total_size;
953
  int stack_offset = info.total_size;
954
 
955
  /* If there is nothing to save, get out now.  */
956
  if (! info.save_fp && ! info.save_lr && ! reg_mask)
957
    return;
958
 
959
  /* If offset doesn't fit in a 15-bit signed integer,
960
     uses a scratch registers to get a smaller offset.  */
961
  if (CONST_OK_FOR_LETTER_P(offset, 'O'))
962
    base_reg = stack_pointer_rtx;
963
  else
964
    {
965
      /* Use the scratch register R9 that holds old stack pointer.  */
966
      base_reg = gen_rtx_REG (SImode, GPR_R9);
967
      offset = 0;
968
    }
969
 
970
  if (info.save_fp)
971
    {
972
      offset -= UNITS_PER_WORD;
973
      stack_offset -= UNITS_PER_WORD;
974
      mt_emit_save_restore
975
        (direction, gen_rtx_REG (SImode, GPR_FP),
976
         gen_rtx_MEM (SImode,
977
                      gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
978
         stack_offset);
979
    }
980
}
981
 
982
/* Emit code to save registers in the prologue and restore register
983
   in epilogue.  */
984
 
985
static void
986
mt_emit_save_regs (enum save_direction direction,
987
                    struct mt_frame_info info)
988
{
989
  rtx base_reg;
990
  int regno;
991
  int reg_mask = info.reg_mask  & ~(FP_MASK | LINK_MASK);
992
  int offset = info.total_size;
993
  int stack_offset = info.total_size;
994
 
995
  /* If there is nothing to save, get out now.  */
996
  if (! info.save_fp && ! info.save_lr && ! reg_mask)
997
    return;
998
 
999
  /* If offset doesn't fit in a 15-bit signed integer,
1000
     uses a scratch registers to get a smaller offset.  */
1001
  if (CONST_OK_FOR_LETTER_P(offset, 'O'))
1002
    base_reg = stack_pointer_rtx;
1003
  else
1004
    {
1005
      /* Use the scratch register R9 that holds old stack pointer.  */
1006
      base_reg = gen_rtx_REG (SImode, GPR_R9);
1007
      offset = 0;
1008
    }
1009
 
1010
  if (info.save_fp)
1011
    {
1012
      /* This just records the space for it, the actual move generated in
1013
         mt_emit_save_fp ().  */
1014
      offset -= UNITS_PER_WORD;
1015
      stack_offset -= UNITS_PER_WORD;
1016
    }
1017
 
1018
  if (info.save_lr)
1019
    {
1020
      offset -= UNITS_PER_WORD;
1021
      stack_offset -= UNITS_PER_WORD;
1022
      mt_emit_save_restore
1023
        (direction, gen_rtx_REG (SImode, GPR_LINK),
1024
         gen_rtx_MEM (SImode,
1025
                      gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
1026
         stack_offset);
1027
    }
1028
 
1029
  /* Save any needed call-saved regs.  */
1030
  for (regno = GPR_R0; regno <= GPR_LAST; regno++)
1031
    {
1032
      if ((reg_mask & (1 << regno)) != 0)
1033
        {
1034
          offset -= UNITS_PER_WORD;
1035
          stack_offset -= UNITS_PER_WORD;
1036
          mt_emit_save_restore
1037
            (direction, gen_rtx_REG (SImode, regno),
1038
             gen_rtx_MEM (SImode,
1039
                          gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
1040
             stack_offset);
1041
        }
1042
    }
1043
}
1044
 
1045
/* Return true if FUNC is a function with the 'interrupt' attribute.  */
1046
static bool
1047
mt_interrupt_function_p (tree func)
1048
{
1049
  tree a;
1050
 
1051
  if (TREE_CODE (func) != FUNCTION_DECL)
1052
    return false;
1053
 
1054
  a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
1055
  return a != NULL_TREE;
1056
}
1057
 
1058
/* Generate prologue code.  */
1059
void
1060
mt_expand_prologue (void)
1061
{
1062
  rtx size_rtx, insn;
1063
  unsigned int frame_size;
1064
 
1065
  if (mt_interrupt_function_p (current_function_decl))
1066
    {
1067
      interrupt_handler = 1;
1068
      if (cfun->machine)
1069
        cfun->machine->interrupt_handler = 1;
1070
    }
1071
 
1072
  mt_compute_frame_size (get_frame_size ());
1073
 
1074
  if (TARGET_DEBUG_STACK)
1075
    mt_debug_stack (&current_frame_info);
1076
 
1077
  /* Compute size of stack adjustment.  */
1078
  frame_size = current_frame_info.total_size;
1079
 
1080
  /* If offset doesn't fit in a 15-bit signed integer,
1081
     uses a scratch registers to get a smaller offset.  */
1082
  if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
1083
    size_rtx = GEN_INT (frame_size);
1084
  else
1085
    {
1086
      /* We do not have any scratch registers.  */
1087
      gcc_assert (!interrupt_handler);
1088
 
1089
      size_rtx = gen_rtx_REG (SImode, GPR_R9);
1090
      insn = emit_move_insn (size_rtx, GEN_INT (frame_size & 0xffff0000));
1091
      insn = emit_insn (gen_iorsi3 (size_rtx, size_rtx,
1092
                                    GEN_INT (frame_size & 0x0000ffff)));
1093
    }
1094
 
1095
  /* Allocate stack for this frame.  */
1096
  /* Make stack adjustment and use scratch register if constant too
1097
     large to fit as immediate.  */
1098
  if (frame_size)
1099
    {
1100
      insn = emit_insn (gen_subsi3 (stack_pointer_rtx,
1101
                                 stack_pointer_rtx,
1102
                                 size_rtx));
1103
      RTX_FRAME_RELATED_P (insn) = 1;
1104
      REG_NOTES (insn)
1105
        = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1106
                             gen_rtx_SET (VOIDmode,
1107
                                          stack_pointer_rtx,
1108
                                          gen_rtx_MINUS (SImode,
1109
                                                        stack_pointer_rtx,
1110
                                                        GEN_INT (frame_size))),
1111
                             REG_NOTES (insn));
1112
    }
1113
 
1114
  /* Set R9 to point to old sp if required for access to register save
1115
     area.  */
1116
  if ( current_frame_info.reg_size != 0
1117
       && !CONST_OK_FOR_LETTER_P (frame_size, 'O'))
1118
      emit_insn (gen_addsi3 (size_rtx, size_rtx, stack_pointer_rtx));
1119
 
1120
  /* Save the frame pointer.  */
1121
  mt_emit_save_fp (FROM_PROCESSOR_TO_MEM, current_frame_info);
1122
 
1123
  /* Now put the frame pointer into the frame pointer register.  */
1124
  if (frame_pointer_needed)
1125
    {
1126
      insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1127
      RTX_FRAME_RELATED_P (insn) = 1;
1128
    }
1129
 
1130
  /* Save the registers.  */
1131
  mt_emit_save_regs (FROM_PROCESSOR_TO_MEM, current_frame_info);
1132
 
1133
  /* If we are profiling, make sure no instructions are scheduled before
1134
     the call to mcount.  */
1135
  if (profile_flag)
1136
    emit_insn (gen_blockage ());
1137
}
1138
 
1139
/* Implement EPILOGUE_USES.  */
1140
int
1141
mt_epilogue_uses (int regno)
1142
{
1143
  if (cfun->machine && cfun->machine->interrupt_handler && reload_completed)
1144
    return 1;
1145
  return regno == GPR_LINK;
1146
}
1147
 
1148
/* Generate epilogue.  EH_MODE is NORMAL_EPILOGUE when generating a
1149
   function epilogue, or EH_EPILOGUE when generating an EH
1150
   epilogue.  */
1151
void
1152
mt_expand_epilogue (enum epilogue_type eh_mode)
1153
{
1154
  rtx size_rtx, insn;
1155
  unsigned frame_size;
1156
 
1157
  mt_compute_frame_size (get_frame_size ());
1158
 
1159
  if (TARGET_DEBUG_STACK)
1160
    mt_debug_stack (& current_frame_info);
1161
 
1162
  /* Compute size of stack adjustment.  */
1163
  frame_size = current_frame_info.total_size;
1164
 
1165
  /* If offset doesn't fit in a 15-bit signed integer,
1166
     uses a scratch registers to get a smaller offset.  */
1167
  if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
1168
    size_rtx = GEN_INT (frame_size);
1169
  else
1170
    {
1171
      /* We do not have any scratch registers.  */
1172
      gcc_assert (!interrupt_handler);
1173
 
1174
      size_rtx = gen_rtx_REG (SImode, GPR_R9);
1175
      insn = emit_move_insn (size_rtx, GEN_INT (frame_size & 0xffff0000));
1176
      insn = emit_insn (gen_iorsi3 (size_rtx, size_rtx,
1177
                                    GEN_INT (frame_size & 0x0000ffff)));
1178
      /* Set R9 to point to old sp if required for access to register
1179
         save area.  */
1180
      emit_insn (gen_addsi3 (size_rtx, size_rtx, stack_pointer_rtx));
1181
    }
1182
 
1183
  /* Restore sp if there was some possible change to it.  */
1184
  if (frame_pointer_needed)
1185
    insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1186
 
1187
  /* Restore the registers.  */
1188
  mt_emit_save_fp (FROM_MEM_TO_PROCESSOR, current_frame_info);
1189
  mt_emit_save_regs (FROM_MEM_TO_PROCESSOR, current_frame_info);
1190
 
1191
  /* Make stack adjustment and use scratch register if constant too
1192
     large to fit as immediate.  */
1193
  if (frame_size)
1194
    {
1195
      if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
1196
        /* Can handle this with simple add.  */
1197
        insn = emit_insn (gen_addsi3 (stack_pointer_rtx,
1198
                                      stack_pointer_rtx,
1199
                                      size_rtx));
1200
      else
1201
        /* Scratch reg R9 has the old sp value.  */
1202
        insn = emit_move_insn (stack_pointer_rtx,
1203
                               gen_rtx_REG (SImode, GPR_R9));
1204
 
1205
      REG_NOTES (insn)
1206
        = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1207
                             gen_rtx_SET (VOIDmode,
1208
                                          stack_pointer_rtx,
1209
                                          gen_rtx_PLUS (SImode,
1210
                                                        stack_pointer_rtx,
1211
                                                        GEN_INT (frame_size))),
1212
                             REG_NOTES (insn));
1213
    }
1214
 
1215
  if (cfun->machine && cfun->machine->eh_stack_adjust != NULL_RTX)
1216
    /* Perform the additional bump for __throw.  */
1217
    emit_insn (gen_addsi3 (stack_pointer_rtx,
1218
                           stack_pointer_rtx,
1219
                           cfun->machine->eh_stack_adjust));
1220
 
1221
  /* Generate the appropriate return.  */
1222
  if (eh_mode == EH_EPILOGUE)
1223
    {
1224
      emit_jump_insn (gen_eh_return_internal ());
1225
      emit_barrier ();
1226
    }
1227
  else if (interrupt_handler)
1228
    emit_jump_insn (gen_return_interrupt_internal ());
1229
  else
1230
    emit_jump_insn (gen_return_internal ());
1231
 
1232
  /* Reset state info for each function.  */
1233
  interrupt_handler = 0;
1234
  current_frame_info = zero_frame_info;
1235
  if (cfun->machine)
1236
    cfun->machine->eh_stack_adjust = NULL_RTX;
1237
}
1238
 
1239
 
1240
/* Generate code for the "eh_return" pattern.  */
1241
void
1242
mt_expand_eh_return (rtx * operands)
1243
{
1244
  if (GET_CODE (operands[0]) != REG
1245
      || REGNO (operands[0]) != EH_RETURN_STACKADJ_REGNO)
1246
    {
1247
      rtx sp = EH_RETURN_STACKADJ_RTX;
1248
 
1249
      emit_move_insn (sp, operands[0]);
1250
      operands[0] = sp;
1251
    }
1252
 
1253
  emit_insn (gen_eh_epilogue (operands[0]));
1254
}
1255
 
1256
/* Generate code for the "eh_epilogue" pattern.  */
1257
void
1258
mt_emit_eh_epilogue (rtx * operands ATTRIBUTE_UNUSED)
1259
{
1260
  cfun->machine->eh_stack_adjust = EH_RETURN_STACKADJ_RTX; /* operands[0]; */
1261
  mt_expand_epilogue (EH_EPILOGUE);
1262
}
1263
 
1264
/* Handle an "interrupt" attribute.  */
1265
static tree
1266
mt_handle_interrupt_attribute (tree * node,
1267
                          tree   name,
1268
                          tree   args  ATTRIBUTE_UNUSED,
1269
                          int    flags ATTRIBUTE_UNUSED,
1270
                          bool * no_add_attrs)
1271
{
1272
  if (TREE_CODE (*node) != FUNCTION_DECL)
1273
    {
1274
      warning (OPT_Wattributes,
1275
               "%qs attribute only applies to functions",
1276
               IDENTIFIER_POINTER (name));
1277
      *no_add_attrs = true;
1278
    }
1279
 
1280
  return NULL_TREE;
1281
}
1282
 
1283
/* Table of machine attributes.  */
1284
const struct attribute_spec mt_attribute_table[] =
1285
{
1286
  /* name,        min, max, decl?, type?, func?, handler  */
1287
  { "interrupt",  0,   0,   false, false, false, mt_handle_interrupt_attribute },
1288
  { NULL,         0,   0,   false, false, false, NULL }
1289
};
1290
 
1291
/* Implement INITIAL_ELIMINATION_OFFSET.  */
1292
int
1293
mt_initial_elimination_offset (int from, int to)
1294
{
1295
  mt_compute_frame_size (get_frame_size ());
1296
 
1297
  if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1298
    return 0;
1299
 
1300
  else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1301
    return current_frame_info.total_size;
1302
 
1303
  else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
1304
    return current_frame_info.total_size;
1305
 
1306
  else
1307
    gcc_unreachable ();
1308
}
1309
 
1310
/* Generate a compare for CODE.  Return a brand-new rtx that
1311
   represents the result of the compare.  */
1312
 
1313
static rtx
1314
mt_generate_compare (enum rtx_code code, rtx op0, rtx op1)
1315
{
1316
  rtx scratch0, scratch1, const_scratch;
1317
 
1318
  switch (code)
1319
    {
1320
    case GTU:
1321
    case LTU:
1322
    case GEU:
1323
    case LEU:
1324
      /* Need to adjust ranges for faking unsigned compares.  */
1325
      scratch0 = gen_reg_rtx (SImode);
1326
      scratch1 = gen_reg_rtx (SImode);
1327
      const_scratch = force_reg (SImode, GEN_INT(MT_MIN_INT));
1328
      emit_insn (gen_addsi3 (scratch0, const_scratch, op0));
1329
      emit_insn (gen_addsi3 (scratch1, const_scratch, op1));
1330
      break;
1331
    default:
1332
      scratch0 = op0;
1333
      scratch1 = op1;
1334
      break;
1335
    }
1336
 
1337
  /* Adjust compare operator to fake unsigned compares.  */
1338
  switch (code)
1339
    {
1340
    case GTU:
1341
      code = GT; break;
1342
    case LTU:
1343
      code = LT; break;
1344
    case GEU:
1345
      code = GE; break;
1346
    case LEU:
1347
      code = LE; break;
1348
    default:
1349
      /* do nothing */
1350
      break;
1351
    }
1352
 
1353
  /* Generate the actual compare.  */
1354
  return gen_rtx_fmt_ee (code, VOIDmode, scratch0, scratch1);
1355
}
1356
 
1357
/* Emit a branch of kind CODE to location LOC.  */
1358
 
1359
void
1360
mt_emit_cbranch (enum rtx_code code, rtx loc, rtx op0, rtx op1)
1361
{
1362
  rtx condition_rtx, loc_ref;
1363
 
1364
  if (! reg_or_0_operand (op0, SImode))
1365
    op0 = copy_to_mode_reg (SImode, op0);
1366
 
1367
  if (! reg_or_0_operand (op1, SImode))
1368
    op1 = copy_to_mode_reg (SImode, op1);
1369
 
1370
  condition_rtx = mt_generate_compare (code, op0, op1);
1371
  loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
1372
  emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1373
                               gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
1374
                                                     loc_ref, pc_rtx)));
1375
}
1376
 
1377
/* Subfunction of the following function.  Update the flags of any MEM
1378
   found in part of X.  */
1379
 
1380
static void
1381
mt_set_memflags_1 (rtx x, int in_struct_p, int volatile_p)
1382
{
1383
  int i;
1384
 
1385
  switch (GET_CODE (x))
1386
    {
1387
    case SEQUENCE:
1388
    case PARALLEL:
1389
      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1390
        mt_set_memflags_1 (XVECEXP (x, 0, i), in_struct_p, volatile_p);
1391
      break;
1392
 
1393
    case INSN:
1394
      mt_set_memflags_1 (PATTERN (x), in_struct_p, volatile_p);
1395
      break;
1396
 
1397
    case SET:
1398
      mt_set_memflags_1 (SET_DEST (x), in_struct_p, volatile_p);
1399
      mt_set_memflags_1 (SET_SRC (x), in_struct_p, volatile_p);
1400
      break;
1401
 
1402
    case MEM:
1403
      MEM_IN_STRUCT_P (x) = in_struct_p;
1404
      MEM_VOLATILE_P (x) = volatile_p;
1405
      /* Sadly, we cannot use alias sets because the extra aliasing
1406
         produced by the AND interferes.  Given that two-byte quantities
1407
         are the only thing we would be able to differentiate anyway,
1408
         there does not seem to be any point in convoluting the early
1409
         out of the alias check.  */
1410
      /* set_mem_alias_set (x, alias_set); */
1411
      break;
1412
 
1413
    default:
1414
      break;
1415
    }
1416
}
1417
 
1418
/* Look for any MEMs in the current sequence of insns and set the
1419
   in-struct, unchanging, and volatile flags from the flags in REF.
1420
   If REF is not a MEM, don't do anything.  */
1421
 
1422
void
1423
mt_set_memflags (rtx ref)
1424
{
1425
  rtx insn;
1426
  int in_struct_p, volatile_p;
1427
 
1428
  if (GET_CODE (ref) != MEM)
1429
    return;
1430
 
1431
  in_struct_p = MEM_IN_STRUCT_P (ref);
1432
  volatile_p = MEM_VOLATILE_P (ref);
1433
 
1434
  /* This is only called from mt.md, after having had something
1435
     generated from one of the insn patterns.  So if everything is
1436
     zero, the pattern is already up-to-date.  */
1437
  if (! in_struct_p && ! volatile_p)
1438
    return;
1439
 
1440
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1441
    mt_set_memflags_1 (insn, in_struct_p, volatile_p);
1442
}
1443
 
1444
/* Implement SECONDARY_RELOAD_CLASS.  */
1445
enum reg_class
1446
mt_secondary_reload_class (enum reg_class class ATTRIBUTE_UNUSED,
1447
                            enum machine_mode mode,
1448
                            rtx x)
1449
{
1450
  if ((mode == QImode && (!TARGET_BYTE_ACCESS)) || mode == HImode)
1451
    {
1452
      if (GET_CODE (x) == MEM
1453
          || (GET_CODE (x) == REG && true_regnum (x) == -1)
1454
          || (GET_CODE (x) == SUBREG
1455
              && (GET_CODE (SUBREG_REG (x)) == MEM
1456
                  || (GET_CODE (SUBREG_REG (x)) == REG
1457
                      && true_regnum (SUBREG_REG (x)) == -1))))
1458
        return GENERAL_REGS;
1459
    }
1460
 
1461
  return NO_REGS;
1462
}
1463
 
1464
/* Handle FUNCTION_VALUE, FUNCTION_OUTGOING_VALUE, and LIBCALL_VALUE
1465
   macros.  */
1466
rtx
1467
mt_function_value (tree valtype, enum machine_mode mode, tree func_decl ATTRIBUTE_UNUSED)
1468
{
1469
  if ((mode) == DImode || (mode) == DFmode)
1470
    return gen_rtx_MEM (mode, gen_rtx_REG (mode, RETURN_VALUE_REGNUM));
1471
 
1472
  if (valtype)
1473
    mode = TYPE_MODE (valtype);
1474
 
1475
  return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1476
}
1477
 
1478
/* Split a move into two smaller pieces.
1479
   MODE indicates the reduced mode.  OPERANDS[0] is the original destination
1480
   OPERANDS[1] is the original src.  The new destinations are
1481
   OPERANDS[2] and OPERANDS[4], while the new sources are OPERANDS[3]
1482
   and OPERANDS[5].  */
1483
 
1484
void
1485
mt_split_words (enum machine_mode nmode,
1486
                 enum machine_mode omode,
1487
                 rtx *operands)
1488
{
1489
  rtx dl,dh;    /* src/dest pieces.  */
1490
  rtx sl,sh;
1491
  int   move_high_first = 0;     /* Assume no overlap.  */
1492
 
1493
  switch (GET_CODE (operands[0])) /* Dest.  */
1494
    {
1495
    case SUBREG:
1496
    case REG:
1497
      if ((GET_CODE (operands[1]) == REG
1498
           || GET_CODE (operands[1]) == SUBREG)
1499
          && true_regnum (operands[0]) <= true_regnum (operands[1]))
1500
        move_high_first = 1;
1501
 
1502
      if (GET_CODE (operands[0]) == SUBREG)
1503
        {
1504
          dl = gen_rtx_SUBREG (nmode, SUBREG_REG (operands[0]),
1505
                               SUBREG_BYTE (operands[0]) + GET_MODE_SIZE (nmode));
1506
          dh = gen_rtx_SUBREG (nmode, SUBREG_REG (operands[0]), SUBREG_BYTE (operands[0]));
1507
        }
1508
      else if (GET_CODE (operands[0]) == REG && ! IS_PSEUDO_P (operands[0]))
1509
        {
1510
          int   r = REGNO (operands[0]);
1511
          dh = gen_rtx_REG (nmode, r);
1512
          dl = gen_rtx_REG (nmode, r + HARD_REGNO_NREGS (r, nmode));
1513
        }
1514
      else
1515
        {
1516
          dh = gen_rtx_SUBREG (nmode, operands[0], 0);
1517
          dl = gen_rtx_SUBREG (nmode, operands[0], GET_MODE_SIZE (nmode));
1518
        }
1519
      break;
1520
 
1521
    case MEM:
1522
      switch (GET_CODE (XEXP (operands[0], 0)))
1523
        {
1524
        case POST_INC:
1525
        case POST_DEC:
1526
          gcc_unreachable ();
1527
        default:
1528
          dl = operand_subword (operands[0],
1529
                                GET_MODE_SIZE (nmode)/UNITS_PER_WORD,
1530
                                0, omode);
1531
          dh = operand_subword (operands[0], 0, 0, omode);
1532
        }
1533
      break;
1534
    default:
1535
      gcc_unreachable ();
1536
    }
1537
 
1538
  switch (GET_CODE (operands[1]))
1539
    {
1540
    case REG:
1541
      if (! IS_PSEUDO_P (operands[1]))
1542
        {
1543
          int r = REGNO (operands[1]);
1544
 
1545
          sh = gen_rtx_REG (nmode, r);
1546
          sl = gen_rtx_REG (nmode, r + HARD_REGNO_NREGS (r, nmode));
1547
        }
1548
      else
1549
        {
1550
          sh = gen_rtx_SUBREG (nmode, operands[1], 0);
1551
          sl = gen_rtx_SUBREG (nmode, operands[1], GET_MODE_SIZE (nmode));
1552
        }
1553
      break;
1554
 
1555
    case CONST_DOUBLE:
1556
      if (operands[1] == const0_rtx)
1557
        sh = sl = const0_rtx;
1558
      else
1559
        split_double (operands[1], & sh, & sl);
1560
      break;
1561
 
1562
    case CONST_INT:
1563
      if (operands[1] == const0_rtx)
1564
        sh = sl = const0_rtx;
1565
      else
1566
        {
1567
          int vl, vh;
1568
 
1569
          switch (nmode)
1570
            {
1571
            default:
1572
              gcc_unreachable ();
1573
            }
1574
 
1575
          sl = GEN_INT (vl);
1576
          sh = GEN_INT (vh);
1577
        }
1578
      break;
1579
 
1580
    case SUBREG:
1581
      sl = gen_rtx_SUBREG (nmode,
1582
                           SUBREG_REG (operands[1]),
1583
                           SUBREG_BYTE (operands[1]) + GET_MODE_SIZE (nmode));
1584
      sh = gen_rtx_SUBREG (nmode,
1585
                           SUBREG_REG (operands[1]),
1586
                           SUBREG_BYTE (operands[1]));
1587
      break;
1588
 
1589
    case MEM:
1590
      switch (GET_CODE (XEXP (operands[1], 0)))
1591
        {
1592
        case POST_DEC:
1593
        case POST_INC:
1594
          gcc_unreachable ();
1595
          break;
1596
        default:
1597
          sl = operand_subword (operands[1],
1598
                                GET_MODE_SIZE (nmode)/UNITS_PER_WORD,
1599
                                0, omode);
1600
          sh = operand_subword (operands[1], 0, 0, omode);
1601
 
1602
          /* Check if the DF load is going to clobber the register
1603
             used for the address, and if so make sure that is going
1604
             to be the second move.  */
1605
          if (GET_CODE (dl) == REG
1606
              && true_regnum (dl)
1607
              == true_regnum (XEXP (XEXP (sl, 0 ), 0)))
1608
            move_high_first = 1;
1609
        }
1610
      break;
1611
    default:
1612
      gcc_unreachable ();
1613
    }
1614
 
1615
  if (move_high_first)
1616
    {
1617
      operands[2] = dh;
1618
      operands[3] = sh;
1619
      operands[4] = dl;
1620
      operands[5] = sl;
1621
    }
1622
  else
1623
    {
1624
      operands[2] = dl;
1625
      operands[3] = sl;
1626
      operands[4] = dh;
1627
      operands[5] = sh;
1628
    }
1629
  return;
1630
}
1631
 
1632
/* Implement TARGET_MUST_PASS_IN_STACK hook.  */
1633
static bool
1634
mt_pass_in_stack (enum machine_mode mode ATTRIBUTE_UNUSED, tree type)
1635
{
1636
  return (((type) != 0
1637
           && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1638
               || TREE_ADDRESSABLE (type))));
1639
}
1640
 
1641
/* Increment the counter for the number of loop instructions in the
1642
   current function.  */
1643
 
1644
void mt_add_loop (void)
1645
{
1646
  cfun->machine->has_loops++;
1647
}
1648
 
1649
 
1650
/* Maximum loop nesting depth.  */
1651
#define MAX_LOOP_DEPTH 4
1652
/* Maximum size of a loop (allows some headroom for delayed branch slot
1653
   filling.  */
1654
#define MAX_LOOP_LENGTH (200 * 4)
1655
 
1656
/* We need to keep a vector of loops */
1657
typedef struct loop_info *loop_info;
1658
DEF_VEC_P (loop_info);
1659
DEF_VEC_ALLOC_P (loop_info,heap);
1660
 
1661
/* Information about a loop we have found (or are in the process of
1662
   finding).  */
1663
struct loop_info GTY (())
1664
{
1665
  /* loop number, for dumps */
1666
  int loop_no;
1667
 
1668
  /* Predecessor block of the loop.   This is the one that falls into
1669
     the loop and contains the initialization instruction.  */
1670
  basic_block predecessor;
1671
 
1672
  /* First block in the loop.  This is the one branched to by the dbnz
1673
     insn.  */
1674
  basic_block head;
1675
 
1676
  /* Last block in the loop (the one with the dbnz insn */
1677
  basic_block tail;
1678
 
1679
  /* The successor block of the loop.  This is the one the dbnz insn
1680
     falls into.  */
1681
  basic_block successor;
1682
 
1683
  /* The dbnz insn.  */
1684
  rtx dbnz;
1685
 
1686
  /* The initialization insn.  */
1687
  rtx init;
1688
 
1689
  /* The new initialization instruction.  */
1690
  rtx loop_init;
1691
 
1692
  /* The new ending instruction. */
1693
  rtx loop_end;
1694
 
1695
  /* The new label placed at the end of the loop. */
1696
  rtx end_label;
1697
 
1698
  /* The nesting depth of the loop.  Set to -1 for a bad loop.  */
1699
  int depth;
1700
 
1701
  /* The length of the loop.  */
1702
  int length;
1703
 
1704
  /* Next loop in the graph. */
1705
  struct loop_info *next;
1706
 
1707
  /* Vector of blocks only within the loop, (excluding those within
1708
     inner loops).  */
1709
  VEC (basic_block,heap) *blocks;
1710
 
1711
  /* Vector of inner loops within this loop  */
1712
  VEC (loop_info,heap) *loops;
1713
};
1714
 
1715
/* Information used during loop detection.  */
1716
typedef struct loop_work GTY(())
1717
{
1718
  /* Basic block to be scanned.  */
1719
  basic_block block;
1720
 
1721
  /* Loop it will be within.  */
1722
  loop_info loop;
1723
} loop_work;
1724
 
1725
/* Work list.  */
1726
DEF_VEC_O (loop_work);
1727
DEF_VEC_ALLOC_O (loop_work,heap);
1728
 
1729
/* Determine the nesting and length of LOOP.  Return false if the loop
1730
   is bad.  */
1731
 
1732
static bool
1733
mt_loop_nesting (loop_info loop)
1734
{
1735
  loop_info inner;
1736
  unsigned ix;
1737
  int inner_depth = 0;
1738
 
1739
  if (!loop->depth)
1740
    {
1741
      /* Make sure we only have one entry point.  */
1742
      if (EDGE_COUNT (loop->head->preds) == 2)
1743
        {
1744
          loop->predecessor = EDGE_PRED (loop->head, 0)->src;
1745
          if (loop->predecessor == loop->tail)
1746
            /* We wanted the other predecessor.  */
1747
            loop->predecessor = EDGE_PRED (loop->head, 1)->src;
1748
 
1749
          /* We can only place a loop insn on a fall through edge of a
1750
             single exit block.  */
1751
          if (EDGE_COUNT (loop->predecessor->succs) != 1
1752
              || !(EDGE_SUCC (loop->predecessor, 0)->flags & EDGE_FALLTHRU))
1753
            loop->predecessor = NULL;
1754
        }
1755
 
1756
      /* Mark this loop as bad for now.  */
1757
      loop->depth = -1;
1758
      if (loop->predecessor)
1759
        {
1760
          for (ix = 0; VEC_iterate (loop_info, loop->loops, ix++, inner);)
1761
            {
1762
              if (!inner->depth)
1763
                mt_loop_nesting (inner);
1764
 
1765
              if (inner->depth < 0)
1766
                {
1767
                  inner_depth = -1;
1768
                  break;
1769
                }
1770
 
1771
              if (inner_depth < inner->depth)
1772
                inner_depth = inner->depth;
1773
              loop->length += inner->length;
1774
            }
1775
 
1776
          /* Set the proper loop depth, if it was good. */
1777
          if (inner_depth >= 0)
1778
            loop->depth = inner_depth + 1;
1779
        }
1780
    }
1781
  return (loop->depth > 0
1782
          && loop->predecessor
1783
          && loop->depth < MAX_LOOP_DEPTH
1784
          && loop->length < MAX_LOOP_LENGTH);
1785
}
1786
 
1787
/* Determine the length of block BB.  */
1788
 
1789
static int
1790
mt_block_length (basic_block bb)
1791
{
1792
  int length = 0;
1793
  rtx insn;
1794
 
1795
  for (insn = BB_HEAD (bb);
1796
       insn != NEXT_INSN (BB_END (bb));
1797
       insn = NEXT_INSN (insn))
1798
    {
1799
      if (!INSN_P (insn))
1800
        continue;
1801
      if (CALL_P (insn))
1802
        {
1803
          /* Calls are not allowed in loops.  */
1804
          length = MAX_LOOP_LENGTH + 1;
1805
          break;
1806
        }
1807
 
1808
      length += get_attr_length (insn);
1809
    }
1810
  return length;
1811
}
1812
 
1813
/* Scan the blocks of LOOP (and its inferiors) looking for uses of
1814
   REG.  Return true, if we find any.  Don't count the loop's dbnz
1815
   insn if it matches DBNZ.  */
1816
 
1817
static bool
1818
mt_scan_loop (loop_info loop, rtx reg, rtx dbnz)
1819
{
1820
  unsigned ix;
1821
  loop_info inner;
1822
  basic_block bb;
1823
 
1824
  for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
1825
    {
1826
      rtx insn;
1827
 
1828
      for (insn = BB_HEAD (bb);
1829
           insn != NEXT_INSN (BB_END (bb));
1830
           insn = NEXT_INSN (insn))
1831
        {
1832
          if (!INSN_P (insn))
1833
            continue;
1834
          if (insn == dbnz)
1835
            continue;
1836
          if (reg_mentioned_p (reg, PATTERN (insn)))
1837
            return true;
1838
        }
1839
    }
1840
  for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, inner); ix++)
1841
    if (mt_scan_loop (inner, reg, NULL_RTX))
1842
      return true;
1843
 
1844
  return false;
1845
}
1846
 
1847
/* MS2 has a loop instruction which needs to be placed just before the
1848
   loop.  It indicates the end of the loop and specifies the number of
1849
   loop iterations.  It can be nested with an automatically maintained
1850
   stack of counter and end address registers.  It's an ideal
1851
   candidate for doloop.  Unfortunately, gcc presumes that loops
1852
   always end with an explicit instruction, and the doloop_begin
1853
   instruction is not a flow control instruction so it can be
1854
   scheduled earlier than just before the start of the loop.  To make
1855
   matters worse, the optimization pipeline can duplicate loop exit
1856
   and entrance blocks and fails to track abnormally exiting loops.
1857
   Thus we cannot simply use doloop.
1858
 
1859
   What we do is emit a dbnz pattern for the doloop optimization, and
1860
   let that be optimized as normal.  Then in machine dependent reorg
1861
   we have to repeat the loop searching algorithm.  We use the
1862
   flow graph to find closed loops ending in a dbnz insn.  We then try
1863
   and convert it to use the loop instruction.  The conditions are,
1864
 
1865
   * the loop has no abnormal exits, duplicated end conditions or
1866
   duplicated entrance blocks
1867
 
1868
   * the loop counter register is only used in the dbnz instruction
1869
   within the loop
1870
 
1871
   * we can find the instruction setting the initial value of the loop
1872
   counter
1873
 
1874
   * the loop is not executed more than 65535 times. (This might be
1875
   changed to 2^32-1, and would therefore allow variable initializers.)
1876
 
1877
   * the loop is not nested more than 4 deep 5) there are no
1878
   subroutine calls in the loop.  */
1879
 
1880
static void
1881
mt_reorg_loops (FILE *dump_file)
1882
{
1883
  basic_block bb;
1884
  loop_info loops = NULL;
1885
  loop_info loop;
1886
  int nloops = 0;
1887
  unsigned dwork = 0;
1888
  VEC (loop_work,heap) *works = VEC_alloc (loop_work,heap,20);
1889
  loop_work *work;
1890
  edge e;
1891
  edge_iterator ei;
1892
  bool replaced = false;
1893
 
1894
  /* Find all the possible loop tails.  This means searching for every
1895
     dbnz instruction.  For each one found, create a loop_info
1896
     structure and add the head block to the work list. */
1897
  FOR_EACH_BB (bb)
1898
    {
1899
      rtx tail = BB_END (bb);
1900
 
1901
      while (GET_CODE (tail) == NOTE)
1902
        tail = PREV_INSN (tail);
1903
 
1904
      bb->aux = NULL;
1905
      if (recog_memoized (tail) == CODE_FOR_decrement_and_branch_until_zero)
1906
        {
1907
          /* A possible loop end */
1908
 
1909
          loop = XNEW (struct loop_info);
1910
          loop->next = loops;
1911
          loops = loop;
1912
          loop->tail = bb;
1913
          loop->head = BRANCH_EDGE (bb)->dest;
1914
          loop->successor = FALLTHRU_EDGE (bb)->dest;
1915
          loop->predecessor = NULL;
1916
          loop->dbnz = tail;
1917
          loop->depth = 0;
1918
          loop->length = mt_block_length (bb);
1919
          loop->blocks = VEC_alloc (basic_block, heap, 20);
1920
          VEC_quick_push (basic_block, loop->blocks, bb);
1921
          loop->loops = NULL;
1922
          loop->loop_no = nloops++;
1923
 
1924
          loop->init = loop->end_label = NULL_RTX;
1925
          loop->loop_init = loop->loop_end = NULL_RTX;
1926
 
1927
          work = VEC_safe_push (loop_work, heap, works, NULL);
1928
          work->block = loop->head;
1929
          work->loop = loop;
1930
 
1931
          bb->aux = loop;
1932
 
1933
          if (dump_file)
1934
            {
1935
              fprintf (dump_file, ";; potential loop %d ending at\n",
1936
                       loop->loop_no);
1937
              print_rtl_single (dump_file, tail);
1938
            }
1939
        }
1940
    }
1941
 
1942
  /*  Now find all the closed loops.
1943
      until work list empty,
1944
       if block's auxptr is set
1945
         if != loop slot
1946
           if block's loop's start != block
1947
             mark loop as bad
1948
           else
1949
             append block's loop's fallthrough block to worklist
1950
             increment this loop's depth
1951
       else if block is exit block
1952
         mark loop as bad
1953
       else
1954
          set auxptr
1955
          for each target of block
1956
            add to worklist */
1957
  while (VEC_iterate (loop_work, works, dwork++, work))
1958
    {
1959
      loop = work->loop;
1960
      bb = work->block;
1961
      if (bb == EXIT_BLOCK_PTR)
1962
        /* We've reached the exit block.  The loop must be bad. */
1963
        loop->depth = -1;
1964
      else if (!bb->aux)
1965
        {
1966
          /* We've not seen this block before.  Add it to the loop's
1967
             list and then add each successor to the work list.  */
1968
          bb->aux = loop;
1969
          loop->length += mt_block_length (bb);
1970
          VEC_safe_push (basic_block, heap, loop->blocks, bb);
1971
          FOR_EACH_EDGE (e, ei, bb->succs)
1972
            {
1973
              if (!VEC_space (loop_work, works, 1))
1974
                {
1975
                  if (dwork)
1976
                    {
1977
                      VEC_block_remove (loop_work, works, 0, dwork);
1978
                      dwork = 0;
1979
                    }
1980
                  else
1981
                    VEC_reserve (loop_work, heap, works, 1);
1982
                }
1983
              work = VEC_quick_push (loop_work, works, NULL);
1984
              work->block = EDGE_SUCC (bb, ei.index)->dest;
1985
              work->loop = loop;
1986
            }
1987
        }
1988
      else if (bb->aux != loop)
1989
        {
1990
          /* We've seen this block in a different loop.  If it's not
1991
             the other loop's head, then this loop must be bad.
1992
             Otherwise, the other loop might be a nested loop, so
1993
             continue from that loop's successor.  */
1994
          loop_info other = bb->aux;
1995
 
1996
          if (other->head != bb)
1997
            loop->depth = -1;
1998
          else
1999
            {
2000
              VEC_safe_push (loop_info, heap, loop->loops, other);
2001
              work = VEC_safe_push (loop_work, heap, works, NULL);
2002
              work->loop = loop;
2003
              work->block = other->successor;
2004
            }
2005
        }
2006
    }
2007
  VEC_free (loop_work, heap, works);
2008
 
2009
  /* Now optimize the loops.  */
2010
  for (loop = loops; loop; loop = loop->next)
2011
    {
2012
      rtx iter_reg, insn, init_insn;
2013
      rtx init_val, loop_end, loop_init, end_label, head_label;
2014
 
2015
      if (!mt_loop_nesting (loop))
2016
        {
2017
          if (dump_file)
2018
            fprintf (dump_file, ";; loop %d is bad\n", loop->loop_no);
2019
          continue;
2020
        }
2021
 
2022
      /* Get the loop iteration register.  */
2023
      iter_reg = SET_DEST (XVECEXP (PATTERN (loop->dbnz), 0, 1));
2024
 
2025
      if (!REG_P (iter_reg))
2026
        {
2027
          /* Spilled */
2028
          if (dump_file)
2029
            fprintf (dump_file, ";; loop %d has spilled iteration count\n",
2030
                     loop->loop_no);
2031
          continue;
2032
        }
2033
 
2034
      /* Look for the initializing insn */
2035
      init_insn = NULL_RTX;
2036
      for (insn = BB_END (loop->predecessor);
2037
           insn != PREV_INSN (BB_HEAD (loop->predecessor));
2038
           insn = PREV_INSN (insn))
2039
        {
2040
          if (!INSN_P (insn))
2041
            continue;
2042
          if (reg_mentioned_p (iter_reg, PATTERN (insn)))
2043
            {
2044
              rtx set = single_set (insn);
2045
 
2046
              if (set && rtx_equal_p (iter_reg, SET_DEST (set)))
2047
                init_insn = insn;
2048
              break;
2049
            }
2050
        }
2051
 
2052
      if (!init_insn)
2053
        {
2054
          if (dump_file)
2055
            fprintf (dump_file, ";; loop %d has no initializer\n",
2056
                     loop->loop_no);
2057
          continue;
2058
        }
2059
      if (dump_file)
2060
        {
2061
          fprintf (dump_file, ";; loop %d initialized by\n",
2062
                   loop->loop_no);
2063
          print_rtl_single (dump_file, init_insn);
2064
        }
2065
 
2066
      init_val = PATTERN (init_insn);
2067
      if (GET_CODE (init_val) == SET)
2068
        init_val = SET_SRC (init_val);
2069
      if (GET_CODE (init_val) != CONST_INT || INTVAL (init_val) >= 65535)
2070
        {
2071
          if (dump_file)
2072
            fprintf (dump_file, ";; loop %d has complex initializer\n",
2073
                     loop->loop_no);
2074
          continue;
2075
        }
2076
 
2077
      /* Scan all the blocks to make sure they don't use iter_reg.  */
2078
      if (mt_scan_loop (loop, iter_reg, loop->dbnz))
2079
        {
2080
          if (dump_file)
2081
            fprintf (dump_file, ";; loop %d uses iterator\n",
2082
                     loop->loop_no);
2083
          continue;
2084
        }
2085
 
2086
      /* The loop is good for replacement.  */
2087
 
2088
      /* loop is 1 based, dbnz is zero based.  */
2089
      init_val = GEN_INT (INTVAL (init_val) + 1);
2090
 
2091
      iter_reg = gen_rtx_REG (SImode, LOOP_FIRST + loop->depth - 1);
2092
      end_label = gen_label_rtx ();
2093
      head_label = XEXP (SET_SRC (XVECEXP (PATTERN (loop->dbnz), 0, 0)), 1);
2094
      loop_end = gen_loop_end (iter_reg, head_label);
2095
      loop_init = gen_loop_init (iter_reg, init_val, end_label);
2096
      loop->init = init_insn;
2097
      loop->end_label = end_label;
2098
      loop->loop_init = loop_init;
2099
      loop->loop_end = loop_end;
2100
      replaced = true;
2101
 
2102
      if (dump_file)
2103
        {
2104
          fprintf (dump_file, ";; replacing loop %d initializer with\n",
2105
                   loop->loop_no);
2106
          print_rtl_single (dump_file, loop->loop_init);
2107
          fprintf (dump_file, ";; replacing loop %d terminator with\n",
2108
                   loop->loop_no);
2109
          print_rtl_single (dump_file, loop->loop_end);
2110
        }
2111
    }
2112
 
2113
  /* Now apply the optimizations.  Do it this way so we don't mess up
2114
     the flow graph half way through.  */
2115
  for (loop = loops; loop; loop = loop->next)
2116
    if (loop->loop_init)
2117
      {
2118
        emit_jump_insn_after (loop->loop_init, BB_END (loop->predecessor));
2119
        delete_insn (loop->init);
2120
        emit_label_before (loop->end_label, loop->dbnz);
2121
        emit_jump_insn_before (loop->loop_end, loop->dbnz);
2122
        delete_insn (loop->dbnz);
2123
      }
2124
 
2125
  /* Free up the loop structures */
2126
  while (loops)
2127
    {
2128
      loop = loops;
2129
      loops = loop->next;
2130
      VEC_free (loop_info, heap, loop->loops);
2131
      VEC_free (basic_block, heap, loop->blocks);
2132
      XDELETE (loop);
2133
    }
2134
 
2135
  if (replaced && dump_file)
2136
    {
2137
      fprintf (dump_file, ";; Replaced loops\n");
2138
      print_rtl (dump_file, get_insns ());
2139
    }
2140
}
2141
 
2142
/* Structures to hold branch information during reorg.  */
2143
typedef struct branch_info
2144
{
2145
  rtx insn;  /* The branch insn.  */
2146
 
2147
  struct branch_info *next;
2148
} branch_info;
2149
 
2150
typedef struct label_info
2151
{
2152
  rtx label;  /* The label.  */
2153
  branch_info *branches;  /* branches to this label.  */
2154
  struct label_info *next;
2155
} label_info;
2156
 
2157
/* Chain of labels found in current function, used during reorg.  */
2158
static label_info *mt_labels;
2159
 
2160
/* If *X is a label, add INSN to the list of branches for that
2161
   label.  */
2162
 
2163
static int
2164
mt_add_branches (rtx *x, void *insn)
2165
{
2166
  if (GET_CODE (*x) == LABEL_REF)
2167
    {
2168
      branch_info *branch = xmalloc (sizeof (*branch));
2169
      rtx label = XEXP (*x, 0);
2170
      label_info *info;
2171
 
2172
      for (info = mt_labels; info; info = info->next)
2173
        if (info->label == label)
2174
          break;
2175
 
2176
      if (!info)
2177
        {
2178
          info = xmalloc (sizeof (*info));
2179
          info->next = mt_labels;
2180
          mt_labels = info;
2181
 
2182
          info->label = label;
2183
          info->branches = NULL;
2184
        }
2185
 
2186
      branch->next = info->branches;
2187
      info->branches = branch;
2188
      branch->insn = insn;
2189
    }
2190
  return 0;
2191
}
2192
 
2193
/* If BRANCH has a filled delay slot, check if INSN is dependent upon
2194
   it.  If so, undo the delay slot fill.   Returns the next insn, if
2195
   we patch out the branch.  Returns the branch insn, if we cannot
2196
   patch out the branch (due to anti-dependency in the delay slot).
2197
   In that case, the caller must insert nops at the branch target.  */
2198
 
2199
static rtx
2200
mt_check_delay_slot (rtx branch, rtx insn)
2201
{
2202
  rtx slot;
2203
  rtx tmp;
2204
  rtx p;
2205
  rtx jmp;
2206
 
2207
  gcc_assert (GET_CODE (PATTERN (branch)) == SEQUENCE);
2208
  if (INSN_DELETED_P (branch))
2209
    return NULL_RTX;
2210
  slot = XVECEXP (PATTERN (branch), 0, 1);
2211
 
2212
  tmp = PATTERN (insn);
2213
  note_stores (PATTERN (slot), insn_dependent_p_1, &tmp);
2214
  if (tmp)
2215
    /* Not dependent.  */
2216
    return NULL_RTX;
2217
 
2218
  /* Undo the delay slot.  */
2219
  jmp = XVECEXP (PATTERN (branch), 0, 0);
2220
 
2221
  tmp = PATTERN (jmp);
2222
  note_stores (PATTERN (slot), insn_dependent_p_1, &tmp);
2223
  if (!tmp)
2224
    /* Anti dependent. */
2225
    return branch;
2226
 
2227
  p = PREV_INSN (branch);
2228
  NEXT_INSN (p) = slot;
2229
  PREV_INSN (slot) = p;
2230
  NEXT_INSN (slot) = jmp;
2231
  PREV_INSN (jmp) = slot;
2232
  NEXT_INSN (jmp) = branch;
2233
  PREV_INSN (branch) = jmp;
2234
  XVECEXP (PATTERN (branch), 0, 0) = NULL_RTX;
2235
  XVECEXP (PATTERN (branch), 0, 1) = NULL_RTX;
2236
  delete_insn (branch);
2237
  return jmp;
2238
}
2239
 
2240
/* Insert nops to satisfy pipeline constraints.  We only deal with ms2
2241
   constraints here.  Earlier CPUs are dealt with by inserting nops with
2242
   final_prescan (but that can lead to inferior code, and is
2243
   impractical with ms2's JAL hazard).
2244
 
2245
   ms2 dynamic constraints
2246
   1) a load and a following use must be separated by one insn
2247
   2) an insn and a following dependent call must be separated by two insns
2248
 
2249
   only arith insns are placed in delay slots so #1 cannot happen with
2250
   a load in a delay slot.  #2 can happen with an arith insn in the
2251
   delay slot.  */
2252
 
2253
static void
2254
mt_reorg_hazard (void)
2255
{
2256
  rtx insn, next;
2257
 
2258
  /* Find all the branches */
2259
  for (insn = get_insns ();
2260
       insn;
2261
       insn = NEXT_INSN (insn))
2262
    {
2263
      rtx jmp;
2264
 
2265
      if (!INSN_P (insn))
2266
        continue;
2267
 
2268
      jmp = PATTERN (insn);
2269
 
2270
      if (GET_CODE (jmp) != SEQUENCE)
2271
        /* If it's not got a filled delay slot, then it can't
2272
           conflict.  */
2273
        continue;
2274
 
2275
      jmp = XVECEXP (jmp, 0, 0);
2276
 
2277
      if (recog_memoized (jmp) == CODE_FOR_tablejump)
2278
        for (jmp = XEXP (XEXP (XVECEXP (PATTERN (jmp), 0, 1), 0), 0);
2279
             !JUMP_TABLE_DATA_P (jmp);
2280
             jmp = NEXT_INSN (jmp))
2281
          continue;
2282
 
2283
      for_each_rtx (&PATTERN (jmp), mt_add_branches, insn);
2284
    }
2285
 
2286
  /* Now scan for dependencies.  */
2287
  for (insn = get_insns ();
2288
       insn && !INSN_P (insn);
2289
       insn = NEXT_INSN (insn))
2290
    continue;
2291
 
2292
  for (;
2293
       insn;
2294
       insn = next)
2295
    {
2296
      rtx jmp, tmp;
2297
      enum attr_type attr;
2298
 
2299
      gcc_assert (INSN_P (insn) && !INSN_DELETED_P (insn));
2300
      for (next = NEXT_INSN (insn);
2301
           next;
2302
           next = NEXT_INSN (next))
2303
        {
2304
          if (!INSN_P (next))
2305
            continue;
2306
          if (GET_CODE (PATTERN (next)) != USE)
2307
            break;
2308
        }
2309
 
2310
      jmp = insn;
2311
      if (GET_CODE (PATTERN (insn)) == SEQUENCE)
2312
        jmp = XVECEXP (PATTERN (insn), 0, 0);
2313
 
2314
      attr = recog_memoized (jmp) >= 0 ? get_attr_type (jmp) : TYPE_UNKNOWN;
2315
 
2316
      if (next && attr == TYPE_LOAD)
2317
        {
2318
          /* A load.  See if NEXT is dependent, and if so insert a
2319
             nop.  */
2320
 
2321
          tmp = PATTERN (next);
2322
          if (GET_CODE (tmp) == SEQUENCE)
2323
            tmp = PATTERN (XVECEXP (tmp, 0, 0));
2324
          note_stores (PATTERN (insn), insn_dependent_p_1, &tmp);
2325
          if (!tmp)
2326
            emit_insn_after (gen_nop (), insn);
2327
        }
2328
 
2329
      if (attr == TYPE_CALL)
2330
        {
2331
          /* A call.  Make sure we're not dependent on either of the
2332
             previous two dynamic instructions.  */
2333
          int nops = 0;
2334
          int count;
2335
          rtx prev = insn;
2336
          rtx rescan = NULL_RTX;
2337
 
2338
          for (count = 2; count && !nops;)
2339
            {
2340
              int type;
2341
 
2342
              prev = PREV_INSN (prev);
2343
              if (!prev)
2344
                {
2345
                  /* If we reach the start of the function, we must
2346
                     presume the caller set the address in the delay
2347
                     slot of the call instruction.  */
2348
                  nops = count;
2349
                  break;
2350
                }
2351
 
2352
              if (BARRIER_P (prev))
2353
                break;
2354
              if (LABEL_P (prev))
2355
                {
2356
                  /* Look at branches to this label.  */
2357
                  label_info *label;
2358
                  branch_info *branch;
2359
 
2360
                  for (label = mt_labels;
2361
                       label;
2362
                       label = label->next)
2363
                    if (label->label == prev)
2364
                      {
2365
                        for (branch = label->branches;
2366
                             branch;
2367
                             branch = branch->next)
2368
                          {
2369
                            tmp = mt_check_delay_slot (branch->insn, jmp);
2370
 
2371
                            if (tmp == branch->insn)
2372
                              {
2373
                                nops = count;
2374
                                break;
2375
                              }
2376
 
2377
                            if (tmp && branch->insn == next)
2378
                              rescan = tmp;
2379
                          }
2380
                        break;
2381
                      }
2382
                  continue;
2383
                }
2384
              if (!INSN_P (prev) || GET_CODE (PATTERN (prev)) == USE)
2385
                continue;
2386
 
2387
              if (GET_CODE (PATTERN (prev)) == SEQUENCE)
2388
                {
2389
                  /* Look at the delay slot.  */
2390
                  tmp = mt_check_delay_slot (prev, jmp);
2391
                  if (tmp == prev)
2392
                    nops = count;
2393
                  break;
2394
                }
2395
 
2396
              type = (INSN_CODE (prev) >= 0 ? get_attr_type (prev)
2397
                      : TYPE_COMPLEX);
2398
              if (type == TYPE_CALL || type == TYPE_BRANCH)
2399
                break;
2400
 
2401
              if (type == TYPE_LOAD
2402
                  || type == TYPE_ARITH
2403
                  || type == TYPE_COMPLEX)
2404
                {
2405
                  tmp = PATTERN (jmp);
2406
                  note_stores (PATTERN (prev), insn_dependent_p_1, &tmp);
2407
                  if (!tmp)
2408
                    {
2409
                      nops = count;
2410
                      break;
2411
                    }
2412
                }
2413
 
2414
              if (INSN_CODE (prev) >= 0)
2415
                count--;
2416
            }
2417
 
2418
          if (rescan)
2419
            for (next = NEXT_INSN (rescan);
2420
                 next && !INSN_P (next);
2421
                 next = NEXT_INSN (next))
2422
              continue;
2423
          while (nops--)
2424
            emit_insn_before (gen_nop (), insn);
2425
        }
2426
    }
2427
 
2428
  /* Free the data structures.  */
2429
  while (mt_labels)
2430
    {
2431
      label_info *label = mt_labels;
2432
      branch_info *branch, *next;
2433
 
2434
      mt_labels = label->next;
2435
      for (branch = label->branches; branch; branch = next)
2436
        {
2437
          next = branch->next;
2438
          free (branch);
2439
        }
2440
      free (label);
2441
    }
2442
}
2443
 
2444
/* Fixup the looping instructions, do delayed branch scheduling, fixup
2445
   scheduling hazards.  */
2446
 
2447
static void
2448
mt_machine_reorg (void)
2449
{
2450
  if (cfun->machine->has_loops && TARGET_MS2)
2451
    mt_reorg_loops (dump_file);
2452
 
2453
  if (mt_flag_delayed_branch)
2454
    dbr_schedule (get_insns ());
2455
 
2456
  if (TARGET_MS2)
2457
    {
2458
      /* Force all instructions to be split into their final form.  */
2459
      split_all_insns_noflow ();
2460
      mt_reorg_hazard ();
2461
    }
2462
}
2463
 
2464
/* Initialize the GCC target structure.  */
2465
const struct attribute_spec mt_attribute_table[];
2466
 
2467
#undef  TARGET_ATTRIBUTE_TABLE
2468
#define TARGET_ATTRIBUTE_TABLE          mt_attribute_table
2469
#undef  TARGET_STRUCT_VALUE_RTX
2470
#define TARGET_STRUCT_VALUE_RTX         mt_struct_value_rtx
2471
#undef  TARGET_PROMOTE_PROTOTYPES
2472
#define TARGET_PROMOTE_PROTOTYPES       hook_bool_tree_true
2473
#undef  TARGET_PASS_BY_REFERENCE
2474
#define TARGET_PASS_BY_REFERENCE        mt_pass_by_reference
2475
#undef  TARGET_MUST_PASS_IN_STACK
2476
#define TARGET_MUST_PASS_IN_STACK       mt_pass_in_stack
2477
#undef  TARGET_ARG_PARTIAL_BYTES
2478
#define TARGET_ARG_PARTIAL_BYTES        mt_arg_partial_bytes
2479
#undef  TARGET_SETUP_INCOMING_VARARGS
2480
#define TARGET_SETUP_INCOMING_VARARGS   mt_setup_incoming_varargs
2481
#undef  TARGET_MACHINE_DEPENDENT_REORG
2482
#define TARGET_MACHINE_DEPENDENT_REORG  mt_machine_reorg
2483
 
2484
struct gcc_target targetm = TARGET_INITIALIZER;
2485
 
2486
#include "gt-mt.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.