OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [config/] [mep/] [mep.c] - Blame information for rev 758

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 709 jeremybenn
/* Definitions for Toshiba Media Processor
2
   Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3
   2011
4
   Free Software Foundation, Inc.
5
   Contributed by Red Hat, Inc.
6
 
7
This file is part of GCC.
8
 
9
GCC is free software; you can redistribute it and/or modify it under
10
the terms of the GNU General Public License as published by the Free
11
Software Foundation; either version 3, or (at your option) any later
12
version.
13
 
14
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15
WARRANTY; without even the implied warranty of MERCHANTABILITY or
16
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17
for more details.
18
 
19
You should have received a copy of the GNU General Public License
20
along with GCC; see the file COPYING3.  If not see
21
<http://www.gnu.org/licenses/>.  */
22
 
23
#include "config.h"
24
#include "system.h"
25
#include "coretypes.h"
26
#include "tm.h"
27
#include "rtl.h"
28
#include "tree.h"
29
#include "regs.h"
30
#include "hard-reg-set.h"
31
#include "insn-config.h"
32
#include "conditions.h"
33
#include "insn-flags.h"
34
#include "output.h"
35
#include "insn-attr.h"
36
#include "flags.h"
37
#include "recog.h"
38
#include "obstack.h"
39
#include "tree.h"
40
#include "expr.h"
41
#include "except.h"
42
#include "function.h"
43
#include "optabs.h"
44
#include "reload.h"
45
#include "tm_p.h"
46
#include "ggc.h"
47
#include "diagnostic-core.h"
48
#include "integrate.h"
49
#include "target.h"
50
#include "target-def.h"
51
#include "langhooks.h"
52
#include "df.h"
53
#include "gimple.h"
54
#include "opts.h"
55
 
56
/* Structure of this file:
57
 
58
 + Command Line Option Support
59
 + Pattern support - constraints, predicates, expanders
60
 + Reload Support
61
 + Costs
62
 + Functions to save and restore machine-specific function data.
63
 + Frame/Epilog/Prolog Related
64
 + Operand Printing
65
 + Function args in registers
66
 + Handle pipeline hazards
67
 + Handle attributes
68
 + Trampolines
69
 + Machine-dependent Reorg
70
 + Builtins.  */
71
 
72
/* Symbol encodings:
73
 
74
   Symbols are encoded as @ <char> . <name> where <char> is one of these:
75
 
76
   b - based
77
   t - tiny
78
   n - near
79
   f - far
80
   i - io, near
81
   I - io, far
82
   c - cb (control bus)  */
83
 
84
struct GTY(()) machine_function
85
{
86
  int mep_frame_pointer_needed;
87
 
88
  /* For varargs. */
89
  int arg_regs_to_save;
90
  int regsave_filler;
91
  int frame_filler;
92
  int frame_locked;
93
 
94
  /* Records __builtin_return address.  */
95
  rtx eh_stack_adjust;
96
 
97
  int reg_save_size;
98
  int reg_save_slot[FIRST_PSEUDO_REGISTER];
99
  unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
100
 
101
  /* 2 if the current function has an interrupt attribute, 1 if not, 0
102
     if unknown.  This is here because resource.c uses EPILOGUE_USES
103
     which needs it.  */
104
  int interrupt_handler;
105
 
106
  /* Likewise, for disinterrupt attribute.  */
107
  int disable_interrupts;
108
 
109
  /* Number of doloop tags used so far.  */
110
  int doloop_tags;
111
 
112
  /* True if the last tag was allocated to a doloop_end.  */
113
  bool doloop_tag_from_end;
114
 
115
  /* True if reload changes $TP.  */
116
  bool reload_changes_tp;
117
 
118
  /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
119
     We only set this if the function is an interrupt handler.  */
120
  int asms_without_operands;
121
};
122
 
123
#define MEP_CONTROL_REG(x) \
124
  (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
125
 
126
static GTY(()) section * based_section;
127
static GTY(()) section * tinybss_section;
128
static GTY(()) section * far_section;
129
static GTY(()) section * farbss_section;
130
static GTY(()) section * frodata_section;
131
static GTY(()) section * srodata_section;
132
 
133
static GTY(()) section * vtext_section;
134
static GTY(()) section * vftext_section;
135
static GTY(()) section * ftext_section;
136
 
137
static void mep_set_leaf_registers (int);
138
static bool symbol_p (rtx);
139
static bool symbolref_p (rtx);
140
static void encode_pattern_1 (rtx);
141
static void encode_pattern (rtx);
142
static bool const_in_range (rtx, int, int);
143
static void mep_rewrite_mult (rtx, rtx);
144
static void mep_rewrite_mulsi3 (rtx, rtx, rtx, rtx);
145
static void mep_rewrite_maddsi3 (rtx, rtx, rtx, rtx, rtx);
146
static bool mep_reuse_lo_p_1 (rtx, rtx, rtx, bool);
147
static bool move_needs_splitting (rtx, rtx, enum machine_mode);
148
static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
149
static bool mep_nongeneral_reg (rtx);
150
static bool mep_general_copro_reg (rtx);
151
static bool mep_nonregister (rtx);
152
static struct machine_function* mep_init_machine_status (void);
153
static rtx mep_tp_rtx (void);
154
static rtx mep_gp_rtx (void);
155
static bool mep_interrupt_p (void);
156
static bool mep_disinterrupt_p (void);
157
static bool mep_reg_set_p (rtx, rtx);
158
static bool mep_reg_set_in_function (int);
159
static bool mep_interrupt_saved_reg (int);
160
static bool mep_call_saves_register (int);
161
static rtx F (rtx);
162
static void add_constant (int, int, int, int);
163
static rtx maybe_dead_move (rtx, rtx, bool);
164
static void mep_reload_pointer (int, const char *);
165
static void mep_start_function (FILE *, HOST_WIDE_INT);
166
static bool mep_function_ok_for_sibcall (tree, tree);
167
static int unique_bit_in (HOST_WIDE_INT);
168
static int bit_size_for_clip (HOST_WIDE_INT);
169
static int bytesize (const_tree, enum machine_mode);
170
static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
171
static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
172
static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
173
static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
174
static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
175
static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
176
static bool mep_function_attribute_inlinable_p (const_tree);
177
static bool mep_can_inline_p (tree, tree);
178
static bool mep_lookup_pragma_disinterrupt (const char *);
179
static int mep_multiple_address_regions (tree, bool);
180
static int mep_attrlist_to_encoding (tree, tree);
181
static void mep_insert_attributes (tree, tree *);
182
static void mep_encode_section_info (tree, rtx, int);
183
static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
184
static void mep_unique_section (tree, int);
185
static unsigned int mep_section_type_flags (tree, const char *, int);
186
static void mep_asm_named_section (const char *, unsigned int, tree);
187
static bool mep_mentioned_p (rtx, rtx, int);
188
static void mep_reorg_regmove (rtx);
189
static rtx mep_insert_repeat_label_last (rtx, rtx, bool, bool);
190
static void mep_reorg_repeat (rtx);
191
static bool mep_invertable_branch_p (rtx);
192
static void mep_invert_branch (rtx, rtx);
193
static void mep_reorg_erepeat (rtx);
194
static void mep_jmp_return_reorg (rtx);
195
static void mep_reorg_addcombine (rtx);
196
static void mep_reorg (void);
197
static void mep_init_intrinsics (void);
198
static void mep_init_builtins (void);
199
static void mep_intrinsic_unavailable (int);
200
static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
201
static bool mep_get_move_insn (int, const struct cgen_insn **);
202
static rtx mep_convert_arg (enum machine_mode, rtx);
203
static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
204
static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
205
static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
206
static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
207
static int mep_adjust_cost (rtx, rtx, rtx, int);
208
static int mep_issue_rate (void);
209
static rtx mep_find_ready_insn (rtx *, int, enum attr_slot, int);
210
static void mep_move_ready_insn (rtx *, int, rtx);
211
static int mep_sched_reorder (FILE *, int, rtx *, int *, int);
212
static rtx mep_make_bundle (rtx, rtx);
213
static void mep_bundle_insns (rtx);
214
static bool mep_rtx_cost (rtx, int, int, int, int *, bool);
215
static int mep_address_cost (rtx, bool);
216
static void mep_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
217
                                        tree, int *, int);
218
static bool mep_pass_by_reference (cumulative_args_t cum, enum machine_mode,
219
                                   const_tree, bool);
220
static rtx mep_function_arg (cumulative_args_t, enum machine_mode,
221
                             const_tree, bool);
222
static void mep_function_arg_advance (cumulative_args_t, enum machine_mode,
223
                                      const_tree, bool);
224
static bool mep_vector_mode_supported_p (enum machine_mode);
225
static rtx  mep_allocate_initial_value (rtx);
226
static void mep_asm_init_sections (void);
227
static int mep_comp_type_attributes (const_tree, const_tree);
228
static bool mep_narrow_volatile_bitfield (void);
229
static rtx mep_expand_builtin_saveregs (void);
230
static tree mep_build_builtin_va_list (void);
231
static void mep_expand_va_start (tree, rtx);
232
static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
233
static bool mep_can_eliminate (const int, const int);
234
static void mep_conditional_register_usage (void);
235
static void mep_trampoline_init (rtx, tree, rtx);
236
 
237
#define WANT_GCC_DEFINITIONS
238
#include "mep-intrin.h"
239
#undef WANT_GCC_DEFINITIONS
240
 
241
 
242
/* Command Line Option Support.  */
243
 
244
char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
245
 
246
/* True if we can use cmov instructions to move values back and forth
247
   between core and coprocessor registers.  */
248
bool mep_have_core_copro_moves_p;
249
 
250
/* True if we can use cmov instructions (or a work-alike) to move
251
   values between coprocessor registers.  */
252
bool mep_have_copro_copro_moves_p;
253
 
254
/* A table of all coprocessor instructions that can act like
255
   a coprocessor-to-coprocessor cmov.  */
256
static const int mep_cmov_insns[] = {
257
  mep_cmov,
258
  mep_cpmov,
259
  mep_fmovs,
260
  mep_caddi3,
261
  mep_csubi3,
262
  mep_candi3,
263
  mep_cori3,
264
  mep_cxori3,
265
  mep_cand3,
266
  mep_cor3
267
};
268
 
269
 
270
static void
271
mep_set_leaf_registers (int enable)
272
{
273
  int i;
274
 
275
  if (mep_leaf_registers[0] != enable)
276
    for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
277
      mep_leaf_registers[i] = enable;
278
}
279
 
280
static void
281
mep_conditional_register_usage (void)
282
{
283
  int i;
284
 
285
  if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
286
    {
287
      fixed_regs[HI_REGNO] = 1;
288
      fixed_regs[LO_REGNO] = 1;
289
      call_used_regs[HI_REGNO] = 1;
290
      call_used_regs[LO_REGNO] = 1;
291
    }
292
 
293
  for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
294
    global_regs[i] = 1;
295
}
296
 
297
static void
298
mep_option_override (void)
299
{
300
  unsigned int i;
301
  int j;
302
  cl_deferred_option *opt;
303
  VEC(cl_deferred_option,heap) *vec
304
    = (VEC(cl_deferred_option,heap) *) mep_deferred_options;
305
 
306
  FOR_EACH_VEC_ELT (cl_deferred_option, vec, i, opt)
307
    {
308
      switch (opt->opt_index)
309
        {
310
        case OPT_mivc2:
311
          for (j = 0; j < 32; j++)
312
            fixed_regs[j + 48] = 0;
313
          for (j = 0; j < 32; j++)
314
            call_used_regs[j + 48] = 1;
315
          for (j = 6; j < 8; j++)
316
            call_used_regs[j + 48] = 0;
317
 
318
#define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
319
          RN (0, "$csar0");
320
          RN (1, "$cc");
321
          RN (4, "$cofr0");
322
          RN (5, "$cofr1");
323
          RN (6, "$cofa0");
324
          RN (7, "$cofa1");
325
          RN (15, "$csar1");
326
 
327
          RN (16, "$acc0_0");
328
          RN (17, "$acc0_1");
329
          RN (18, "$acc0_2");
330
          RN (19, "$acc0_3");
331
          RN (20, "$acc0_4");
332
          RN (21, "$acc0_5");
333
          RN (22, "$acc0_6");
334
          RN (23, "$acc0_7");
335
 
336
          RN (24, "$acc1_0");
337
          RN (25, "$acc1_1");
338
          RN (26, "$acc1_2");
339
          RN (27, "$acc1_3");
340
          RN (28, "$acc1_4");
341
          RN (29, "$acc1_5");
342
          RN (30, "$acc1_6");
343
          RN (31, "$acc1_7");
344
#undef RN
345
          break;
346
 
347
        default:
348
          gcc_unreachable ();
349
        }
350
    }
351
 
352
  if (flag_pic == 1)
353
    warning (OPT_fpic, "-fpic is not supported");
354
  if (flag_pic == 2)
355
    warning (OPT_fPIC, "-fPIC is not supported");
356
  if (TARGET_S && TARGET_M)
357
    error ("only one of -ms and -mm may be given");
358
  if (TARGET_S && TARGET_L)
359
    error ("only one of -ms and -ml may be given");
360
  if (TARGET_M && TARGET_L)
361
    error ("only one of -mm and -ml may be given");
362
  if (TARGET_S && global_options_set.x_mep_tiny_cutoff)
363
    error ("only one of -ms and -mtiny= may be given");
364
  if (TARGET_M && global_options_set.x_mep_tiny_cutoff)
365
    error ("only one of -mm and -mtiny= may be given");
366
  if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
367
    warning (0, "-mclip currently has no effect without -mminmax");
368
 
369
  if (mep_const_section)
370
    {
371
      if (strcmp (mep_const_section, "tiny") != 0
372
          && strcmp (mep_const_section, "near") != 0
373
          && strcmp (mep_const_section, "far") != 0)
374
        error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
375
    }
376
 
377
  if (TARGET_S)
378
    mep_tiny_cutoff = 65536;
379
  if (TARGET_M)
380
    mep_tiny_cutoff = 0;
381
  if (TARGET_L && ! global_options_set.x_mep_tiny_cutoff)
382
    mep_tiny_cutoff = 0;
383
 
384
  if (TARGET_64BIT_CR_REGS)
385
    flag_split_wide_types = 0;
386
 
387
  init_machine_status = mep_init_machine_status;
388
  mep_init_intrinsics ();
389
}
390
 
391
/* Pattern Support - constraints, predicates, expanders.  */
392
 
393
/* MEP has very few instructions that can refer to the span of
394
   addresses used by symbols, so it's common to check for them.  */
395
 
396
static bool
397
symbol_p (rtx x)
398
{
399
  int c = GET_CODE (x);
400
 
401
  return (c == CONST_INT
402
          || c == CONST
403
          || c == SYMBOL_REF);
404
}
405
 
406
static bool
407
symbolref_p (rtx x)
408
{
409
  int c;
410
 
411
  if (GET_CODE (x) != MEM)
412
    return false;
413
 
414
  c = GET_CODE (XEXP (x, 0));
415
  return (c == CONST_INT
416
          || c == CONST
417
          || c == SYMBOL_REF);
418
}
419
 
420
/* static const char *reg_class_names[] = REG_CLASS_NAMES; */
421
 
422
#define GEN_REG(R, STRICT)                              \
423
  (GR_REGNO_P (R)                                       \
424
   || (!STRICT                                          \
425
       && ((R) == ARG_POINTER_REGNUM                    \
426
           || (R) >= FIRST_PSEUDO_REGISTER)))
427
 
428
static char pattern[12], *patternp;
429
static GTY(()) rtx patternr[12];
430
#define RTX_IS(x) (strcmp (pattern, x) == 0)
431
 
432
static void
433
encode_pattern_1 (rtx x)
434
{
435
  int i;
436
 
437
  if (patternp == pattern + sizeof (pattern) - 2)
438
    {
439
      patternp[-1] = '?';
440
      return;
441
    }
442
 
443
  patternr[patternp-pattern] = x;
444
 
445
  switch (GET_CODE (x))
446
    {
447
    case REG:
448
      *patternp++ = 'r';
449
      break;
450
    case MEM:
451
      *patternp++ = 'm';
452
    case CONST:
453
      encode_pattern_1 (XEXP(x, 0));
454
      break;
455
    case PLUS:
456
      *patternp++ = '+';
457
      encode_pattern_1 (XEXP(x, 0));
458
      encode_pattern_1 (XEXP(x, 1));
459
      break;
460
    case LO_SUM:
461
      *patternp++ = 'L';
462
      encode_pattern_1 (XEXP(x, 0));
463
      encode_pattern_1 (XEXP(x, 1));
464
      break;
465
    case HIGH:
466
      *patternp++ = 'H';
467
      encode_pattern_1 (XEXP(x, 0));
468
      break;
469
    case SYMBOL_REF:
470
      *patternp++ = 's';
471
      break;
472
    case LABEL_REF:
473
      *patternp++ = 'l';
474
      break;
475
    case CONST_INT:
476
    case CONST_DOUBLE:
477
      *patternp++ = 'i';
478
      break;
479
    case UNSPEC:
480
      *patternp++ = 'u';
481
      *patternp++ = '0' + XCINT(x, 1, UNSPEC);
482
      for (i=0; i<XVECLEN (x, 0); i++)
483
        encode_pattern_1 (XVECEXP (x, 0, i));
484
      break;
485
    case USE:
486
      *patternp++ = 'U';
487
      break;
488
    default:
489
      *patternp++ = '?';
490
#if 0
491
      fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
492
      debug_rtx (x);
493
      gcc_unreachable ();
494
#endif
495
      break;
496
    }
497
}
498
 
499
static void
500
encode_pattern (rtx x)
501
{
502
  patternp = pattern;
503
  encode_pattern_1 (x);
504
  *patternp = 0;
505
}
506
 
507
int
508
mep_section_tag (rtx x)
509
{
510
  const char *name;
511
 
512
  while (1)
513
    {
514
      switch (GET_CODE (x))
515
        {
516
        case MEM:
517
        case CONST:
518
          x = XEXP (x, 0);
519
          break;
520
        case UNSPEC:
521
          x = XVECEXP (x, 0, 0);
522
          break;
523
        case PLUS:
524
          if (GET_CODE (XEXP (x, 1)) != CONST_INT)
525
            return 0;
526
          x = XEXP (x, 0);
527
          break;
528
        default:
529
          goto done;
530
        }
531
    }
532
 done:
533
  if (GET_CODE (x) != SYMBOL_REF)
534
    return 0;
535
  name = XSTR (x, 0);
536
  if (name[0] == '@' && name[2] == '.')
537
    {
538
      if (name[1] == 'i' || name[1] == 'I')
539
        {
540
          if (name[1] == 'I')
541
            return 'f'; /* near */
542
          return 'n'; /* far */
543
        }
544
      return name[1];
545
    }
546
  return 0;
547
}
548
 
549
int
550
mep_regno_reg_class (int regno)
551
{
552
  switch (regno)
553
    {
554
    case SP_REGNO:              return SP_REGS;
555
    case TP_REGNO:              return TP_REGS;
556
    case GP_REGNO:              return GP_REGS;
557
    case 0:                      return R0_REGS;
558
    case HI_REGNO:              return HI_REGS;
559
    case LO_REGNO:              return LO_REGS;
560
    case ARG_POINTER_REGNUM:    return GENERAL_REGS;
561
    }
562
 
563
  if (GR_REGNO_P (regno))
564
    return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
565
  if (CONTROL_REGNO_P (regno))
566
    return CONTROL_REGS;
567
 
568
  if (CR_REGNO_P (regno))
569
    {
570
      int i, j;
571
 
572
      /* Search for the register amongst user-defined subclasses of
573
         the coprocessor registers.  */
574
      for (i = USER0_REGS; i <= USER3_REGS; ++i)
575
        {
576
          if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
577
            continue;
578
          for (j = 0; j < N_REG_CLASSES; ++j)
579
            {
580
              enum reg_class sub = reg_class_subclasses[i][j];
581
 
582
              if (sub == LIM_REG_CLASSES)
583
                return i;
584
              if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
585
                break;
586
            }
587
        }
588
 
589
      return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
590
    }
591
 
592
  if (CCR_REGNO_P (regno))
593
    return CCR_REGS;
594
 
595
  gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
596
  return NO_REGS;
597
}
598
 
599
#if 0
600
int
601
mep_reg_class_from_constraint (int c, const char *str)
602
{
603
  switch (c)
604
    {
605
    case 'a':
606
      return SP_REGS;
607
    case 'b':
608
      return TP_REGS;
609
    case 'c':
610
      return CONTROL_REGS;
611
    case 'd':
612
      return HILO_REGS;
613
    case 'e':
614
      {
615
        switch (str[1])
616
          {
617
          case 'm':
618
            return LOADABLE_CR_REGS;
619
          case 'x':
620
            return mep_have_copro_copro_moves_p ? CR_REGS : NO_REGS;
621
          case 'r':
622
            return mep_have_core_copro_moves_p ? CR_REGS : NO_REGS;
623
          default:
624
            return NO_REGS;
625
          }
626
      }
627
    case 'h':
628
      return HI_REGS;
629
    case 'j':
630
      return RPC_REGS;
631
    case 'l':
632
      return LO_REGS;
633
    case 't':
634
      return TPREL_REGS;
635
    case 'v':
636
      return GP_REGS;
637
    case 'x':
638
      return CR_REGS;
639
    case 'y':
640
      return CCR_REGS;
641
    case 'z':
642
      return R0_REGS;
643
 
644
    case 'A':
645
    case 'B':
646
    case 'C':
647
    case 'D':
648
      {
649
        enum reg_class which = c - 'A' + USER0_REGS;
650
        return (reg_class_size[which] > 0 ? which : NO_REGS);
651
      }
652
 
653
    default:
654
      return NO_REGS;
655
    }
656
}
657
 
658
bool
659
mep_const_ok_for_letter_p (HOST_WIDE_INT value, int c)
660
{
661
  switch (c)
662
    {
663
      case 'I': return value >= -32768 && value <      32768;
664
      case 'J': return value >=      0 && value <      65536;
665
      case 'K': return value >=      0 && value < 0x01000000;
666
      case 'L': return value >=    -32 && value <         32;
667
      case 'M': return value >=      0 && value <         32;
668
      case 'N': return value >=      0 && value <         16;
669
      case 'O':
670
        if (value & 0xffff)
671
          return false;
672
        return value >= -2147483647-1 && value <= 2147483647;
673
    default:
674
      gcc_unreachable ();
675
    }
676
}
677
 
678
bool
679
mep_extra_constraint (rtx value, int c)
680
{
681
  encode_pattern (value);
682
 
683
  switch (c)
684
    {
685
    case 'R':
686
      /* For near symbols, like what call uses.  */
687
      if (GET_CODE (value) == REG)
688
        return 0;
689
      return mep_call_address_operand (value, GET_MODE (value));
690
 
691
    case 'S':
692
      /* For signed 8-bit immediates.  */
693
      return (GET_CODE (value) == CONST_INT
694
              && INTVAL (value) >= -128
695
              && INTVAL (value) <= 127);
696
 
697
    case 'T':
698
      /* For tp/gp relative symbol values.  */
699
      return (RTX_IS ("u3s") || RTX_IS ("u2s")
700
              || RTX_IS ("+u3si") || RTX_IS ("+u2si"));
701
 
702
    case 'U':
703
      /* Non-absolute memories.  */
704
      return GET_CODE (value) == MEM && ! CONSTANT_P (XEXP (value, 0));
705
 
706
    case 'W':
707
      /* %hi(sym) */
708
      return RTX_IS ("Hs");
709
 
710
    case 'Y':
711
      /* Register indirect.  */
712
      return RTX_IS ("mr");
713
 
714
    case 'Z':
715
      return mep_section_tag (value) == 'c' && RTX_IS ("ms");
716
    }
717
 
718
  return false;
719
}
720
#endif
721
 
722
#undef PASS
723
#undef FAIL
724
 
725
static bool
726
const_in_range (rtx x, int minv, int maxv)
727
{
728
  return (GET_CODE (x) == CONST_INT
729
          && INTVAL (x) >= minv
730
          && INTVAL (x) <= maxv);
731
}
732
 
733
/* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
734
   such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2.  If a move
735
   is needed, emit it before INSN if INSN is nonnull, otherwise emit it
736
   at the end of the insn stream.  */
737
 
738
rtx
739
mep_mulr_source (rtx insn, rtx dest, rtx src1, rtx src2)
740
{
741
  if (rtx_equal_p (dest, src1))
742
    return src2;
743
  else if (rtx_equal_p (dest, src2))
744
    return src1;
745
  else
746
    {
747
      if (insn == 0)
748
        emit_insn (gen_movsi (copy_rtx (dest), src1));
749
      else
750
        emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
751
      return src2;
752
    }
753
}
754
 
755
/* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
756
   Change the last element of PATTERN from (clobber (scratch:SI))
757
   to (clobber (reg:SI HI_REGNO)).  */
758
 
759
static void
760
mep_rewrite_mult (rtx insn, rtx pattern)
761
{
762
  rtx hi_clobber;
763
 
764
  hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
765
  XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
766
  PATTERN (insn) = pattern;
767
  INSN_CODE (insn) = -1;
768
}
769
 
770
/* Subroutine of mep_reuse_lo_p.  Rewrite instruction INSN so that it
771
   calculates SRC1 * SRC2 and stores the result in $lo.  Also make it
772
   store the result in DEST if nonnull.  */
773
 
774
static void
775
mep_rewrite_mulsi3 (rtx insn, rtx dest, rtx src1, rtx src2)
776
{
777
  rtx lo, pattern;
778
 
779
  lo = gen_rtx_REG (SImode, LO_REGNO);
780
  if (dest)
781
    pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
782
                           mep_mulr_source (insn, dest, src1, src2));
783
  else
784
    pattern = gen_mulsi3_lo (lo, src1, src2);
785
  mep_rewrite_mult (insn, pattern);
786
}
787
 
788
/* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3.  First copy
789
   SRC3 into $lo, then use either madd or maddr.  The move into $lo will
790
   be deleted by a peephole2 if SRC3 is already in $lo.  */
791
 
792
static void
793
mep_rewrite_maddsi3 (rtx insn, rtx dest, rtx src1, rtx src2, rtx src3)
794
{
795
  rtx lo, pattern;
796
 
797
  lo = gen_rtx_REG (SImode, LO_REGNO);
798
  emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
799
  if (dest)
800
    pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
801
                            mep_mulr_source (insn, dest, src1, src2),
802
                            copy_rtx (lo));
803
  else
804
    pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
805
  mep_rewrite_mult (insn, pattern);
806
}
807
 
808
/* Return true if $lo has the same value as integer register GPR when
809
   instruction INSN is reached.  If necessary, rewrite the instruction
810
   that sets $lo so that it uses a proper SET, not a CLOBBER.  LO is an
811
   rtx for (reg:SI LO_REGNO).
812
 
813
   This function is intended to be used by the peephole2 pass.  Since
814
   that pass goes from the end of a basic block to the beginning, and
815
   propagates liveness information on the way, there is no need to
816
   update register notes here.
817
 
818
   If GPR_DEAD_P is true on entry, and this function returns true,
819
   then the caller will replace _every_ use of GPR in and after INSN
820
   with LO.  This means that if the instruction that sets $lo is a
821
   mulr- or maddr-type instruction, we can rewrite it to use mul or
822
   madd instead.  In combination with the copy progagation pass,
823
   this allows us to replace sequences like:
824
 
825
        mov GPR,R1
826
        mulr GPR,R2
827
 
828
   with:
829
 
830
        mul R1,R2
831
 
832
   if GPR is no longer used.  */
833
 
834
static bool
835
mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
836
{
837
  do
838
    {
839
      insn = PREV_INSN (insn);
840
      if (INSN_P (insn))
841
        switch (recog_memoized (insn))
842
          {
843
          case CODE_FOR_mulsi3_1:
844
            extract_insn (insn);
845
            if (rtx_equal_p (recog_data.operand[0], gpr))
846
              {
847
                mep_rewrite_mulsi3 (insn,
848
                                    gpr_dead_p ? NULL : recog_data.operand[0],
849
                                    recog_data.operand[1],
850
                                    recog_data.operand[2]);
851
                return true;
852
              }
853
            return false;
854
 
855
          case CODE_FOR_maddsi3:
856
            extract_insn (insn);
857
            if (rtx_equal_p (recog_data.operand[0], gpr))
858
              {
859
                mep_rewrite_maddsi3 (insn,
860
                                     gpr_dead_p ? NULL : recog_data.operand[0],
861
                                     recog_data.operand[1],
862
                                     recog_data.operand[2],
863
                                     recog_data.operand[3]);
864
                return true;
865
              }
866
            return false;
867
 
868
          case CODE_FOR_mulsi3r:
869
          case CODE_FOR_maddsi3r:
870
            extract_insn (insn);
871
            return rtx_equal_p (recog_data.operand[1], gpr);
872
 
873
          default:
874
            if (reg_set_p (lo, insn)
875
                || reg_set_p (gpr, insn)
876
                || volatile_insn_p (PATTERN (insn)))
877
              return false;
878
 
879
            if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
880
              gpr_dead_p = false;
881
            break;
882
          }
883
    }
884
  while (!NOTE_INSN_BASIC_BLOCK_P (insn));
885
  return false;
886
}
887
 
888
/* A wrapper around mep_reuse_lo_p_1 that preserves recog_data.  */
889
 
890
bool
891
mep_reuse_lo_p (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
892
{
893
  bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
894
  extract_insn (insn);
895
  return result;
896
}
897
 
898
/* Return true if SET can be turned into a post-modify load or store
899
   that adds OFFSET to GPR.  In other words, return true if SET can be
900
   changed into:
901
 
902
       (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
903
 
904
   It's OK to change SET to an equivalent operation in order to
905
   make it match.  */
906
 
907
static bool
908
mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
909
{
910
  rtx *reg, *mem;
911
  unsigned int reg_bytes, mem_bytes;
912
  enum machine_mode reg_mode, mem_mode;
913
 
914
  /* Only simple SETs can be converted.  */
915
  if (GET_CODE (set) != SET)
916
    return false;
917
 
918
  /* Point REG to what we hope will be the register side of the set and
919
     MEM to what we hope will be the memory side.  */
920
  if (GET_CODE (SET_DEST (set)) == MEM)
921
    {
922
      mem = &SET_DEST (set);
923
      reg = &SET_SRC (set);
924
    }
925
  else
926
    {
927
      reg = &SET_DEST (set);
928
      mem = &SET_SRC (set);
929
      if (GET_CODE (*mem) == SIGN_EXTEND)
930
        mem = &XEXP (*mem, 0);
931
    }
932
 
933
  /* Check that *REG is a suitable coprocessor register.  */
934
  if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
935
    return false;
936
 
937
  /* Check that *MEM is a suitable memory reference.  */
938
  if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
939
    return false;
940
 
941
  /* Get the number of bytes in each operand.  */
942
  mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
943
  reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
944
 
945
  /* Check that OFFSET is suitably aligned.  */
946
  if (INTVAL (offset) & (mem_bytes - 1))
947
    return false;
948
 
949
  /* Convert *MEM to a normal integer mode.  */
950
  mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
951
  *mem = change_address (*mem, mem_mode, NULL);
952
 
953
  /* Adjust *REG as well.  */
954
  *reg = shallow_copy_rtx (*reg);
955
  if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
956
    {
957
      /* SET is a subword load.  Convert it to an explicit extension.  */
958
      PUT_MODE (*reg, SImode);
959
      *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
960
    }
961
  else
962
    {
963
      reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
964
      PUT_MODE (*reg, reg_mode);
965
    }
966
  return true;
967
}
968
 
969
/* Return the effect of frame-related instruction INSN.  */
970
 
971
static rtx
972
mep_frame_expr (rtx insn)
973
{
974
  rtx note, expr;
975
 
976
  note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
977
  expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
978
  RTX_FRAME_RELATED_P (expr) = 1;
979
  return expr;
980
}
981
 
982
/* Merge instructions INSN1 and INSN2 using a PARALLEL.  Store the
983
   new pattern in INSN1; INSN2 will be deleted by the caller.  */
984
 
985
static void
986
mep_make_parallel (rtx insn1, rtx insn2)
987
{
988
  rtx expr;
989
 
990
  if (RTX_FRAME_RELATED_P (insn2))
991
    {
992
      expr = mep_frame_expr (insn2);
993
      if (RTX_FRAME_RELATED_P (insn1))
994
        expr = gen_rtx_SEQUENCE (VOIDmode,
995
                                 gen_rtvec (2, mep_frame_expr (insn1), expr));
996
      set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
997
      RTX_FRAME_RELATED_P (insn1) = 1;
998
    }
999
 
1000
  PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
1001
                                      gen_rtvec (2, PATTERN (insn1),
1002
                                                 PATTERN (insn2)));
1003
  INSN_CODE (insn1) = -1;
1004
}
1005
 
1006
/* SET_INSN is an instruction that adds OFFSET to REG.  Go back through
1007
   the basic block to see if any previous load or store instruction can
1008
   be persuaded to do SET_INSN as a side-effect.  Return true if so.  */
1009
 
1010
static bool
1011
mep_use_post_modify_p_1 (rtx set_insn, rtx reg, rtx offset)
1012
{
1013
  rtx insn;
1014
 
1015
  insn = set_insn;
1016
  do
1017
    {
1018
      insn = PREV_INSN (insn);
1019
      if (INSN_P (insn))
1020
        {
1021
          if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
1022
            {
1023
              mep_make_parallel (insn, set_insn);
1024
              return true;
1025
            }
1026
 
1027
          if (reg_set_p (reg, insn)
1028
              || reg_referenced_p (reg, PATTERN (insn))
1029
              || volatile_insn_p (PATTERN (insn)))
1030
            return false;
1031
        }
1032
    }
1033
  while (!NOTE_INSN_BASIC_BLOCK_P (insn));
1034
  return false;
1035
}
1036
 
1037
/* A wrapper around mep_use_post_modify_p_1 that preserves recog_data.  */
1038
 
1039
bool
1040
mep_use_post_modify_p (rtx insn, rtx reg, rtx offset)
1041
{
1042
  bool result = mep_use_post_modify_p_1 (insn, reg, offset);
1043
  extract_insn (insn);
1044
  return result;
1045
}
1046
 
1047
bool
1048
mep_allow_clip (rtx ux, rtx lx, int s)
1049
{
1050
  HOST_WIDE_INT u = INTVAL (ux);
1051
  HOST_WIDE_INT l = INTVAL (lx);
1052
  int i;
1053
 
1054
  if (!TARGET_OPT_CLIP)
1055
    return false;
1056
 
1057
  if (s)
1058
    {
1059
      for (i = 0; i < 30; i ++)
1060
        if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
1061
            && (l == - ((HOST_WIDE_INT) 1 << i)))
1062
          return true;
1063
    }
1064
  else
1065
    {
1066
      if (l != 0)
1067
        return false;
1068
 
1069
      for (i = 0; i < 30; i ++)
1070
        if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
1071
          return true;
1072
    }
1073
  return false;
1074
}
1075
 
1076
bool
1077
mep_bit_position_p (rtx x, bool looking_for)
1078
{
1079
  if (GET_CODE (x) != CONST_INT)
1080
    return false;
1081
  switch ((int) INTVAL(x) & 0xff)
1082
    {
1083
    case 0x01: case 0x02: case 0x04: case 0x08:
1084
    case 0x10: case 0x20: case 0x40: case 0x80:
1085
      return looking_for;
1086
    case 0xfe: case 0xfd: case 0xfb: case 0xf7:
1087
    case 0xef: case 0xdf: case 0xbf: case 0x7f:
1088
      return !looking_for;
1089
    }
1090
  return false;
1091
}
1092
 
1093
static bool
1094
move_needs_splitting (rtx dest, rtx src,
1095
                      enum machine_mode mode ATTRIBUTE_UNUSED)
1096
{
1097
  int s = mep_section_tag (src);
1098
 
1099
  while (1)
1100
    {
1101
      if (GET_CODE (src) == CONST
1102
          || GET_CODE (src) == MEM)
1103
        src = XEXP (src, 0);
1104
      else if (GET_CODE (src) == SYMBOL_REF
1105
               || GET_CODE (src) == LABEL_REF
1106
               || GET_CODE (src) == PLUS)
1107
        break;
1108
      else
1109
        return false;
1110
    }
1111
  if (s == 'f'
1112
      || (GET_CODE (src) == PLUS
1113
          && GET_CODE (XEXP (src, 1)) == CONST_INT
1114
          && (INTVAL (XEXP (src, 1)) < -65536
1115
              || INTVAL (XEXP (src, 1)) > 0xffffff))
1116
      || (GET_CODE (dest) == REG
1117
          && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1118
    return true;
1119
  return false;
1120
}
1121
 
1122
bool
1123
mep_split_mov (rtx *operands, int symbolic)
1124
{
1125
  if (symbolic)
1126
    {
1127
      if (move_needs_splitting (operands[0], operands[1], SImode))
1128
        return true;
1129
      return false;
1130
    }
1131
 
1132
  if (GET_CODE (operands[1]) != CONST_INT)
1133
    return false;
1134
 
1135
  if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1136
      || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1137
      || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1138
    return false;
1139
 
1140
  if (((!reload_completed && !reload_in_progress)
1141
       || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1142
      && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1143
    return false;
1144
 
1145
  return true;
1146
}
1147
 
1148
/* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1149
   it to one specific value.  So the insn chosen depends on whether
1150
   the source and destination modes match.  */
1151
 
1152
bool
1153
mep_vliw_mode_match (rtx tgt)
1154
{
1155
  bool src_vliw = mep_vliw_function_p (cfun->decl);
1156
  bool tgt_vliw = INTVAL (tgt);
1157
 
1158
  return src_vliw == tgt_vliw;
1159
}
1160
 
1161
/* Like the above, but also test for near/far mismatches.  */
1162
 
1163
bool
1164
mep_vliw_jmp_match (rtx tgt)
1165
{
1166
  bool src_vliw = mep_vliw_function_p (cfun->decl);
1167
  bool tgt_vliw = INTVAL (tgt);
1168
 
1169
  if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1170
    return false;
1171
 
1172
  return src_vliw == tgt_vliw;
1173
}
1174
 
1175
bool
1176
mep_multi_slot (rtx x)
1177
{
1178
  return get_attr_slot (x) == SLOT_MULTI;
1179
}
1180
 
1181
/* Implement TARGET_LEGITIMATE_CONSTANT_P.  */
1182
 
1183
static bool
1184
mep_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1185
{
1186
  /* We can't convert symbol values to gp- or tp-rel values after
1187
     reload, as reload might have used $gp or $tp for other
1188
     purposes.  */
1189
  if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1190
    {
1191
      char e = mep_section_tag (x);
1192
      return (e != 't' && e != 'b');
1193
    }
1194
  return 1;
1195
}
1196
 
1197
/* Be careful not to use macros that need to be compiled one way for
1198
   strict, and another way for not-strict, like REG_OK_FOR_BASE_P.  */
1199
 
1200
bool
1201
mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1202
{
1203
  int the_tag;
1204
 
1205
#define DEBUG_LEGIT 0
1206
#if DEBUG_LEGIT
1207
  fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1208
  debug_rtx (x);
1209
#endif
1210
 
1211
  if (GET_CODE (x) == LO_SUM
1212
      && GET_CODE (XEXP (x, 0)) == REG
1213
      && GEN_REG (REGNO (XEXP (x, 0)), strict)
1214
      && CONSTANT_P (XEXP (x, 1)))
1215
    {
1216
      if (GET_MODE_SIZE (mode) > 4)
1217
        {
1218
          /* We will end up splitting this, and lo_sums are not
1219
             offsettable for us.  */
1220
#if DEBUG_LEGIT
1221
          fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1222
#endif
1223
          return false;
1224
        }
1225
#if DEBUG_LEGIT
1226
      fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1227
#endif
1228
      return true;
1229
    }
1230
 
1231
  if (GET_CODE (x) == REG
1232
      && GEN_REG (REGNO (x), strict))
1233
    {
1234
#if DEBUG_LEGIT
1235
      fprintf (stderr, " - yup, [reg]\n");
1236
#endif
1237
      return true;
1238
    }
1239
 
1240
  if (GET_CODE (x) == PLUS
1241
      && GET_CODE (XEXP (x, 0)) == REG
1242
      && GEN_REG (REGNO (XEXP (x, 0)), strict)
1243
      && const_in_range (XEXP (x, 1), -32768, 32767))
1244
    {
1245
#if DEBUG_LEGIT
1246
      fprintf (stderr, " - yup, [reg+const]\n");
1247
#endif
1248
      return true;
1249
    }
1250
 
1251
  if (GET_CODE (x) == PLUS
1252
      && GET_CODE (XEXP (x, 0)) == REG
1253
      && GEN_REG (REGNO (XEXP (x, 0)), strict)
1254
      && GET_CODE (XEXP (x, 1)) == CONST
1255
      && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1256
          || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1257
              && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1258
              && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1259
    {
1260
#if DEBUG_LEGIT
1261
      fprintf (stderr, " - yup, [reg+unspec]\n");
1262
#endif
1263
      return true;
1264
    }
1265
 
1266
  the_tag = mep_section_tag (x);
1267
 
1268
  if (the_tag == 'f')
1269
    {
1270
#if DEBUG_LEGIT
1271
      fprintf (stderr, " - nope, [far]\n");
1272
#endif
1273
      return false;
1274
    }
1275
 
1276
  if (mode == VOIDmode
1277
      && GET_CODE (x) == SYMBOL_REF)
1278
    {
1279
#if DEBUG_LEGIT
1280
      fprintf (stderr, " - yup, call [symbol]\n");
1281
#endif
1282
      return true;
1283
    }
1284
 
1285
  if ((mode == SImode || mode == SFmode)
1286
      && CONSTANT_P (x)
1287
      && mep_legitimate_constant_p (mode, x)
1288
      && the_tag != 't' && the_tag != 'b')
1289
    {
1290
      if (GET_CODE (x) != CONST_INT
1291
          || (INTVAL (x) <= 0xfffff
1292
              && INTVAL (x) >= 0
1293
              && (INTVAL (x) % 4) == 0))
1294
        {
1295
#if DEBUG_LEGIT
1296
          fprintf (stderr, " - yup, [const]\n");
1297
#endif
1298
          return true;
1299
        }
1300
    }
1301
 
1302
#if DEBUG_LEGIT
1303
  fprintf (stderr, " - nope.\n");
1304
#endif
1305
  return false;
1306
}
1307
 
1308
int
1309
mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
1310
                               int type_i,
1311
                               int ind_levels ATTRIBUTE_UNUSED)
1312
{
1313
  enum reload_type type = (enum reload_type) type_i;
1314
 
1315
  if (GET_CODE (*x) == PLUS
1316
      && GET_CODE (XEXP (*x, 0)) == MEM
1317
      && GET_CODE (XEXP (*x, 1)) == REG)
1318
    {
1319
      /* GCC will by default copy the MEM into a REG, which results in
1320
         an invalid address.  For us, the best thing to do is move the
1321
         whole expression to a REG.  */
1322
      push_reload (*x, NULL_RTX, x, NULL,
1323
                   GENERAL_REGS, mode, VOIDmode,
1324
                   0, 0, opnum, type);
1325
      return 1;
1326
    }
1327
 
1328
  if (GET_CODE (*x) == PLUS
1329
      && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1330
      && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1331
    {
1332
      char e = mep_section_tag (XEXP (*x, 0));
1333
 
1334
      if (e != 't' && e != 'b')
1335
        {
1336
          /* GCC thinks that (sym+const) is a valid address.  Well,
1337
             sometimes it is, this time it isn't.  The best thing to
1338
             do is reload the symbol to a register, since reg+int
1339
             tends to work, and we can't just add the symbol and
1340
             constant anyway.  */
1341
          push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1342
                       GENERAL_REGS, mode, VOIDmode,
1343
                       0, 0, opnum, type);
1344
          return 1;
1345
        }
1346
    }
1347
  return 0;
1348
}
1349
 
1350
int
1351
mep_core_address_length (rtx insn, int opn)
1352
{
1353
  rtx set = single_set (insn);
1354
  rtx mem = XEXP (set, opn);
1355
  rtx other = XEXP (set, 1-opn);
1356
  rtx addr = XEXP (mem, 0);
1357
 
1358
  if (register_operand (addr, Pmode))
1359
    return 2;
1360
  if (GET_CODE (addr) == PLUS)
1361
    {
1362
      rtx addend = XEXP (addr, 1);
1363
 
1364
      gcc_assert (REG_P (XEXP (addr, 0)));
1365
 
1366
      switch (REGNO (XEXP (addr, 0)))
1367
        {
1368
        case STACK_POINTER_REGNUM:
1369
          if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1370
              && mep_imm7a4_operand (addend, VOIDmode))
1371
            return 2;
1372
          break;
1373
 
1374
        case 13: /* TP */
1375
          gcc_assert (REG_P (other));
1376
 
1377
          if (REGNO (other) >= 8)
1378
            break;
1379
 
1380
          if (GET_CODE (addend) == CONST
1381
              && GET_CODE (XEXP (addend, 0)) == UNSPEC
1382
              && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1383
            return 2;
1384
 
1385
          if (GET_CODE (addend) == CONST_INT
1386
              && INTVAL (addend) >= 0
1387
              && INTVAL (addend) <= 127
1388
              && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1389
            return 2;
1390
          break;
1391
        }
1392
    }
1393
 
1394
  return 4;
1395
}
1396
 
1397
int
1398
mep_cop_address_length (rtx insn, int opn)
1399
{
1400
  rtx set = single_set (insn);
1401
  rtx mem = XEXP (set, opn);
1402
  rtx addr = XEXP (mem, 0);
1403
 
1404
  if (GET_CODE (mem) != MEM)
1405
    return 2;
1406
  if (register_operand (addr, Pmode))
1407
    return 2;
1408
  if (GET_CODE (addr) == POST_INC)
1409
    return 2;
1410
 
1411
  return 4;
1412
}
1413
 
1414
#define DEBUG_EXPAND_MOV 0
1415
bool
1416
mep_expand_mov (rtx *operands, enum machine_mode mode)
1417
{
1418
  int i, t;
1419
  int tag[2];
1420
  rtx tpsym, tpoffs;
1421
  int post_reload = 0;
1422
 
1423
  tag[0] = mep_section_tag (operands[0]);
1424
  tag[1] = mep_section_tag (operands[1]);
1425
 
1426
  if (!reload_in_progress
1427
      && !reload_completed
1428
      && GET_CODE (operands[0]) != REG
1429
      && GET_CODE (operands[0]) != SUBREG
1430
      && GET_CODE (operands[1]) != REG
1431
      && GET_CODE (operands[1]) != SUBREG)
1432
    operands[1] = copy_to_mode_reg (mode, operands[1]);
1433
 
1434
#if DEBUG_EXPAND_MOV
1435
  fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1436
          reload_in_progress || reload_completed);
1437
  debug_rtx (operands[0]);
1438
  debug_rtx (operands[1]);
1439
#endif
1440
 
1441
  if (mode == DImode || mode == DFmode)
1442
    return false;
1443
 
1444
  if (reload_in_progress || reload_completed)
1445
    {
1446
      rtx r;
1447
 
1448
      if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1449
        cfun->machine->reload_changes_tp = true;
1450
 
1451
      if (tag[0] == 't' || tag[1] == 't')
1452
        {
1453
          r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1454
          if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1455
            post_reload = 1;
1456
        }
1457
      if (tag[0] == 'b' || tag[1] == 'b')
1458
        {
1459
          r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1460
          if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1461
            post_reload = 1;
1462
        }
1463
      if (cfun->machine->reload_changes_tp == true)
1464
        post_reload = 1;
1465
    }
1466
 
1467
  if (!post_reload)
1468
    {
1469
      rtx n;
1470
      if (symbol_p (operands[1]))
1471
        {
1472
          t = mep_section_tag (operands[1]);
1473
          if (t == 'b' || t == 't')
1474
            {
1475
 
1476
              if (GET_CODE (operands[1]) == SYMBOL_REF)
1477
                {
1478
                  tpsym = operands[1];
1479
                  n = gen_rtx_UNSPEC (mode,
1480
                                      gen_rtvec (1, operands[1]),
1481
                                      t == 'b' ? UNS_TPREL : UNS_GPREL);
1482
                  n = gen_rtx_CONST (mode, n);
1483
                }
1484
              else if (GET_CODE (operands[1]) == CONST
1485
                       && GET_CODE (XEXP (operands[1], 0)) == PLUS
1486
                       && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1487
                       && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1488
                {
1489
                  tpsym = XEXP (XEXP (operands[1], 0), 0);
1490
                  tpoffs = XEXP (XEXP (operands[1], 0), 1);
1491
                  n = gen_rtx_UNSPEC (mode,
1492
                                      gen_rtvec (1, tpsym),
1493
                                      t == 'b' ? UNS_TPREL : UNS_GPREL);
1494
                  n = gen_rtx_PLUS (mode, n, tpoffs);
1495
                  n = gen_rtx_CONST (mode, n);
1496
                }
1497
              else if (GET_CODE (operands[1]) == CONST
1498
                       && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1499
                return false;
1500
              else
1501
                {
1502
                  error ("unusual TP-relative address");
1503
                  return false;
1504
                }
1505
 
1506
              n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1507
                                       : mep_gp_rtx ()), n);
1508
              n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1509
#if DEBUG_EXPAND_MOV
1510
              fprintf(stderr, "mep_expand_mov emitting ");
1511
              debug_rtx(n);
1512
#endif
1513
              return true;
1514
            }
1515
        }
1516
 
1517
      for (i=0; i < 2; i++)
1518
        {
1519
          t = mep_section_tag (operands[i]);
1520
          if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1521
            {
1522
              rtx sym, n, r;
1523
              int u;
1524
 
1525
              sym = XEXP (operands[i], 0);
1526
              if (GET_CODE (sym) == CONST
1527
                  && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1528
                sym = XVECEXP (XEXP (sym, 0), 0, 0);
1529
 
1530
              if (t == 'b')
1531
                {
1532
                  r = mep_tp_rtx ();
1533
                  u = UNS_TPREL;
1534
                }
1535
              else
1536
                {
1537
                  r = mep_gp_rtx ();
1538
                  u = UNS_GPREL;
1539
                }
1540
 
1541
              n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1542
              n = gen_rtx_CONST (Pmode, n);
1543
              n = gen_rtx_PLUS (Pmode, r, n);
1544
              operands[i] = replace_equiv_address (operands[i], n);
1545
            }
1546
        }
1547
    }
1548
 
1549
  if ((GET_CODE (operands[1]) != REG
1550
       && MEP_CONTROL_REG (operands[0]))
1551
      || (GET_CODE (operands[0]) != REG
1552
          && MEP_CONTROL_REG (operands[1])))
1553
    {
1554
      rtx temp;
1555
#if DEBUG_EXPAND_MOV
1556
      fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1557
#endif
1558
      temp = gen_reg_rtx (mode);
1559
      emit_move_insn (temp, operands[1]);
1560
      operands[1] = temp;
1561
    }
1562
 
1563
  if (symbolref_p (operands[0])
1564
      && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1565
          || (GET_MODE_SIZE (mode) != 4)))
1566
    {
1567
      rtx temp;
1568
 
1569
      gcc_assert (!reload_in_progress && !reload_completed);
1570
 
1571
      temp = force_reg (Pmode, XEXP (operands[0], 0));
1572
      operands[0] = replace_equiv_address (operands[0], temp);
1573
      emit_move_insn (operands[0], operands[1]);
1574
      return true;
1575
    }
1576
 
1577
  if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1578
    tag[1] = 0;
1579
 
1580
  if (symbol_p (operands[1])
1581
      && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1582
    {
1583
      emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1584
      emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1585
      return true;
1586
    }
1587
 
1588
  if (symbolref_p (operands[1])
1589
      && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1590
    {
1591
      rtx temp;
1592
 
1593
      if (reload_in_progress || reload_completed)
1594
        temp = operands[0];
1595
      else
1596
        temp = gen_reg_rtx (Pmode);
1597
 
1598
      emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1599
      emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1600
      emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1601
      return true;
1602
    }
1603
 
1604
  return false;
1605
}
1606
 
1607
/* Cases where the pattern can't be made to use at all.  */
1608
 
1609
bool
1610
mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1611
{
1612
  int i;
1613
 
1614
#define DEBUG_MOV_OK 0
1615
#if DEBUG_MOV_OK
1616
  fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1617
           mep_section_tag (operands[1]));
1618
  debug_rtx (operands[0]);
1619
  debug_rtx (operands[1]);
1620
#endif
1621
 
1622
  /* We want the movh patterns to get these.  */
1623
  if (GET_CODE (operands[1]) == HIGH)
1624
    return false;
1625
 
1626
  /* We can't store a register to a far variable without using a
1627
     scratch register to hold the address.  Using far variables should
1628
     be split by mep_emit_mov anyway.  */
1629
  if (mep_section_tag (operands[0]) == 'f'
1630
      || mep_section_tag (operands[1]) == 'f')
1631
    {
1632
#if DEBUG_MOV_OK
1633
      fprintf (stderr, " - no, f\n");
1634
#endif
1635
      return false;
1636
    }
1637
  i = mep_section_tag (operands[1]);
1638
  if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1639
    /* These are supposed to be generated with adds of the appropriate
1640
       register.  During and after reload, however, we allow them to
1641
       be accessed as normal symbols because adding a dependency on
1642
       the base register now might cause problems.  */
1643
    {
1644
#if DEBUG_MOV_OK
1645
      fprintf (stderr, " - no, bt\n");
1646
#endif
1647
      return false;
1648
    }
1649
 
1650
  /* The only moves we can allow involve at least one general
1651
     register, so require it.  */
1652
  for (i = 0; i < 2; i ++)
1653
    {
1654
      /* Allow subregs too, before reload.  */
1655
      rtx x = operands[i];
1656
 
1657
      if (GET_CODE (x) == SUBREG)
1658
        x = XEXP (x, 0);
1659
      if (GET_CODE (x) == REG
1660
          && ! MEP_CONTROL_REG (x))
1661
        {
1662
#if DEBUG_MOV_OK
1663
          fprintf (stderr, " - ok\n");
1664
#endif
1665
          return true;
1666
        }
1667
    }
1668
#if DEBUG_MOV_OK
1669
  fprintf (stderr, " - no, no gen reg\n");
1670
#endif
1671
  return false;
1672
}
1673
 
1674
#define DEBUG_SPLIT_WIDE_MOVE 0
1675
void
1676
mep_split_wide_move (rtx *operands, enum machine_mode mode)
1677
{
1678
  int i;
1679
 
1680
#if DEBUG_SPLIT_WIDE_MOVE
1681
  fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1682
  debug_rtx (operands[0]);
1683
  debug_rtx (operands[1]);
1684
#endif
1685
 
1686
  for (i = 0; i <= 1; i++)
1687
    {
1688
      rtx op = operands[i], hi, lo;
1689
 
1690
      switch (GET_CODE (op))
1691
        {
1692
        case REG:
1693
          {
1694
            unsigned int regno = REGNO (op);
1695
 
1696
            if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1697
              {
1698
                rtx i32;
1699
 
1700
                lo = gen_rtx_REG (SImode, regno);
1701
                i32 = GEN_INT (32);
1702
                hi = gen_rtx_ZERO_EXTRACT (SImode,
1703
                                           gen_rtx_REG (DImode, regno),
1704
                                           i32, i32);
1705
              }
1706
            else
1707
              {
1708
                hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1709
                lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1710
              }
1711
          }
1712
          break;
1713
 
1714
        case CONST_INT:
1715
        case CONST_DOUBLE:
1716
        case MEM:
1717
          hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1718
          lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1719
          break;
1720
 
1721
        default:
1722
          gcc_unreachable ();
1723
        }
1724
 
1725
      /* The high part of CR <- GPR moves must be done after the low part.  */
1726
      operands [i + 4] = lo;
1727
      operands [i + 2] = hi;
1728
    }
1729
 
1730
  if (reg_mentioned_p (operands[2], operands[5])
1731
      || GET_CODE (operands[2]) == ZERO_EXTRACT
1732
      || GET_CODE (operands[4]) == ZERO_EXTRACT)
1733
    {
1734
      rtx tmp;
1735
 
1736
      /* Overlapping register pairs -- make sure we don't
1737
         early-clobber ourselves.  */
1738
      tmp = operands[2];
1739
      operands[2] = operands[4];
1740
      operands[4] = tmp;
1741
      tmp = operands[3];
1742
      operands[3] = operands[5];
1743
      operands[5] = tmp;
1744
    }
1745
 
1746
#if DEBUG_SPLIT_WIDE_MOVE
1747
  fprintf(stderr, "\033[34m");
1748
  debug_rtx (operands[2]);
1749
  debug_rtx (operands[3]);
1750
  debug_rtx (operands[4]);
1751
  debug_rtx (operands[5]);
1752
  fprintf(stderr, "\033[0m");
1753
#endif
1754
}
1755
 
1756
/* Emit a setcc instruction in its entirity.  */
1757
 
1758
static bool
1759
mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1760
{
1761
  rtx tmp;
1762
 
1763
  switch (code)
1764
    {
1765
    case GT:
1766
    case GTU:
1767
      tmp = op1, op1 = op2, op2 = tmp;
1768
      code = swap_condition (code);
1769
      /* FALLTHRU */
1770
 
1771
    case LT:
1772
    case LTU:
1773
      op1 = force_reg (SImode, op1);
1774
      emit_insn (gen_rtx_SET (VOIDmode, dest,
1775
                              gen_rtx_fmt_ee (code, SImode, op1, op2)));
1776
      return true;
1777
 
1778
    case EQ:
1779
      if (op2 != const0_rtx)
1780
        op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1781
      mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1782
      return true;
1783
 
1784
    case NE:
1785
      /* Branchful sequence:
1786
                mov dest, 0             16-bit
1787
                beq op1, op2, Lover     16-bit (op2 < 16), 32-bit otherwise
1788
                mov dest, 1             16-bit
1789
 
1790
         Branchless sequence:
1791
                add3 tmp, op1, -op2     32-bit (or mov + sub)
1792
                sltu3 tmp, tmp, 1       16-bit
1793
                xor3 dest, tmp, 1       32-bit
1794
        */
1795
      if (optimize_size && op2 != const0_rtx)
1796
        return false;
1797
 
1798
      if (op2 != const0_rtx)
1799
        op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1800
 
1801
      op2 = gen_reg_rtx (SImode);
1802
      mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1803
 
1804
      emit_insn (gen_rtx_SET (VOIDmode, dest,
1805
                              gen_rtx_XOR (SImode, op2, const1_rtx)));
1806
      return true;
1807
 
1808
    case LE:
1809
      if (GET_CODE (op2) != CONST_INT
1810
          || INTVAL (op2) == 0x7ffffff)
1811
        return false;
1812
      op2 = GEN_INT (INTVAL (op2) + 1);
1813
      return mep_expand_setcc_1 (LT, dest, op1, op2);
1814
 
1815
    case LEU:
1816
      if (GET_CODE (op2) != CONST_INT
1817
          || INTVAL (op2) == -1)
1818
        return false;
1819
      op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1820
      return mep_expand_setcc_1 (LTU, dest, op1, op2);
1821
 
1822
    case GE:
1823
      if (GET_CODE (op2) != CONST_INT
1824
          || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1825
        return false;
1826
      op2 = GEN_INT (INTVAL (op2) - 1);
1827
      return mep_expand_setcc_1 (GT, dest, op1, op2);
1828
 
1829
    case GEU:
1830
      if (GET_CODE (op2) != CONST_INT
1831
          || op2 == const0_rtx)
1832
        return false;
1833
      op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1834
      return mep_expand_setcc_1 (GTU, dest, op1, op2);
1835
 
1836
    default:
1837
      gcc_unreachable ();
1838
    }
1839
}
1840
 
1841
bool
1842
mep_expand_setcc (rtx *operands)
1843
{
1844
  rtx dest = operands[0];
1845
  enum rtx_code code = GET_CODE (operands[1]);
1846
  rtx op0 = operands[2];
1847
  rtx op1 = operands[3];
1848
 
1849
  return mep_expand_setcc_1 (code, dest, op0, op1);
1850
}
1851
 
1852
rtx
1853
mep_expand_cbranch (rtx *operands)
1854
{
1855
  enum rtx_code code = GET_CODE (operands[0]);
1856
  rtx op0 = operands[1];
1857
  rtx op1 = operands[2];
1858
  rtx tmp;
1859
 
1860
 restart:
1861
  switch (code)
1862
    {
1863
    case LT:
1864
      if (mep_imm4_operand (op1, SImode))
1865
        break;
1866
 
1867
      tmp = gen_reg_rtx (SImode);
1868
      gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1869
      code = NE;
1870
      op0 = tmp;
1871
      op1 = const0_rtx;
1872
      break;
1873
 
1874
    case GE:
1875
      if (mep_imm4_operand (op1, SImode))
1876
        break;
1877
 
1878
      tmp = gen_reg_rtx (SImode);
1879
      gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1880
 
1881
      code = EQ;
1882
      op0 = tmp;
1883
      op1 = const0_rtx;
1884
      break;
1885
 
1886
    case EQ:
1887
    case NE:
1888
      if (! mep_reg_or_imm4_operand (op1, SImode))
1889
        op1 = force_reg (SImode, op1);
1890
      break;
1891
 
1892
    case LE:
1893
    case GT:
1894
      if (GET_CODE (op1) == CONST_INT
1895
          && INTVAL (op1) != 0x7fffffff)
1896
        {
1897
          op1 = GEN_INT (INTVAL (op1) + 1);
1898
          code = (code == LE ? LT : GE);
1899
          goto restart;
1900
        }
1901
 
1902
      tmp = gen_reg_rtx (SImode);
1903
      gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1904
 
1905
      code = (code == LE ? EQ : NE);
1906
      op0 = tmp;
1907
      op1 = const0_rtx;
1908
      break;
1909
 
1910
    case LTU:
1911
      if (op1 == const1_rtx)
1912
        {
1913
          code = EQ;
1914
          op1 = const0_rtx;
1915
          break;
1916
        }
1917
 
1918
      tmp = gen_reg_rtx (SImode);
1919
      gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1920
      code = NE;
1921
      op0 = tmp;
1922
      op1 = const0_rtx;
1923
      break;
1924
 
1925
    case LEU:
1926
      tmp = gen_reg_rtx (SImode);
1927
      if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1928
        code = NE;
1929
      else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1930
        code = EQ;
1931
      else
1932
        gcc_unreachable ();
1933
      op0 = tmp;
1934
      op1 = const0_rtx;
1935
      break;
1936
 
1937
    case GTU:
1938
      tmp = gen_reg_rtx (SImode);
1939
      gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1940
                  || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1941
      code = NE;
1942
      op0 = tmp;
1943
      op1 = const0_rtx;
1944
      break;
1945
 
1946
    case GEU:
1947
      tmp = gen_reg_rtx (SImode);
1948
      if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1949
        code = NE;
1950
      else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1951
        code = EQ;
1952
      else
1953
        gcc_unreachable ();
1954
      op0 = tmp;
1955
      op1 = const0_rtx;
1956
      break;
1957
 
1958
    default:
1959
      gcc_unreachable ();
1960
    }
1961
 
1962
  return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1963
}
1964
 
1965
const char *
1966
mep_emit_cbranch (rtx *operands, int ne)
1967
{
1968
  if (GET_CODE (operands[1]) == REG)
1969
    return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1970
  else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
1971
    return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1972
  else
1973
    return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1974
}
1975
 
1976
void
1977
mep_expand_call (rtx *operands, int returns_value)
1978
{
1979
  rtx addr = operands[returns_value];
1980
  rtx tp = mep_tp_rtx ();
1981
  rtx gp = mep_gp_rtx ();
1982
 
1983
  gcc_assert (GET_CODE (addr) == MEM);
1984
 
1985
  addr = XEXP (addr, 0);
1986
 
1987
  if (! mep_call_address_operand (addr, VOIDmode))
1988
    addr = force_reg (SImode, addr);
1989
 
1990
  if (! operands[returns_value+2])
1991
    operands[returns_value+2] = const0_rtx;
1992
 
1993
  if (returns_value)
1994
    emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1995
                                             operands[3], tp, gp));
1996
  else
1997
    emit_call_insn (gen_call_internal (addr, operands[1],
1998
                                       operands[2], tp, gp));
1999
}
2000
 
2001
/* Aliasing Support.  */
2002
 
2003
/* If X is a machine specific address (i.e. a symbol or label being
2004
   referenced as a displacement from the GOT implemented using an
2005
   UNSPEC), then return the base term.  Otherwise return X.  */
2006
 
2007
rtx
2008
mep_find_base_term (rtx x)
2009
{
2010
  rtx base, term;
2011
  int unspec;
2012
 
2013
  if (GET_CODE (x) != PLUS)
2014
    return x;
2015
  base = XEXP (x, 0);
2016
  term = XEXP (x, 1);
2017
 
2018
  if (has_hard_reg_initial_val(Pmode, TP_REGNO)
2019
      && base == mep_tp_rtx ())
2020
    unspec = UNS_TPREL;
2021
  else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
2022
           && base == mep_gp_rtx ())
2023
    unspec = UNS_GPREL;
2024
  else
2025
    return x;
2026
 
2027
  if (GET_CODE (term) != CONST)
2028
    return x;
2029
  term = XEXP (term, 0);
2030
 
2031
  if (GET_CODE (term) != UNSPEC
2032
      || XINT (term, 1) != unspec)
2033
    return x;
2034
 
2035
  return XVECEXP (term, 0, 0);
2036
}
2037
 
2038
/* Reload Support.  */
2039
 
2040
/* Return true if the registers in CLASS cannot represent the change from
2041
   modes FROM to TO.  */
2042
 
2043
bool
2044
mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
2045
                               enum reg_class regclass)
2046
{
2047
  if (from == to)
2048
    return false;
2049
 
2050
  /* 64-bit COP regs must remain 64-bit COP regs.  */
2051
  if (TARGET_64BIT_CR_REGS
2052
      && (regclass == CR_REGS
2053
          || regclass == LOADABLE_CR_REGS)
2054
      && (GET_MODE_SIZE (to) < 8
2055
          || GET_MODE_SIZE (from) < 8))
2056
    return true;
2057
 
2058
  return false;
2059
}
2060
 
2061
#define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
2062
 
2063
static bool
2064
mep_general_reg (rtx x)
2065
{
2066
  while (GET_CODE (x) == SUBREG)
2067
    x = XEXP (x, 0);
2068
  return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
2069
}
2070
 
2071
static bool
2072
mep_nongeneral_reg (rtx x)
2073
{
2074
  while (GET_CODE (x) == SUBREG)
2075
    x = XEXP (x, 0);
2076
  return (GET_CODE (x) == REG
2077
          && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2078
}
2079
 
2080
static bool
2081
mep_general_copro_reg (rtx x)
2082
{
2083
  while (GET_CODE (x) == SUBREG)
2084
    x = XEXP (x, 0);
2085
  return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
2086
}
2087
 
2088
static bool
2089
mep_nonregister (rtx x)
2090
{
2091
  while (GET_CODE (x) == SUBREG)
2092
    x = XEXP (x, 0);
2093
  return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
2094
}
2095
 
2096
#define DEBUG_RELOAD 0
2097
 
2098
/* Return the secondary reload class needed for moving value X to or
2099
   from a register in coprocessor register class CLASS.  */
2100
 
2101
static enum reg_class
2102
mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2103
{
2104
  if (mep_general_reg (x))
2105
    /* We can do the move directly if mep_have_core_copro_moves_p,
2106
       otherwise we need to go through memory.  Either way, no secondary
2107
       register is needed.  */
2108
    return NO_REGS;
2109
 
2110
  if (mep_general_copro_reg (x))
2111
    {
2112
      /* We can do the move directly if mep_have_copro_copro_moves_p.  */
2113
      if (mep_have_copro_copro_moves_p)
2114
        return NO_REGS;
2115
 
2116
      /* Otherwise we can use a temporary if mep_have_core_copro_moves_p.  */
2117
      if (mep_have_core_copro_moves_p)
2118
        return GENERAL_REGS;
2119
 
2120
      /* Otherwise we need to do it through memory.  No secondary
2121
         register is needed.  */
2122
      return NO_REGS;
2123
    }
2124
 
2125
  if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2126
      && constraint_satisfied_p (x, CONSTRAINT_U))
2127
    /* X is a memory value that we can access directly.  */
2128
    return NO_REGS;
2129
 
2130
  /* We have to move X into a GPR first and then copy it to
2131
     the coprocessor register.  The move from the GPR to the
2132
     coprocessor might be done directly or through memory,
2133
     depending on mep_have_core_copro_moves_p. */
2134
  return GENERAL_REGS;
2135
}
2136
 
2137
/* Copying X to register in RCLASS.  */
2138
 
2139
enum reg_class
2140
mep_secondary_input_reload_class (enum reg_class rclass,
2141
                                  enum machine_mode mode ATTRIBUTE_UNUSED,
2142
                                  rtx x)
2143
{
2144
  int rv = NO_REGS;
2145
 
2146
#if DEBUG_RELOAD
2147
  fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2148
  debug_rtx (x);
2149
#endif
2150
 
2151
  if (reg_class_subset_p (rclass, CR_REGS))
2152
    rv = mep_secondary_copro_reload_class (rclass, x);
2153
  else if (MEP_NONGENERAL_CLASS (rclass)
2154
           && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2155
    rv = GENERAL_REGS;
2156
 
2157
#if DEBUG_RELOAD
2158
  fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2159
#endif
2160
  return (enum reg_class) rv;
2161
}
2162
 
2163
/* Copying register in RCLASS to X.  */
2164
 
2165
enum reg_class
2166
mep_secondary_output_reload_class (enum reg_class rclass,
2167
                                   enum machine_mode mode ATTRIBUTE_UNUSED,
2168
                                   rtx x)
2169
{
2170
  int rv = NO_REGS;
2171
 
2172
#if DEBUG_RELOAD
2173
  fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2174
  debug_rtx (x);
2175
#endif
2176
 
2177
  if (reg_class_subset_p (rclass, CR_REGS))
2178
    rv = mep_secondary_copro_reload_class (rclass, x);
2179
  else if (MEP_NONGENERAL_CLASS (rclass)
2180
           && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2181
    rv = GENERAL_REGS;
2182
 
2183
#if DEBUG_RELOAD
2184
  fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2185
#endif
2186
 
2187
  return (enum reg_class) rv;
2188
}
2189
 
2190
/* Implement SECONDARY_MEMORY_NEEDED.  */
2191
 
2192
bool
2193
mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2194
                             enum machine_mode mode ATTRIBUTE_UNUSED)
2195
{
2196
  if (!mep_have_core_copro_moves_p)
2197
    {
2198
      if (reg_classes_intersect_p (rclass1, CR_REGS)
2199
          && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2200
        return true;
2201
      if (reg_classes_intersect_p (rclass2, CR_REGS)
2202
          && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2203
        return true;
2204
      if (!mep_have_copro_copro_moves_p
2205
          && reg_classes_intersect_p (rclass1, CR_REGS)
2206
          && reg_classes_intersect_p (rclass2, CR_REGS))
2207
        return true;
2208
    }
2209
  return false;
2210
}
2211
 
2212
void
2213
mep_expand_reload (rtx *operands, enum machine_mode mode)
2214
{
2215
  /* There are three cases for each direction:
2216
     register, farsym
2217
     control, farsym
2218
     control, nearsym */
2219
 
2220
  int s0 = mep_section_tag (operands[0]) == 'f';
2221
  int s1 = mep_section_tag (operands[1]) == 'f';
2222
  int c0 = mep_nongeneral_reg (operands[0]);
2223
  int c1 = mep_nongeneral_reg (operands[1]);
2224
  int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2225
 
2226
#if DEBUG_RELOAD
2227
  fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2228
  debug_rtx (operands[0]);
2229
  debug_rtx (operands[1]);
2230
#endif
2231
 
2232
  switch (which)
2233
    {
2234
    case 00: /* Don't know why this gets here.  */
2235
    case 02: /* general = far */
2236
      emit_move_insn (operands[0], operands[1]);
2237
      return;
2238
 
2239
    case 10: /* cr = mem */
2240
    case 11: /* cr = cr */
2241
    case 01: /* mem = cr */
2242
    case 12: /* cr = far */
2243
      emit_move_insn (operands[2], operands[1]);
2244
      emit_move_insn (operands[0], operands[2]);
2245
      return;
2246
 
2247
    case 20: /* far = general */
2248
      emit_move_insn (operands[2], XEXP (operands[1], 0));
2249
      emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2250
      return;
2251
 
2252
    case 21: /* far = cr */
2253
    case 22: /* far = far */
2254
    default:
2255
      fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2256
               which, mode_name[mode]);
2257
      debug_rtx (operands[0]);
2258
      debug_rtx (operands[1]);
2259
      gcc_unreachable ();
2260
    }
2261
}
2262
 
2263
/* Implement PREFERRED_RELOAD_CLASS.  See whether X is a constant that
2264
   can be moved directly into registers 0 to 7, but not into the rest.
2265
   If so, and if the required class includes registers 0 to 7, restrict
2266
   it to those registers.  */
2267
 
2268
enum reg_class
2269
mep_preferred_reload_class (rtx x, enum reg_class rclass)
2270
{
2271
  switch (GET_CODE (x))
2272
    {
2273
    case CONST_INT:
2274
      if (INTVAL (x) >= 0x10000
2275
          && INTVAL (x) < 0x01000000
2276
          && (INTVAL (x) & 0xffff) != 0
2277
          && reg_class_subset_p (TPREL_REGS, rclass))
2278
        rclass = TPREL_REGS;
2279
      break;
2280
 
2281
    case CONST:
2282
    case SYMBOL_REF:
2283
    case LABEL_REF:
2284
      if (mep_section_tag (x) != 'f'
2285
          && reg_class_subset_p (TPREL_REGS, rclass))
2286
        rclass = TPREL_REGS;
2287
      break;
2288
 
2289
    default:
2290
      break;
2291
    }
2292
  return rclass;
2293
}
2294
 
2295
/* Implement REGISTER_MOVE_COST.  Return 2 for direct single-register
2296
   moves, 4 for direct double-register moves, and 1000 for anything
2297
   that requires a temporary register or temporary stack slot.  */
2298
 
2299
int
2300
mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2301
{
2302
  if (mep_have_copro_copro_moves_p
2303
      && reg_class_subset_p (from, CR_REGS)
2304
      && reg_class_subset_p (to, CR_REGS))
2305
    {
2306
      if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2307
        return 4;
2308
      return 2;
2309
    }
2310
  if (reg_class_subset_p (from, CR_REGS)
2311
      && reg_class_subset_p (to, CR_REGS))
2312
    {
2313
      if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2314
        return 8;
2315
      return 4;
2316
    }
2317
  if (reg_class_subset_p (from, CR_REGS)
2318
      || reg_class_subset_p (to, CR_REGS))
2319
    {
2320
      if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2321
        return 4;
2322
      return 2;
2323
    }
2324
  if (mep_secondary_memory_needed (from, to, mode))
2325
    return 1000;
2326
  if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2327
    return 1000;
2328
 
2329
  if (GET_MODE_SIZE (mode) > 4)
2330
    return 4;
2331
 
2332
  return 2;
2333
}
2334
 
2335
 
2336
/* Functions to save and restore machine-specific function data.  */
2337
 
2338
static struct machine_function *
2339
mep_init_machine_status (void)
2340
{
2341
  return ggc_alloc_cleared_machine_function ();
2342
}
2343
 
2344
static rtx
2345
mep_allocate_initial_value (rtx reg)
2346
{
2347
  int rss;
2348
 
2349
  if (GET_CODE (reg) != REG)
2350
    return NULL_RTX;
2351
 
2352
  if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2353
    return NULL_RTX;
2354
 
2355
  /* In interrupt functions, the "initial" values of $gp and $tp are
2356
     provided by the prologue.  They are not necessarily the same as
2357
     the values that the caller was using.  */
2358
  if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2359
    if (mep_interrupt_p ())
2360
      return NULL_RTX;
2361
 
2362
  if (! cfun->machine->reg_save_slot[REGNO(reg)])
2363
    {
2364
      cfun->machine->reg_save_size += 4;
2365
      cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2366
    }
2367
 
2368
  rss = cfun->machine->reg_save_slot[REGNO(reg)];
2369
  return gen_rtx_MEM (SImode, plus_constant (arg_pointer_rtx, -rss));
2370
}
2371
 
2372
rtx
2373
mep_return_addr_rtx (int count)
2374
{
2375
  if (count != 0)
2376
    return const0_rtx;
2377
 
2378
  return get_hard_reg_initial_val (Pmode, LP_REGNO);
2379
}
2380
 
2381
static rtx
2382
mep_tp_rtx (void)
2383
{
2384
  return get_hard_reg_initial_val (Pmode, TP_REGNO);
2385
}
2386
 
2387
static rtx
2388
mep_gp_rtx (void)
2389
{
2390
  return get_hard_reg_initial_val (Pmode, GP_REGNO);
2391
}
2392
 
2393
static bool
2394
mep_interrupt_p (void)
2395
{
2396
  if (cfun->machine->interrupt_handler == 0)
2397
    {
2398
      int interrupt_handler
2399
        = (lookup_attribute ("interrupt",
2400
                             DECL_ATTRIBUTES (current_function_decl))
2401
           != NULL_TREE);
2402
      cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2403
    }
2404
  return cfun->machine->interrupt_handler == 2;
2405
}
2406
 
2407
static bool
2408
mep_disinterrupt_p (void)
2409
{
2410
  if (cfun->machine->disable_interrupts == 0)
2411
    {
2412
      int disable_interrupts
2413
        = (lookup_attribute ("disinterrupt",
2414
                             DECL_ATTRIBUTES (current_function_decl))
2415
           != NULL_TREE);
2416
      cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2417
    }
2418
  return cfun->machine->disable_interrupts == 2;
2419
}
2420
 
2421
 
2422
/* Frame/Epilog/Prolog Related.  */
2423
 
2424
static bool
2425
mep_reg_set_p (rtx reg, rtx insn)
2426
{
2427
  /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2428
  if (INSN_P (insn))
2429
    {
2430
      if (FIND_REG_INC_NOTE (insn, reg))
2431
        return true;
2432
      insn = PATTERN (insn);
2433
    }
2434
 
2435
  if (GET_CODE (insn) == SET
2436
      && GET_CODE (XEXP (insn, 0)) == REG
2437
      && GET_CODE (XEXP (insn, 1)) == REG
2438
      && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2439
    return false;
2440
 
2441
  return set_of (reg, insn) != NULL_RTX;
2442
}
2443
 
2444
 
2445
#define MEP_SAVES_UNKNOWN 0
2446
#define MEP_SAVES_YES 1
2447
#define MEP_SAVES_MAYBE 2
2448
#define MEP_SAVES_NO 3
2449
 
2450
static bool
2451
mep_reg_set_in_function (int regno)
2452
{
2453
  rtx reg, insn;
2454
 
2455
  if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2456
    return true;
2457
 
2458
  if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2459
    return true;
2460
 
2461
  push_topmost_sequence ();
2462
  insn = get_insns ();
2463
  pop_topmost_sequence ();
2464
 
2465
  if (!insn)
2466
    return false;
2467
 
2468
  reg = gen_rtx_REG (SImode, regno);
2469
 
2470
  for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2471
    if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2472
      return true;
2473
  return false;
2474
}
2475
 
2476
static bool
2477
mep_asm_without_operands_p (void)
2478
{
2479
  if (cfun->machine->asms_without_operands == 0)
2480
    {
2481
      rtx insn;
2482
 
2483
      push_topmost_sequence ();
2484
      insn = get_insns ();
2485
      pop_topmost_sequence ();
2486
 
2487
      cfun->machine->asms_without_operands = 1;
2488
      while (insn)
2489
        {
2490
          if (INSN_P (insn)
2491
              && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2492
            {
2493
              cfun->machine->asms_without_operands = 2;
2494
              break;
2495
            }
2496
          insn = NEXT_INSN (insn);
2497
        }
2498
 
2499
    }
2500
  return cfun->machine->asms_without_operands == 2;
2501
}
2502
 
2503
/* Interrupt functions save/restore every call-preserved register, and
2504
   any call-used register it uses (or all if it calls any function,
2505
   since they may get clobbered there too).  Here we check to see
2506
   which call-used registers need saving.  */
2507
 
2508
#define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2509
                           && (r == FIRST_CCR_REGNO + 1 \
2510
                               || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2511
                               || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2512
 
2513
static bool
2514
mep_interrupt_saved_reg (int r)
2515
{
2516
  if (!mep_interrupt_p ())
2517
    return false;
2518
  if (r == REGSAVE_CONTROL_TEMP
2519
      || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2520
    return true;
2521
  if (mep_asm_without_operands_p ()
2522
      && (!fixed_regs[r]
2523
          || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2524
          || IVC2_ISAVED_REG (r)))
2525
    return true;
2526
  if (!current_function_is_leaf)
2527
    /* Function calls mean we need to save $lp.  */
2528
    if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2529
      return true;
2530
  if (!current_function_is_leaf || cfun->machine->doloop_tags > 0)
2531
    /* The interrupt handler might use these registers for repeat blocks,
2532
       or it might call a function that does so.  */
2533
    if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2534
      return true;
2535
  if (current_function_is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2536
    return false;
2537
  /* Functions we call might clobber these.  */
2538
  if (call_used_regs[r] && !fixed_regs[r])
2539
    return true;
2540
  /* Additional registers that need to be saved for IVC2.  */
2541
  if (IVC2_ISAVED_REG (r))
2542
    return true;
2543
 
2544
  return false;
2545
}
2546
 
2547
static bool
2548
mep_call_saves_register (int r)
2549
{
2550
  if (! cfun->machine->frame_locked)
2551
    {
2552
      int rv = MEP_SAVES_NO;
2553
 
2554
      if (cfun->machine->reg_save_slot[r])
2555
        rv = MEP_SAVES_YES;
2556
      else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2557
        rv = MEP_SAVES_YES;
2558
      else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2559
        rv = MEP_SAVES_YES;
2560
      else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2561
        rv = MEP_SAVES_YES;
2562
      else if (crtl->calls_eh_return && (r == 10 || r == 11))
2563
        /* We need these to have stack slots so that they can be set during
2564
           unwinding.  */
2565
        rv = MEP_SAVES_YES;
2566
      else if (mep_interrupt_saved_reg (r))
2567
        rv = MEP_SAVES_YES;
2568
      cfun->machine->reg_saved[r] = rv;
2569
    }
2570
  return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2571
}
2572
 
2573
/* Return true if epilogue uses register REGNO.  */
2574
 
2575
bool
2576
mep_epilogue_uses (int regno)
2577
{
2578
  /* Since $lp is a call-saved register, the generic code will normally
2579
     mark it used in the epilogue if it needs to be saved and restored.
2580
     However, when profiling is enabled, the profiling code will implicitly
2581
     clobber $11.  This case has to be handled specially both here and in
2582
     mep_call_saves_register.  */
2583
  if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2584
    return true;
2585
  /* Interrupt functions save/restore pretty much everything.  */
2586
  return (reload_completed && mep_interrupt_saved_reg (regno));
2587
}
2588
 
2589
static int
2590
mep_reg_size (int regno)
2591
{
2592
  if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2593
    return 8;
2594
  return 4;
2595
}
2596
 
2597
/* Worker function for TARGET_CAN_ELIMINATE.  */
2598
 
2599
bool
2600
mep_can_eliminate (const int from, const int to)
2601
{
2602
  return  (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2603
           ? ! frame_pointer_needed
2604
           : true);
2605
}
2606
 
2607
int
2608
mep_elimination_offset (int from, int to)
2609
{
2610
  int reg_save_size;
2611
  int i;
2612
  int frame_size = get_frame_size () + crtl->outgoing_args_size;
2613
  int total_size;
2614
 
2615
  if (!cfun->machine->frame_locked)
2616
    memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2617
 
2618
  /* We don't count arg_regs_to_save in the arg pointer offset, because
2619
     gcc thinks the arg pointer has moved along with the saved regs.
2620
     However, we do count it when we adjust $sp in the prologue.  */
2621
  reg_save_size = 0;
2622
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2623
    if (mep_call_saves_register (i))
2624
      reg_save_size += mep_reg_size (i);
2625
 
2626
  if (reg_save_size % 8)
2627
    cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2628
  else
2629
    cfun->machine->regsave_filler = 0;
2630
 
2631
  /* This is what our total stack adjustment looks like.  */
2632
  total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2633
 
2634
  if (total_size % 8)
2635
    cfun->machine->frame_filler = 8 - (total_size % 8);
2636
  else
2637
    cfun->machine->frame_filler = 0;
2638
 
2639
 
2640
  if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2641
    return reg_save_size + cfun->machine->regsave_filler;
2642
 
2643
  if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2644
    return cfun->machine->frame_filler + frame_size;
2645
 
2646
  if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2647
    return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2648
 
2649
  gcc_unreachable ();
2650
}
2651
 
2652
static rtx
2653
F (rtx x)
2654
{
2655
  RTX_FRAME_RELATED_P (x) = 1;
2656
  return x;
2657
}
2658
 
2659
/* Since the prologue/epilogue code is generated after optimization,
2660
   we can't rely on gcc to split constants for us.  So, this code
2661
   captures all the ways to add a constant to a register in one logic
2662
   chunk, including optimizing away insns we just don't need.  This
2663
   makes the prolog/epilog code easier to follow.  */
2664
static void
2665
add_constant (int dest, int src, int value, int mark_frame)
2666
{
2667
  rtx insn;
2668
  int hi, lo;
2669
 
2670
  if (src == dest && value == 0)
2671
    return;
2672
 
2673
  if (value == 0)
2674
    {
2675
      insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2676
                             gen_rtx_REG (SImode, src));
2677
      if (mark_frame)
2678
        RTX_FRAME_RELATED_P(insn) = 1;
2679
      return;
2680
    }
2681
 
2682
  if (value >= -32768 && value <= 32767)
2683
    {
2684
      insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2685
                                    gen_rtx_REG (SImode, src),
2686
                                    GEN_INT (value)));
2687
      if (mark_frame)
2688
        RTX_FRAME_RELATED_P(insn) = 1;
2689
      return;
2690
    }
2691
 
2692
  /* Big constant, need to use a temp register.  We use
2693
     REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2694
     area is always small enough to directly add to).  */
2695
 
2696
  hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2697
  lo = value & 0xffff;
2698
 
2699
  insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2700
                         GEN_INT (hi));
2701
 
2702
  if (lo)
2703
    {
2704
      insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2705
                                    gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2706
                                    GEN_INT (lo)));
2707
    }
2708
 
2709
  insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2710
                                gen_rtx_REG (SImode, src),
2711
                                gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2712
  if (mark_frame)
2713
    {
2714
      RTX_FRAME_RELATED_P(insn) = 1;
2715
      add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2716
                    gen_rtx_SET (SImode,
2717
                                 gen_rtx_REG (SImode, dest),
2718
                                 gen_rtx_PLUS (SImode,
2719
                                               gen_rtx_REG (SImode, dest),
2720
                                               GEN_INT (value))));
2721
    }
2722
}
2723
 
2724
/* Move SRC to DEST.  Mark the move as being potentially dead if
2725
   MAYBE_DEAD_P.  */
2726
 
2727
static rtx
2728
maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2729
{
2730
  rtx insn = emit_move_insn (dest, src);
2731
#if 0
2732
  if (maybe_dead_p)
2733
    REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2734
#endif
2735
  return insn;
2736
}
2737
 
2738
/* Used for interrupt functions, which can't assume that $tp and $gp
2739
   contain the correct pointers.  */
2740
 
2741
static void
2742
mep_reload_pointer (int regno, const char *symbol)
2743
{
2744
  rtx reg, sym;
2745
 
2746
  if (!df_regs_ever_live_p(regno) && current_function_is_leaf)
2747
    return;
2748
 
2749
  reg = gen_rtx_REG (SImode, regno);
2750
  sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2751
  emit_insn (gen_movsi_topsym_s (reg, sym));
2752
  emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2753
}
2754
 
2755
/* Assign save slots for any register not already saved.  DImode
2756
   registers go at the end of the reg save area; the rest go at the
2757
   beginning.  This is for alignment purposes.  Returns true if a frame
2758
   is really needed.  */
2759
static bool
2760
mep_assign_save_slots (int reg_save_size)
2761
{
2762
  bool really_need_stack_frame = false;
2763
  int di_ofs = 0;
2764
  int i;
2765
 
2766
  for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2767
    if (mep_call_saves_register(i))
2768
      {
2769
        int regsize = mep_reg_size (i);
2770
 
2771
        if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2772
            || mep_reg_set_in_function (i))
2773
          really_need_stack_frame = true;
2774
 
2775
        if (cfun->machine->reg_save_slot[i])
2776
          continue;
2777
 
2778
        if (regsize < 8)
2779
          {
2780
            cfun->machine->reg_save_size += regsize;
2781
            cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2782
          }
2783
        else
2784
          {
2785
            cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2786
            di_ofs += 8;
2787
          }
2788
      }
2789
  cfun->machine->frame_locked = 1;
2790
  return really_need_stack_frame;
2791
}
2792
 
2793
void
2794
mep_expand_prologue (void)
2795
{
2796
  int i, rss, sp_offset = 0;
2797
  int reg_save_size;
2798
  int frame_size;
2799
  int really_need_stack_frame;
2800
 
2801
  /* We must not allow register renaming in interrupt functions,
2802
     because that invalidates the correctness of the set of call-used
2803
     registers we're going to save/restore.  */
2804
  mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2805
 
2806
  if (mep_disinterrupt_p ())
2807
    emit_insn (gen_mep_disable_int ());
2808
 
2809
  cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2810
 
2811
  reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2812
  frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2813
  really_need_stack_frame = frame_size;
2814
 
2815
  really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2816
 
2817
  sp_offset = reg_save_size;
2818
  if (sp_offset + frame_size < 128)
2819
    sp_offset += frame_size ;
2820
 
2821
  add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2822
 
2823
  for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2824
    if (mep_call_saves_register(i))
2825
      {
2826
        rtx mem;
2827
        bool maybe_dead_p;
2828
        enum machine_mode rmode;
2829
 
2830
        rss = cfun->machine->reg_save_slot[i];
2831
 
2832
        if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2833
            && (!mep_reg_set_in_function (i)
2834
                && !mep_interrupt_p ()))
2835
          continue;
2836
 
2837
        if (mep_reg_size (i) == 8)
2838
          rmode = DImode;
2839
        else
2840
          rmode = SImode;
2841
 
2842
        /* If there is a pseudo associated with this register's initial value,
2843
           reload might have already spilt it to the stack slot suggested by
2844
           ALLOCATE_INITIAL_VALUE.  The moves emitted here can then be safely
2845
           deleted as dead.  */
2846
        mem = gen_rtx_MEM (rmode,
2847
                           plus_constant (stack_pointer_rtx, sp_offset - rss));
2848
        maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2849
 
2850
        if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2851
          F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2852
        else if (rmode == DImode)
2853
          {
2854
            rtx insn;
2855
            int be = TARGET_BIG_ENDIAN ? 4 : 0;
2856
 
2857
            mem = gen_rtx_MEM (SImode,
2858
                               plus_constant (stack_pointer_rtx, sp_offset - rss + be));
2859
 
2860
            maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2861
                             gen_rtx_REG (SImode, i),
2862
                             maybe_dead_p);
2863
            maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2864
                             gen_rtx_ZERO_EXTRACT (SImode,
2865
                                                   gen_rtx_REG (DImode, i),
2866
                                                   GEN_INT (32),
2867
                                                   GEN_INT (32)),
2868
                             maybe_dead_p);
2869
            insn = maybe_dead_move (mem,
2870
                                    gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2871
                                    maybe_dead_p);
2872
            RTX_FRAME_RELATED_P (insn) = 1;
2873
 
2874
            add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2875
                          gen_rtx_SET (VOIDmode,
2876
                                       copy_rtx (mem),
2877
                                       gen_rtx_REG (rmode, i)));
2878
            mem = gen_rtx_MEM (SImode,
2879
                               plus_constant (stack_pointer_rtx, sp_offset - rss + (4-be)));
2880
            insn = maybe_dead_move (mem,
2881
                                    gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2882
                                    maybe_dead_p);
2883
          }
2884
        else
2885
          {
2886
            rtx insn;
2887
            maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2888
                             gen_rtx_REG (rmode, i),
2889
                             maybe_dead_p);
2890
            insn = maybe_dead_move (mem,
2891
                                    gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2892
                                    maybe_dead_p);
2893
            RTX_FRAME_RELATED_P (insn) = 1;
2894
 
2895
            add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2896
                          gen_rtx_SET (VOIDmode,
2897
                                       copy_rtx (mem),
2898
                                       gen_rtx_REG (rmode, i)));
2899
          }
2900
      }
2901
 
2902
  if (frame_pointer_needed)
2903
    {
2904
      /* We've already adjusted down by sp_offset.  Total $sp change
2905
         is reg_save_size + frame_size.  We want a net change here of
2906
         just reg_save_size.  */
2907
      add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2908
    }
2909
 
2910
  add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2911
 
2912
  if (mep_interrupt_p ())
2913
    {
2914
      mep_reload_pointer(GP_REGNO, "__sdabase");
2915
      mep_reload_pointer(TP_REGNO, "__tpbase");
2916
    }
2917
}
2918
 
2919
static void
2920
mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2921
{
2922
  int local = hwi_local;
2923
  int frame_size = local + crtl->outgoing_args_size;
2924
  int reg_save_size;
2925
  int ffill;
2926
  int i, sp, skip;
2927
  int sp_offset;
2928
  int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2929
 
2930
  reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2931
  frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2932
  sp_offset = reg_save_size + frame_size;
2933
 
2934
  ffill = cfun->machine->frame_filler;
2935
 
2936
  if (cfun->machine->mep_frame_pointer_needed)
2937
    reg_names[FP_REGNO] = "$fp";
2938
  else
2939
    reg_names[FP_REGNO] = "$8";
2940
 
2941
  if (sp_offset == 0)
2942
    return;
2943
 
2944
  if (debug_info_level == DINFO_LEVEL_NONE)
2945
    {
2946
      fprintf (file, "\t# frame: %d", sp_offset);
2947
      if (reg_save_size)
2948
        fprintf (file, "   %d regs", reg_save_size);
2949
      if (local)
2950
        fprintf (file, "   %d locals", local);
2951
      if (crtl->outgoing_args_size)
2952
        fprintf (file, "   %d args", crtl->outgoing_args_size);
2953
      fprintf (file, "\n");
2954
      return;
2955
    }
2956
 
2957
  fprintf (file, "\t#\n");
2958
  fprintf (file, "\t# Initial Frame Information:\n");
2959
  if (sp_offset || !frame_pointer_needed)
2960
    fprintf (file, "\t# Entry   ---------- 0\n");
2961
 
2962
  /* Sort registers by save slots, so they're printed in the order
2963
     they appear in memory, not the order they're saved in.  */
2964
  for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2965
    slot_map[si] = si;
2966
  for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2967
    for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2968
      if (cfun->machine->reg_save_slot[slot_map[si]]
2969
          > cfun->machine->reg_save_slot[slot_map[sj]])
2970
        {
2971
          int t = slot_map[si];
2972
          slot_map[si] = slot_map[sj];
2973
          slot_map[sj] = t;
2974
        }
2975
 
2976
  sp = 0;
2977
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2978
    {
2979
      int rsize;
2980
      int r = slot_map[i];
2981
      int rss = cfun->machine->reg_save_slot[r];
2982
 
2983
      if (!mep_call_saves_register (r))
2984
        continue;
2985
 
2986
      if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
2987
          && (!mep_reg_set_in_function (r)
2988
              && !mep_interrupt_p ()))
2989
        continue;
2990
 
2991
      rsize = mep_reg_size(r);
2992
      skip = rss - (sp+rsize);
2993
      if (skip)
2994
        fprintf (file, "\t#         %3d bytes for alignment\n", skip);
2995
      fprintf (file, "\t#         %3d bytes for saved %-3s   %3d($sp)\n",
2996
               rsize, reg_names[r], sp_offset - rss);
2997
      sp = rss;
2998
    }
2999
 
3000
  skip = reg_save_size - sp;
3001
  if (skip)
3002
    fprintf (file, "\t#         %3d bytes for alignment\n", skip);
3003
 
3004
  if (frame_pointer_needed)
3005
    fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
3006
  if (local)
3007
    fprintf (file, "\t#         %3d bytes for local vars\n", local);
3008
  if (ffill)
3009
    fprintf (file, "\t#         %3d bytes for alignment\n", ffill);
3010
  if (crtl->outgoing_args_size)
3011
    fprintf (file, "\t#         %3d bytes for outgoing args\n",
3012
             crtl->outgoing_args_size);
3013
  fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
3014
  fprintf (file, "\t#\n");
3015
}
3016
 
3017
 
3018
static int mep_prevent_lp_restore = 0;
3019
static int mep_sibcall_epilogue = 0;
3020
 
3021
void
3022
mep_expand_epilogue (void)
3023
{
3024
  int i, sp_offset = 0;
3025
  int reg_save_size = 0;
3026
  int frame_size;
3027
  int lp_temp = LP_REGNO, lp_slot = -1;
3028
  int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
3029
  int interrupt_handler = mep_interrupt_p ();
3030
 
3031
  if (profile_arc_flag == 2)
3032
    emit_insn (gen_mep_bb_trace_ret ());
3033
 
3034
  reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
3035
  frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
3036
 
3037
  really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
3038
 
3039
  if (frame_pointer_needed)
3040
    {
3041
      /* If we have a frame pointer, we won't have a reliable stack
3042
         pointer (alloca, you know), so rebase SP from FP */
3043
      emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
3044
                      gen_rtx_REG (SImode, FP_REGNO));
3045
      sp_offset = reg_save_size;
3046
    }
3047
  else
3048
    {
3049
      /* SP is right under our local variable space.  Adjust it if
3050
         needed.  */
3051
      sp_offset = reg_save_size + frame_size;
3052
      if (sp_offset >= 128)
3053
        {
3054
          add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
3055
          sp_offset -= frame_size;
3056
        }
3057
    }
3058
 
3059
  /* This is backwards so that we restore the control and coprocessor
3060
     registers before the temporary registers we use to restore
3061
     them.  */
3062
  for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
3063
    if (mep_call_saves_register (i))
3064
      {
3065
        enum machine_mode rmode;
3066
        int rss = cfun->machine->reg_save_slot[i];
3067
 
3068
        if (mep_reg_size (i) == 8)
3069
          rmode = DImode;
3070
        else
3071
          rmode = SImode;
3072
 
3073
        if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
3074
            && !(mep_reg_set_in_function (i) || interrupt_handler))
3075
          continue;
3076
        if (mep_prevent_lp_restore && i == LP_REGNO)
3077
          continue;
3078
        if (!mep_prevent_lp_restore
3079
            && !interrupt_handler
3080
            && (i == 10 || i == 11))
3081
          continue;
3082
 
3083
        if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
3084
          emit_move_insn (gen_rtx_REG (rmode, i),
3085
                          gen_rtx_MEM (rmode,
3086
                                       plus_constant (stack_pointer_rtx,
3087
                                                      sp_offset-rss)));
3088
        else
3089
          {
3090
            if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
3091
              /* Defer this one so we can jump indirect rather than
3092
                 copying the RA to $lp and "ret".  EH epilogues
3093
                 automatically skip this anyway.  */
3094
              lp_slot = sp_offset-rss;
3095
            else
3096
              {
3097
                emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
3098
                                gen_rtx_MEM (rmode,
3099
                                             plus_constant (stack_pointer_rtx,
3100
                                                            sp_offset-rss)));
3101
                emit_move_insn (gen_rtx_REG (rmode, i),
3102
                                gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3103
              }
3104
          }
3105
      }
3106
  if (lp_slot != -1)
3107
    {
3108
      /* Restore this one last so we know it will be in the temp
3109
         register when we return by jumping indirectly via the temp.  */
3110
      emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3111
                      gen_rtx_MEM (SImode,
3112
                                   plus_constant (stack_pointer_rtx,
3113
                                                  lp_slot)));
3114
      lp_temp = REGSAVE_CONTROL_TEMP;
3115
    }
3116
 
3117
 
3118
  add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3119
 
3120
  if (crtl->calls_eh_return && mep_prevent_lp_restore)
3121
    emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3122
                           gen_rtx_REG (SImode, SP_REGNO),
3123
                           cfun->machine->eh_stack_adjust));
3124
 
3125
  if (mep_sibcall_epilogue)
3126
    return;
3127
 
3128
  if (mep_disinterrupt_p ())
3129
    emit_insn (gen_mep_enable_int ());
3130
 
3131
  if (mep_prevent_lp_restore)
3132
    {
3133
      emit_jump_insn (gen_eh_return_internal ());
3134
      emit_barrier ();
3135
    }
3136
  else if (interrupt_handler)
3137
    emit_jump_insn (gen_mep_reti ());
3138
  else
3139
    emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3140
}
3141
 
3142
void
3143
mep_expand_eh_return (rtx *operands)
3144
{
3145
  if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3146
    {
3147
      rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3148
      emit_move_insn (ra, operands[0]);
3149
      operands[0] = ra;
3150
    }
3151
 
3152
  emit_insn (gen_eh_epilogue (operands[0]));
3153
}
3154
 
3155
void
3156
mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3157
{
3158
  cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3159
  mep_prevent_lp_restore = 1;
3160
  mep_expand_epilogue ();
3161
  mep_prevent_lp_restore = 0;
3162
}
3163
 
3164
void
3165
mep_expand_sibcall_epilogue (void)
3166
{
3167
  mep_sibcall_epilogue = 1;
3168
  mep_expand_epilogue ();
3169
  mep_sibcall_epilogue = 0;
3170
}
3171
 
3172
static bool
3173
mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3174
{
3175
  if (decl == NULL)
3176
    return false;
3177
 
3178
  if (mep_section_tag (DECL_RTL (decl)) == 'f')
3179
    return false;
3180
 
3181
  /* Can't call to a sibcall from an interrupt or disinterrupt function.  */
3182
  if (mep_interrupt_p () || mep_disinterrupt_p ())
3183
    return false;
3184
 
3185
  return true;
3186
}
3187
 
3188
rtx
3189
mep_return_stackadj_rtx (void)
3190
{
3191
  return gen_rtx_REG (SImode, 10);
3192
}
3193
 
3194
rtx
3195
mep_return_handler_rtx (void)
3196
{
3197
  return gen_rtx_REG (SImode, LP_REGNO);
3198
}
3199
 
3200
void
3201
mep_function_profiler (FILE *file)
3202
{
3203
  /* Always right at the beginning of the function.  */
3204
  fprintf (file, "\t# mep function profiler\n");
3205
  fprintf (file, "\tadd\t$sp, -8\n");
3206
  fprintf (file, "\tsw\t$0, ($sp)\n");
3207
  fprintf (file, "\tldc\t$0, $lp\n");
3208
  fprintf (file, "\tsw\t$0, 4($sp)\n");
3209
  fprintf (file, "\tbsr\t__mep_mcount\n");
3210
  fprintf (file, "\tlw\t$0, 4($sp)\n");
3211
  fprintf (file, "\tstc\t$0, $lp\n");
3212
  fprintf (file, "\tlw\t$0, ($sp)\n");
3213
  fprintf (file, "\tadd\t$sp, 8\n\n");
3214
}
3215
 
3216
const char *
3217
mep_emit_bb_trace_ret (void)
3218
{
3219
  fprintf (asm_out_file, "\t# end of block profiling\n");
3220
  fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3221
  fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3222
  fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3223
  fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3224
  fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3225
  fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3226
  fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3227
  fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3228
  fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3229
  return "";
3230
}
3231
 
3232
#undef SAVE
3233
#undef RESTORE
3234
 
3235
/* Operand Printing.  */
3236
 
3237
void
3238
mep_print_operand_address (FILE *stream, rtx address)
3239
{
3240
  if (GET_CODE (address) == MEM)
3241
    address = XEXP (address, 0);
3242
  else
3243
    /* cf: gcc.dg/asm-4.c.  */
3244
    gcc_assert (GET_CODE (address) == REG);
3245
 
3246
  mep_print_operand (stream, address, 0);
3247
}
3248
 
3249
static struct
3250
{
3251
  char code;
3252
  const char *pattern;
3253
  const char *format;
3254
}
3255
const conversions[] =
3256
{
3257
  { 0, "r", "0" },
3258
  { 0, "m+ri", "3(2)" },
3259
  { 0, "mr", "(1)" },
3260
  { 0, "ms", "(1)" },
3261
  { 0, "ml", "(1)" },
3262
  { 0, "mLrs", "%lo(3)(2)" },
3263
  { 0, "mLr+si", "%lo(4+5)(2)" },
3264
  { 0, "m+ru2s", "%tpoff(5)(2)" },
3265
  { 0, "m+ru3s", "%sdaoff(5)(2)" },
3266
  { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3267
  { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3268
  { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3269
  { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3270
  { 0, "mi", "(1)" },
3271
  { 0, "m+si", "(2+3)" },
3272
  { 0, "m+li", "(2+3)" },
3273
  { 0, "i", "0" },
3274
  { 0, "s", "0" },
3275
  { 0, "+si", "1+2" },
3276
  { 0, "+u2si", "%tpoff(3+4)" },
3277
  { 0, "+u3si", "%sdaoff(3+4)" },
3278
  { 0, "l", "0" },
3279
  { 'b', "i", "0" },
3280
  { 'B', "i", "0" },
3281
  { 'U', "i", "0" },
3282
  { 'h', "i", "0" },
3283
  { 'h', "Hs", "%hi(1)" },
3284
  { 'I', "i", "0" },
3285
  { 'I', "u2s", "%tpoff(2)" },
3286
  { 'I', "u3s", "%sdaoff(2)" },
3287
  { 'I', "+u2si", "%tpoff(3+4)" },
3288
  { 'I', "+u3si", "%sdaoff(3+4)" },
3289
  { 'J', "i", "0" },
3290
  { 'P', "mr", "(1\\+),\\0" },
3291
  { 'x', "i", "0" },
3292
  { 0, 0, 0 }
3293
};
3294
 
3295
static int
3296
unique_bit_in (HOST_WIDE_INT i)
3297
{
3298
  switch (i & 0xff)
3299
    {
3300
    case 0x01: case 0xfe: return 0;
3301
    case 0x02: case 0xfd: return 1;
3302
    case 0x04: case 0xfb: return 2;
3303
    case 0x08: case 0xf7: return 3;
3304
    case 0x10: case 0x7f: return 4;
3305
    case 0x20: case 0xbf: return 5;
3306
    case 0x40: case 0xdf: return 6;
3307
    case 0x80: case 0xef: return 7;
3308
    default:
3309
      gcc_unreachable ();
3310
    }
3311
}
3312
 
3313
static int
3314
bit_size_for_clip (HOST_WIDE_INT i)
3315
{
3316
  int rv;
3317
 
3318
  for (rv = 0; rv < 31; rv ++)
3319
    if (((HOST_WIDE_INT) 1 << rv) > i)
3320
      return rv + 1;
3321
  gcc_unreachable ();
3322
}
3323
 
3324
/* Print an operand to a assembler instruction.  */
3325
 
3326
void
3327
mep_print_operand (FILE *file, rtx x, int code)
3328
{
3329
  int i, j;
3330
  const char *real_name;
3331
 
3332
  if (code == '<')
3333
    {
3334
      /* Print a mnemonic to do CR <- CR moves.  Find out which intrinsic
3335
         we're using, then skip over the "mep_" part of its name.  */
3336
      const struct cgen_insn *insn;
3337
 
3338
      if (mep_get_move_insn (mep_cmov, &insn))
3339
        fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3340
      else
3341
        mep_intrinsic_unavailable (mep_cmov);
3342
      return;
3343
    }
3344
  if (code == 'L')
3345
    {
3346
      switch (GET_CODE (x))
3347
        {
3348
        case AND:
3349
          fputs ("clr", file);
3350
          return;
3351
        case IOR:
3352
          fputs ("set", file);
3353
          return;
3354
        case XOR:
3355
          fputs ("not", file);
3356
          return;
3357
        default:
3358
          output_operand_lossage ("invalid %%L code");
3359
        }
3360
    }
3361
  if (code == 'M')
3362
    {
3363
      /* Print the second operand of a CR <- CR move.  If we're using
3364
         a two-operand instruction (i.e., a real cmov), then just print
3365
         the operand normally.  If we're using a "reg, reg, immediate"
3366
         instruction such as caddi3, print the operand followed by a
3367
         zero field.  If we're using a three-register instruction,
3368
         print the operand twice.  */
3369
      const struct cgen_insn *insn;
3370
 
3371
      mep_print_operand (file, x, 0);
3372
      if (mep_get_move_insn (mep_cmov, &insn)
3373
          && insn_data[insn->icode].n_operands == 3)
3374
        {
3375
          fputs (", ", file);
3376
          if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3377
            mep_print_operand (file, x, 0);
3378
          else
3379
            mep_print_operand (file, const0_rtx, 0);
3380
        }
3381
      return;
3382
    }
3383
 
3384
  encode_pattern (x);
3385
  for (i = 0; conversions[i].pattern; i++)
3386
    if (conversions[i].code == code
3387
        && strcmp(conversions[i].pattern, pattern) == 0)
3388
      {
3389
        for (j = 0; conversions[i].format[j]; j++)
3390
          if (conversions[i].format[j] == '\\')
3391
            {
3392
              fputc (conversions[i].format[j+1], file);
3393
              j++;
3394
            }
3395
          else if (ISDIGIT(conversions[i].format[j]))
3396
            {
3397
              rtx r = patternr[conversions[i].format[j] - '0'];
3398
              switch (GET_CODE (r))
3399
                {
3400
                case REG:
3401
                  fprintf (file, "%s", reg_names [REGNO (r)]);
3402
                  break;
3403
                case CONST_INT:
3404
                  switch (code)
3405
                    {
3406
                    case 'b':
3407
                      fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3408
                      break;
3409
                    case 'B':
3410
                      fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3411
                      break;
3412
                    case 'h':
3413
                      fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3414
                      break;
3415
                    case 'U':
3416
                      fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3417
                      break;
3418
                    case 'J':
3419
                      fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3420
                      break;
3421
                    case 'x':
3422
                      if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3423
                          && !(INTVAL (r) & 0xff))
3424
                        fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3425
                      else
3426
                        fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3427
                      break;
3428
                    case 'I':
3429
                      if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3430
                          && conversions[i].format[j+1] == 0)
3431
                        {
3432
                          fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3433
                          fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3434
                        }
3435
                      else
3436
                        fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3437
                      break;
3438
                    default:
3439
                      fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3440
                      break;
3441
                    }
3442
                  break;
3443
                case CONST_DOUBLE:
3444
                  fprintf(file, "[const_double 0x%lx]",
3445
                          (unsigned long) CONST_DOUBLE_HIGH(r));
3446
                  break;
3447
                case SYMBOL_REF:
3448
                  real_name = targetm.strip_name_encoding (XSTR (r, 0));
3449
                  assemble_name (file, real_name);
3450
                  break;
3451
                case LABEL_REF:
3452
                  output_asm_label (r);
3453
                  break;
3454
                default:
3455
                  fprintf (stderr, "don't know how to print this operand:");
3456
                  debug_rtx (r);
3457
                  gcc_unreachable ();
3458
                }
3459
            }
3460
          else
3461
            {
3462
              if (conversions[i].format[j] == '+'
3463
                  && (!code || code == 'I')
3464
                  && ISDIGIT (conversions[i].format[j+1])
3465
                  && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3466
                  && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3467
                continue;
3468
              fputc(conversions[i].format[j], file);
3469
            }
3470
        break;
3471
      }
3472
  if (!conversions[i].pattern)
3473
    {
3474
      error ("unconvertible operand %c %qs", code?code:'-', pattern);
3475
      debug_rtx(x);
3476
    }
3477
 
3478
  return;
3479
}
3480
 
3481
void
3482
mep_final_prescan_insn (rtx insn, rtx *operands ATTRIBUTE_UNUSED,
3483
                        int noperands ATTRIBUTE_UNUSED)
3484
{
3485
  /* Despite the fact that MeP is perfectly capable of branching and
3486
     doing something else in the same bundle, gcc does jump
3487
     optimization *after* scheduling, so we cannot trust the bundling
3488
     flags on jump instructions.  */
3489
  if (GET_MODE (insn) == BImode
3490
      && get_attr_slots (insn) != SLOTS_CORE)
3491
    fputc ('+', asm_out_file);
3492
}
3493
 
3494
/* Function args in registers.  */
3495
 
3496
static void
3497
mep_setup_incoming_varargs (cumulative_args_t cum,
3498
                            enum machine_mode mode ATTRIBUTE_UNUSED,
3499
                            tree type ATTRIBUTE_UNUSED, int *pretend_size,
3500
                            int second_time ATTRIBUTE_UNUSED)
3501
{
3502
  int nsave = 4 - (get_cumulative_args (cum)->nregs + 1);
3503
 
3504
  if (nsave > 0)
3505
    cfun->machine->arg_regs_to_save = nsave;
3506
  *pretend_size = nsave * 4;
3507
}
3508
 
3509
static int
3510
bytesize (const_tree type, enum machine_mode mode)
3511
{
3512
  if (mode == BLKmode)
3513
    return int_size_in_bytes (type);
3514
  return GET_MODE_SIZE (mode);
3515
}
3516
 
3517
static rtx
3518
mep_expand_builtin_saveregs (void)
3519
{
3520
  int bufsize, i, ns;
3521
  rtx regbuf;
3522
 
3523
  ns = cfun->machine->arg_regs_to_save;
3524
  if (TARGET_IVC2)
3525
    {
3526
      bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3527
      regbuf = assign_stack_local (SImode, bufsize, 64);
3528
    }
3529
  else
3530
    {
3531
      bufsize = ns * 4;
3532
      regbuf = assign_stack_local (SImode, bufsize, 32);
3533
    }
3534
 
3535
  move_block_from_reg (5-ns, regbuf, ns);
3536
 
3537
  if (TARGET_IVC2)
3538
    {
3539
      rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3540
      int ofs = 8 * ((ns+1)/2);
3541
 
3542
      for (i=0; i<ns; i++)
3543
        {
3544
          int rn = (4-ns) + i + 49;
3545
          rtx ptr;
3546
 
3547
          ptr = offset_address (tmp, GEN_INT (ofs), 2);
3548
          emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3549
          ofs += 8;
3550
        }
3551
    }
3552
  return XEXP (regbuf, 0);
3553
}
3554
 
3555
#define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3556
 
3557
static tree
3558
mep_build_builtin_va_list (void)
3559
{
3560
  tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3561
  tree record;
3562
 
3563
 
3564
  record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3565
 
3566
  f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3567
                          get_identifier ("__va_next_gp"), ptr_type_node);
3568
  f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3569
                                get_identifier ("__va_next_gp_limit"),
3570
                                ptr_type_node);
3571
  f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3572
                           ptr_type_node);
3573
  f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3574
                             ptr_type_node);
3575
 
3576
  DECL_FIELD_CONTEXT (f_next_gp) = record;
3577
  DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3578
  DECL_FIELD_CONTEXT (f_next_cop) = record;
3579
  DECL_FIELD_CONTEXT (f_next_stack) = record;
3580
 
3581
  TYPE_FIELDS (record) = f_next_gp;
3582
  DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3583
  DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3584
  DECL_CHAIN (f_next_cop) = f_next_stack;
3585
 
3586
  layout_type (record);
3587
 
3588
  return record;
3589
}
3590
 
3591
static void
3592
mep_expand_va_start (tree valist, rtx nextarg)
3593
{
3594
  tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3595
  tree next_gp, next_gp_limit, next_cop, next_stack;
3596
  tree t, u;
3597
  int ns;
3598
 
3599
  ns = cfun->machine->arg_regs_to_save;
3600
 
3601
  f_next_gp = TYPE_FIELDS (va_list_type_node);
3602
  f_next_gp_limit = DECL_CHAIN (f_next_gp);
3603
  f_next_cop = DECL_CHAIN (f_next_gp_limit);
3604
  f_next_stack = DECL_CHAIN (f_next_cop);
3605
 
3606
  next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3607
                    NULL_TREE);
3608
  next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3609
                          valist, f_next_gp_limit, NULL_TREE);
3610
  next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3611
                     NULL_TREE);
3612
  next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3613
                       valist, f_next_stack, NULL_TREE);
3614
 
3615
  /* va_list.next_gp = expand_builtin_saveregs (); */
3616
  u = make_tree (sizetype, expand_builtin_saveregs ());
3617
  u = fold_convert (ptr_type_node, u);
3618
  t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3619
  TREE_SIDE_EFFECTS (t) = 1;
3620
  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3621
 
3622
  /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3623
  u = fold_build_pointer_plus_hwi (u, 4 * ns);
3624
  t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3625
  TREE_SIDE_EFFECTS (t) = 1;
3626
  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3627
 
3628
  u = fold_build_pointer_plus_hwi (u, 8 * ((ns+1)/2));
3629
  /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3630
  t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3631
  TREE_SIDE_EFFECTS (t) = 1;
3632
  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3633
 
3634
  /* va_list.next_stack = nextarg; */
3635
  u = make_tree (ptr_type_node, nextarg);
3636
  t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3637
  TREE_SIDE_EFFECTS (t) = 1;
3638
  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3639
}
3640
 
3641
static tree
3642
mep_gimplify_va_arg_expr (tree valist, tree type,
3643
                          gimple_seq *pre_p,
3644
                          gimple_seq *post_p ATTRIBUTE_UNUSED)
3645
{
3646
  HOST_WIDE_INT size, rsize;
3647
  bool by_reference, ivc2_vec;
3648
  tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3649
  tree next_gp, next_gp_limit, next_cop, next_stack;
3650
  tree label_sover, label_selse;
3651
  tree tmp, res_addr;
3652
 
3653
  ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3654
 
3655
  size = int_size_in_bytes (type);
3656
  by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3657
 
3658
  if (by_reference)
3659
    {
3660
      type = build_pointer_type (type);
3661
      size = 4;
3662
    }
3663
  rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3664
 
3665
  f_next_gp = TYPE_FIELDS (va_list_type_node);
3666
  f_next_gp_limit = DECL_CHAIN (f_next_gp);
3667
  f_next_cop = DECL_CHAIN (f_next_gp_limit);
3668
  f_next_stack = DECL_CHAIN (f_next_cop);
3669
 
3670
  next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3671
                    NULL_TREE);
3672
  next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3673
                          valist, f_next_gp_limit, NULL_TREE);
3674
  next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3675
                     NULL_TREE);
3676
  next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3677
                       valist, f_next_stack, NULL_TREE);
3678
 
3679
  /* if f_next_gp < f_next_gp_limit
3680
       IF (VECTOR_P && IVC2)
3681
         val = *f_next_cop;
3682
       ELSE
3683
         val = *f_next_gp;
3684
       f_next_gp += 4;
3685
       f_next_cop += 8;
3686
     else
3687
       label_selse:
3688
       val = *f_next_stack;
3689
       f_next_stack += rsize;
3690
     label_sover:
3691
  */
3692
 
3693
  label_sover = create_artificial_label (UNKNOWN_LOCATION);
3694
  label_selse = create_artificial_label (UNKNOWN_LOCATION);
3695
  res_addr = create_tmp_var (ptr_type_node, NULL);
3696
 
3697
  tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3698
                unshare_expr (next_gp_limit));
3699
  tmp = build3 (COND_EXPR, void_type_node, tmp,
3700
                build1 (GOTO_EXPR, void_type_node,
3701
                        unshare_expr (label_selse)),
3702
                NULL_TREE);
3703
  gimplify_and_add (tmp, pre_p);
3704
 
3705
  if (ivc2_vec)
3706
    {
3707
      tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3708
      gimplify_and_add (tmp, pre_p);
3709
    }
3710
  else
3711
    {
3712
      tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3713
      gimplify_and_add (tmp, pre_p);
3714
    }
3715
 
3716
  tmp = fold_build_pointer_plus_hwi (unshare_expr (next_gp), 4);
3717
  gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3718
 
3719
  tmp = fold_build_pointer_plus_hwi (unshare_expr (next_cop), 8);
3720
  gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3721
 
3722
  tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3723
  gimplify_and_add (tmp, pre_p);
3724
 
3725
  /* - - */
3726
 
3727
  tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3728
  gimplify_and_add (tmp, pre_p);
3729
 
3730
  tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3731
  gimplify_and_add (tmp, pre_p);
3732
 
3733
  tmp = fold_build_pointer_plus_hwi (unshare_expr (next_stack), rsize);
3734
  gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3735
 
3736
  /* - - */
3737
 
3738
  tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3739
  gimplify_and_add (tmp, pre_p);
3740
 
3741
  res_addr = fold_convert (build_pointer_type (type), res_addr);
3742
 
3743
  if (by_reference)
3744
    res_addr = build_va_arg_indirect_ref (res_addr);
3745
 
3746
  return build_va_arg_indirect_ref (res_addr);
3747
}
3748
 
3749
void
3750
mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3751
                          rtx libname ATTRIBUTE_UNUSED,
3752
                          tree fndecl ATTRIBUTE_UNUSED)
3753
{
3754
  pcum->nregs = 0;
3755
 
3756
  if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3757
    pcum->vliw = 1;
3758
  else
3759
    pcum->vliw = 0;
3760
}
3761
 
3762
/* The ABI is thus: Arguments are in $1, $2, $3, $4, stack.  Arguments
3763
   larger than 4 bytes are passed indirectly.  Return value in 0,
3764
   unless bigger than 4 bytes, then the caller passes a pointer as the
3765
   first arg.  For varargs, we copy $1..$4 to the stack.  */
3766
 
3767
static rtx
3768
mep_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
3769
                  const_tree type ATTRIBUTE_UNUSED,
3770
                  bool named ATTRIBUTE_UNUSED)
3771
{
3772
  CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3773
 
3774
  /* VOIDmode is a signal for the backend to pass data to the call
3775
     expander via the second operand to the call pattern.  We use
3776
     this to determine whether to use "jsr" or "jsrv".  */
3777
  if (mode == VOIDmode)
3778
    return GEN_INT (cum->vliw);
3779
 
3780
  /* If we havn't run out of argument registers, return the next.  */
3781
  if (cum->nregs < 4)
3782
    {
3783
      if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3784
        return gen_rtx_REG (mode, cum->nregs + 49);
3785
      else
3786
        return gen_rtx_REG (mode, cum->nregs + 1);
3787
    }
3788
 
3789
  /* Otherwise the argument goes on the stack.  */
3790
  return NULL_RTX;
3791
}
3792
 
3793
static bool
3794
mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
3795
                       enum machine_mode mode,
3796
                       const_tree        type,
3797
                       bool              named ATTRIBUTE_UNUSED)
3798
{
3799
  int size = bytesize (type, mode);
3800
 
3801
  /* This is non-obvious, but yes, large values passed after we've run
3802
     out of registers are *still* passed by reference - we put the
3803
     address of the parameter on the stack, as well as putting the
3804
     parameter itself elsewhere on the stack.  */
3805
 
3806
  if (size <= 0 || size > 8)
3807
    return true;
3808
  if (size <= 4)
3809
    return false;
3810
  if (TARGET_IVC2 && get_cumulative_args (cum)->nregs < 4
3811
      && type != NULL_TREE && VECTOR_TYPE_P (type))
3812
    return false;
3813
  return true;
3814
}
3815
 
3816
static void
3817
mep_function_arg_advance (cumulative_args_t pcum,
3818
                          enum machine_mode mode ATTRIBUTE_UNUSED,
3819
                          const_tree type ATTRIBUTE_UNUSED,
3820
                          bool named ATTRIBUTE_UNUSED)
3821
{
3822
  get_cumulative_args (pcum)->nregs += 1;
3823
}
3824
 
3825
bool
3826
mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3827
{
3828
  int size = bytesize (type, BLKmode);
3829
  if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3830
    return size > 0 && size <= 8 ? 0 : 1;
3831
  return size > 0 && size <= 4 ? 0 : 1;
3832
}
3833
 
3834
static bool
3835
mep_narrow_volatile_bitfield (void)
3836
{
3837
  return true;
3838
  return false;
3839
}
3840
 
3841
/* Implement FUNCTION_VALUE.  All values are returned in $0.  */
3842
 
3843
rtx
3844
mep_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
3845
{
3846
  if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3847
    return gen_rtx_REG (TYPE_MODE (type), 48);
3848
  return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3849
}
3850
 
3851
/* Implement LIBCALL_VALUE, using the same rules as mep_function_value.  */
3852
 
3853
rtx
3854
mep_libcall_value (enum machine_mode mode)
3855
{
3856
  return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3857
}
3858
 
3859
/* Handle pipeline hazards.  */
3860
 
3861
typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3862
static const char *opnames[] = { "", "stc", "fsft", "ret" };
3863
 
3864
static int prev_opcode = 0;
3865
 
3866
/* This isn't as optimal as it could be, because we don't know what
3867
   control register the STC opcode is storing in.  We only need to add
3868
   the nop if it's the relevent register, but we add it for irrelevent
3869
   registers also.  */
3870
 
3871
void
3872
mep_asm_output_opcode (FILE *file, const char *ptr)
3873
{
3874
  int this_opcode = op_none;
3875
  const char *hazard = 0;
3876
 
3877
  switch (*ptr)
3878
    {
3879
    case 'f':
3880
      if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3881
        this_opcode = op_fsft;
3882
      break;
3883
    case 'r':
3884
      if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3885
        this_opcode = op_ret;
3886
      break;
3887
    case 's':
3888
      if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3889
        this_opcode = op_stc;
3890
      break;
3891
    }
3892
 
3893
  if (prev_opcode == op_stc && this_opcode == op_fsft)
3894
    hazard = "nop";
3895
  if (prev_opcode == op_stc && this_opcode == op_ret)
3896
    hazard = "nop";
3897
 
3898
  if (hazard)
3899
    fprintf(file, "%s\t# %s-%s hazard\n\t",
3900
            hazard, opnames[prev_opcode], opnames[this_opcode]);
3901
 
3902
  prev_opcode = this_opcode;
3903
}
3904
 
3905
/* Handle attributes.  */
3906
 
3907
static tree
3908
mep_validate_based_tiny (tree *node, tree name, tree args,
3909
                         int flags ATTRIBUTE_UNUSED, bool *no_add)
3910
{
3911
  if (TREE_CODE (*node) != VAR_DECL
3912
      && TREE_CODE (*node) != POINTER_TYPE
3913
      && TREE_CODE (*node) != TYPE_DECL)
3914
    {
3915
      warning (0, "%qE attribute only applies to variables", name);
3916
      *no_add = true;
3917
    }
3918
  else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3919
    {
3920
      if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3921
        {
3922
          warning (0, "address region attributes not allowed with auto storage class");
3923
          *no_add = true;
3924
        }
3925
      /* Ignore storage attribute of pointed to variable: char __far * x;  */
3926
      if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3927
        {
3928
          warning (0, "address region attributes on pointed-to types ignored");
3929
          *no_add = true;
3930
        }
3931
    }
3932
 
3933
  return NULL_TREE;
3934
}
3935
 
3936
static int
3937
mep_multiple_address_regions (tree list, bool check_section_attr)
3938
{
3939
  tree a;
3940
  int count_sections = 0;
3941
  int section_attr_count = 0;
3942
 
3943
  for (a = list; a; a = TREE_CHAIN (a))
3944
    {
3945
      if (is_attribute_p ("based", TREE_PURPOSE (a))
3946
          || is_attribute_p ("tiny", TREE_PURPOSE (a))
3947
          || is_attribute_p ("near", TREE_PURPOSE (a))
3948
          || is_attribute_p ("far", TREE_PURPOSE (a))
3949
          || is_attribute_p ("io", TREE_PURPOSE (a)))
3950
        count_sections ++;
3951
      if (check_section_attr)
3952
        section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3953
    }
3954
 
3955
  if (check_section_attr)
3956
    return section_attr_count;
3957
  else
3958
    return count_sections;
3959
}
3960
 
3961
#define MEP_ATTRIBUTES(decl) \
3962
  (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3963
                : DECL_ATTRIBUTES (decl) \
3964
                  ? (DECL_ATTRIBUTES (decl)) \
3965
                  : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3966
 
3967
static tree
3968
mep_validate_near_far (tree *node, tree name, tree args,
3969
                       int flags ATTRIBUTE_UNUSED, bool *no_add)
3970
{
3971
  if (TREE_CODE (*node) != VAR_DECL
3972
      && TREE_CODE (*node) != FUNCTION_DECL
3973
      && TREE_CODE (*node) != METHOD_TYPE
3974
      && TREE_CODE (*node) != POINTER_TYPE
3975
      && TREE_CODE (*node) != TYPE_DECL)
3976
    {
3977
      warning (0, "%qE attribute only applies to variables and functions",
3978
               name);
3979
      *no_add = true;
3980
    }
3981
  else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3982
    {
3983
      if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3984
        {
3985
          warning (0, "address region attributes not allowed with auto storage class");
3986
          *no_add = true;
3987
        }
3988
      /* Ignore storage attribute of pointed to variable: char __far * x;  */
3989
      if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3990
        {
3991
          warning (0, "address region attributes on pointed-to types ignored");
3992
          *no_add = true;
3993
        }
3994
    }
3995
  else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3996
    {
3997
      warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3998
               name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3999
      DECL_ATTRIBUTES (*node) = NULL_TREE;
4000
    }
4001
  return NULL_TREE;
4002
}
4003
 
4004
static tree
4005
mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4006
                           int flags ATTRIBUTE_UNUSED, bool *no_add)
4007
{
4008
  if (TREE_CODE (*node) != FUNCTION_DECL
4009
      && TREE_CODE (*node) != METHOD_TYPE)
4010
    {
4011
      warning (0, "%qE attribute only applies to functions", name);
4012
      *no_add = true;
4013
    }
4014
  return NULL_TREE;
4015
}
4016
 
4017
static tree
4018
mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4019
                        int flags ATTRIBUTE_UNUSED, bool *no_add)
4020
{
4021
  tree function_type;
4022
 
4023
  if (TREE_CODE (*node) != FUNCTION_DECL)
4024
    {
4025
      warning (0, "%qE attribute only applies to functions", name);
4026
      *no_add = true;
4027
      return NULL_TREE;
4028
    }
4029
 
4030
  if (DECL_DECLARED_INLINE_P (*node))
4031
    error ("cannot inline interrupt function %qE", DECL_NAME (*node));
4032
  DECL_UNINLINABLE (*node) = 1;
4033
 
4034
  function_type = TREE_TYPE (*node);
4035
 
4036
  if (TREE_TYPE (function_type) != void_type_node)
4037
    error ("interrupt function must have return type of void");
4038
 
4039
  if (prototype_p (function_type)
4040
      && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
4041
          || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
4042
    error ("interrupt function must have no arguments");
4043
 
4044
  return NULL_TREE;
4045
}
4046
 
4047
static tree
4048
mep_validate_io_cb (tree *node, tree name, tree args,
4049
                    int flags ATTRIBUTE_UNUSED, bool *no_add)
4050
{
4051
  if (TREE_CODE (*node) != VAR_DECL)
4052
    {
4053
      warning (0, "%qE attribute only applies to variables", name);
4054
      *no_add = true;
4055
    }
4056
 
4057
  if (args != NULL_TREE)
4058
    {
4059
      if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
4060
        TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
4061
      if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
4062
        {
4063
          warning (0, "%qE attribute allows only an integer constant argument",
4064
                   name);
4065
          *no_add = true;
4066
        }
4067
    }
4068
 
4069
  if (*no_add == false && !TARGET_IO_NO_VOLATILE)
4070
    TREE_THIS_VOLATILE (*node) = 1;
4071
 
4072
  return NULL_TREE;
4073
}
4074
 
4075
static tree
4076
mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4077
                   int flags ATTRIBUTE_UNUSED, bool *no_add)
4078
{
4079
  if (TREE_CODE (*node) != FUNCTION_TYPE
4080
      && TREE_CODE (*node) != FUNCTION_DECL
4081
      && TREE_CODE (*node) != METHOD_TYPE
4082
      && TREE_CODE (*node) != FIELD_DECL
4083
      && TREE_CODE (*node) != TYPE_DECL)
4084
    {
4085
      static int gave_pointer_note = 0;
4086
      static int gave_array_note = 0;
4087
      static const char * given_type = NULL;
4088
 
4089
      given_type = tree_code_name[TREE_CODE (*node)];
4090
      if (TREE_CODE (*node) == POINTER_TYPE)
4091
        given_type = "pointers";
4092
      if (TREE_CODE (*node) == ARRAY_TYPE)
4093
        given_type = "arrays";
4094
 
4095
      if (given_type)
4096
        warning (0, "%qE attribute only applies to functions, not %s",
4097
                 name, given_type);
4098
      else
4099
        warning (0, "%qE attribute only applies to functions",
4100
                 name);
4101
      *no_add = true;
4102
 
4103
      if (TREE_CODE (*node) == POINTER_TYPE
4104
          && !gave_pointer_note)
4105
        {
4106
          inform (input_location,
4107
                  "to describe a pointer to a VLIW function, use syntax like this:\n%s",
4108
                  "   typedef int (__vliw *vfuncptr) ();");
4109
          gave_pointer_note = 1;
4110
        }
4111
 
4112
      if (TREE_CODE (*node) == ARRAY_TYPE
4113
          && !gave_array_note)
4114
        {
4115
          inform (input_location,
4116
                  "to describe an array of VLIW function pointers, use syntax like this:\n%s",
4117
                  "   typedef int (__vliw *vfuncptr[]) ();");
4118
          gave_array_note = 1;
4119
        }
4120
    }
4121
  if (!TARGET_VLIW)
4122
    error ("VLIW functions are not allowed without a VLIW configuration");
4123
  return NULL_TREE;
4124
}
4125
 
4126
static const struct attribute_spec mep_attribute_table[11] =
4127
{
4128
  /* name         min max decl   type   func   handler
4129
     affects_type_identity */
4130
  { "based",        0, 0, false, false, false, mep_validate_based_tiny, false },
4131
  { "tiny",         0, 0, false, false, false, mep_validate_based_tiny, false },
4132
  { "near",         0, 0, false, false, false, mep_validate_near_far, false },
4133
  { "far",          0, 0, false, false, false, mep_validate_near_far, false },
4134
  { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt,
4135
    false },
4136
  { "interrupt",    0, 0, false, false, false, mep_validate_interrupt, false },
4137
  { "io",           0, 1, false, false, false, mep_validate_io_cb, false },
4138
  { "cb",           0, 1, false, false, false, mep_validate_io_cb, false },
4139
  { "vliw",         0, 0, false, true,  false, mep_validate_vliw, false },
4140
  { NULL,           0, 0, false, false, false, NULL, false }
4141
};
4142
 
4143
static bool
4144
mep_function_attribute_inlinable_p (const_tree callee)
4145
{
4146
  tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4147
  if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4148
  return (lookup_attribute ("disinterrupt", attrs) == 0
4149
          && lookup_attribute ("interrupt", attrs) == 0);
4150
}
4151
 
4152
static bool
4153
mep_can_inline_p (tree caller, tree callee)
4154
{
4155
  if (TREE_CODE (callee) == ADDR_EXPR)
4156
    callee = TREE_OPERAND (callee, 0);
4157
 
4158
  if (!mep_vliw_function_p (caller)
4159
      && mep_vliw_function_p (callee))
4160
    {
4161
      return false;
4162
    }
4163
  return true;
4164
}
4165
 
4166
#define FUNC_CALL               1
4167
#define FUNC_DISINTERRUPT       2
4168
 
4169
 
4170
struct GTY(()) pragma_entry {
4171
  int used;
4172
  int flag;
4173
  const char *funcname;
4174
};
4175
typedef struct pragma_entry pragma_entry;
4176
 
4177
/* Hash table of farcall-tagged sections.  */
4178
static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4179
 
4180
static int
4181
pragma_entry_eq (const void *p1, const void *p2)
4182
{
4183
  const pragma_entry *old = (const pragma_entry *) p1;
4184
  const char *new_name = (const char *) p2;
4185
 
4186
  return strcmp (old->funcname, new_name) == 0;
4187
}
4188
 
4189
static hashval_t
4190
pragma_entry_hash (const void *p)
4191
{
4192
  const pragma_entry *old = (const pragma_entry *) p;
4193
  return htab_hash_string (old->funcname);
4194
}
4195
 
4196
static void
4197
mep_note_pragma_flag (const char *funcname, int flag)
4198
{
4199
  pragma_entry **slot;
4200
 
4201
  if (!pragma_htab)
4202
    pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4203
                                    pragma_entry_eq, NULL);
4204
 
4205
  slot = (pragma_entry **)
4206
    htab_find_slot_with_hash (pragma_htab, funcname,
4207
                              htab_hash_string (funcname), INSERT);
4208
 
4209
  if (!*slot)
4210
    {
4211
      *slot = ggc_alloc_pragma_entry ();
4212
      (*slot)->flag = 0;
4213
      (*slot)->used = 0;
4214
      (*slot)->funcname = ggc_strdup (funcname);
4215
    }
4216
  (*slot)->flag |= flag;
4217
}
4218
 
4219
static bool
4220
mep_lookup_pragma_flag (const char *funcname, int flag)
4221
{
4222
  pragma_entry **slot;
4223
 
4224
  if (!pragma_htab)
4225
    return false;
4226
 
4227
  if (funcname[0] == '@' && funcname[2] == '.')
4228
    funcname += 3;
4229
 
4230
  slot = (pragma_entry **)
4231
    htab_find_slot_with_hash (pragma_htab, funcname,
4232
                              htab_hash_string (funcname), NO_INSERT);
4233
  if (slot && *slot && ((*slot)->flag & flag))
4234
    {
4235
      (*slot)->used |= flag;
4236
      return true;
4237
    }
4238
  return false;
4239
}
4240
 
4241
bool
4242
mep_lookup_pragma_call (const char *funcname)
4243
{
4244
  return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4245
}
4246
 
4247
void
4248
mep_note_pragma_call (const char *funcname)
4249
{
4250
  mep_note_pragma_flag (funcname, FUNC_CALL);
4251
}
4252
 
4253
bool
4254
mep_lookup_pragma_disinterrupt (const char *funcname)
4255
{
4256
  return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4257
}
4258
 
4259
void
4260
mep_note_pragma_disinterrupt (const char *funcname)
4261
{
4262
  mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4263
}
4264
 
4265
static int
4266
note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4267
{
4268
  const pragma_entry *d = (const pragma_entry *)(*slot);
4269
 
4270
  if ((d->flag & FUNC_DISINTERRUPT)
4271
      && !(d->used & FUNC_DISINTERRUPT))
4272
    warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4273
  return 1;
4274
}
4275
 
4276
void
4277
mep_file_cleanups (void)
4278
{
4279
  if (pragma_htab)
4280
    htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4281
}
4282
 
4283
/* These three functions provide a bridge between the pramgas that
4284
   affect register classes, and the functions that maintain them.  We
4285
   can't call those functions directly as pragma handling is part of
4286
   the front end and doesn't have direct access to them.  */
4287
 
4288
void
4289
mep_save_register_info (void)
4290
{
4291
  save_register_info ();
4292
}
4293
 
4294
void
4295
mep_reinit_regs (void)
4296
{
4297
  reinit_regs ();
4298
}
4299
 
4300
void
4301
mep_init_regs (void)
4302
{
4303
  init_regs ();
4304
}
4305
 
4306
 
4307
 
4308
static int
4309
mep_attrlist_to_encoding (tree list, tree decl)
4310
{
4311
  if (mep_multiple_address_regions (list, false) > 1)
4312
    {
4313
      warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4314
               TREE_PURPOSE (TREE_CHAIN (list)),
4315
               DECL_NAME (decl),
4316
               DECL_SOURCE_LINE (decl));
4317
      TREE_CHAIN (list) = NULL_TREE;
4318
    }
4319
 
4320
  while (list)
4321
    {
4322
      if (is_attribute_p ("based", TREE_PURPOSE (list)))
4323
        return 'b';
4324
      if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4325
        return 't';
4326
      if (is_attribute_p ("near", TREE_PURPOSE (list)))
4327
        return 'n';
4328
      if (is_attribute_p ("far", TREE_PURPOSE (list)))
4329
        return 'f';
4330
      if (is_attribute_p ("io", TREE_PURPOSE (list)))
4331
        {
4332
          if (TREE_VALUE (list)
4333
              && TREE_VALUE (TREE_VALUE (list))
4334
              && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4335
            {
4336
              int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4337
              if (location >= 0
4338
                  && location <= 0x1000000)
4339
                return 'i';
4340
            }
4341
          return 'I';
4342
        }
4343
      if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4344
        return 'c';
4345
      list = TREE_CHAIN (list);
4346
    }
4347
  if (TARGET_TF
4348
      && TREE_CODE (decl) == FUNCTION_DECL
4349
      && DECL_SECTION_NAME (decl) == 0)
4350
    return 'f';
4351
  return 0;
4352
}
4353
 
4354
static int
4355
mep_comp_type_attributes (const_tree t1, const_tree t2)
4356
{
4357
  int vliw1, vliw2;
4358
 
4359
  vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4360
  vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4361
 
4362
  if (vliw1 != vliw2)
4363
    return 0;
4364
 
4365
  return 1;
4366
}
4367
 
4368
static void
4369
mep_insert_attributes (tree decl, tree *attributes)
4370
{
4371
  int size;
4372
  const char *secname = 0;
4373
  tree attrib, attrlist;
4374
  char encoding;
4375
 
4376
  if (TREE_CODE (decl) == FUNCTION_DECL)
4377
    {
4378
      const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4379
 
4380
      if (mep_lookup_pragma_disinterrupt (funcname))
4381
        {
4382
          attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4383
          *attributes = chainon (*attributes, attrib);
4384
        }
4385
    }
4386
 
4387
  if (TREE_CODE (decl) != VAR_DECL
4388
      || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4389
    return;
4390
 
4391
  if (TREE_READONLY (decl) && TARGET_DC)
4392
    /* -mdc means that const variables default to the near section,
4393
       regardless of the size cutoff.  */
4394
    return;
4395
 
4396
  /* User specified an attribute, so override the default.
4397
     Ignore storage attribute of pointed to variable. char __far * x;  */
4398
  if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4399
    {
4400
      if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4401
        TYPE_ATTRIBUTES (decl) = NULL_TREE;
4402
      else if (DECL_ATTRIBUTES (decl) && *attributes)
4403
        DECL_ATTRIBUTES (decl) = NULL_TREE;
4404
    }
4405
 
4406
  attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4407
  encoding = mep_attrlist_to_encoding (attrlist, decl);
4408
  if (!encoding && TYPE_P (TREE_TYPE (decl)))
4409
    {
4410
      attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4411
      encoding = mep_attrlist_to_encoding (attrlist, decl);
4412
    }
4413
  if (encoding)
4414
    {
4415
      /* This means that the declaration has a specific section
4416
         attribute, so we should not apply the default rules.  */
4417
 
4418
      if (encoding == 'i' || encoding == 'I')
4419
        {
4420
          tree attr = lookup_attribute ("io", attrlist);
4421
          if (attr
4422
              && TREE_VALUE (attr)
4423
              && TREE_VALUE (TREE_VALUE(attr)))
4424
            {
4425
              int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4426
              static tree previous_value = 0;
4427
              static int previous_location = 0;
4428
              static tree previous_name = 0;
4429
 
4430
              /* We take advantage of the fact that gcc will reuse the
4431
                 same tree pointer when applying an attribute to a
4432
                 list of decls, but produce a new tree for attributes
4433
                 on separate source lines, even when they're textually
4434
                 identical.  This is the behavior we want.  */
4435
              if (TREE_VALUE (attr) == previous_value
4436
                  && location == previous_location)
4437
                {
4438
                  warning(0, "__io address 0x%x is the same for %qE and %qE",
4439
                          location, previous_name, DECL_NAME (decl));
4440
                }
4441
              previous_name = DECL_NAME (decl);
4442
              previous_location = location;
4443
              previous_value = TREE_VALUE (attr);
4444
            }
4445
        }
4446
      return;
4447
    }
4448
 
4449
 
4450
  /* Declarations of arrays can change size.  Don't trust them.  */
4451
  if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4452
    size = 0;
4453
  else
4454
    size = int_size_in_bytes (TREE_TYPE (decl));
4455
 
4456
  if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4457
    {
4458
      if (TREE_PUBLIC (decl)
4459
          || DECL_EXTERNAL (decl)
4460
          || TREE_STATIC (decl))
4461
        {
4462
          const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4463
          int key = 0;
4464
 
4465
          while (*name)
4466
            key += *name++;
4467
 
4468
          switch (key & 3)
4469
            {
4470
            case 0:
4471
              secname = "based";
4472
              break;
4473
            case 1:
4474
              secname = "tiny";
4475
              break;
4476
            case 2:
4477
              secname = "far";
4478
              break;
4479
            default:
4480
              ;
4481
            }
4482
        }
4483
    }
4484
  else
4485
    {
4486
      if (size <= mep_based_cutoff && size > 0)
4487
        secname = "based";
4488
      else if (size <= mep_tiny_cutoff && size > 0)
4489
        secname = "tiny";
4490
      else if (TARGET_L)
4491
        secname = "far";
4492
    }
4493
 
4494
  if (mep_const_section && TREE_READONLY (decl))
4495
    {
4496
      if (strcmp (mep_const_section, "tiny") == 0)
4497
        secname = "tiny";
4498
      else if (strcmp (mep_const_section, "near") == 0)
4499
        return;
4500
      else if (strcmp (mep_const_section, "far") == 0)
4501
        secname = "far";
4502
    }
4503
 
4504
  if (!secname)
4505
    return;
4506
 
4507
  if (!mep_multiple_address_regions (*attributes, true)
4508
      && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4509
    {
4510
      attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4511
 
4512
      /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4513
         in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4514
         and mep_validate_based_tiny.  */
4515
      DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4516
    }
4517
}
4518
 
4519
static void
4520
mep_encode_section_info (tree decl, rtx rtl, int first)
4521
{
4522
  rtx rtlname;
4523
  const char *oldname;
4524
  const char *secname;
4525
  char encoding;
4526
  char *newname;
4527
  tree idp;
4528
  int maxsize;
4529
  tree type;
4530
  tree mep_attributes;
4531
 
4532
  if (! first)
4533
    return;
4534
 
4535
  if (TREE_CODE (decl) != VAR_DECL
4536
      && TREE_CODE (decl) != FUNCTION_DECL)
4537
    return;
4538
 
4539
  rtlname = XEXP (rtl, 0);
4540
  if (GET_CODE (rtlname) == SYMBOL_REF)
4541
    oldname = XSTR (rtlname, 0);
4542
  else if (GET_CODE (rtlname) == MEM
4543
           && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4544
    oldname = XSTR (XEXP (rtlname, 0), 0);
4545
  else
4546
    gcc_unreachable ();
4547
 
4548
  type = TREE_TYPE (decl);
4549
  if (type == error_mark_node)
4550
    return;
4551
  mep_attributes = MEP_ATTRIBUTES (decl);
4552
 
4553
  encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4554
 
4555
  if (encoding)
4556
    {
4557
      newname = (char *) alloca (strlen (oldname) + 4);
4558
      sprintf (newname, "@%c.%s", encoding, oldname);
4559
      idp = get_identifier (newname);
4560
      XEXP (rtl, 0) =
4561
        gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4562
      SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4563
      SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4564
 
4565
      switch (encoding)
4566
        {
4567
        case 'b':
4568
          maxsize = 128;
4569
          secname = "based";
4570
          break;
4571
        case 't':
4572
          maxsize = 65536;
4573
          secname = "tiny";
4574
          break;
4575
        case 'n':
4576
          maxsize = 0x1000000;
4577
          secname = "near";
4578
          break;
4579
        default:
4580
          maxsize = 0;
4581
          secname = 0;
4582
          break;
4583
        }
4584
      if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4585
        {
4586
          warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4587
                   oldname,
4588
                   (long) int_size_in_bytes (TREE_TYPE (decl)),
4589
                   secname,
4590
                   maxsize);
4591
        }
4592
    }
4593
}
4594
 
4595
const char *
4596
mep_strip_name_encoding (const char *sym)
4597
{
4598
  while (1)
4599
    {
4600
      if (*sym == '*')
4601
        sym++;
4602
      else if (*sym == '@' && sym[2] == '.')
4603
        sym += 3;
4604
      else
4605
        return sym;
4606
    }
4607
}
4608
 
4609
static section *
4610
mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4611
                    unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4612
{
4613
  int readonly = 1;
4614
  int encoding;
4615
 
4616
  switch (TREE_CODE (decl))
4617
    {
4618
    case VAR_DECL:
4619
      if (!TREE_READONLY (decl)
4620
          || TREE_SIDE_EFFECTS (decl)
4621
          || !DECL_INITIAL (decl)
4622
          || (DECL_INITIAL (decl) != error_mark_node
4623
              && !TREE_CONSTANT (DECL_INITIAL (decl))))
4624
        readonly = 0;
4625
      break;
4626
    case CONSTRUCTOR:
4627
      if (! TREE_CONSTANT (decl))
4628
        readonly = 0;
4629
      break;
4630
 
4631
    default:
4632
      break;
4633
    }
4634
 
4635
  if (TREE_CODE (decl) == FUNCTION_DECL)
4636
    {
4637
      const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4638
 
4639
      if (name[0] == '@' && name[2] == '.')
4640
        encoding = name[1];
4641
      else
4642
        encoding = 0;
4643
 
4644
      if (flag_function_sections || DECL_ONE_ONLY (decl))
4645
        mep_unique_section (decl, 0);
4646
      else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4647
        {
4648
          if (encoding == 'f')
4649
            return vftext_section;
4650
          else
4651
            return vtext_section;
4652
        }
4653
      else if (encoding == 'f')
4654
        return ftext_section;
4655
      else
4656
        return text_section;
4657
    }
4658
 
4659
  if (TREE_CODE (decl) == VAR_DECL)
4660
    {
4661
      const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4662
 
4663
      if (name[0] == '@' && name[2] == '.')
4664
        switch (name[1])
4665
          {
4666
          case 'b':
4667
            return based_section;
4668
 
4669
          case 't':
4670
            if (readonly)
4671
              return srodata_section;
4672
            if (DECL_INITIAL (decl))
4673
              return sdata_section;
4674
            return tinybss_section;
4675
 
4676
          case 'f':
4677
            if (readonly)
4678
              return frodata_section;
4679
            return far_section;
4680
 
4681
          case 'i':
4682
          case 'I':
4683
            error_at (DECL_SOURCE_LOCATION (decl),
4684
                      "variable %D of type %<io%> must be uninitialized", decl);
4685
            return data_section;
4686
 
4687
          case 'c':
4688
            error_at (DECL_SOURCE_LOCATION (decl),
4689
                      "variable %D of type %<cb%> must be uninitialized", decl);
4690
            return data_section;
4691
          }
4692
    }
4693
 
4694
  if (readonly)
4695
    return readonly_data_section;
4696
 
4697
  return data_section;
4698
}
4699
 
4700
static void
4701
mep_unique_section (tree decl, int reloc)
4702
{
4703
  static const char *prefixes[][2] =
4704
  {
4705
    { ".text.",   ".gnu.linkonce.t." },
4706
    { ".rodata.", ".gnu.linkonce.r." },
4707
    { ".data.",   ".gnu.linkonce.d." },
4708
    { ".based.",   ".gnu.linkonce.based." },
4709
    { ".sdata.",   ".gnu.linkonce.s." },
4710
    { ".far.",     ".gnu.linkonce.far." },
4711
    { ".ftext.",   ".gnu.linkonce.ft." },
4712
    { ".frodata.", ".gnu.linkonce.frd." },
4713
    { ".srodata.", ".gnu.linkonce.srd." },
4714
    { ".vtext.",   ".gnu.linkonce.v." },
4715
    { ".vftext.",   ".gnu.linkonce.vf." }
4716
  };
4717
  int sec = 2; /* .data */
4718
  int len;
4719
  const char *name, *prefix;
4720
  char *string;
4721
 
4722
  name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4723
  if (DECL_RTL (decl))
4724
    name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4725
 
4726
  if (TREE_CODE (decl) == FUNCTION_DECL)
4727
    {
4728
      if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4729
        sec = 9; /* .vtext */
4730
      else
4731
        sec = 0; /* .text */
4732
    }
4733
  else if (decl_readonly_section (decl, reloc))
4734
    sec = 1; /* .rodata */
4735
 
4736
  if (name[0] == '@' && name[2] == '.')
4737
    {
4738
      switch (name[1])
4739
        {
4740
        case 'b':
4741
          sec = 3; /* .based */
4742
          break;
4743
        case 't':
4744
          if (sec == 1)
4745
            sec = 8; /* .srodata */
4746
          else
4747
            sec = 4; /* .sdata */
4748
          break;
4749
        case 'f':
4750
          if (sec == 0)
4751
            sec = 6; /* .ftext */
4752
          else if (sec == 9)
4753
            sec = 10; /* .vftext */
4754
          else if (sec == 1)
4755
            sec = 7; /* .frodata */
4756
          else
4757
            sec = 5; /* .far. */
4758
          break;
4759
        }
4760
      name += 3;
4761
    }
4762
 
4763
  prefix = prefixes[sec][DECL_ONE_ONLY(decl)];
4764
  len    = strlen (name) + strlen (prefix);
4765
  string = (char *) alloca (len + 1);
4766
 
4767
  sprintf (string, "%s%s", prefix, name);
4768
 
4769
  DECL_SECTION_NAME (decl) = build_string (len, string);
4770
}
4771
 
4772
/* Given a decl, a section name, and whether the decl initializer
4773
   has relocs, choose attributes for the section.  */
4774
 
4775
#define SECTION_MEP_VLIW        SECTION_MACH_DEP
4776
 
4777
static unsigned int
4778
mep_section_type_flags (tree decl, const char *name, int reloc)
4779
{
4780
  unsigned int flags = default_section_type_flags (decl, name, reloc);
4781
 
4782
  if (decl && TREE_CODE (decl) == FUNCTION_DECL
4783
      && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4784
    flags |= SECTION_MEP_VLIW;
4785
 
4786
  return flags;
4787
}
4788
 
4789
/* Switch to an arbitrary section NAME with attributes as specified
4790
   by FLAGS.  ALIGN specifies any known alignment requirements for
4791
   the section; 0 if the default should be used.
4792
 
4793
   Differs from the standard ELF version only in support of VLIW mode.  */
4794
 
4795
static void
4796
mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4797
{
4798
  char flagchars[8], *f = flagchars;
4799
  const char *type;
4800
 
4801
  if (!(flags & SECTION_DEBUG))
4802
    *f++ = 'a';
4803
  if (flags & SECTION_WRITE)
4804
    *f++ = 'w';
4805
  if (flags & SECTION_CODE)
4806
    *f++ = 'x';
4807
  if (flags & SECTION_SMALL)
4808
    *f++ = 's';
4809
  if (flags & SECTION_MEP_VLIW)
4810
    *f++ = 'v';
4811
  *f = '\0';
4812
 
4813
  if (flags & SECTION_BSS)
4814
    type = "nobits";
4815
  else
4816
    type = "progbits";
4817
 
4818
  fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4819
           name, flagchars, type);
4820
 
4821
  if (flags & SECTION_CODE)
4822
    fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4823
           asm_out_file);
4824
}
4825
 
4826
void
4827
mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4828
                           int size, int align, int global)
4829
{
4830
  /* We intentionally don't use mep_section_tag() here.  */
4831
  if (name[0] == '@'
4832
      && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4833
      && name[2] == '.')
4834
    {
4835
      int location = -1;
4836
      tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4837
                                    DECL_ATTRIBUTES (decl));
4838
      if (attr
4839
          && TREE_VALUE (attr)
4840
          && TREE_VALUE (TREE_VALUE(attr)))
4841
        location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4842
      if (location == -1)
4843
        return;
4844
      if (global)
4845
        {
4846
          fprintf (stream, "\t.globl\t");
4847
          assemble_name (stream, name);
4848
          fprintf (stream, "\n");
4849
        }
4850
      assemble_name (stream, name);
4851
      fprintf (stream, " = %d\n", location);
4852
      return;
4853
    }
4854
  if (name[0] == '@' && name[2] == '.')
4855
    {
4856
      const char *sec = 0;
4857
      switch (name[1])
4858
        {
4859
        case 'b':
4860
          switch_to_section (based_section);
4861
          sec = ".based";
4862
          break;
4863
        case 't':
4864
          switch_to_section (tinybss_section);
4865
          sec = ".sbss";
4866
          break;
4867
        case 'f':
4868
          switch_to_section (farbss_section);
4869
          sec = ".farbss";
4870
          break;
4871
        }
4872
      if (sec)
4873
        {
4874
          const char *name2;
4875
          int p2align = 0;
4876
 
4877
          while (align > BITS_PER_UNIT)
4878
            {
4879
              align /= 2;
4880
              p2align ++;
4881
            }
4882
          name2 = targetm.strip_name_encoding (name);
4883
          if (global)
4884
            fprintf (stream, "\t.globl\t%s\n", name2);
4885
          fprintf (stream, "\t.p2align %d\n", p2align);
4886
          fprintf (stream, "\t.type\t%s,@object\n", name2);
4887
          fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4888
          fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4889
          return;
4890
        }
4891
    }
4892
 
4893
  if (!global)
4894
    {
4895
      fprintf (stream, "\t.local\t");
4896
      assemble_name (stream, name);
4897
      fprintf (stream, "\n");
4898
    }
4899
  fprintf (stream, "\t.comm\t");
4900
  assemble_name (stream, name);
4901
  fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4902
}
4903
 
4904
/* Trampolines.  */
4905
 
4906
static void
4907
mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4908
{
4909
  rtx addr = XEXP (m_tramp, 0);
4910
  rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4911
 
4912
  emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4913
                     LCT_NORMAL, VOIDmode, 3,
4914
                     addr, Pmode,
4915
                     fnaddr, Pmode,
4916
                     static_chain, Pmode);
4917
}
4918
 
4919
/* Experimental Reorg.  */
4920
 
4921
static bool
4922
mep_mentioned_p (rtx in,
4923
                 rtx reg, /* NULL for mem */
4924
                 int modes_too) /* if nonzero, modes must match also.  */
4925
{
4926
  const char *fmt;
4927
  int i;
4928
  enum rtx_code code;
4929
 
4930
  if (in == 0)
4931
    return false;
4932
  if (reg && GET_CODE (reg) != REG)
4933
    return false;
4934
 
4935
  if (GET_CODE (in) == LABEL_REF)
4936
    return (reg == 0);
4937
 
4938
  code = GET_CODE (in);
4939
 
4940
  switch (code)
4941
    {
4942
    case MEM:
4943
      if (reg)
4944
        return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4945
      return true;
4946
 
4947
    case REG:
4948
      if (!reg)
4949
        return false;
4950
      if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4951
        return false;
4952
      return (REGNO (in) == REGNO (reg));
4953
 
4954
    case SCRATCH:
4955
    case CC0:
4956
    case PC:
4957
    case CONST_INT:
4958
    case CONST_DOUBLE:
4959
      return false;
4960
 
4961
    default:
4962
      break;
4963
    }
4964
 
4965
  /* Set's source should be read-only.  */
4966
  if (code == SET && !reg)
4967
    return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4968
 
4969
  fmt = GET_RTX_FORMAT (code);
4970
 
4971
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4972
    {
4973
      if (fmt[i] == 'E')
4974
        {
4975
          register int j;
4976
          for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4977
            if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4978
              return true;
4979
        }
4980
      else if (fmt[i] == 'e'
4981
               && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4982
        return true;
4983
    }
4984
  return false;
4985
}
4986
 
4987
#define EXPERIMENTAL_REGMOVE_REORG 1
4988
 
4989
#if EXPERIMENTAL_REGMOVE_REORG
4990
 
4991
static int
4992
mep_compatible_reg_class (int r1, int r2)
4993
{
4994
  if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4995
    return 1;
4996
  if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4997
    return 1;
4998
  return 0;
4999
}
5000
 
5001
static void
5002
mep_reorg_regmove (rtx insns)
5003
{
5004
  rtx insn, next, pat, follow, *where;
5005
  int count = 0, done = 0, replace, before = 0;
5006
 
5007
  if (dump_file)
5008
    for (insn = insns; insn; insn = NEXT_INSN (insn))
5009
      if (GET_CODE (insn) == INSN)
5010
        before++;
5011
 
5012
  /* We're looking for (set r2 r1) moves where r1 dies, followed by a
5013
     set that uses the r2 and r2 dies there.  We replace r2 with r1
5014
     and see if it's still a valid insn.  If so, delete the first set.
5015
     Copied from reorg.c.  */
5016
 
5017
  while (!done)
5018
    {
5019
      done = 1;
5020
      for (insn = insns; insn; insn = next)
5021
        {
5022
          next = NEXT_INSN (insn);
5023
          if (GET_CODE (insn) != INSN)
5024
            continue;
5025
          pat = PATTERN (insn);
5026
 
5027
          replace = 0;
5028
 
5029
          if (GET_CODE (pat) == SET
5030
              && GET_CODE (SET_SRC (pat)) == REG
5031
              && GET_CODE (SET_DEST (pat)) == REG
5032
              && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
5033
              && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
5034
            {
5035
              follow = next_nonnote_insn (insn);
5036
              if (dump_file)
5037
                fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
5038
 
5039
              while (follow && GET_CODE (follow) == INSN
5040
                     && GET_CODE (PATTERN (follow)) == SET
5041
                     && !dead_or_set_p (follow, SET_SRC (pat))
5042
                     && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
5043
                     && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
5044
                {
5045
                  if (dump_file)
5046
                    fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
5047
                  follow = next_nonnote_insn (follow);
5048
                }
5049
 
5050
              if (dump_file)
5051
                fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
5052
              if (follow && GET_CODE (follow) == INSN
5053
                  && GET_CODE (PATTERN (follow)) == SET
5054
                  && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
5055
                {
5056
                  if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
5057
                    {
5058
                      if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
5059
                        {
5060
                          replace = 1;
5061
                          where = & SET_SRC (PATTERN (follow));
5062
                        }
5063
                    }
5064
                  else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
5065
                    {
5066
                      if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
5067
                        {
5068
                          replace = 1;
5069
                          where = & PATTERN (follow);
5070
                        }
5071
                    }
5072
                }
5073
            }
5074
 
5075
          /* If so, follow is the corresponding insn */
5076
          if (replace)
5077
            {
5078
              if (dump_file)
5079
                {
5080
                  rtx x;
5081
 
5082
                  fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
5083
                  for (x = insn; x ;x = NEXT_INSN (x))
5084
                    {
5085
                      print_rtl_single (dump_file, x);
5086
                      if (x == follow)
5087
                        break;
5088
                      fprintf (dump_file, "\n");
5089
                    }
5090
                }
5091
 
5092
              if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
5093
                                               follow, where))
5094
                {
5095
                  count ++;
5096
                  next = delete_insn (insn);
5097
                  if (dump_file)
5098
                    {
5099
                      fprintf (dump_file, "\n----- Success!  new insn:\n\n");
5100
                      print_rtl_single (dump_file, follow);
5101
                    }
5102
                  done = 0;
5103
                }
5104
            }
5105
        }
5106
    }
5107
 
5108
  if (dump_file)
5109
    {
5110
      fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5111
      fprintf (dump_file, "=====\n");
5112
    }
5113
}
5114
#endif
5115
 
5116
 
5117
/* Figure out where to put LABEL, which is the label for a repeat loop.
5118
   If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5119
   the loop ends just before LAST_INSN.  If SHARED, insns other than the
5120
   "repeat" might use LABEL to jump to the loop's continuation point.
5121
 
5122
   Return the last instruction in the adjusted loop.  */
5123
 
5124
static rtx
5125
mep_insert_repeat_label_last (rtx last_insn, rtx label, bool including,
5126
                              bool shared)
5127
{
5128
  rtx next, prev;
5129
  int count = 0, code, icode;
5130
 
5131
  if (dump_file)
5132
    fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5133
             INSN_UID (last_insn));
5134
 
5135
  /* Set PREV to the last insn in the loop.  */
5136
  prev = last_insn;
5137
  if (!including)
5138
    prev = PREV_INSN (prev);
5139
 
5140
  /* Set NEXT to the next insn after the repeat label.  */
5141
  next = last_insn;
5142
  if (!shared)
5143
    while (prev != 0)
5144
      {
5145
        code = GET_CODE (prev);
5146
        if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5147
          break;
5148
 
5149
        if (INSN_P (prev))
5150
          {
5151
            if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5152
              prev = XVECEXP (PATTERN (prev), 0, 1);
5153
 
5154
            /* Other insns that should not be in the last two opcodes.  */
5155
            icode = recog_memoized (prev);
5156
            if (icode < 0
5157
                || icode == CODE_FOR_repeat
5158
                || icode == CODE_FOR_erepeat
5159
                || get_attr_may_trap (prev) == MAY_TRAP_YES)
5160
              break;
5161
 
5162
            /* That leaves JUMP_INSN and INSN.  It will have BImode if it
5163
               is the second instruction in a VLIW bundle.  In that case,
5164
               loop again: if the first instruction also satisfies the
5165
               conditions above then we will reach here again and put
5166
               both of them into the repeat epilogue.  Otherwise both
5167
               should remain outside.  */
5168
            if (GET_MODE (prev) != BImode)
5169
              {
5170
                count++;
5171
                next = prev;
5172
                if (dump_file)
5173
                  print_rtl_single (dump_file, next);
5174
                if (count == 2)
5175
                  break;
5176
              }
5177
          }
5178
        prev = PREV_INSN (prev);
5179
      }
5180
 
5181
  /* See if we're adding the label immediately after the repeat insn.
5182
     If so, we need to separate them with a nop.  */
5183
  prev = prev_real_insn (next);
5184
  if (prev)
5185
    switch (recog_memoized (prev))
5186
      {
5187
      case CODE_FOR_repeat:
5188
      case CODE_FOR_erepeat:
5189
        if (dump_file)
5190
          fprintf (dump_file, "Adding nop inside loop\n");
5191
        emit_insn_before (gen_nop (), next);
5192
        break;
5193
 
5194
      default:
5195
        break;
5196
      }
5197
 
5198
  /* Insert the label.  */
5199
  emit_label_before (label, next);
5200
 
5201
  /* Insert the nops.  */
5202
  if (dump_file && count < 2)
5203
    fprintf (dump_file, "Adding %d nop%s\n\n",
5204
             2 - count, count == 1 ? "" : "s");
5205
 
5206
  for (; count < 2; count++)
5207
    if (including)
5208
      last_insn = emit_insn_after (gen_nop (), last_insn);
5209
    else
5210
      emit_insn_before (gen_nop (), last_insn);
5211
 
5212
  return last_insn;
5213
}
5214
 
5215
 
5216
void
5217
mep_emit_doloop (rtx *operands, int is_end)
5218
{
5219
  rtx tag;
5220
 
5221
  if (cfun->machine->doloop_tags == 0
5222
      || cfun->machine->doloop_tag_from_end == is_end)
5223
    {
5224
      cfun->machine->doloop_tags++;
5225
      cfun->machine->doloop_tag_from_end = is_end;
5226
    }
5227
 
5228
  tag = GEN_INT (cfun->machine->doloop_tags - 1);
5229
  if (is_end)
5230
    emit_jump_insn (gen_doloop_end_internal (operands[0], operands[4], tag));
5231
  else
5232
    emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5233
}
5234
 
5235
 
5236
/* Code for converting doloop_begins and doloop_ends into valid
5237
   MeP instructions.  A doloop_begin is just a placeholder:
5238
 
5239
        $count = unspec ($count)
5240
 
5241
   where $count is initially the number of iterations - 1.
5242
   doloop_end has the form:
5243
 
5244
        if ($count-- == 0) goto label
5245
 
5246
   The counter variable is private to the doloop insns, nothing else
5247
   relies on its value.
5248
 
5249
   There are three cases, in decreasing order of preference:
5250
 
5251
      1. A loop has exactly one doloop_begin and one doloop_end.
5252
         The doloop_end branches to the first instruction after
5253
         the doloop_begin.
5254
 
5255
         In this case we can replace the doloop_begin with a repeat
5256
         instruction and remove the doloop_end.  I.e.:
5257
 
5258
                $count1 = unspec ($count1)
5259
            label:
5260
                ...
5261
                insn1
5262
                insn2
5263
                if ($count2-- == 0) goto label
5264
 
5265
          becomes:
5266
 
5267
                repeat $count1,repeat_label
5268
            label:
5269
                ...
5270
            repeat_label:
5271
                insn1
5272
                insn2
5273
                # end repeat
5274
 
5275
      2. As for (1), except there are several doloop_ends.  One of them
5276
         (call it X) falls through to a label L.  All the others fall
5277
         through to branches to L.
5278
 
5279
         In this case, we remove X and replace the other doloop_ends
5280
         with branches to the repeat label.  For example:
5281
 
5282
                $count1 = unspec ($count1)
5283
            start:
5284
                ...
5285
                if ($count2-- == 0) goto label
5286
            end:
5287
                ...
5288
                if ($count3-- == 0) goto label
5289
                goto end
5290
 
5291
         becomes:
5292
 
5293
                repeat $count1,repeat_label
5294
            start:
5295
                ...
5296
            repeat_label:
5297
                nop
5298
                nop
5299
                # end repeat
5300
            end:
5301
                ...
5302
                goto repeat_label
5303
 
5304
      3. The fallback case.  Replace doloop_begins with:
5305
 
5306
                $count = $count + 1
5307
 
5308
         Replace doloop_ends with the equivalent of:
5309
 
5310
                $count = $count - 1
5311
                if ($count == 0) goto label
5312
 
5313
         Note that this might need a scratch register if $count
5314
         is stored in memory.  */
5315
 
5316
/* A structure describing one doloop_begin.  */
5317
struct mep_doloop_begin {
5318
  /* The next doloop_begin with the same tag.  */
5319
  struct mep_doloop_begin *next;
5320
 
5321
  /* The instruction itself.  */
5322
  rtx insn;
5323
 
5324
  /* The initial counter value.  This is known to be a general register.  */
5325
  rtx counter;
5326
};
5327
 
5328
/* A structure describing a doloop_end.  */
5329
struct mep_doloop_end {
5330
  /* The next doloop_end with the same loop tag.  */
5331
  struct mep_doloop_end *next;
5332
 
5333
  /* The instruction itself.  */
5334
  rtx insn;
5335
 
5336
  /* The first instruction after INSN when the branch isn't taken.  */
5337
  rtx fallthrough;
5338
 
5339
  /* The location of the counter value.  Since doloop_end_internal is a
5340
     jump instruction, it has to allow the counter to be stored anywhere
5341
     (any non-fixed register or memory location).  */
5342
  rtx counter;
5343
 
5344
  /* The target label (the place where the insn branches when the counter
5345
     isn't zero).  */
5346
  rtx label;
5347
 
5348
  /* A scratch register.  Only available when COUNTER isn't stored
5349
     in a general register.  */
5350
  rtx scratch;
5351
};
5352
 
5353
 
5354
/* One do-while loop.  */
5355
struct mep_doloop {
5356
  /* All the doloop_begins for this loop (in no particular order).  */
5357
  struct mep_doloop_begin *begin;
5358
 
5359
  /* All the doloop_ends.  When there is more than one, arrange things
5360
     so that the first one is the most likely to be X in case (2) above.  */
5361
  struct mep_doloop_end *end;
5362
};
5363
 
5364
 
5365
/* Return true if LOOP can be converted into repeat/repeat_end form
5366
   (that is, if it matches cases (1) or (2) above).  */
5367
 
5368
static bool
5369
mep_repeat_loop_p (struct mep_doloop *loop)
5370
{
5371
  struct mep_doloop_end *end;
5372
  rtx fallthrough;
5373
 
5374
  /* There must be exactly one doloop_begin and at least one doloop_end.  */
5375
  if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5376
    return false;
5377
 
5378
  /* The first doloop_end (X) must branch back to the insn after
5379
     the doloop_begin.  */
5380
  if (prev_real_insn (loop->end->label) != loop->begin->insn)
5381
    return false;
5382
 
5383
  /* All the other doloop_ends must branch to the same place as X.
5384
     When the branch isn't taken, they must jump to the instruction
5385
     after X.  */
5386
  fallthrough = loop->end->fallthrough;
5387
  for (end = loop->end->next; end != 0; end = end->next)
5388
    if (end->label != loop->end->label
5389
        || !simplejump_p (end->fallthrough)
5390
        || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5391
      return false;
5392
 
5393
  return true;
5394
}
5395
 
5396
 
5397
/* The main repeat reorg function.  See comment above for details.  */
5398
 
5399
static void
5400
mep_reorg_repeat (rtx insns)
5401
{
5402
  rtx insn;
5403
  struct mep_doloop *loops, *loop;
5404
  struct mep_doloop_begin *begin;
5405
  struct mep_doloop_end *end;
5406
 
5407
  /* Quick exit if we haven't created any loops.  */
5408
  if (cfun->machine->doloop_tags == 0)
5409
    return;
5410
 
5411
  /* Create an array of mep_doloop structures.  */
5412
  loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5413
  memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5414
 
5415
  /* Search the function for do-while insns and group them by loop tag.  */
5416
  for (insn = insns; insn; insn = NEXT_INSN (insn))
5417
    if (INSN_P (insn))
5418
      switch (recog_memoized (insn))
5419
        {
5420
        case CODE_FOR_doloop_begin_internal:
5421
          insn_extract (insn);
5422
          loop = &loops[INTVAL (recog_data.operand[2])];
5423
 
5424
          begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5425
          begin->next = loop->begin;
5426
          begin->insn = insn;
5427
          begin->counter = recog_data.operand[0];
5428
 
5429
          loop->begin = begin;
5430
          break;
5431
 
5432
        case CODE_FOR_doloop_end_internal:
5433
          insn_extract (insn);
5434
          loop = &loops[INTVAL (recog_data.operand[2])];
5435
 
5436
          end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5437
          end->insn = insn;
5438
          end->fallthrough = next_real_insn (insn);
5439
          end->counter = recog_data.operand[0];
5440
          end->label = recog_data.operand[1];
5441
          end->scratch = recog_data.operand[3];
5442
 
5443
          /* If this insn falls through to an unconditional jump,
5444
             give it a lower priority than the others.  */
5445
          if (loop->end != 0 && simplejump_p (end->fallthrough))
5446
            {
5447
              end->next = loop->end->next;
5448
              loop->end->next = end;
5449
            }
5450
          else
5451
            {
5452
              end->next = loop->end;
5453
              loop->end = end;
5454
            }
5455
          break;
5456
        }
5457
 
5458
  /* Convert the insns for each loop in turn.  */
5459
  for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5460
    if (mep_repeat_loop_p (loop))
5461
      {
5462
        /* Case (1) or (2).  */
5463
        rtx repeat_label, label_ref;
5464
 
5465
        /* Create a new label for the repeat insn.  */
5466
        repeat_label = gen_label_rtx ();
5467
 
5468
        /* Replace the doloop_begin with a repeat.  */
5469
        label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5470
        emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5471
                          loop->begin->insn);
5472
        delete_insn (loop->begin->insn);
5473
 
5474
        /* Insert the repeat label before the first doloop_end.
5475
           Fill the gap with nops if there are other doloop_ends.  */
5476
        mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5477
                                      false, loop->end->next != 0);
5478
 
5479
        /* Emit a repeat_end (to improve the readability of the output).  */
5480
        emit_insn_before (gen_repeat_end (), loop->end->insn);
5481
 
5482
        /* Delete the first doloop_end.  */
5483
        delete_insn (loop->end->insn);
5484
 
5485
        /* Replace the others with branches to REPEAT_LABEL.  */
5486
        for (end = loop->end->next; end != 0; end = end->next)
5487
          {
5488
            emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5489
            delete_insn (end->insn);
5490
            delete_insn (end->fallthrough);
5491
          }
5492
      }
5493
    else
5494
      {
5495
        /* Case (3).  First replace all the doloop_begins with increment
5496
           instructions.  */
5497
        for (begin = loop->begin; begin != 0; begin = begin->next)
5498
          {
5499
            emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5500
                                             begin->counter, const1_rtx),
5501
                              begin->insn);
5502
            delete_insn (begin->insn);
5503
          }
5504
 
5505
        /* Replace all the doloop_ends with decrement-and-branch sequences.  */
5506
        for (end = loop->end; end != 0; end = end->next)
5507
          {
5508
            rtx reg;
5509
 
5510
            start_sequence ();
5511
 
5512
            /* Load the counter value into a general register.  */
5513
            reg = end->counter;
5514
            if (!REG_P (reg) || REGNO (reg) > 15)
5515
              {
5516
                reg = end->scratch;
5517
                emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5518
              }
5519
 
5520
            /* Decrement the counter.  */
5521
            emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5522
                                      constm1_rtx));
5523
 
5524
            /* Copy it back to its original location.  */
5525
            if (reg != end->counter)
5526
              emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5527
 
5528
            /* Jump back to the start label.  */
5529
            insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5530
                                                     end->label));
5531
            JUMP_LABEL (insn) = end->label;
5532
            LABEL_NUSES (end->label)++;
5533
 
5534
            /* Emit the whole sequence before the doloop_end.  */
5535
            insn = get_insns ();
5536
            end_sequence ();
5537
            emit_insn_before (insn, end->insn);
5538
 
5539
            /* Delete the doloop_end.  */
5540
            delete_insn (end->insn);
5541
          }
5542
      }
5543
}
5544
 
5545
 
5546
static bool
5547
mep_invertable_branch_p (rtx insn)
5548
{
5549
  rtx cond, set;
5550
  enum rtx_code old_code;
5551
  int i;
5552
 
5553
  set = PATTERN (insn);
5554
  if (GET_CODE (set) != SET)
5555
    return false;
5556
  if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5557
    return false;
5558
  cond = XEXP (XEXP (set, 1), 0);
5559
  old_code = GET_CODE (cond);
5560
  switch (old_code)
5561
    {
5562
    case EQ:
5563
      PUT_CODE (cond, NE);
5564
      break;
5565
    case NE:
5566
      PUT_CODE (cond, EQ);
5567
      break;
5568
    case LT:
5569
      PUT_CODE (cond, GE);
5570
      break;
5571
    case GE:
5572
      PUT_CODE (cond, LT);
5573
      break;
5574
    default:
5575
      return false;
5576
    }
5577
  INSN_CODE (insn) = -1;
5578
  i = recog_memoized (insn);
5579
  PUT_CODE (cond, old_code);
5580
  INSN_CODE (insn) = -1;
5581
  return i >= 0;
5582
}
5583
 
5584
static void
5585
mep_invert_branch (rtx insn, rtx after)
5586
{
5587
  rtx cond, set, label;
5588
  int i;
5589
 
5590
  set = PATTERN (insn);
5591
 
5592
  gcc_assert (GET_CODE (set) == SET);
5593
  gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5594
 
5595
  cond = XEXP (XEXP (set, 1), 0);
5596
  switch (GET_CODE (cond))
5597
    {
5598
    case EQ:
5599
      PUT_CODE (cond, NE);
5600
      break;
5601
    case NE:
5602
      PUT_CODE (cond, EQ);
5603
      break;
5604
    case LT:
5605
      PUT_CODE (cond, GE);
5606
      break;
5607
    case GE:
5608
      PUT_CODE (cond, LT);
5609
      break;
5610
    default:
5611
      gcc_unreachable ();
5612
    }
5613
  label = gen_label_rtx ();
5614
  emit_label_after (label, after);
5615
  for (i=1; i<=2; i++)
5616
    if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5617
      {
5618
        rtx ref = XEXP (XEXP (set, 1), i);
5619
        if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5620
          delete_insn (XEXP (ref, 0));
5621
        XEXP (ref, 0) = label;
5622
        LABEL_NUSES (label) ++;
5623
        JUMP_LABEL (insn) = label;
5624
      }
5625
  INSN_CODE (insn) = -1;
5626
  i = recog_memoized (insn);
5627
  gcc_assert (i >= 0);
5628
}
5629
 
5630
static void
5631
mep_reorg_erepeat (rtx insns)
5632
{
5633
  rtx insn, prev, l, x;
5634
  int count;
5635
 
5636
  for (insn = insns; insn; insn = NEXT_INSN (insn))
5637
    if (JUMP_P (insn)
5638
        && ! JUMP_TABLE_DATA_P (insn)
5639
        && mep_invertable_branch_p (insn))
5640
      {
5641
        if (dump_file)
5642
          {
5643
            fprintf (dump_file, "\n------------------------------\n");
5644
            fprintf (dump_file, "erepeat: considering this jump:\n");
5645
            print_rtl_single (dump_file, insn);
5646
          }
5647
        count = simplejump_p (insn) ? 0 : 1;
5648
        for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5649
          {
5650
            if (GET_CODE (prev) == CALL_INSN
5651
                || BARRIER_P (prev))
5652
              break;
5653
 
5654
            if (prev == JUMP_LABEL (insn))
5655
              {
5656
                rtx newlast;
5657
                if (dump_file)
5658
                  fprintf (dump_file, "found loop top, %d insns\n", count);
5659
 
5660
                if (LABEL_NUSES (prev) == 1)
5661
                  /* We're the only user, always safe */ ;
5662
                else if (LABEL_NUSES (prev) == 2)
5663
                  {
5664
                    /* See if there's a barrier before this label.  If
5665
                       so, we know nobody inside the loop uses it.
5666
                       But we must be careful to put the erepeat
5667
                       *after* the label.  */
5668
                    rtx barrier;
5669
                    for (barrier = PREV_INSN (prev);
5670
                         barrier && GET_CODE (barrier) == NOTE;
5671
                         barrier = PREV_INSN (barrier))
5672
                      ;
5673
                    if (barrier && GET_CODE (barrier) != BARRIER)
5674
                      break;
5675
                  }
5676
                else
5677
                  {
5678
                    /* We don't know who else, within or without our loop, uses this */
5679
                    if (dump_file)
5680
                      fprintf (dump_file, "... but there are multiple users, too risky.\n");
5681
                    break;
5682
                  }
5683
 
5684
                /* Generate a label to be used by the erepat insn.  */
5685
                l = gen_label_rtx ();
5686
 
5687
                /* Insert the erepeat after INSN's target label.  */
5688
                x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5689
                LABEL_NUSES (l)++;
5690
                emit_insn_after (x, prev);
5691
 
5692
                /* Insert the erepeat label.  */
5693
                newlast = (mep_insert_repeat_label_last
5694
                           (insn, l, !simplejump_p (insn), false));
5695
                if (simplejump_p (insn))
5696
                  {
5697
                    emit_insn_before (gen_erepeat_end (), insn);
5698
                    delete_insn (insn);
5699
                  }
5700
                else
5701
                  {
5702
                    mep_invert_branch (insn, newlast);
5703
                    emit_insn_after (gen_erepeat_end (), newlast);
5704
                  }
5705
                break;
5706
              }
5707
 
5708
            if (LABEL_P (prev))
5709
              {
5710
                /* A label is OK if there is exactly one user, and we
5711
                   can find that user before the next label.  */
5712
                rtx user = 0;
5713
                int safe = 0;
5714
                if (LABEL_NUSES (prev) == 1)
5715
                  {
5716
                    for (user = PREV_INSN (prev);
5717
                         user && (INSN_P (user) || GET_CODE (user) == NOTE);
5718
                         user = PREV_INSN (user))
5719
                      if (GET_CODE (user) == JUMP_INSN
5720
                          && JUMP_LABEL (user) == prev)
5721
                        {
5722
                          safe = INSN_UID (user);
5723
                          break;
5724
                        }
5725
                  }
5726
                if (!safe)
5727
                  break;
5728
                if (dump_file)
5729
                  fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5730
                           safe, INSN_UID (prev));
5731
              }
5732
 
5733
            if (INSN_P (prev))
5734
              {
5735
                count ++;
5736
              }
5737
          }
5738
      }
5739
  if (dump_file)
5740
    fprintf (dump_file, "\n==============================\n");
5741
}
5742
 
5743
/* Replace a jump to a return, with a copy of the return.  GCC doesn't
5744
   always do this on its own.  */
5745
 
5746
static void
5747
mep_jmp_return_reorg (rtx insns)
5748
{
5749
  rtx insn, label, ret;
5750
  int ret_code;
5751
 
5752
  for (insn = insns; insn; insn = NEXT_INSN (insn))
5753
    if (simplejump_p (insn))
5754
    {
5755
      /* Find the fist real insn the jump jumps to.  */
5756
      label = ret = JUMP_LABEL (insn);
5757
      while (ret
5758
             && (GET_CODE (ret) == NOTE
5759
                 || GET_CODE (ret) == CODE_LABEL
5760
                 || GET_CODE (PATTERN (ret)) == USE))
5761
        ret = NEXT_INSN (ret);
5762
 
5763
      if (ret)
5764
        {
5765
          /* Is it a return?  */
5766
          ret_code = recog_memoized (ret);
5767
          if (ret_code == CODE_FOR_return_internal
5768
              || ret_code == CODE_FOR_eh_return_internal)
5769
            {
5770
              /* It is.  Replace the jump with a return.  */
5771
              LABEL_NUSES (label) --;
5772
              if (LABEL_NUSES (label) == 0)
5773
                delete_insn (label);
5774
              PATTERN (insn) = copy_rtx (PATTERN (ret));
5775
              INSN_CODE (insn) = -1;
5776
            }
5777
        }
5778
    }
5779
}
5780
 
5781
 
5782
static void
5783
mep_reorg_addcombine (rtx insns)
5784
{
5785
  rtx i, n;
5786
 
5787
  for (i = insns; i; i = NEXT_INSN (i))
5788
    if (INSN_P (i)
5789
        && INSN_CODE (i) == CODE_FOR_addsi3
5790
        && GET_CODE (SET_DEST (PATTERN (i))) == REG
5791
        && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5792
        && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5793
        && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5794
      {
5795
        n = NEXT_INSN (i);
5796
        if (INSN_P (n)
5797
            && INSN_CODE (n) == CODE_FOR_addsi3
5798
            && GET_CODE (SET_DEST (PATTERN (n))) == REG
5799
            && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5800
            && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5801
            && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5802
          {
5803
            int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5804
            int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5805
            if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5806
                && ic + nc < 32767
5807
                && ic + nc > -32768)
5808
              {
5809
                XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5810
                NEXT_INSN (i) = NEXT_INSN (n);
5811
                if (NEXT_INSN (i))
5812
                  PREV_INSN (NEXT_INSN (i)) = i;
5813
              }
5814
          }
5815
      }
5816
}
5817
 
5818
/* If this insn adjusts the stack, return the adjustment, else return
5819
   zero.  */
5820
static int
5821
add_sp_insn_p (rtx insn)
5822
{
5823
  rtx pat;
5824
 
5825
  if (! single_set (insn))
5826
    return 0;
5827
  pat = PATTERN (insn);
5828
  if (GET_CODE (SET_DEST (pat)) != REG)
5829
    return 0;
5830
  if (REGNO (SET_DEST (pat)) != SP_REGNO)
5831
    return 0;
5832
  if (GET_CODE (SET_SRC (pat)) != PLUS)
5833
    return 0;
5834
  if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5835
    return 0;
5836
  if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5837
    return 0;
5838
  if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5839
    return 0;
5840
  return INTVAL (XEXP (SET_SRC (pat), 1));
5841
}
5842
 
5843
/* Check for trivial functions that set up an unneeded stack
5844
   frame.  */
5845
static void
5846
mep_reorg_noframe (rtx insns)
5847
{
5848
  rtx start_frame_insn;
5849
  rtx end_frame_insn = 0;
5850
  int sp_adjust, sp2;
5851
  rtx sp;
5852
 
5853
  /* The first insn should be $sp = $sp + N */
5854
  while (insns && ! INSN_P (insns))
5855
    insns = NEXT_INSN (insns);
5856
  if (!insns)
5857
    return;
5858
 
5859
  sp_adjust = add_sp_insn_p (insns);
5860
  if (sp_adjust == 0)
5861
    return;
5862
 
5863
  start_frame_insn = insns;
5864
  sp = SET_DEST (PATTERN (start_frame_insn));
5865
 
5866
  insns = next_real_insn (insns);
5867
 
5868
  while (insns)
5869
    {
5870
      rtx next = next_real_insn (insns);
5871
      if (!next)
5872
        break;
5873
 
5874
      sp2 = add_sp_insn_p (insns);
5875
      if (sp2)
5876
        {
5877
          if (end_frame_insn)
5878
            return;
5879
          end_frame_insn = insns;
5880
          if (sp2 != -sp_adjust)
5881
            return;
5882
        }
5883
      else if (mep_mentioned_p (insns, sp, 0))
5884
        return;
5885
      else if (CALL_P (insns))
5886
        return;
5887
 
5888
      insns = next;
5889
    }
5890
 
5891
  if (end_frame_insn)
5892
    {
5893
      delete_insn (start_frame_insn);
5894
      delete_insn (end_frame_insn);
5895
    }
5896
}
5897
 
5898
static void
5899
mep_reorg (void)
5900
{
5901
  rtx insns = get_insns ();
5902
 
5903
  /* We require accurate REG_DEAD notes.  */
5904
  compute_bb_for_insn ();
5905
  df_note_add_problem ();
5906
  df_analyze ();
5907
 
5908
  mep_reorg_addcombine (insns);
5909
#if EXPERIMENTAL_REGMOVE_REORG
5910
  /* VLIW packing has been done already, so we can't just delete things.  */
5911
  if (!mep_vliw_function_p (cfun->decl))
5912
    mep_reorg_regmove (insns);
5913
#endif
5914
  mep_jmp_return_reorg (insns);
5915
  mep_bundle_insns (insns);
5916
  mep_reorg_repeat (insns);
5917
  if (optimize
5918
      && !profile_flag
5919
      && !profile_arc_flag
5920
      && TARGET_OPT_REPEAT
5921
      && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5922
    mep_reorg_erepeat (insns);
5923
 
5924
  /* This may delete *insns so make sure it's last.  */
5925
  mep_reorg_noframe (insns);
5926
 
5927
  df_finish_pass (false);
5928
}
5929
 
5930
 
5931
 
5932
/*----------------------------------------------------------------------*/
5933
/* Builtins                                                             */
5934
/*----------------------------------------------------------------------*/
5935
 
5936
/* Element X gives the index into cgen_insns[] of the most general
5937
   implementation of intrinsic X.  Unimplemented intrinsics are
5938
   mapped to -1.  */
5939
int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5940
 
5941
/* Element X gives the index of another instruction that is mapped to
5942
   the same intrinsic as cgen_insns[X].  It is -1 when there is no other
5943
   instruction.
5944
 
5945
   Things are set up so that mep_intrinsic_chain[X] < X.  */
5946
static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5947
 
5948
/* The bitmask for the current ISA.  The ISA masks are declared
5949
   in mep-intrin.h.  */
5950
unsigned int mep_selected_isa;
5951
 
5952
struct mep_config {
5953
  const char *config_name;
5954
  unsigned int isa;
5955
};
5956
 
5957
static struct mep_config mep_configs[] = {
5958
#ifdef COPROC_SELECTION_TABLE
5959
  COPROC_SELECTION_TABLE,
5960
#endif
5961
  { 0, 0 }
5962
};
5963
 
5964
/* Initialize the global intrinsics variables above.  */
5965
 
5966
static void
5967
mep_init_intrinsics (void)
5968
{
5969
  size_t i;
5970
 
5971
  /* Set MEP_SELECTED_ISA to the ISA flag for this configuration.  */
5972
  mep_selected_isa = mep_configs[0].isa;
5973
  if (mep_config_string != 0)
5974
    for (i = 0; mep_configs[i].config_name; i++)
5975
      if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5976
        {
5977
          mep_selected_isa = mep_configs[i].isa;
5978
          break;
5979
        }
5980
 
5981
  /* Assume all intrinsics are unavailable.  */
5982
  for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5983
    mep_intrinsic_insn[i] = -1;
5984
 
5985
  /* Build up the global intrinsic tables.  */
5986
  for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5987
    if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5988
      {
5989
        mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5990
        mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5991
      }
5992
  /* See whether we can directly move values between one coprocessor
5993
     register and another.  */
5994
  for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5995
    if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5996
      mep_have_copro_copro_moves_p = true;
5997
 
5998
  /* See whether we can directly move values between core and
5999
     coprocessor registers.  */
6000
  mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
6001
                                 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
6002
 
6003
  mep_have_core_copro_moves_p = 1;
6004
}
6005
 
6006
/* Declare all available intrinsic functions.  Called once only.  */
6007
 
6008
static tree cp_data_bus_int_type_node;
6009
static tree opaque_vector_type_node;
6010
static tree v8qi_type_node;
6011
static tree v4hi_type_node;
6012
static tree v2si_type_node;
6013
static tree v8uqi_type_node;
6014
static tree v4uhi_type_node;
6015
static tree v2usi_type_node;
6016
 
6017
static tree
6018
mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
6019
{
6020
  switch (cr)
6021
    {
6022
    case cgen_regnum_operand_type_POINTER:      return ptr_type_node;
6023
    case cgen_regnum_operand_type_LONG:         return long_integer_type_node;
6024
    case cgen_regnum_operand_type_ULONG:        return long_unsigned_type_node;
6025
    case cgen_regnum_operand_type_SHORT:        return short_integer_type_node;
6026
    case cgen_regnum_operand_type_USHORT:       return short_unsigned_type_node;
6027
    case cgen_regnum_operand_type_CHAR:         return char_type_node;
6028
    case cgen_regnum_operand_type_UCHAR:        return unsigned_char_type_node;
6029
    case cgen_regnum_operand_type_SI:           return intSI_type_node;
6030
    case cgen_regnum_operand_type_DI:           return intDI_type_node;
6031
    case cgen_regnum_operand_type_VECTOR:       return opaque_vector_type_node;
6032
    case cgen_regnum_operand_type_V8QI:         return v8qi_type_node;
6033
    case cgen_regnum_operand_type_V4HI:         return v4hi_type_node;
6034
    case cgen_regnum_operand_type_V2SI:         return v2si_type_node;
6035
    case cgen_regnum_operand_type_V8UQI:        return v8uqi_type_node;
6036
    case cgen_regnum_operand_type_V4UHI:        return v4uhi_type_node;
6037
    case cgen_regnum_operand_type_V2USI:        return v2usi_type_node;
6038
    case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
6039
    default:
6040
      return void_type_node;
6041
    }
6042
}
6043
 
6044
static void
6045
mep_init_builtins (void)
6046
{
6047
  size_t i;
6048
 
6049
  if (TARGET_64BIT_CR_REGS)
6050
    cp_data_bus_int_type_node = long_long_integer_type_node;
6051
  else
6052
    cp_data_bus_int_type_node = long_integer_type_node;
6053
 
6054
  opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
6055
  v8qi_type_node = build_vector_type (intQI_type_node, 8);
6056
  v4hi_type_node = build_vector_type (intHI_type_node, 4);
6057
  v2si_type_node = build_vector_type (intSI_type_node, 2);
6058
  v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
6059
  v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
6060
  v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
6061
 
6062
  (*lang_hooks.decls.pushdecl)
6063
    (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_data_bus_int"),
6064
                 cp_data_bus_int_type_node));
6065
 
6066
  (*lang_hooks.decls.pushdecl)
6067
    (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_vector"),
6068
                 opaque_vector_type_node));
6069
 
6070
  (*lang_hooks.decls.pushdecl)
6071
    (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8qi"),
6072
                 v8qi_type_node));
6073
  (*lang_hooks.decls.pushdecl)
6074
    (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4hi"),
6075
                 v4hi_type_node));
6076
  (*lang_hooks.decls.pushdecl)
6077
    (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2si"),
6078
                 v2si_type_node));
6079
 
6080
  (*lang_hooks.decls.pushdecl)
6081
    (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8uqi"),
6082
                 v8uqi_type_node));
6083
  (*lang_hooks.decls.pushdecl)
6084
    (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4uhi"),
6085
                 v4uhi_type_node));
6086
  (*lang_hooks.decls.pushdecl)
6087
    (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2usi"),
6088
                 v2usi_type_node));
6089
 
6090
  /* Intrinsics like mep_cadd3 are implemented with two groups of
6091
     instructions, one which uses UNSPECs and one which uses a specific
6092
     rtl code such as PLUS.  Instructions in the latter group belong
6093
     to GROUP_KNOWN_CODE.
6094
 
6095
     In such cases, the intrinsic will have two entries in the global
6096
     tables above.  The unspec form is accessed using builtin functions
6097
     while the specific form is accessed using the mep_* enum in
6098
     mep-intrin.h.
6099
 
6100
     The idea is that __cop arithmetic and builtin functions have
6101
     different optimization requirements.  If mep_cadd3() appears in
6102
     the source code, the user will surely except gcc to use cadd3
6103
     rather than a work-alike such as add3.  However, if the user
6104
     just writes "a + b", where a or b are __cop variables, it is
6105
     reasonable for gcc to choose a core instruction rather than
6106
     cadd3 if it believes that is more optimal.  */
6107
  for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
6108
    if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
6109
        && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
6110
      {
6111
        tree ret_type = void_type_node;
6112
        tree bi_type;
6113
 
6114
        if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
6115
          continue;
6116
 
6117
        if (cgen_insns[i].cret_p)
6118
          ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
6119
 
6120
        bi_type = build_function_type_list (ret_type, NULL_TREE);
6121
        add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
6122
                              bi_type,
6123
                              cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
6124
      }
6125
}
6126
 
6127
/* Report the unavailablity of the given intrinsic.  */
6128
 
6129
#if 1
6130
static void
6131
mep_intrinsic_unavailable (int intrinsic)
6132
{
6133
  static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6134
 
6135
  if (already_reported_p[intrinsic])
6136
    return;
6137
 
6138
  if (mep_intrinsic_insn[intrinsic] < 0)
6139
    error ("coprocessor intrinsic %qs is not available in this configuration",
6140
           cgen_intrinsics[intrinsic]);
6141
  else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6142
    error ("%qs is not available in VLIW functions",
6143
           cgen_intrinsics[intrinsic]);
6144
  else
6145
    error ("%qs is not available in non-VLIW functions",
6146
           cgen_intrinsics[intrinsic]);
6147
 
6148
  already_reported_p[intrinsic] = 1;
6149
}
6150
#endif
6151
 
6152
 
6153
/* See if any implementation of INTRINSIC is available to the
6154
   current function.  If so, store the most general implementation
6155
   in *INSN_PTR and return true.  Return false otherwise.  */
6156
 
6157
static bool
6158
mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6159
{
6160
  int i;
6161
 
6162
  i = mep_intrinsic_insn[intrinsic];
6163
  while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6164
    i = mep_intrinsic_chain[i];
6165
 
6166
  if (i >= 0)
6167
    {
6168
      *insn_ptr = &cgen_insns[i];
6169
      return true;
6170
    }
6171
  return false;
6172
}
6173
 
6174
 
6175
/* Like mep_get_intrinsic_insn, but with extra handling for moves.
6176
   If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6177
   try using a work-alike instead.  In this case, the returned insn
6178
   may have three operands rather than two.  */
6179
 
6180
static bool
6181
mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6182
{
6183
  size_t i;
6184
 
6185
  if (intrinsic == mep_cmov)
6186
    {
6187
      for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6188
        if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6189
          return true;
6190
      return false;
6191
    }
6192
  return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6193
}
6194
 
6195
 
6196
/* If ARG is a register operand that is the same size as MODE, convert it
6197
   to MODE using a subreg.  Otherwise return ARG as-is.  */
6198
 
6199
static rtx
6200
mep_convert_arg (enum machine_mode mode, rtx arg)
6201
{
6202
  if (GET_MODE (arg) != mode
6203
      && register_operand (arg, VOIDmode)
6204
      && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6205
    return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6206
  return arg;
6207
}
6208
 
6209
 
6210
/* Apply regnum conversions to ARG using the description given by REGNUM.
6211
   Return the new argument on success and null on failure.  */
6212
 
6213
static rtx
6214
mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6215
{
6216
  if (regnum->count == 0)
6217
    return arg;
6218
 
6219
  if (GET_CODE (arg) != CONST_INT
6220
      || INTVAL (arg) < 0
6221
      || INTVAL (arg) >= regnum->count)
6222
    return 0;
6223
 
6224
  return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6225
}
6226
 
6227
 
6228
/* Try to make intrinsic argument ARG match the given operand.
6229
   UNSIGNED_P is true if the argument has an unsigned type.  */
6230
 
6231
static rtx
6232
mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6233
                    int unsigned_p)
6234
{
6235
  if (GET_CODE (arg) == CONST_INT)
6236
    {
6237
      /* CONST_INTs can only be bound to integer operands.  */
6238
      if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6239
        return 0;
6240
    }
6241
  else if (GET_CODE (arg) == CONST_DOUBLE)
6242
    /* These hold vector constants.  */;
6243
  else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6244
    {
6245
      /* If the argument is a different size from what's expected, we must
6246
         have a value in the right mode class in order to convert it.  */
6247
      if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6248
        return 0;
6249
 
6250
      /* If the operand is an rvalue, promote or demote it to match the
6251
         operand's size.  This might not need extra instructions when
6252
         ARG is a register value.  */
6253
      if (operand->constraint[0] != '=')
6254
        arg = convert_to_mode (operand->mode, arg, unsigned_p);
6255
    }
6256
 
6257
  /* If the operand is an lvalue, bind the operand to a new register.
6258
     The caller will copy this value into ARG after the main
6259
     instruction.  By doing this always, we produce slightly more
6260
     optimal code.  */
6261
  /* But not for control registers.  */
6262
  if (operand->constraint[0] == '='
6263
      && (! REG_P (arg)
6264
          || ! (CONTROL_REGNO_P (REGNO (arg))
6265
                || CCR_REGNO_P (REGNO (arg))
6266
                || CR_REGNO_P (REGNO (arg)))
6267
          ))
6268
    return gen_reg_rtx (operand->mode);
6269
 
6270
  /* Try simple mode punning.  */
6271
  arg = mep_convert_arg (operand->mode, arg);
6272
  if (operand->predicate (arg, operand->mode))
6273
    return arg;
6274
 
6275
  /* See if forcing the argument into a register will make it match.  */
6276
  if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6277
    arg = force_reg (operand->mode, arg);
6278
  else
6279
    arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6280
  if (operand->predicate (arg, operand->mode))
6281
    return arg;
6282
 
6283
  return 0;
6284
}
6285
 
6286
 
6287
/* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6288
   function FNNAME.  OPERAND describes the operand to which ARGNUM
6289
   is mapped.  */
6290
 
6291
static void
6292
mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6293
                      int argnum, tree fnname)
6294
{
6295
  size_t i;
6296
 
6297
  if (GET_CODE (arg) == CONST_INT)
6298
    for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6299
      if (operand->predicate == cgen_immediate_predicates[i].predicate)
6300
        {
6301
          const struct cgen_immediate_predicate *predicate;
6302
          HOST_WIDE_INT argval;
6303
 
6304
          predicate = &cgen_immediate_predicates[i];
6305
          argval = INTVAL (arg);
6306
          if (argval < predicate->lower || argval >= predicate->upper)
6307
            error ("argument %d of %qE must be in the range %d...%d",
6308
                   argnum, fnname, predicate->lower, predicate->upper - 1);
6309
          else
6310
            error ("argument %d of %qE must be a multiple of %d",
6311
                   argnum, fnname, predicate->align);
6312
          return;
6313
        }
6314
 
6315
  error ("incompatible type for argument %d of %qE", argnum, fnname);
6316
}
6317
 
6318
static rtx
6319
mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6320
                    rtx subtarget ATTRIBUTE_UNUSED,
6321
                    enum machine_mode mode ATTRIBUTE_UNUSED,
6322
                    int ignore ATTRIBUTE_UNUSED)
6323
{
6324
  rtx pat, op[10], arg[10];
6325
  unsigned int a;
6326
  int opindex, unsigned_p[10];
6327
  tree fndecl, args;
6328
  unsigned int n_args;
6329
  tree fnname;
6330
  const struct cgen_insn *cgen_insn;
6331
  const struct insn_data_d *idata;
6332
  unsigned int first_arg = 0;
6333
  unsigned int builtin_n_args;
6334
 
6335
  fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6336
  fnname = DECL_NAME (fndecl);
6337
 
6338
  /* Find out which instruction we should emit.  Note that some coprocessor
6339
     intrinsics may only be available in VLIW mode, or only in normal mode.  */
6340
  if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6341
    {
6342
      mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6343
      return NULL_RTX;
6344
    }
6345
  idata = &insn_data[cgen_insn->icode];
6346
 
6347
  builtin_n_args = cgen_insn->num_args;
6348
 
6349
  if (cgen_insn->cret_p)
6350
    {
6351
      if (cgen_insn->cret_p > 1)
6352
        builtin_n_args ++;
6353
      first_arg = 1;
6354
      mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6355
      builtin_n_args --;
6356
    }
6357
 
6358
  /* Evaluate each argument.  */
6359
  n_args = call_expr_nargs (exp);
6360
 
6361
  if (n_args < builtin_n_args)
6362
    {
6363
      error ("too few arguments to %qE", fnname);
6364
      return NULL_RTX;
6365
    }
6366
  if (n_args > builtin_n_args)
6367
    {
6368
      error ("too many arguments to %qE", fnname);
6369
      return NULL_RTX;
6370
    }
6371
 
6372
  for (a = first_arg; a < builtin_n_args + first_arg; a++)
6373
    {
6374
      tree value;
6375
 
6376
      args = CALL_EXPR_ARG (exp, a - first_arg);
6377
 
6378
      value = args;
6379
 
6380
#if 0
6381
      if (cgen_insn->regnums[a].reference_p)
6382
        {
6383
          if (TREE_CODE (value) != ADDR_EXPR)
6384
            {
6385
              debug_tree(value);
6386
              error ("argument %d of %qE must be an address", a+1, fnname);
6387
              return NULL_RTX;
6388
            }
6389
          value = TREE_OPERAND (value, 0);
6390
        }
6391
#endif
6392
 
6393
      /* If the argument has been promoted to int, get the unpromoted
6394
         value.  This is necessary when sub-int memory values are bound
6395
         to reference parameters.  */
6396
      if (TREE_CODE (value) == NOP_EXPR
6397
          && TREE_TYPE (value) == integer_type_node
6398
          && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6399
          && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6400
              < TYPE_PRECISION (TREE_TYPE (value))))
6401
        value = TREE_OPERAND (value, 0);
6402
 
6403
      /* If the argument has been promoted to double, get the unpromoted
6404
         SFmode value.  This is necessary for FMAX support, for example.  */
6405
      if (TREE_CODE (value) == NOP_EXPR
6406
          && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6407
          && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6408
          && TYPE_MODE (TREE_TYPE (value)) == DFmode
6409
          && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6410
        value = TREE_OPERAND (value, 0);
6411
 
6412
      unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6413
      arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6414
      arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6415
      if (cgen_insn->regnums[a].reference_p)
6416
        {
6417
          tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6418
          enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6419
 
6420
          arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6421
        }
6422
      if (arg[a] == 0)
6423
        {
6424
          error ("argument %d of %qE must be in the range %d...%d",
6425
                 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6426
          return NULL_RTX;
6427
        }
6428
    }
6429
 
6430
  for (a = 0; a < first_arg; a++)
6431
    {
6432
      if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6433
        arg[a] = target;
6434
      else
6435
        arg[a] = gen_reg_rtx (idata->operand[0].mode);
6436
    }
6437
 
6438
  /* Convert the arguments into a form suitable for the intrinsic.
6439
     Report an error if this isn't possible.  */
6440
  for (opindex = 0; opindex < idata->n_operands; opindex++)
6441
    {
6442
      a = cgen_insn->op_mapping[opindex];
6443
      op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6444
                                        arg[a], unsigned_p[a]);
6445
      if (op[opindex] == 0)
6446
        {
6447
          mep_incompatible_arg (&idata->operand[opindex],
6448
                                arg[a], a + 1 - first_arg, fnname);
6449
          return NULL_RTX;
6450
        }
6451
    }
6452
 
6453
  /* Emit the instruction.  */
6454
  pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6455
                       op[5], op[6], op[7], op[8], op[9]);
6456
 
6457
  if (GET_CODE (pat) == SET
6458
      && GET_CODE (SET_DEST (pat)) == PC
6459
      && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6460
    emit_jump_insn (pat);
6461
  else
6462
    emit_insn (pat);
6463
 
6464
  /* Copy lvalues back to their final locations.  */
6465
  for (opindex = 0; opindex < idata->n_operands; opindex++)
6466
    if (idata->operand[opindex].constraint[0] == '=')
6467
      {
6468
        a = cgen_insn->op_mapping[opindex];
6469
        if (a >= first_arg)
6470
          {
6471
            if (GET_MODE_CLASS (GET_MODE (arg[a]))
6472
                != GET_MODE_CLASS (GET_MODE (op[opindex])))
6473
              emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6474
                                                   op[opindex]));
6475
            else
6476
              {
6477
                /* First convert the operand to the right mode, then copy it
6478
                   into the destination.  Doing the conversion as a separate
6479
                   step (rather than using convert_move) means that we can
6480
                   avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6481
                   refer to the same register.  */
6482
                op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6483
                                               op[opindex], unsigned_p[a]);
6484
                if (!rtx_equal_p (arg[a], op[opindex]))
6485
                  emit_move_insn (arg[a], op[opindex]);
6486
              }
6487
          }
6488
      }
6489
 
6490
  if (first_arg > 0 && target && target != op[0])
6491
    {
6492
      emit_move_insn (target, op[0]);
6493
    }
6494
 
6495
  return target;
6496
}
6497
 
6498
static bool
6499
mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6500
{
6501
  return false;
6502
}
6503
 
6504
/* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6505
   a global register.  */
6506
 
6507
static int
6508
global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6509
{
6510
  int regno;
6511
  rtx x = *loc;
6512
 
6513
  if (! x)
6514
    return 0;
6515
 
6516
  switch (GET_CODE (x))
6517
    {
6518
    case SUBREG:
6519
      if (REG_P (SUBREG_REG (x)))
6520
        {
6521
          if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6522
              && global_regs[subreg_regno (x)])
6523
            return 1;
6524
          return 0;
6525
        }
6526
      break;
6527
 
6528
    case REG:
6529
      regno = REGNO (x);
6530
      if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6531
        return 1;
6532
      return 0;
6533
 
6534
    case SCRATCH:
6535
    case PC:
6536
    case CC0:
6537
    case CONST_INT:
6538
    case CONST_DOUBLE:
6539
    case CONST:
6540
    case LABEL_REF:
6541
      return 0;
6542
 
6543
    case CALL:
6544
      /* A non-constant call might use a global register.  */
6545
      return 1;
6546
 
6547
    default:
6548
      break;
6549
    }
6550
 
6551
  return 0;
6552
}
6553
 
6554
/* Returns nonzero if X mentions a global register.  */
6555
 
6556
static int
6557
global_reg_mentioned_p (rtx x)
6558
{
6559
  if (INSN_P (x))
6560
    {
6561
      if (CALL_P (x))
6562
        {
6563
          if (! RTL_CONST_OR_PURE_CALL_P (x))
6564
            return 1;
6565
          x = CALL_INSN_FUNCTION_USAGE (x);
6566
          if (x == 0)
6567
            return 0;
6568
        }
6569
      else
6570
        x = PATTERN (x);
6571
    }
6572
 
6573
  return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6574
}
6575
/* Scheduling hooks for VLIW mode.
6576
 
6577
   Conceptually this is very simple: we have a two-pack architecture
6578
   that takes one core insn and one coprocessor insn to make up either
6579
   a 32- or 64-bit instruction word (depending on the option bit set in
6580
   the chip).  I.e. in VL32 mode, we can pack one 16-bit core insn and
6581
   one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6582
   and one 48-bit cop insn or two 32-bit core/cop insns.
6583
 
6584
   In practice, instruction selection will be a bear.  Consider in
6585
   VL64 mode the following insns
6586
 
6587
        add $1, 1
6588
        cmov $cr0, $0
6589
 
6590
   these cannot pack, since the add is a 16-bit core insn and cmov
6591
   is a 32-bit cop insn.  However,
6592
 
6593
        add3 $1, $1, 1
6594
        cmov $cr0, $0
6595
 
6596
   packs just fine.  For good VLIW code generation in VL64 mode, we
6597
   will have to have 32-bit alternatives for many of the common core
6598
   insns.  Not implemented.  */
6599
 
6600
static int
6601
mep_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
6602
{
6603
  int cost_specified;
6604
 
6605
  if (REG_NOTE_KIND (link) != 0)
6606
    {
6607
      /* See whether INSN and DEP_INSN are intrinsics that set the same
6608
         hard register.  If so, it is more important to free up DEP_INSN
6609
         than it is to free up INSN.
6610
 
6611
         Note that intrinsics like mep_mulr are handled differently from
6612
         the equivalent mep.md patterns.  In mep.md, if we don't care
6613
         about the value of $lo and $hi, the pattern will just clobber
6614
         the registers, not set them.  Since clobbers don't count as
6615
         output dependencies, it is often possible to reorder two mulrs,
6616
         even after reload.
6617
 
6618
         In contrast, mep_mulr() sets both $lo and $hi to specific values,
6619
         so any pair of mep_mulr()s will be inter-dependent.   We should
6620
         therefore give the first mep_mulr() a higher priority.  */
6621
      if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6622
          && global_reg_mentioned_p (PATTERN (insn))
6623
          && global_reg_mentioned_p (PATTERN (dep_insn)))
6624
        return 1;
6625
 
6626
      /* If the dependence is an anti or output dependence, assume it
6627
         has no cost.  */
6628
      return 0;
6629
    }
6630
 
6631
  /* If we can't recognize the insns, we can't really do anything.  */
6632
  if (recog_memoized (dep_insn) < 0)
6633
    return cost;
6634
 
6635
  /* The latency attribute doesn't apply to MeP-h1: we use the stall
6636
     attribute instead.  */
6637
  if (!TARGET_H1)
6638
    {
6639
      cost_specified = get_attr_latency (dep_insn);
6640
      if (cost_specified != 0)
6641
        return cost_specified;
6642
    }
6643
 
6644
  return cost;
6645
}
6646
 
6647
/* ??? We don't properly compute the length of a load/store insn,
6648
   taking into account the addressing mode.  */
6649
 
6650
static int
6651
mep_issue_rate (void)
6652
{
6653
  return TARGET_IVC2 ? 3 : 2;
6654
}
6655
 
6656
/* Return true if function DECL was declared with the vliw attribute.  */
6657
 
6658
bool
6659
mep_vliw_function_p (tree decl)
6660
{
6661
  return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6662
}
6663
 
6664
static rtx
6665
mep_find_ready_insn (rtx *ready, int nready, enum attr_slot slot, int length)
6666
{
6667
  int i;
6668
 
6669
  for (i = nready - 1; i >= 0; --i)
6670
    {
6671
      rtx insn = ready[i];
6672
      if (recog_memoized (insn) >= 0
6673
          && get_attr_slot (insn) == slot
6674
          && get_attr_length (insn) == length)
6675
        return insn;
6676
    }
6677
 
6678
  return NULL_RTX;
6679
}
6680
 
6681
static void
6682
mep_move_ready_insn (rtx *ready, int nready, rtx insn)
6683
{
6684
  int i;
6685
 
6686
  for (i = 0; i < nready; ++i)
6687
    if (ready[i] == insn)
6688
      {
6689
        for (; i < nready - 1; ++i)
6690
          ready[i] = ready[i + 1];
6691
        ready[i] = insn;
6692
        return;
6693
      }
6694
 
6695
  gcc_unreachable ();
6696
}
6697
 
6698
static void
6699
mep_print_sched_insn (FILE *dump, rtx insn)
6700
{
6701
  const char *slots = "none";
6702
  const char *name = NULL;
6703
  int code;
6704
  char buf[30];
6705
 
6706
  if (GET_CODE (PATTERN (insn)) == SET
6707
      || GET_CODE (PATTERN (insn)) == PARALLEL)
6708
    {
6709
      switch (get_attr_slots (insn))
6710
        {
6711
        case SLOTS_CORE: slots = "core"; break;
6712
        case SLOTS_C3: slots = "c3"; break;
6713
        case SLOTS_P0: slots = "p0"; break;
6714
        case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6715
        case SLOTS_P0_P1: slots = "p0,p1"; break;
6716
        case SLOTS_P0S: slots = "p0s"; break;
6717
        case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6718
        case SLOTS_P1: slots = "p1"; break;
6719
        default:
6720
          sprintf(buf, "%d", get_attr_slots (insn));
6721
          slots = buf;
6722
          break;
6723
        }
6724
    }
6725
  if (GET_CODE (PATTERN (insn)) == USE)
6726
    slots = "use";
6727
 
6728
  code = INSN_CODE (insn);
6729
  if (code >= 0)
6730
    name = get_insn_name (code);
6731
  if (!name)
6732
    name = "{unknown}";
6733
 
6734
  fprintf (dump,
6735
           "insn %4d %4d  %8s  %s\n",
6736
           code,
6737
           INSN_UID (insn),
6738
           name,
6739
           slots);
6740
}
6741
 
6742
static int
6743
mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6744
                   int sched_verbose ATTRIBUTE_UNUSED, rtx *ready,
6745
                   int *pnready, int clock ATTRIBUTE_UNUSED)
6746
{
6747
  int nready = *pnready;
6748
  rtx core_insn, cop_insn;
6749
  int i;
6750
 
6751
  if (dump && sched_verbose > 1)
6752
    {
6753
      fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6754
      for (i=0; i<nready; i++)
6755
        mep_print_sched_insn (dump, ready[i]);
6756
      fprintf (dump, "\n");
6757
    }
6758
 
6759
  if (!mep_vliw_function_p (cfun->decl))
6760
    return 1;
6761
  if (nready < 2)
6762
    return 1;
6763
 
6764
  /* IVC2 uses a DFA to determine what's ready and what's not. */
6765
  if (TARGET_IVC2)
6766
    return nready;
6767
 
6768
  /* We can issue either a core or coprocessor instruction.
6769
     Look for a matched pair of insns to reorder.  If we don't
6770
     find any, don't second-guess the scheduler's priorities.  */
6771
 
6772
  if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6773
      && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6774
                                          TARGET_OPT_VL64 ? 6 : 2)))
6775
    ;
6776
  else if (TARGET_OPT_VL64
6777
           && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6778
           && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6779
    ;
6780
  else
6781
    /* We didn't find a pair.  Issue the single insn at the head
6782
       of the ready list.  */
6783
    return 1;
6784
 
6785
  /* Reorder the two insns first.  */
6786
  mep_move_ready_insn (ready, nready, core_insn);
6787
  mep_move_ready_insn (ready, nready - 1, cop_insn);
6788
  return 2;
6789
}
6790
 
6791
/* A for_each_rtx callback.  Return true if *X is a register that is
6792
   set by insn PREV.  */
6793
 
6794
static int
6795
mep_store_find_set (rtx *x, void *prev)
6796
{
6797
  return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6798
}
6799
 
6800
/* Like mep_store_bypass_p, but takes a pattern as the second argument,
6801
   not the containing insn.  */
6802
 
6803
static bool
6804
mep_store_data_bypass_1 (rtx prev, rtx pat)
6805
{
6806
  /* Cope with intrinsics like swcpa.  */
6807
  if (GET_CODE (pat) == PARALLEL)
6808
    {
6809
      int i;
6810
 
6811
      for (i = 0; i < XVECLEN (pat, 0); i++)
6812
        if (mep_store_data_bypass_p (prev, XVECEXP (pat, 0, i)))
6813
          return true;
6814
 
6815
      return false;
6816
    }
6817
 
6818
  /* Check for some sort of store.  */
6819
  if (GET_CODE (pat) != SET
6820
      || GET_CODE (SET_DEST (pat)) != MEM)
6821
    return false;
6822
 
6823
  /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6824
     The first operand to the unspec is the store data and the other operands
6825
     are used to calculate the address.  */
6826
  if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6827
    {
6828
      rtx src;
6829
      int i;
6830
 
6831
      src = SET_SRC (pat);
6832
      for (i = 1; i < XVECLEN (src, 0); i++)
6833
        if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6834
          return false;
6835
 
6836
      return true;
6837
    }
6838
 
6839
  /* Otherwise just check that PREV doesn't modify any register mentioned
6840
     in the memory destination.  */
6841
  return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6842
}
6843
 
6844
/* Return true if INSN is a store instruction and if the store address
6845
   has no true dependence on PREV.  */
6846
 
6847
bool
6848
mep_store_data_bypass_p (rtx prev, rtx insn)
6849
{
6850
  return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6851
}
6852
 
6853
/* A for_each_rtx subroutine of mep_mul_hilo_bypass_p.  Return 1 if *X
6854
   is a register other than LO or HI and if PREV sets *X.  */
6855
 
6856
static int
6857
mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6858
{
6859
  return (REG_P (*x)
6860
          && REGNO (*x) != LO_REGNO
6861
          && REGNO (*x) != HI_REGNO
6862
          && reg_set_p (*x, (const_rtx) prev));
6863
}
6864
 
6865
/* Return true if, apart from HI/LO, there are no true dependencies
6866
   between multiplication instructions PREV and INSN.  */
6867
 
6868
bool
6869
mep_mul_hilo_bypass_p (rtx prev, rtx insn)
6870
{
6871
  rtx pat;
6872
 
6873
  pat = PATTERN (insn);
6874
  if (GET_CODE (pat) == PARALLEL)
6875
    pat = XVECEXP (pat, 0, 0);
6876
  return (GET_CODE (pat) == SET
6877
          && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6878
}
6879
 
6880
/* Return true if INSN is an ldc instruction that issues to the
6881
   MeP-h1 integer pipeline.  This is true for instructions that
6882
   read from PSW, LP, SAR, HI and LO.  */
6883
 
6884
bool
6885
mep_ipipe_ldc_p (rtx insn)
6886
{
6887
  rtx pat, src;
6888
 
6889
  pat = PATTERN (insn);
6890
 
6891
  /* Cope with instrinsics that set both a hard register and its shadow.
6892
     The set of the hard register comes first.  */
6893
  if (GET_CODE (pat) == PARALLEL)
6894
    pat = XVECEXP (pat, 0, 0);
6895
 
6896
  if (GET_CODE (pat) == SET)
6897
    {
6898
      src = SET_SRC (pat);
6899
 
6900
      /* Cope with intrinsics.  The first operand to the unspec is
6901
         the source register.  */
6902
      if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6903
        src = XVECEXP (src, 0, 0);
6904
 
6905
      if (REG_P (src))
6906
        switch (REGNO (src))
6907
          {
6908
          case PSW_REGNO:
6909
          case LP_REGNO:
6910
          case SAR_REGNO:
6911
          case HI_REGNO:
6912
          case LO_REGNO:
6913
            return true;
6914
          }
6915
    }
6916
  return false;
6917
}
6918
 
6919
/* Create a VLIW bundle from core instruction CORE and coprocessor
6920
   instruction COP.  COP always satisfies INSN_P, but CORE can be
6921
   either a new pattern or an existing instruction.
6922
 
6923
   Emit the bundle in place of COP and return it.  */
6924
 
6925
static rtx
6926
mep_make_bundle (rtx core, rtx cop)
6927
{
6928
  rtx insn;
6929
 
6930
  /* If CORE is an existing instruction, remove it, otherwise put
6931
     the new pattern in an INSN harness.  */
6932
  if (INSN_P (core))
6933
    remove_insn (core);
6934
  else
6935
    core = make_insn_raw (core);
6936
 
6937
  /* Generate the bundle sequence and replace COP with it.  */
6938
  insn = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core, cop));
6939
  insn = emit_insn_after (insn, cop);
6940
  remove_insn (cop);
6941
 
6942
  /* Set up the links of the insns inside the SEQUENCE.  */
6943
  PREV_INSN (core) = PREV_INSN (insn);
6944
  NEXT_INSN (core) = cop;
6945
  PREV_INSN (cop) = core;
6946
  NEXT_INSN (cop) = NEXT_INSN (insn);
6947
 
6948
  /* Set the VLIW flag for the coprocessor instruction.  */
6949
  PUT_MODE (core, VOIDmode);
6950
  PUT_MODE (cop, BImode);
6951
 
6952
  /* Derive a location for the bundle.  Individual instructions cannot
6953
     have their own location because there can be no assembler labels
6954
     between CORE and COP.  */
6955
  INSN_LOCATOR (insn) = INSN_LOCATOR (INSN_LOCATOR (core) ? core : cop);
6956
  INSN_LOCATOR (core) = 0;
6957
  INSN_LOCATOR (cop) = 0;
6958
 
6959
  return insn;
6960
}
6961
 
6962
/* A helper routine for ms1_insn_dependent_p called through note_stores.  */
6963
 
6964
static void
6965
mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6966
{
6967
  rtx * pinsn = (rtx *) data;
6968
 
6969
  if (*pinsn && reg_mentioned_p (x, *pinsn))
6970
    *pinsn = NULL_RTX;
6971
}
6972
 
6973
/* Return true if anything in insn X is (anti,output,true) dependent on
6974
   anything in insn Y.  */
6975
 
6976
static int
6977
mep_insn_dependent_p (rtx x, rtx y)
6978
{
6979
  rtx tmp;
6980
 
6981
  gcc_assert (INSN_P (x));
6982
  gcc_assert (INSN_P (y));
6983
 
6984
  tmp = PATTERN (y);
6985
  note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6986
  if (tmp == NULL_RTX)
6987
    return 1;
6988
 
6989
  tmp = PATTERN (x);
6990
  note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6991
  if (tmp == NULL_RTX)
6992
    return 1;
6993
 
6994
  return 0;
6995
}
6996
 
6997
static int
6998
core_insn_p (rtx insn)
6999
{
7000
  if (GET_CODE (PATTERN (insn)) == USE)
7001
    return 0;
7002
  if (get_attr_slot (insn) == SLOT_CORE)
7003
    return 1;
7004
  return 0;
7005
}
7006
 
7007
/* Mark coprocessor instructions that can be bundled together with
7008
   the immediately preceeding core instruction.  This is later used
7009
   to emit the "+" that tells the assembler to create a VLIW insn.
7010
 
7011
   For unbundled insns, the assembler will automatically add coprocessor
7012
   nops, and 16-bit core nops.  Due to an apparent oversight in the
7013
   spec, the assembler will _not_ automatically add 32-bit core nops,
7014
   so we have to emit those here.
7015
 
7016
   Called from mep_insn_reorg.  */
7017
 
7018
static void
7019
mep_bundle_insns (rtx insns)
7020
{
7021
  rtx insn, last = NULL_RTX, first = NULL_RTX;
7022
  int saw_scheduling = 0;
7023
 
7024
  /* Only do bundling if we're in vliw mode.  */
7025
  if (!mep_vliw_function_p (cfun->decl))
7026
    return;
7027
 
7028
  /* The first insn in a bundle are TImode, the remainder are
7029
     VOIDmode.  After this function, the first has VOIDmode and the
7030
     rest have BImode.  */
7031
 
7032
  /* Note: this doesn't appear to be true for JUMP_INSNs.  */
7033
 
7034
  /* First, move any NOTEs that are within a bundle, to the beginning
7035
     of the bundle.  */
7036
  for (insn = insns; insn ; insn = NEXT_INSN (insn))
7037
    {
7038
      if (NOTE_P (insn) && first)
7039
        /* Don't clear FIRST.  */;
7040
 
7041
      else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
7042
        first = insn;
7043
 
7044
      else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
7045
        {
7046
          rtx note, prev;
7047
 
7048
          /* INSN is part of a bundle; FIRST is the first insn in that
7049
             bundle.  Move all intervening notes out of the bundle.
7050
             In addition, since the debug pass may insert a label
7051
             whenever the current line changes, set the location info
7052
             for INSN to match FIRST.  */
7053
 
7054
          INSN_LOCATOR (insn) = INSN_LOCATOR (first);
7055
 
7056
          note = PREV_INSN (insn);
7057
          while (note && note != first)
7058
            {
7059
              prev = PREV_INSN (note);
7060
 
7061
              if (NOTE_P (note))
7062
                {
7063
                  /* Remove NOTE from here... */
7064
                  PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
7065
                  NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
7066
                  /* ...and put it in here.  */
7067
                  NEXT_INSN (note) = first;
7068
                  PREV_INSN (note) = PREV_INSN (first);
7069
                  NEXT_INSN (PREV_INSN (note)) = note;
7070
                  PREV_INSN (NEXT_INSN (note)) = note;
7071
                }
7072
 
7073
              note = prev;
7074
            }
7075
        }
7076
 
7077
      else if (!NONJUMP_INSN_P (insn))
7078
        first = 0;
7079
    }
7080
 
7081
  /* Now fix up the bundles.  */
7082
  for (insn = insns; insn ; insn = NEXT_INSN (insn))
7083
    {
7084
      if (NOTE_P (insn))
7085
        continue;
7086
 
7087
      if (!NONJUMP_INSN_P (insn))
7088
        {
7089
          last = 0;
7090
          continue;
7091
        }
7092
 
7093
      /* If we're not optimizing enough, there won't be scheduling
7094
         info.  We detect that here.  */
7095
      if (GET_MODE (insn) == TImode)
7096
        saw_scheduling = 1;
7097
      if (!saw_scheduling)
7098
        continue;
7099
 
7100
      if (TARGET_IVC2)
7101
        {
7102
          rtx core_insn = NULL_RTX;
7103
 
7104
          /* IVC2 slots are scheduled by DFA, so we just accept
7105
             whatever the scheduler gives us.  However, we must make
7106
             sure the core insn (if any) is the first in the bundle.
7107
             The IVC2 assembler can insert whatever NOPs are needed,
7108
             and allows a COP insn to be first.  */
7109
 
7110
          if (NONJUMP_INSN_P (insn)
7111
              && GET_CODE (PATTERN (insn)) != USE
7112
              && GET_MODE (insn) == TImode)
7113
            {
7114
              for (last = insn;
7115
                   NEXT_INSN (last)
7116
                     && GET_MODE (NEXT_INSN (last)) == VOIDmode
7117
                     && NONJUMP_INSN_P (NEXT_INSN (last));
7118
                   last = NEXT_INSN (last))
7119
                {
7120
                  if (core_insn_p (last))
7121
                    core_insn = last;
7122
                }
7123
              if (core_insn_p (last))
7124
                core_insn = last;
7125
 
7126
              if (core_insn && core_insn != insn)
7127
                {
7128
                  /* Swap core insn to first in the bundle.  */
7129
 
7130
                  /* Remove core insn.  */
7131
                  if (PREV_INSN (core_insn))
7132
                    NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7133
                  if (NEXT_INSN (core_insn))
7134
                    PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7135
 
7136
                  /* Re-insert core insn.  */
7137
                  PREV_INSN (core_insn) = PREV_INSN (insn);
7138
                  NEXT_INSN (core_insn) = insn;
7139
 
7140
                  if (PREV_INSN (core_insn))
7141
                    NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7142
                  PREV_INSN (insn) = core_insn;
7143
 
7144
                  PUT_MODE (core_insn, TImode);
7145
                  PUT_MODE (insn, VOIDmode);
7146
                }
7147
            }
7148
 
7149
          /* The first insn has TImode, the rest have VOIDmode */
7150
          if (GET_MODE (insn) == TImode)
7151
            PUT_MODE (insn, VOIDmode);
7152
          else
7153
            PUT_MODE (insn, BImode);
7154
          continue;
7155
        }
7156
 
7157
      PUT_MODE (insn, VOIDmode);
7158
      if (recog_memoized (insn) >= 0
7159
          && get_attr_slot (insn) == SLOT_COP)
7160
        {
7161
          if (GET_CODE (insn) == JUMP_INSN
7162
              || ! last
7163
              || recog_memoized (last) < 0
7164
              || get_attr_slot (last) != SLOT_CORE
7165
              || (get_attr_length (insn)
7166
                  != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7167
              || mep_insn_dependent_p (insn, last))
7168
            {
7169
              switch (get_attr_length (insn))
7170
                {
7171
                case 8:
7172
                  break;
7173
                case 6:
7174
                  insn = mep_make_bundle (gen_nop (), insn);
7175
                  break;
7176
                case 4:
7177
                  if (TARGET_OPT_VL64)
7178
                    insn = mep_make_bundle (gen_nop32 (), insn);
7179
                  break;
7180
                case 2:
7181
                  if (TARGET_OPT_VL64)
7182
                    error ("2 byte cop instructions are"
7183
                           " not allowed in 64-bit VLIW mode");
7184
                  else
7185
                    insn = mep_make_bundle (gen_nop (), insn);
7186
                  break;
7187
                default:
7188
                  error ("unexpected %d byte cop instruction",
7189
                         get_attr_length (insn));
7190
                  break;
7191
                }
7192
            }
7193
          else
7194
            insn = mep_make_bundle (last, insn);
7195
        }
7196
 
7197
      last = insn;
7198
    }
7199
}
7200
 
7201
 
7202
/* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7203
   Return true on success.  This function can fail if the intrinsic
7204
   is unavailable or if the operands don't satisfy their predicates.  */
7205
 
7206
bool
7207
mep_emit_intrinsic (int intrinsic, const rtx *operands)
7208
{
7209
  const struct cgen_insn *cgen_insn;
7210
  const struct insn_data_d *idata;
7211
  rtx newop[10];
7212
  int i;
7213
 
7214
  if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7215
    return false;
7216
 
7217
  idata = &insn_data[cgen_insn->icode];
7218
  for (i = 0; i < idata->n_operands; i++)
7219
    {
7220
      newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7221
      if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7222
        return false;
7223
    }
7224
 
7225
  emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7226
                            newop[3], newop[4], newop[5],
7227
                            newop[6], newop[7], newop[8]));
7228
 
7229
  return true;
7230
}
7231
 
7232
 
7233
/* Apply the given unary intrinsic to OPERANDS[1] and store it on
7234
   OPERANDS[0].  Report an error if the instruction could not
7235
   be synthesized.  OPERANDS[1] is a register_operand.  For sign
7236
   and zero extensions, it may be smaller than SImode.  */
7237
 
7238
bool
7239
mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7240
                            rtx * operands ATTRIBUTE_UNUSED)
7241
{
7242
  return false;
7243
}
7244
 
7245
 
7246
/* Likewise, but apply a binary operation to OPERANDS[1] and
7247
   OPERANDS[2].  OPERANDS[1] is a register_operand, OPERANDS[2]
7248
   can be a general_operand.
7249
 
7250
   IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7251
   third operand.  REG and REG3 take register operands only.  */
7252
 
7253
bool
7254
mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7255
                             int ATTRIBUTE_UNUSED immediate3,
7256
                             int ATTRIBUTE_UNUSED reg,
7257
                             int ATTRIBUTE_UNUSED reg3,
7258
                             rtx * operands ATTRIBUTE_UNUSED)
7259
{
7260
  return false;
7261
}
7262
 
7263
static bool
7264
mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
7265
              int opno ATTRIBUTE_UNUSED, int *total,
7266
              bool ATTRIBUTE_UNUSED speed_t)
7267
{
7268
  switch (code)
7269
    {
7270
    case CONST_INT:
7271
      if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7272
        *total = 0;
7273
      else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7274
        *total = 1;
7275
      else
7276
        *total = 3;
7277
      return true;
7278
 
7279
    case SYMBOL_REF:
7280
      *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7281
      return true;
7282
 
7283
    case MULT:
7284
      *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7285
                ? COSTS_N_INSNS (3)
7286
                : COSTS_N_INSNS (2));
7287
      return true;
7288
    }
7289
  return false;
7290
}
7291
 
7292
static int
7293
mep_address_cost (rtx addr ATTRIBUTE_UNUSED, bool ATTRIBUTE_UNUSED speed_p)
7294
{
7295
  return 1;
7296
}
7297
 
7298
static void
7299
mep_asm_init_sections (void)
7300
{
7301
  based_section
7302
    = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7303
                           "\t.section .based,\"aw\"");
7304
 
7305
  tinybss_section
7306
    = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7307
                           "\t.section .sbss,\"aw\"");
7308
 
7309
  sdata_section
7310
    = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7311
                           "\t.section .sdata,\"aw\",@progbits");
7312
 
7313
  far_section
7314
    = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7315
                           "\t.section .far,\"aw\"");
7316
 
7317
  farbss_section
7318
    = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7319
                           "\t.section .farbss,\"aw\"");
7320
 
7321
  frodata_section
7322
    = get_unnamed_section (0, output_section_asm_op,
7323
                           "\t.section .frodata,\"a\"");
7324
 
7325
  srodata_section
7326
    = get_unnamed_section (0, output_section_asm_op,
7327
                           "\t.section .srodata,\"a\"");
7328
 
7329
  vtext_section
7330
    = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7331
                           "\t.section .vtext,\"axv\"\n\t.vliw");
7332
 
7333
  vftext_section
7334
    = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7335
                           "\t.section .vftext,\"axv\"\n\t.vliw");
7336
 
7337
  ftext_section
7338
    = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7339
                           "\t.section .ftext,\"ax\"\n\t.core");
7340
 
7341
}
7342
 
7343
/* Initialize the GCC target structure.  */
7344
 
7345
#undef  TARGET_ASM_FUNCTION_PROLOGUE
7346
#define TARGET_ASM_FUNCTION_PROLOGUE    mep_start_function
7347
#undef  TARGET_ATTRIBUTE_TABLE
7348
#define TARGET_ATTRIBUTE_TABLE          mep_attribute_table
7349
#undef  TARGET_COMP_TYPE_ATTRIBUTES
7350
#define TARGET_COMP_TYPE_ATTRIBUTES     mep_comp_type_attributes
7351
#undef  TARGET_INSERT_ATTRIBUTES
7352
#define TARGET_INSERT_ATTRIBUTES        mep_insert_attributes
7353
#undef  TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7354
#define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P   mep_function_attribute_inlinable_p
7355
#undef  TARGET_CAN_INLINE_P
7356
#define TARGET_CAN_INLINE_P             mep_can_inline_p
7357
#undef  TARGET_SECTION_TYPE_FLAGS
7358
#define TARGET_SECTION_TYPE_FLAGS       mep_section_type_flags
7359
#undef  TARGET_ASM_NAMED_SECTION
7360
#define TARGET_ASM_NAMED_SECTION        mep_asm_named_section
7361
#undef  TARGET_INIT_BUILTINS
7362
#define TARGET_INIT_BUILTINS            mep_init_builtins
7363
#undef  TARGET_EXPAND_BUILTIN
7364
#define TARGET_EXPAND_BUILTIN           mep_expand_builtin
7365
#undef  TARGET_SCHED_ADJUST_COST
7366
#define TARGET_SCHED_ADJUST_COST        mep_adjust_cost
7367
#undef  TARGET_SCHED_ISSUE_RATE
7368
#define TARGET_SCHED_ISSUE_RATE         mep_issue_rate
7369
#undef  TARGET_SCHED_REORDER
7370
#define TARGET_SCHED_REORDER            mep_sched_reorder
7371
#undef  TARGET_STRIP_NAME_ENCODING
7372
#define TARGET_STRIP_NAME_ENCODING      mep_strip_name_encoding
7373
#undef  TARGET_ASM_SELECT_SECTION
7374
#define TARGET_ASM_SELECT_SECTION       mep_select_section
7375
#undef  TARGET_ASM_UNIQUE_SECTION
7376
#define TARGET_ASM_UNIQUE_SECTION       mep_unique_section
7377
#undef  TARGET_ENCODE_SECTION_INFO
7378
#define TARGET_ENCODE_SECTION_INFO      mep_encode_section_info
7379
#undef  TARGET_FUNCTION_OK_FOR_SIBCALL
7380
#define TARGET_FUNCTION_OK_FOR_SIBCALL  mep_function_ok_for_sibcall
7381
#undef  TARGET_RTX_COSTS
7382
#define TARGET_RTX_COSTS                mep_rtx_cost
7383
#undef  TARGET_ADDRESS_COST
7384
#define TARGET_ADDRESS_COST             mep_address_cost
7385
#undef  TARGET_MACHINE_DEPENDENT_REORG
7386
#define TARGET_MACHINE_DEPENDENT_REORG  mep_reorg
7387
#undef  TARGET_SETUP_INCOMING_VARARGS
7388
#define TARGET_SETUP_INCOMING_VARARGS   mep_setup_incoming_varargs
7389
#undef  TARGET_PASS_BY_REFERENCE
7390
#define TARGET_PASS_BY_REFERENCE        mep_pass_by_reference
7391
#undef  TARGET_FUNCTION_ARG
7392
#define TARGET_FUNCTION_ARG             mep_function_arg
7393
#undef  TARGET_FUNCTION_ARG_ADVANCE
7394
#define TARGET_FUNCTION_ARG_ADVANCE     mep_function_arg_advance
7395
#undef  TARGET_VECTOR_MODE_SUPPORTED_P
7396
#define TARGET_VECTOR_MODE_SUPPORTED_P  mep_vector_mode_supported_p
7397
#undef  TARGET_OPTION_OVERRIDE
7398
#define TARGET_OPTION_OVERRIDE          mep_option_override
7399
#undef  TARGET_ALLOCATE_INITIAL_VALUE
7400
#define TARGET_ALLOCATE_INITIAL_VALUE   mep_allocate_initial_value
7401
#undef  TARGET_ASM_INIT_SECTIONS
7402
#define TARGET_ASM_INIT_SECTIONS        mep_asm_init_sections
7403
#undef  TARGET_RETURN_IN_MEMORY
7404
#define TARGET_RETURN_IN_MEMORY         mep_return_in_memory
7405
#undef  TARGET_NARROW_VOLATILE_BITFIELD
7406
#define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7407
#undef  TARGET_EXPAND_BUILTIN_SAVEREGS
7408
#define TARGET_EXPAND_BUILTIN_SAVEREGS  mep_expand_builtin_saveregs
7409
#undef  TARGET_BUILD_BUILTIN_VA_LIST
7410
#define TARGET_BUILD_BUILTIN_VA_LIST    mep_build_builtin_va_list
7411
#undef  TARGET_EXPAND_BUILTIN_VA_START
7412
#define TARGET_EXPAND_BUILTIN_VA_START  mep_expand_va_start
7413
#undef  TARGET_GIMPLIFY_VA_ARG_EXPR
7414
#define TARGET_GIMPLIFY_VA_ARG_EXPR     mep_gimplify_va_arg_expr
7415
#undef  TARGET_CAN_ELIMINATE
7416
#define TARGET_CAN_ELIMINATE            mep_can_eliminate
7417
#undef  TARGET_CONDITIONAL_REGISTER_USAGE
7418
#define TARGET_CONDITIONAL_REGISTER_USAGE       mep_conditional_register_usage
7419
#undef  TARGET_TRAMPOLINE_INIT
7420
#define TARGET_TRAMPOLINE_INIT          mep_trampoline_init
7421
#undef  TARGET_LEGITIMATE_CONSTANT_P
7422
#define TARGET_LEGITIMATE_CONSTANT_P    mep_legitimate_constant_p
7423
 
7424
struct gcc_target targetm = TARGET_INITIALIZER;
7425
 
7426
#include "gt-mep.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.