OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [config/] [mep/] [mep.c] - Blame information for rev 290

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 282 jeremybenn
/* Definitions for Toshiba Media Processor
2
   Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3
   Free Software Foundation, Inc.
4
   Contributed by Red Hat, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "rtl.h"
27
#include "tree.h"
28
#include "regs.h"
29
#include "hard-reg-set.h"
30
#include "real.h"
31
#include "insn-config.h"
32
#include "conditions.h"
33
#include "insn-flags.h"
34
#include "output.h"
35
#include "insn-attr.h"
36
#include "flags.h"
37
#include "recog.h"
38
#include "obstack.h"
39
#include "tree.h"
40
#include "expr.h"
41
#include "except.h"
42
#include "function.h"
43
#include "optabs.h"
44
#include "reload.h"
45
#include "tm_p.h"
46
#include "ggc.h"
47
#include "toplev.h"
48
#include "integrate.h"
49
#include "target.h"
50
#include "target-def.h"
51
#include "langhooks.h"
52
#include "df.h"
53
#include "gimple.h"
54
 
55
/* Structure of this file:
56
 
57
 + Command Line Option Support
58
 + Pattern support - constraints, predicates, expanders
59
 + Reload Support
60
 + Costs
61
 + Functions to save and restore machine-specific function data.
62
 + Frame/Epilog/Prolog Related
63
 + Operand Printing
64
 + Function args in registers
65
 + Handle pipeline hazards
66
 + Handle attributes
67
 + Trampolines
68
 + Machine-dependent Reorg
69
 + Builtins.  */
70
 
71
/* Symbol encodings:
72
 
73
   Symbols are encoded as @ <char> . <name> where <char> is one of these:
74
 
75
   b - based
76
   t - tiny
77
   n - near
78
   f - far
79
   i - io, near
80
   I - io, far
81
   c - cb (control bus)  */
82
 
83
struct GTY(()) machine_function
84
{
85
  int mep_frame_pointer_needed;
86
 
87
  /* For varargs. */
88
  int arg_regs_to_save;
89
  int regsave_filler;
90
  int frame_filler;
91
  int frame_locked;
92
 
93
  /* Records __builtin_return address.  */
94
  rtx eh_stack_adjust;
95
 
96
  int reg_save_size;
97
  int reg_save_slot[FIRST_PSEUDO_REGISTER];
98
  unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
99
 
100
  /* 2 if the current function has an interrupt attribute, 1 if not, 0
101
     if unknown.  This is here because resource.c uses EPILOGUE_USES
102
     which needs it.  */
103
  int interrupt_handler;
104
 
105
  /* Likewise, for disinterrupt attribute.  */
106
  int disable_interrupts;
107
 
108
  /* Number of doloop tags used so far.  */
109
  int doloop_tags;
110
 
111
  /* True if the last tag was allocated to a doloop_end.  */
112
  bool doloop_tag_from_end;
113
 
114
  /* True if reload changes $TP.  */
115
  bool reload_changes_tp;
116
 
117
  /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
118
     We only set this if the function is an interrupt handler.  */
119
  int asms_without_operands;
120
};
121
 
122
#define MEP_CONTROL_REG(x) \
123
  (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
124
 
125
static const struct attribute_spec mep_attribute_table[11];
126
 
127
static GTY(()) section * based_section;
128
static GTY(()) section * tinybss_section;
129
static GTY(()) section * far_section;
130
static GTY(()) section * farbss_section;
131
static GTY(()) section * frodata_section;
132
static GTY(()) section * srodata_section;
133
 
134
static GTY(()) section * vtext_section;
135
static GTY(()) section * vftext_section;
136
static GTY(()) section * ftext_section;
137
 
138
static void mep_set_leaf_registers (int);
139
static bool symbol_p (rtx);
140
static bool symbolref_p (rtx);
141
static void encode_pattern_1 (rtx);
142
static void encode_pattern (rtx);
143
static bool const_in_range (rtx, int, int);
144
static void mep_rewrite_mult (rtx, rtx);
145
static void mep_rewrite_mulsi3 (rtx, rtx, rtx, rtx);
146
static void mep_rewrite_maddsi3 (rtx, rtx, rtx, rtx, rtx);
147
static bool mep_reuse_lo_p_1 (rtx, rtx, rtx, bool);
148
static bool move_needs_splitting (rtx, rtx, enum machine_mode);
149
static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
150
static bool mep_nongeneral_reg (rtx);
151
static bool mep_general_copro_reg (rtx);
152
static bool mep_nonregister (rtx);
153
static struct machine_function* mep_init_machine_status (void);
154
static rtx mep_tp_rtx (void);
155
static rtx mep_gp_rtx (void);
156
static bool mep_interrupt_p (void);
157
static bool mep_disinterrupt_p (void);
158
static bool mep_reg_set_p (rtx, rtx);
159
static bool mep_reg_set_in_function (int);
160
static bool mep_interrupt_saved_reg (int);
161
static bool mep_call_saves_register (int);
162
static rtx F (rtx);
163
static void add_constant (int, int, int, int);
164
static rtx maybe_dead_move (rtx, rtx, bool);
165
static void mep_reload_pointer (int, const char *);
166
static void mep_start_function (FILE *, HOST_WIDE_INT);
167
static bool mep_function_ok_for_sibcall (tree, tree);
168
static int unique_bit_in (HOST_WIDE_INT);
169
static int bit_size_for_clip (HOST_WIDE_INT);
170
static int bytesize (const_tree, enum machine_mode);
171
static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
172
static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
173
static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
174
static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
175
static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
176
static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
177
static bool mep_function_attribute_inlinable_p (const_tree);
178
static bool mep_can_inline_p (tree, tree);
179
static bool mep_lookup_pragma_disinterrupt (const char *);
180
static int mep_multiple_address_regions (tree, bool);
181
static int mep_attrlist_to_encoding (tree, tree);
182
static void mep_insert_attributes (tree, tree *);
183
static void mep_encode_section_info (tree, rtx, int);
184
static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
185
static void mep_unique_section (tree, int);
186
static unsigned int mep_section_type_flags (tree, const char *, int);
187
static void mep_asm_named_section (const char *, unsigned int, tree);
188
static bool mep_mentioned_p (rtx, rtx, int);
189
static void mep_reorg_regmove (rtx);
190
static rtx mep_insert_repeat_label_last (rtx, rtx, bool, bool);
191
static void mep_reorg_repeat (rtx);
192
static bool mep_invertable_branch_p (rtx);
193
static void mep_invert_branch (rtx, rtx);
194
static void mep_reorg_erepeat (rtx);
195
static void mep_jmp_return_reorg (rtx);
196
static void mep_reorg_addcombine (rtx);
197
static void mep_reorg (void);
198
static void mep_init_intrinsics (void);
199
static void mep_init_builtins (void);
200
static void mep_intrinsic_unavailable (int);
201
static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
202
static bool mep_get_move_insn (int, const struct cgen_insn **);
203
static rtx mep_convert_arg (enum machine_mode, rtx);
204
static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
205
static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
206
static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
207
static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
208
static int mep_adjust_cost (rtx, rtx, rtx, int);
209
static int mep_issue_rate (void);
210
static rtx mep_find_ready_insn (rtx *, int, enum attr_slot, int);
211
static void mep_move_ready_insn (rtx *, int, rtx);
212
static int mep_sched_reorder (FILE *, int, rtx *, int *, int);
213
static rtx mep_make_bundle (rtx, rtx);
214
static void mep_bundle_insns (rtx);
215
static bool mep_rtx_cost (rtx, int, int, int *, bool);
216
static int mep_address_cost (rtx, bool);
217
static void mep_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
218
                                        tree, int *, int);
219
static bool mep_pass_by_reference (CUMULATIVE_ARGS * cum, enum machine_mode,
220
                                   const_tree, bool);
221
static bool mep_vector_mode_supported_p (enum machine_mode);
222
static bool mep_handle_option (size_t, const char *, int);
223
static rtx  mep_allocate_initial_value (rtx);
224
static void mep_asm_init_sections (void);
225
static int mep_comp_type_attributes (const_tree, const_tree);
226
static bool mep_narrow_volatile_bitfield (void);
227
static rtx mep_expand_builtin_saveregs (void);
228
static tree mep_build_builtin_va_list (void);
229
static void mep_expand_va_start (tree, rtx);
230
static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
231
static bool mep_can_eliminate (const int, const int);
232
static void mep_trampoline_init (rtx, tree, rtx);
233
 
234
/* Initialize the GCC target structure.  */
235
 
236
#undef  TARGET_ASM_FUNCTION_PROLOGUE
237
#define TARGET_ASM_FUNCTION_PROLOGUE    mep_start_function
238
#undef  TARGET_ATTRIBUTE_TABLE
239
#define TARGET_ATTRIBUTE_TABLE          mep_attribute_table
240
#undef  TARGET_COMP_TYPE_ATTRIBUTES
241
#define TARGET_COMP_TYPE_ATTRIBUTES     mep_comp_type_attributes
242
#undef  TARGET_INSERT_ATTRIBUTES
243
#define TARGET_INSERT_ATTRIBUTES        mep_insert_attributes
244
#undef  TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
245
#define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P   mep_function_attribute_inlinable_p
246
#undef  TARGET_CAN_INLINE_P
247
#define TARGET_CAN_INLINE_P             mep_can_inline_p
248
#undef  TARGET_SECTION_TYPE_FLAGS
249
#define TARGET_SECTION_TYPE_FLAGS       mep_section_type_flags
250
#undef  TARGET_ASM_NAMED_SECTION
251
#define TARGET_ASM_NAMED_SECTION        mep_asm_named_section
252
#undef  TARGET_INIT_BUILTINS
253
#define TARGET_INIT_BUILTINS            mep_init_builtins
254
#undef  TARGET_EXPAND_BUILTIN
255
#define TARGET_EXPAND_BUILTIN           mep_expand_builtin
256
#undef  TARGET_SCHED_ADJUST_COST
257
#define TARGET_SCHED_ADJUST_COST        mep_adjust_cost
258
#undef  TARGET_SCHED_ISSUE_RATE
259
#define TARGET_SCHED_ISSUE_RATE         mep_issue_rate
260
#undef  TARGET_SCHED_REORDER
261
#define TARGET_SCHED_REORDER            mep_sched_reorder
262
#undef  TARGET_STRIP_NAME_ENCODING
263
#define TARGET_STRIP_NAME_ENCODING      mep_strip_name_encoding
264
#undef  TARGET_ASM_SELECT_SECTION
265
#define TARGET_ASM_SELECT_SECTION       mep_select_section
266
#undef  TARGET_ASM_UNIQUE_SECTION
267
#define TARGET_ASM_UNIQUE_SECTION       mep_unique_section
268
#undef  TARGET_ENCODE_SECTION_INFO
269
#define TARGET_ENCODE_SECTION_INFO      mep_encode_section_info
270
#undef  TARGET_FUNCTION_OK_FOR_SIBCALL
271
#define TARGET_FUNCTION_OK_FOR_SIBCALL  mep_function_ok_for_sibcall
272
#undef  TARGET_RTX_COSTS
273
#define TARGET_RTX_COSTS                mep_rtx_cost
274
#undef  TARGET_ADDRESS_COST
275
#define TARGET_ADDRESS_COST             mep_address_cost
276
#undef  TARGET_MACHINE_DEPENDENT_REORG
277
#define TARGET_MACHINE_DEPENDENT_REORG  mep_reorg
278
#undef  TARGET_SETUP_INCOMING_VARARGS
279
#define TARGET_SETUP_INCOMING_VARARGS   mep_setup_incoming_varargs
280
#undef  TARGET_PASS_BY_REFERENCE
281
#define TARGET_PASS_BY_REFERENCE        mep_pass_by_reference
282
#undef  TARGET_VECTOR_MODE_SUPPORTED_P
283
#define TARGET_VECTOR_MODE_SUPPORTED_P  mep_vector_mode_supported_p
284
#undef  TARGET_HANDLE_OPTION
285
#define TARGET_HANDLE_OPTION            mep_handle_option
286
#undef  TARGET_DEFAULT_TARGET_FLAGS
287
#define TARGET_DEFAULT_TARGET_FLAGS     TARGET_DEFAULT
288
#undef  TARGET_ALLOCATE_INITIAL_VALUE
289
#define TARGET_ALLOCATE_INITIAL_VALUE   mep_allocate_initial_value
290
#undef  TARGET_ASM_INIT_SECTIONS
291
#define TARGET_ASM_INIT_SECTIONS        mep_asm_init_sections
292
#undef  TARGET_RETURN_IN_MEMORY
293
#define TARGET_RETURN_IN_MEMORY         mep_return_in_memory
294
#undef  TARGET_NARROW_VOLATILE_BITFIELD
295
#define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
296
#undef  TARGET_EXPAND_BUILTIN_SAVEREGS
297
#define TARGET_EXPAND_BUILTIN_SAVEREGS  mep_expand_builtin_saveregs
298
#undef  TARGET_BUILD_BUILTIN_VA_LIST
299
#define TARGET_BUILD_BUILTIN_VA_LIST    mep_build_builtin_va_list
300
#undef  TARGET_EXPAND_BUILTIN_VA_START
301
#define TARGET_EXPAND_BUILTIN_VA_START  mep_expand_va_start
302
#undef  TARGET_GIMPLIFY_VA_ARG_EXPR
303
#define TARGET_GIMPLIFY_VA_ARG_EXPR     mep_gimplify_va_arg_expr
304
#undef  TARGET_CAN_ELIMINATE
305
#define TARGET_CAN_ELIMINATE            mep_can_eliminate
306
#undef  TARGET_TRAMPOLINE_INIT
307
#define TARGET_TRAMPOLINE_INIT          mep_trampoline_init
308
 
309
struct gcc_target targetm = TARGET_INITIALIZER;
310
 
311
#define WANT_GCC_DEFINITIONS
312
#include "mep-intrin.h"
313
#undef WANT_GCC_DEFINITIONS
314
 
315
 
316
/* Command Line Option Support.  */
317
 
318
char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
319
 
320
/* True if we can use cmov instructions to move values back and forth
321
   between core and coprocessor registers.  */
322
bool mep_have_core_copro_moves_p;
323
 
324
/* True if we can use cmov instructions (or a work-alike) to move
325
   values between coprocessor registers.  */
326
bool mep_have_copro_copro_moves_p;
327
 
328
/* A table of all coprocessor instructions that can act like
329
   a coprocessor-to-coprocessor cmov.  */
330
static const int mep_cmov_insns[] = {
331
  mep_cmov,
332
  mep_cpmov,
333
  mep_fmovs,
334
  mep_caddi3,
335
  mep_csubi3,
336
  mep_candi3,
337
  mep_cori3,
338
  mep_cxori3,
339
  mep_cand3,
340
  mep_cor3
341
};
342
 
343
static int option_mtiny_specified = 0;
344
 
345
 
346
static void
347
mep_set_leaf_registers (int enable)
348
{
349
  int i;
350
 
351
  if (mep_leaf_registers[0] != enable)
352
    for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
353
      mep_leaf_registers[i] = enable;
354
}
355
 
356
void
357
mep_conditional_register_usage (char *fixed_regs, char *call_used_regs)
358
{
359
  int i;
360
 
361
  if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
362
    {
363
      fixed_regs[HI_REGNO] = 1;
364
      fixed_regs[LO_REGNO] = 1;
365
      call_used_regs[HI_REGNO] = 1;
366
      call_used_regs[LO_REGNO] = 1;
367
    }
368
 
369
  for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
370
    global_regs[i] = 1;
371
}
372
 
373
void
374
mep_optimization_options (void)
375
{
376
  /* The first scheduling pass often increases register pressure and tends
377
     to result in more spill code.  Only run it when specifically asked.  */
378
  flag_schedule_insns = 0;
379
 
380
  /* Using $fp doesn't gain us much, even when debugging is important.  */
381
  flag_omit_frame_pointer = 1;
382
}
383
 
384
void
385
mep_override_options (void)
386
{
387
  if (flag_pic == 1)
388
    warning (OPT_fpic, "-fpic is not supported");
389
  if (flag_pic == 2)
390
    warning (OPT_fPIC, "-fPIC is not supported");
391
  if (TARGET_S && TARGET_M)
392
    error ("only one of -ms and -mm may be given");
393
  if (TARGET_S && TARGET_L)
394
    error ("only one of -ms and -ml may be given");
395
  if (TARGET_M && TARGET_L)
396
    error ("only one of -mm and -ml may be given");
397
  if (TARGET_S && option_mtiny_specified)
398
    error ("only one of -ms and -mtiny= may be given");
399
  if (TARGET_M && option_mtiny_specified)
400
    error ("only one of -mm and -mtiny= may be given");
401
  if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
402
    warning (0, "-mclip currently has no effect without -mminmax");
403
 
404
  if (mep_const_section)
405
    {
406
      if (strcmp (mep_const_section, "tiny") != 0
407
          && strcmp (mep_const_section, "near") != 0
408
          && strcmp (mep_const_section, "far") != 0)
409
        error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
410
    }
411
 
412
  if (TARGET_S)
413
    mep_tiny_cutoff = 65536;
414
  if (TARGET_M)
415
    mep_tiny_cutoff = 0;
416
  if (TARGET_L && ! option_mtiny_specified)
417
    mep_tiny_cutoff = 0;
418
 
419
  if (TARGET_64BIT_CR_REGS)
420
    flag_split_wide_types = 0;
421
 
422
  init_machine_status = mep_init_machine_status;
423
  mep_init_intrinsics ();
424
}
425
 
426
/* Pattern Support - constraints, predicates, expanders.  */
427
 
428
/* MEP has very few instructions that can refer to the span of
429
   addresses used by symbols, so it's common to check for them.  */
430
 
431
static bool
432
symbol_p (rtx x)
433
{
434
  int c = GET_CODE (x);
435
 
436
  return (c == CONST_INT
437
          || c == CONST
438
          || c == SYMBOL_REF);
439
}
440
 
441
static bool
442
symbolref_p (rtx x)
443
{
444
  int c;
445
 
446
  if (GET_CODE (x) != MEM)
447
    return false;
448
 
449
  c = GET_CODE (XEXP (x, 0));
450
  return (c == CONST_INT
451
          || c == CONST
452
          || c == SYMBOL_REF);
453
}
454
 
455
/* static const char *reg_class_names[] = REG_CLASS_NAMES; */
456
 
457
#define GEN_REG(R, STRICT)                              \
458
  (GR_REGNO_P (R)                                       \
459
   || (!STRICT                                          \
460
       && ((R) == ARG_POINTER_REGNUM                    \
461
           || (R) >= FIRST_PSEUDO_REGISTER)))
462
 
463
static char pattern[12], *patternp;
464
static GTY(()) rtx patternr[12];
465
#define RTX_IS(x) (strcmp (pattern, x) == 0)
466
 
467
static void
468
encode_pattern_1 (rtx x)
469
{
470
  int i;
471
 
472
  if (patternp == pattern + sizeof (pattern) - 2)
473
    {
474
      patternp[-1] = '?';
475
      return;
476
    }
477
 
478
  patternr[patternp-pattern] = x;
479
 
480
  switch (GET_CODE (x))
481
    {
482
    case REG:
483
      *patternp++ = 'r';
484
      break;
485
    case MEM:
486
      *patternp++ = 'm';
487
    case CONST:
488
      encode_pattern_1 (XEXP(x, 0));
489
      break;
490
    case PLUS:
491
      *patternp++ = '+';
492
      encode_pattern_1 (XEXP(x, 0));
493
      encode_pattern_1 (XEXP(x, 1));
494
      break;
495
    case LO_SUM:
496
      *patternp++ = 'L';
497
      encode_pattern_1 (XEXP(x, 0));
498
      encode_pattern_1 (XEXP(x, 1));
499
      break;
500
    case HIGH:
501
      *patternp++ = 'H';
502
      encode_pattern_1 (XEXP(x, 0));
503
      break;
504
    case SYMBOL_REF:
505
      *patternp++ = 's';
506
      break;
507
    case LABEL_REF:
508
      *patternp++ = 'l';
509
      break;
510
    case CONST_INT:
511
    case CONST_DOUBLE:
512
      *patternp++ = 'i';
513
      break;
514
    case UNSPEC:
515
      *patternp++ = 'u';
516
      *patternp++ = '0' + XCINT(x, 1, UNSPEC);
517
      for (i=0; i<XVECLEN (x, 0); i++)
518
        encode_pattern_1 (XVECEXP (x, 0, i));
519
      break;
520
    case USE:
521
      *patternp++ = 'U';
522
      break;
523
    default:
524
      *patternp++ = '?';
525
#if 0
526
      fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
527
      debug_rtx (x);
528
      gcc_unreachable ();
529
#endif
530
      break;
531
    }
532
}
533
 
534
static void
535
encode_pattern (rtx x)
536
{
537
  patternp = pattern;
538
  encode_pattern_1 (x);
539
  *patternp = 0;
540
}
541
 
542
int
543
mep_section_tag (rtx x)
544
{
545
  const char *name;
546
 
547
  while (1)
548
    {
549
      switch (GET_CODE (x))
550
        {
551
        case MEM:
552
        case CONST:
553
          x = XEXP (x, 0);
554
          break;
555
        case UNSPEC:
556
          x = XVECEXP (x, 0, 0);
557
          break;
558
        case PLUS:
559
          if (GET_CODE (XEXP (x, 1)) != CONST_INT)
560
            return 0;
561
          x = XEXP (x, 0);
562
          break;
563
        default:
564
          goto done;
565
        }
566
    }
567
 done:
568
  if (GET_CODE (x) != SYMBOL_REF)
569
    return 0;
570
  name = XSTR (x, 0);
571
  if (name[0] == '@' && name[2] == '.')
572
    {
573
      if (name[1] == 'i' || name[1] == 'I')
574
        {
575
          if (name[1] == 'I')
576
            return 'f'; /* near */
577
          return 'n'; /* far */
578
        }
579
      return name[1];
580
    }
581
  return 0;
582
}
583
 
584
int
585
mep_regno_reg_class (int regno)
586
{
587
  switch (regno)
588
    {
589
    case SP_REGNO:              return SP_REGS;
590
    case TP_REGNO:              return TP_REGS;
591
    case GP_REGNO:              return GP_REGS;
592
    case 0:                      return R0_REGS;
593
    case HI_REGNO:              return HI_REGS;
594
    case LO_REGNO:              return LO_REGS;
595
    case ARG_POINTER_REGNUM:    return GENERAL_REGS;
596
    }
597
 
598
  if (GR_REGNO_P (regno))
599
    return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
600
  if (CONTROL_REGNO_P (regno))
601
    return CONTROL_REGS;
602
 
603
  if (CR_REGNO_P (regno))
604
    {
605
      int i, j;
606
 
607
      /* Search for the register amongst user-defined subclasses of
608
         the coprocessor registers.  */
609
      for (i = USER0_REGS; i <= USER3_REGS; ++i)
610
        {
611
          if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
612
            continue;
613
          for (j = 0; j < N_REG_CLASSES; ++j)
614
            {
615
              enum reg_class sub = reg_class_subclasses[i][j];
616
 
617
              if (sub == LIM_REG_CLASSES)
618
                return i;
619
              if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
620
                break;
621
            }
622
        }
623
 
624
      return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
625
    }
626
 
627
  if (CCR_REGNO_P (regno))
628
    return CCR_REGS;
629
 
630
  gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
631
  return NO_REGS;
632
}
633
 
634
#if 0
635
int
636
mep_reg_class_from_constraint (int c, const char *str)
637
{
638
  switch (c)
639
    {
640
    case 'a':
641
      return SP_REGS;
642
    case 'b':
643
      return TP_REGS;
644
    case 'c':
645
      return CONTROL_REGS;
646
    case 'd':
647
      return HILO_REGS;
648
    case 'e':
649
      {
650
        switch (str[1])
651
          {
652
          case 'm':
653
            return LOADABLE_CR_REGS;
654
          case 'x':
655
            return mep_have_copro_copro_moves_p ? CR_REGS : NO_REGS;
656
          case 'r':
657
            return mep_have_core_copro_moves_p ? CR_REGS : NO_REGS;
658
          default:
659
            return NO_REGS;
660
          }
661
      }
662
    case 'h':
663
      return HI_REGS;
664
    case 'j':
665
      return RPC_REGS;
666
    case 'l':
667
      return LO_REGS;
668
    case 't':
669
      return TPREL_REGS;
670
    case 'v':
671
      return GP_REGS;
672
    case 'x':
673
      return CR_REGS;
674
    case 'y':
675
      return CCR_REGS;
676
    case 'z':
677
      return R0_REGS;
678
 
679
    case 'A':
680
    case 'B':
681
    case 'C':
682
    case 'D':
683
      {
684
        enum reg_class which = c - 'A' + USER0_REGS;
685
        return (reg_class_size[which] > 0 ? which : NO_REGS);
686
      }
687
 
688
    default:
689
      return NO_REGS;
690
    }
691
}
692
 
693
bool
694
mep_const_ok_for_letter_p (HOST_WIDE_INT value, int c)
695
{
696
  switch (c)
697
    {
698
      case 'I': return value >= -32768 && value <      32768;
699
      case 'J': return value >=      0 && value <      65536;
700
      case 'K': return value >=      0 && value < 0x01000000;
701
      case 'L': return value >=    -32 && value <         32;
702
      case 'M': return value >=      0 && value <         32;
703
      case 'N': return value >=      0 && value <         16;
704
      case 'O':
705
        if (value & 0xffff)
706
          return false;
707
        return value >= -2147483647-1 && value <= 2147483647;
708
    default:
709
      gcc_unreachable ();
710
    }
711
}
712
 
713
bool
714
mep_extra_constraint (rtx value, int c)
715
{
716
  encode_pattern (value);
717
 
718
  switch (c)
719
    {
720
    case 'R':
721
      /* For near symbols, like what call uses.  */
722
      if (GET_CODE (value) == REG)
723
        return 0;
724
      return mep_call_address_operand (value, GET_MODE (value));
725
 
726
    case 'S':
727
      /* For signed 8-bit immediates.  */
728
      return (GET_CODE (value) == CONST_INT
729
              && INTVAL (value) >= -128
730
              && INTVAL (value) <= 127);
731
 
732
    case 'T':
733
      /* For tp/gp relative symbol values.  */
734
      return (RTX_IS ("u3s") || RTX_IS ("u2s")
735
              || RTX_IS ("+u3si") || RTX_IS ("+u2si"));
736
 
737
    case 'U':
738
      /* Non-absolute memories.  */
739
      return GET_CODE (value) == MEM && ! CONSTANT_P (XEXP (value, 0));
740
 
741
    case 'W':
742
      /* %hi(sym) */
743
      return RTX_IS ("Hs");
744
 
745
    case 'Y':
746
      /* Register indirect.  */
747
      return RTX_IS ("mr");
748
 
749
    case 'Z':
750
      return mep_section_tag (value) == 'c' && RTX_IS ("ms");
751
    }
752
 
753
  return false;
754
}
755
#endif
756
 
757
#undef PASS
758
#undef FAIL
759
 
760
static bool
761
const_in_range (rtx x, int minv, int maxv)
762
{
763
  return (GET_CODE (x) == CONST_INT
764
          && INTVAL (x) >= minv
765
          && INTVAL (x) <= maxv);
766
}
767
 
768
/* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
769
   such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2.  If a move
770
   is needed, emit it before INSN if INSN is nonnull, otherwise emit it
771
   at the end of the insn stream.  */
772
 
773
rtx
774
mep_mulr_source (rtx insn, rtx dest, rtx src1, rtx src2)
775
{
776
  if (rtx_equal_p (dest, src1))
777
    return src2;
778
  else if (rtx_equal_p (dest, src2))
779
    return src1;
780
  else
781
    {
782
      if (insn == 0)
783
        emit_insn (gen_movsi (copy_rtx (dest), src1));
784
      else
785
        emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
786
      return src2;
787
    }
788
}
789
 
790
/* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
791
   Change the last element of PATTERN from (clobber (scratch:SI))
792
   to (clobber (reg:SI HI_REGNO)).  */
793
 
794
static void
795
mep_rewrite_mult (rtx insn, rtx pattern)
796
{
797
  rtx hi_clobber;
798
 
799
  hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
800
  XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
801
  PATTERN (insn) = pattern;
802
  INSN_CODE (insn) = -1;
803
}
804
 
805
/* Subroutine of mep_reuse_lo_p.  Rewrite instruction INSN so that it
806
   calculates SRC1 * SRC2 and stores the result in $lo.  Also make it
807
   store the result in DEST if nonnull.  */
808
 
809
static void
810
mep_rewrite_mulsi3 (rtx insn, rtx dest, rtx src1, rtx src2)
811
{
812
  rtx lo, pattern;
813
 
814
  lo = gen_rtx_REG (SImode, LO_REGNO);
815
  if (dest)
816
    pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
817
                           mep_mulr_source (insn, dest, src1, src2));
818
  else
819
    pattern = gen_mulsi3_lo (lo, src1, src2);
820
  mep_rewrite_mult (insn, pattern);
821
}
822
 
823
/* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3.  First copy
824
   SRC3 into $lo, then use either madd or maddr.  The move into $lo will
825
   be deleted by a peephole2 if SRC3 is already in $lo.  */
826
 
827
static void
828
mep_rewrite_maddsi3 (rtx insn, rtx dest, rtx src1, rtx src2, rtx src3)
829
{
830
  rtx lo, pattern;
831
 
832
  lo = gen_rtx_REG (SImode, LO_REGNO);
833
  emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
834
  if (dest)
835
    pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
836
                            mep_mulr_source (insn, dest, src1, src2),
837
                            copy_rtx (lo));
838
  else
839
    pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
840
  mep_rewrite_mult (insn, pattern);
841
}
842
 
843
/* Return true if $lo has the same value as integer register GPR when
844
   instruction INSN is reached.  If necessary, rewrite the instruction
845
   that sets $lo so that it uses a proper SET, not a CLOBBER.  LO is an
846
   rtx for (reg:SI LO_REGNO).
847
 
848
   This function is intended to be used by the peephole2 pass.  Since
849
   that pass goes from the end of a basic block to the beginning, and
850
   propagates liveness information on the way, there is no need to
851
   update register notes here.
852
 
853
   If GPR_DEAD_P is true on entry, and this function returns true,
854
   then the caller will replace _every_ use of GPR in and after INSN
855
   with LO.  This means that if the instruction that sets $lo is a
856
   mulr- or maddr-type instruction, we can rewrite it to use mul or
857
   madd instead.  In combination with the copy progagation pass,
858
   this allows us to replace sequences like:
859
 
860
        mov GPR,R1
861
        mulr GPR,R2
862
 
863
   with:
864
 
865
        mul R1,R2
866
 
867
   if GPR is no longer used.  */
868
 
869
static bool
870
mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
871
{
872
  do
873
    {
874
      insn = PREV_INSN (insn);
875
      if (INSN_P (insn))
876
        switch (recog_memoized (insn))
877
          {
878
          case CODE_FOR_mulsi3_1:
879
            extract_insn (insn);
880
            if (rtx_equal_p (recog_data.operand[0], gpr))
881
              {
882
                mep_rewrite_mulsi3 (insn,
883
                                    gpr_dead_p ? NULL : recog_data.operand[0],
884
                                    recog_data.operand[1],
885
                                    recog_data.operand[2]);
886
                return true;
887
              }
888
            return false;
889
 
890
          case CODE_FOR_maddsi3:
891
            extract_insn (insn);
892
            if (rtx_equal_p (recog_data.operand[0], gpr))
893
              {
894
                mep_rewrite_maddsi3 (insn,
895
                                     gpr_dead_p ? NULL : recog_data.operand[0],
896
                                     recog_data.operand[1],
897
                                     recog_data.operand[2],
898
                                     recog_data.operand[3]);
899
                return true;
900
              }
901
            return false;
902
 
903
          case CODE_FOR_mulsi3r:
904
          case CODE_FOR_maddsi3r:
905
            extract_insn (insn);
906
            return rtx_equal_p (recog_data.operand[1], gpr);
907
 
908
          default:
909
            if (reg_set_p (lo, insn)
910
                || reg_set_p (gpr, insn)
911
                || volatile_insn_p (PATTERN (insn)))
912
              return false;
913
 
914
            if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
915
              gpr_dead_p = false;
916
            break;
917
          }
918
    }
919
  while (!NOTE_INSN_BASIC_BLOCK_P (insn));
920
  return false;
921
}
922
 
923
/* A wrapper around mep_reuse_lo_p_1 that preserves recog_data.  */
924
 
925
bool
926
mep_reuse_lo_p (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
927
{
928
  bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
929
  extract_insn (insn);
930
  return result;
931
}
932
 
933
/* Return true if SET can be turned into a post-modify load or store
934
   that adds OFFSET to GPR.  In other words, return true if SET can be
935
   changed into:
936
 
937
       (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
938
 
939
   It's OK to change SET to an equivalent operation in order to
940
   make it match.  */
941
 
942
static bool
943
mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
944
{
945
  rtx *reg, *mem;
946
  unsigned int reg_bytes, mem_bytes;
947
  enum machine_mode reg_mode, mem_mode;
948
 
949
  /* Only simple SETs can be converted.  */
950
  if (GET_CODE (set) != SET)
951
    return false;
952
 
953
  /* Point REG to what we hope will be the register side of the set and
954
     MEM to what we hope will be the memory side.  */
955
  if (GET_CODE (SET_DEST (set)) == MEM)
956
    {
957
      mem = &SET_DEST (set);
958
      reg = &SET_SRC (set);
959
    }
960
  else
961
    {
962
      reg = &SET_DEST (set);
963
      mem = &SET_SRC (set);
964
      if (GET_CODE (*mem) == SIGN_EXTEND)
965
        mem = &XEXP (*mem, 0);
966
    }
967
 
968
  /* Check that *REG is a suitable coprocessor register.  */
969
  if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
970
    return false;
971
 
972
  /* Check that *MEM is a suitable memory reference.  */
973
  if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
974
    return false;
975
 
976
  /* Get the number of bytes in each operand.  */
977
  mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
978
  reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
979
 
980
  /* Check that OFFSET is suitably aligned.  */
981
  if (INTVAL (offset) & (mem_bytes - 1))
982
    return false;
983
 
984
  /* Convert *MEM to a normal integer mode.  */
985
  mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
986
  *mem = change_address (*mem, mem_mode, NULL);
987
 
988
  /* Adjust *REG as well.  */
989
  *reg = shallow_copy_rtx (*reg);
990
  if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
991
    {
992
      /* SET is a subword load.  Convert it to an explicit extension.  */
993
      PUT_MODE (*reg, SImode);
994
      *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
995
    }
996
  else
997
    {
998
      reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
999
      PUT_MODE (*reg, reg_mode);
1000
    }
1001
  return true;
1002
}
1003
 
1004
/* Return the effect of frame-related instruction INSN.  */
1005
 
1006
static rtx
1007
mep_frame_expr (rtx insn)
1008
{
1009
  rtx note, expr;
1010
 
1011
  note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
1012
  expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
1013
  RTX_FRAME_RELATED_P (expr) = 1;
1014
  return expr;
1015
}
1016
 
1017
/* Merge instructions INSN1 and INSN2 using a PARALLEL.  Store the
1018
   new pattern in INSN1; INSN2 will be deleted by the caller.  */
1019
 
1020
static void
1021
mep_make_parallel (rtx insn1, rtx insn2)
1022
{
1023
  rtx expr;
1024
 
1025
  if (RTX_FRAME_RELATED_P (insn2))
1026
    {
1027
      expr = mep_frame_expr (insn2);
1028
      if (RTX_FRAME_RELATED_P (insn1))
1029
        expr = gen_rtx_SEQUENCE (VOIDmode,
1030
                                 gen_rtvec (2, mep_frame_expr (insn1), expr));
1031
      set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
1032
      RTX_FRAME_RELATED_P (insn1) = 1;
1033
    }
1034
 
1035
  PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
1036
                                      gen_rtvec (2, PATTERN (insn1),
1037
                                                 PATTERN (insn2)));
1038
  INSN_CODE (insn1) = -1;
1039
}
1040
 
1041
/* SET_INSN is an instruction that adds OFFSET to REG.  Go back through
1042
   the basic block to see if any previous load or store instruction can
1043
   be persuaded to do SET_INSN as a side-effect.  Return true if so.  */
1044
 
1045
static bool
1046
mep_use_post_modify_p_1 (rtx set_insn, rtx reg, rtx offset)
1047
{
1048
  rtx insn;
1049
 
1050
  insn = set_insn;
1051
  do
1052
    {
1053
      insn = PREV_INSN (insn);
1054
      if (INSN_P (insn))
1055
        {
1056
          if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
1057
            {
1058
              mep_make_parallel (insn, set_insn);
1059
              return true;
1060
            }
1061
 
1062
          if (reg_set_p (reg, insn)
1063
              || reg_referenced_p (reg, PATTERN (insn))
1064
              || volatile_insn_p (PATTERN (insn)))
1065
            return false;
1066
        }
1067
    }
1068
  while (!NOTE_INSN_BASIC_BLOCK_P (insn));
1069
  return false;
1070
}
1071
 
1072
/* A wrapper around mep_use_post_modify_p_1 that preserves recog_data.  */
1073
 
1074
bool
1075
mep_use_post_modify_p (rtx insn, rtx reg, rtx offset)
1076
{
1077
  bool result = mep_use_post_modify_p_1 (insn, reg, offset);
1078
  extract_insn (insn);
1079
  return result;
1080
}
1081
 
1082
bool
1083
mep_allow_clip (rtx ux, rtx lx, int s)
1084
{
1085
  HOST_WIDE_INT u = INTVAL (ux);
1086
  HOST_WIDE_INT l = INTVAL (lx);
1087
  int i;
1088
 
1089
  if (!TARGET_OPT_CLIP)
1090
    return false;
1091
 
1092
  if (s)
1093
    {
1094
      for (i = 0; i < 30; i ++)
1095
        if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
1096
            && (l == - ((HOST_WIDE_INT) 1 << i)))
1097
          return true;
1098
    }
1099
  else
1100
    {
1101
      if (l != 0)
1102
        return false;
1103
 
1104
      for (i = 0; i < 30; i ++)
1105
        if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
1106
          return true;
1107
    }
1108
  return false;
1109
}
1110
 
1111
bool
1112
mep_bit_position_p (rtx x, bool looking_for)
1113
{
1114
  if (GET_CODE (x) != CONST_INT)
1115
    return false;
1116
  switch ((int) INTVAL(x) & 0xff)
1117
    {
1118
    case 0x01: case 0x02: case 0x04: case 0x08:
1119
    case 0x10: case 0x20: case 0x40: case 0x80:
1120
      return looking_for;
1121
    case 0xfe: case 0xfd: case 0xfb: case 0xf7:
1122
    case 0xef: case 0xdf: case 0xbf: case 0x7f:
1123
      return !looking_for;
1124
    }
1125
  return false;
1126
}
1127
 
1128
static bool
1129
move_needs_splitting (rtx dest, rtx src,
1130
                      enum machine_mode mode ATTRIBUTE_UNUSED)
1131
{
1132
  int s = mep_section_tag (src);
1133
 
1134
  while (1)
1135
    {
1136
      if (GET_CODE (src) == CONST
1137
          || GET_CODE (src) == MEM)
1138
        src = XEXP (src, 0);
1139
      else if (GET_CODE (src) == SYMBOL_REF
1140
               || GET_CODE (src) == LABEL_REF
1141
               || GET_CODE (src) == PLUS)
1142
        break;
1143
      else
1144
        return false;
1145
    }
1146
  if (s == 'f'
1147
      || (GET_CODE (src) == PLUS
1148
          && GET_CODE (XEXP (src, 1)) == CONST_INT
1149
          && (INTVAL (XEXP (src, 1)) < -65536
1150
              || INTVAL (XEXP (src, 1)) > 0xffffff))
1151
      || (GET_CODE (dest) == REG
1152
          && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1153
    return true;
1154
  return false;
1155
}
1156
 
1157
bool
1158
mep_split_mov (rtx *operands, int symbolic)
1159
{
1160
  if (symbolic)
1161
    {
1162
      if (move_needs_splitting (operands[0], operands[1], SImode))
1163
        return true;
1164
      return false;
1165
    }
1166
 
1167
  if (GET_CODE (operands[1]) != CONST_INT)
1168
    return false;
1169
 
1170
  if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1171
      || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1172
      || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1173
    return false;
1174
 
1175
  if (((!reload_completed && !reload_in_progress)
1176
       || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1177
      && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1178
    return false;
1179
 
1180
  return true;
1181
}
1182
 
1183
/* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1184
   it to one specific value.  So the insn chosen depends on whether
1185
   the source and destination modes match.  */
1186
 
1187
bool
1188
mep_vliw_mode_match (rtx tgt)
1189
{
1190
  bool src_vliw = mep_vliw_function_p (cfun->decl);
1191
  bool tgt_vliw = INTVAL (tgt);
1192
 
1193
  return src_vliw == tgt_vliw;
1194
}
1195
 
1196
/* Like the above, but also test for near/far mismatches.  */
1197
 
1198
bool
1199
mep_vliw_jmp_match (rtx tgt)
1200
{
1201
  bool src_vliw = mep_vliw_function_p (cfun->decl);
1202
  bool tgt_vliw = INTVAL (tgt);
1203
 
1204
  if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1205
    return false;
1206
 
1207
  return src_vliw == tgt_vliw;
1208
}
1209
 
1210
bool
1211
mep_multi_slot (rtx x)
1212
{
1213
  return get_attr_slot (x) == SLOT_MULTI;
1214
}
1215
 
1216
 
1217
bool
1218
mep_legitimate_constant_p (rtx x)
1219
{
1220
  /* We can't convert symbol values to gp- or tp-rel values after
1221
     reload, as reload might have used $gp or $tp for other
1222
     purposes.  */
1223
  if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1224
    {
1225
      char e = mep_section_tag (x);
1226
      return (e != 't' && e != 'b');
1227
    }
1228
  return 1;
1229
}
1230
 
1231
/* Be careful not to use macros that need to be compiled one way for
1232
   strict, and another way for not-strict, like REG_OK_FOR_BASE_P.  */
1233
 
1234
bool
1235
mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1236
{
1237
  int the_tag;
1238
 
1239
#define DEBUG_LEGIT 0
1240
#if DEBUG_LEGIT
1241
  fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1242
  debug_rtx (x);
1243
#endif
1244
 
1245
  if (GET_CODE (x) == LO_SUM
1246
      && GET_CODE (XEXP (x, 0)) == REG
1247
      && GEN_REG (REGNO (XEXP (x, 0)), strict)
1248
      && CONSTANT_P (XEXP (x, 1)))
1249
    {
1250
      if (GET_MODE_SIZE (mode) > 4)
1251
        {
1252
          /* We will end up splitting this, and lo_sums are not
1253
             offsettable for us.  */
1254
#if DEBUG_LEGIT
1255
          fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1256
#endif
1257
          return false;
1258
        }
1259
#if DEBUG_LEGIT
1260
      fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1261
#endif
1262
      return true;
1263
    }
1264
 
1265
  if (GET_CODE (x) == REG
1266
      && GEN_REG (REGNO (x), strict))
1267
    {
1268
#if DEBUG_LEGIT
1269
      fprintf (stderr, " - yup, [reg]\n");
1270
#endif
1271
      return true;
1272
    }
1273
 
1274
  if (GET_CODE (x) == PLUS
1275
      && GET_CODE (XEXP (x, 0)) == REG
1276
      && GEN_REG (REGNO (XEXP (x, 0)), strict)
1277
      && const_in_range (XEXP (x, 1), -32768, 32767))
1278
    {
1279
#if DEBUG_LEGIT
1280
      fprintf (stderr, " - yup, [reg+const]\n");
1281
#endif
1282
      return true;
1283
    }
1284
 
1285
  if (GET_CODE (x) == PLUS
1286
      && GET_CODE (XEXP (x, 0)) == REG
1287
      && GEN_REG (REGNO (XEXP (x, 0)), strict)
1288
      && GET_CODE (XEXP (x, 1)) == CONST
1289
      && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1290
          || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1291
              && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1292
              && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1293
    {
1294
#if DEBUG_LEGIT
1295
      fprintf (stderr, " - yup, [reg+unspec]\n");
1296
#endif
1297
      return true;
1298
    }
1299
 
1300
  the_tag = mep_section_tag (x);
1301
 
1302
  if (the_tag == 'f')
1303
    {
1304
#if DEBUG_LEGIT
1305
      fprintf (stderr, " - nope, [far]\n");
1306
#endif
1307
      return false;
1308
    }
1309
 
1310
  if (mode == VOIDmode
1311
      && GET_CODE (x) == SYMBOL_REF)
1312
    {
1313
#if DEBUG_LEGIT
1314
      fprintf (stderr, " - yup, call [symbol]\n");
1315
#endif
1316
      return true;
1317
    }
1318
 
1319
  if ((mode == SImode || mode == SFmode)
1320
      && CONSTANT_P (x)
1321
      && LEGITIMATE_CONSTANT_P (x)
1322
      && the_tag != 't' && the_tag != 'b')
1323
    {
1324
      if (GET_CODE (x) != CONST_INT
1325
          || (INTVAL (x) <= 0xfffff
1326
              && INTVAL (x) >= 0
1327
              && (INTVAL (x) % 4) == 0))
1328
        {
1329
#if DEBUG_LEGIT
1330
          fprintf (stderr, " - yup, [const]\n");
1331
#endif
1332
          return true;
1333
        }
1334
    }
1335
 
1336
#if DEBUG_LEGIT
1337
  fprintf (stderr, " - nope.\n");
1338
#endif
1339
  return false;
1340
}
1341
 
1342
int
1343
mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
1344
                               enum reload_type type,
1345
                               int ind_levels ATTRIBUTE_UNUSED)
1346
{
1347
  if (GET_CODE (*x) == PLUS
1348
      && GET_CODE (XEXP (*x, 0)) == MEM
1349
      && GET_CODE (XEXP (*x, 1)) == REG)
1350
    {
1351
      /* GCC will by default copy the MEM into a REG, which results in
1352
         an invalid address.  For us, the best thing to do is move the
1353
         whole expression to a REG.  */
1354
      push_reload (*x, NULL_RTX, x, NULL,
1355
                   GENERAL_REGS, mode, VOIDmode,
1356
                   0, 0, opnum, type);
1357
      return 1;
1358
    }
1359
 
1360
  if (GET_CODE (*x) == PLUS
1361
      && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1362
      && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1363
    {
1364
      char e = mep_section_tag (XEXP (*x, 0));
1365
 
1366
      if (e != 't' && e != 'b')
1367
        {
1368
          /* GCC thinks that (sym+const) is a valid address.  Well,
1369
             sometimes it is, this time it isn't.  The best thing to
1370
             do is reload the symbol to a register, since reg+int
1371
             tends to work, and we can't just add the symbol and
1372
             constant anyway.  */
1373
          push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1374
                       GENERAL_REGS, mode, VOIDmode,
1375
                       0, 0, opnum, type);
1376
          return 1;
1377
        }
1378
    }
1379
  return 0;
1380
}
1381
 
1382
int
1383
mep_core_address_length (rtx insn, int opn)
1384
{
1385
  rtx set = single_set (insn);
1386
  rtx mem = XEXP (set, opn);
1387
  rtx other = XEXP (set, 1-opn);
1388
  rtx addr = XEXP (mem, 0);
1389
 
1390
  if (register_operand (addr, Pmode))
1391
    return 2;
1392
  if (GET_CODE (addr) == PLUS)
1393
    {
1394
      rtx addend = XEXP (addr, 1);
1395
 
1396
      gcc_assert (REG_P (XEXP (addr, 0)));
1397
 
1398
      switch (REGNO (XEXP (addr, 0)))
1399
        {
1400
        case STACK_POINTER_REGNUM:
1401
          if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1402
              && mep_imm7a4_operand (addend, VOIDmode))
1403
            return 2;
1404
          break;
1405
 
1406
        case 13: /* TP */
1407
          gcc_assert (REG_P (other));
1408
 
1409
          if (REGNO (other) >= 8)
1410
            break;
1411
 
1412
          if (GET_CODE (addend) == CONST
1413
              && GET_CODE (XEXP (addend, 0)) == UNSPEC
1414
              && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1415
            return 2;
1416
 
1417
          if (GET_CODE (addend) == CONST_INT
1418
              && INTVAL (addend) >= 0
1419
              && INTVAL (addend) <= 127
1420
              && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1421
            return 2;
1422
          break;
1423
        }
1424
    }
1425
 
1426
  return 4;
1427
}
1428
 
1429
int
1430
mep_cop_address_length (rtx insn, int opn)
1431
{
1432
  rtx set = single_set (insn);
1433
  rtx mem = XEXP (set, opn);
1434
  rtx addr = XEXP (mem, 0);
1435
 
1436
  if (GET_CODE (mem) != MEM)
1437
    return 2;
1438
  if (register_operand (addr, Pmode))
1439
    return 2;
1440
  if (GET_CODE (addr) == POST_INC)
1441
    return 2;
1442
 
1443
  return 4;
1444
}
1445
 
1446
#define DEBUG_EXPAND_MOV 0
1447
bool
1448
mep_expand_mov (rtx *operands, enum machine_mode mode)
1449
{
1450
  int i, t;
1451
  int tag[2];
1452
  rtx tpsym, tpoffs;
1453
  int post_reload = 0;
1454
 
1455
  tag[0] = mep_section_tag (operands[0]);
1456
  tag[1] = mep_section_tag (operands[1]);
1457
 
1458
  if (!reload_in_progress
1459
      && !reload_completed
1460
      && GET_CODE (operands[0]) != REG
1461
      && GET_CODE (operands[0]) != SUBREG
1462
      && GET_CODE (operands[1]) != REG
1463
      && GET_CODE (operands[1]) != SUBREG)
1464
    operands[1] = copy_to_mode_reg (mode, operands[1]);
1465
 
1466
#if DEBUG_EXPAND_MOV
1467
  fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1468
          reload_in_progress || reload_completed);
1469
  debug_rtx (operands[0]);
1470
  debug_rtx (operands[1]);
1471
#endif
1472
 
1473
  if (mode == DImode || mode == DFmode)
1474
    return false;
1475
 
1476
  if (reload_in_progress || reload_completed)
1477
    {
1478
      rtx r;
1479
 
1480
      if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1481
        cfun->machine->reload_changes_tp = true;
1482
 
1483
      if (tag[0] == 't' || tag[1] == 't')
1484
        {
1485
          r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1486
          if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1487
            post_reload = 1;
1488
        }
1489
      if (tag[0] == 'b' || tag[1] == 'b')
1490
        {
1491
          r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1492
          if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1493
            post_reload = 1;
1494
        }
1495
      if (cfun->machine->reload_changes_tp == true)
1496
        post_reload = 1;
1497
    }
1498
 
1499
  if (!post_reload)
1500
    {
1501
      rtx n;
1502
      if (symbol_p (operands[1]))
1503
        {
1504
          t = mep_section_tag (operands[1]);
1505
          if (t == 'b' || t == 't')
1506
            {
1507
 
1508
              if (GET_CODE (operands[1]) == SYMBOL_REF)
1509
                {
1510
                  tpsym = operands[1];
1511
                  n = gen_rtx_UNSPEC (mode,
1512
                                      gen_rtvec (1, operands[1]),
1513
                                      t == 'b' ? UNS_TPREL : UNS_GPREL);
1514
                  n = gen_rtx_CONST (mode, n);
1515
                }
1516
              else if (GET_CODE (operands[1]) == CONST
1517
                       && GET_CODE (XEXP (operands[1], 0)) == PLUS
1518
                       && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1519
                       && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1520
                {
1521
                  tpsym = XEXP (XEXP (operands[1], 0), 0);
1522
                  tpoffs = XEXP (XEXP (operands[1], 0), 1);
1523
                  n = gen_rtx_UNSPEC (mode,
1524
                                      gen_rtvec (1, tpsym),
1525
                                      t == 'b' ? UNS_TPREL : UNS_GPREL);
1526
                  n = gen_rtx_PLUS (mode, n, tpoffs);
1527
                  n = gen_rtx_CONST (mode, n);
1528
                }
1529
              else if (GET_CODE (operands[1]) == CONST
1530
                       && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1531
                return false;
1532
              else
1533
                {
1534
                  error ("unusual TP-relative address");
1535
                  return false;
1536
                }
1537
 
1538
              n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1539
                                       : mep_gp_rtx ()), n);
1540
              n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1541
#if DEBUG_EXPAND_MOV
1542
              fprintf(stderr, "mep_expand_mov emitting ");
1543
              debug_rtx(n);
1544
#endif
1545
              return true;
1546
            }
1547
        }
1548
 
1549
      for (i=0; i < 2; i++)
1550
        {
1551
          t = mep_section_tag (operands[i]);
1552
          if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1553
            {
1554
              rtx sym, n, r;
1555
              int u;
1556
 
1557
              sym = XEXP (operands[i], 0);
1558
              if (GET_CODE (sym) == CONST
1559
                  && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1560
                sym = XVECEXP (XEXP (sym, 0), 0, 0);
1561
 
1562
              if (t == 'b')
1563
                {
1564
                  r = mep_tp_rtx ();
1565
                  u = UNS_TPREL;
1566
                }
1567
              else
1568
                {
1569
                  r = mep_gp_rtx ();
1570
                  u = UNS_GPREL;
1571
                }
1572
 
1573
              n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1574
              n = gen_rtx_CONST (Pmode, n);
1575
              n = gen_rtx_PLUS (Pmode, r, n);
1576
              operands[i] = replace_equiv_address (operands[i], n);
1577
            }
1578
        }
1579
    }
1580
 
1581
  if ((GET_CODE (operands[1]) != REG
1582
       && MEP_CONTROL_REG (operands[0]))
1583
      || (GET_CODE (operands[0]) != REG
1584
          && MEP_CONTROL_REG (operands[1])))
1585
    {
1586
      rtx temp;
1587
#if DEBUG_EXPAND_MOV
1588
      fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1589
#endif
1590
      temp = gen_reg_rtx (mode);
1591
      emit_move_insn (temp, operands[1]);
1592
      operands[1] = temp;
1593
    }
1594
 
1595
  if (symbolref_p (operands[0])
1596
      && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1597
          || (GET_MODE_SIZE (mode) != 4)))
1598
    {
1599
      rtx temp;
1600
 
1601
      gcc_assert (!reload_in_progress && !reload_completed);
1602
 
1603
      temp = force_reg (Pmode, XEXP (operands[0], 0));
1604
      operands[0] = replace_equiv_address (operands[0], temp);
1605
      emit_move_insn (operands[0], operands[1]);
1606
      return true;
1607
    }
1608
 
1609
  if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1610
    tag[1] = 0;
1611
 
1612
  if (symbol_p (operands[1])
1613
      && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1614
    {
1615
      emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1616
      emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1617
      return true;
1618
    }
1619
 
1620
  if (symbolref_p (operands[1])
1621
      && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1622
    {
1623
      rtx temp;
1624
 
1625
      if (reload_in_progress || reload_completed)
1626
        temp = operands[0];
1627
      else
1628
        temp = gen_reg_rtx (Pmode);
1629
 
1630
      emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1631
      emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1632
      emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1633
      return true;
1634
    }
1635
 
1636
  return false;
1637
}
1638
 
1639
/* Cases where the pattern can't be made to use at all.  */
1640
 
1641
bool
1642
mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1643
{
1644
  int i;
1645
 
1646
#define DEBUG_MOV_OK 0
1647
#if DEBUG_MOV_OK
1648
  fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1649
           mep_section_tag (operands[1]));
1650
  debug_rtx (operands[0]);
1651
  debug_rtx (operands[1]);
1652
#endif
1653
 
1654
  /* We want the movh patterns to get these.  */
1655
  if (GET_CODE (operands[1]) == HIGH)
1656
    return false;
1657
 
1658
  /* We can't store a register to a far variable without using a
1659
     scratch register to hold the address.  Using far variables should
1660
     be split by mep_emit_mov anyway.  */
1661
  if (mep_section_tag (operands[0]) == 'f'
1662
      || mep_section_tag (operands[1]) == 'f')
1663
    {
1664
#if DEBUG_MOV_OK
1665
      fprintf (stderr, " - no, f\n");
1666
#endif
1667
      return false;
1668
    }
1669
  i = mep_section_tag (operands[1]);
1670
  if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1671
    /* These are supposed to be generated with adds of the appropriate
1672
       register.  During and after reload, however, we allow them to
1673
       be accessed as normal symbols because adding a dependency on
1674
       the base register now might cause problems.  */
1675
    {
1676
#if DEBUG_MOV_OK
1677
      fprintf (stderr, " - no, bt\n");
1678
#endif
1679
      return false;
1680
    }
1681
 
1682
  /* The only moves we can allow involve at least one general
1683
     register, so require it.  */
1684
  for (i = 0; i < 2; i ++)
1685
    {
1686
      /* Allow subregs too, before reload.  */
1687
      rtx x = operands[i];
1688
 
1689
      if (GET_CODE (x) == SUBREG)
1690
        x = XEXP (x, 0);
1691
      if (GET_CODE (x) == REG
1692
          && ! MEP_CONTROL_REG (x))
1693
        {
1694
#if DEBUG_MOV_OK
1695
          fprintf (stderr, " - ok\n");
1696
#endif
1697
          return true;
1698
        }
1699
    }
1700
#if DEBUG_MOV_OK
1701
  fprintf (stderr, " - no, no gen reg\n");
1702
#endif
1703
  return false;
1704
}
1705
 
1706
#define DEBUG_SPLIT_WIDE_MOVE 0
1707
void
1708
mep_split_wide_move (rtx *operands, enum machine_mode mode)
1709
{
1710
  int i;
1711
 
1712
#if DEBUG_SPLIT_WIDE_MOVE
1713
  fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1714
  debug_rtx (operands[0]);
1715
  debug_rtx (operands[1]);
1716
#endif
1717
 
1718
  for (i = 0; i <= 1; i++)
1719
    {
1720
      rtx op = operands[i], hi, lo;
1721
 
1722
      switch (GET_CODE (op))
1723
        {
1724
        case REG:
1725
          {
1726
            unsigned int regno = REGNO (op);
1727
 
1728
            if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1729
              {
1730
                rtx i32;
1731
 
1732
                lo = gen_rtx_REG (SImode, regno);
1733
                i32 = GEN_INT (32);
1734
                hi = gen_rtx_ZERO_EXTRACT (SImode,
1735
                                           gen_rtx_REG (DImode, regno),
1736
                                           i32, i32);
1737
              }
1738
            else
1739
              {
1740
                hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1741
                lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1742
              }
1743
          }
1744
          break;
1745
 
1746
        case CONST_INT:
1747
        case CONST_DOUBLE:
1748
        case MEM:
1749
          hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1750
          lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1751
          break;
1752
 
1753
        default:
1754
          gcc_unreachable ();
1755
        }
1756
 
1757
      /* The high part of CR <- GPR moves must be done after the low part.  */
1758
      operands [i + 4] = lo;
1759
      operands [i + 2] = hi;
1760
    }
1761
 
1762
  if (reg_mentioned_p (operands[2], operands[5])
1763
      || GET_CODE (operands[2]) == ZERO_EXTRACT
1764
      || GET_CODE (operands[4]) == ZERO_EXTRACT)
1765
    {
1766
      rtx tmp;
1767
 
1768
      /* Overlapping register pairs -- make sure we don't
1769
         early-clobber ourselves.  */
1770
      tmp = operands[2];
1771
      operands[2] = operands[4];
1772
      operands[4] = tmp;
1773
      tmp = operands[3];
1774
      operands[3] = operands[5];
1775
      operands[5] = tmp;
1776
    }
1777
 
1778
#if DEBUG_SPLIT_WIDE_MOVE
1779
  fprintf(stderr, "\033[34m");
1780
  debug_rtx (operands[2]);
1781
  debug_rtx (operands[3]);
1782
  debug_rtx (operands[4]);
1783
  debug_rtx (operands[5]);
1784
  fprintf(stderr, "\033[0m");
1785
#endif
1786
}
1787
 
1788
/* Emit a setcc instruction in its entirity.  */
1789
 
1790
static bool
1791
mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1792
{
1793
  rtx tmp;
1794
 
1795
  switch (code)
1796
    {
1797
    case GT:
1798
    case GTU:
1799
      tmp = op1, op1 = op2, op2 = tmp;
1800
      code = swap_condition (code);
1801
      /* FALLTHRU */
1802
 
1803
    case LT:
1804
    case LTU:
1805
      op1 = force_reg (SImode, op1);
1806
      emit_insn (gen_rtx_SET (VOIDmode, dest,
1807
                              gen_rtx_fmt_ee (code, SImode, op1, op2)));
1808
      return true;
1809
 
1810
    case EQ:
1811
      if (op2 != const0_rtx)
1812
        op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1813
      mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1814
      return true;
1815
 
1816
    case NE:
1817
      /* Branchful sequence:
1818
                mov dest, 0             16-bit
1819
                beq op1, op2, Lover     16-bit (op2 < 16), 32-bit otherwise
1820
                mov dest, 1             16-bit
1821
 
1822
         Branchless sequence:
1823
                add3 tmp, op1, -op2     32-bit (or mov + sub)
1824
                sltu3 tmp, tmp, 1       16-bit
1825
                xor3 dest, tmp, 1       32-bit
1826
        */
1827
      if (optimize_size && op2 != const0_rtx)
1828
        return false;
1829
 
1830
      if (op2 != const0_rtx)
1831
        op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1832
 
1833
      op2 = gen_reg_rtx (SImode);
1834
      mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1835
 
1836
      emit_insn (gen_rtx_SET (VOIDmode, dest,
1837
                              gen_rtx_XOR (SImode, op2, const1_rtx)));
1838
      return true;
1839
 
1840
    case LE:
1841
      if (GET_CODE (op2) != CONST_INT
1842
          || INTVAL (op2) == 0x7ffffff)
1843
        return false;
1844
      op2 = GEN_INT (INTVAL (op2) + 1);
1845
      return mep_expand_setcc_1 (LT, dest, op1, op2);
1846
 
1847
    case LEU:
1848
      if (GET_CODE (op2) != CONST_INT
1849
          || INTVAL (op2) == -1)
1850
        return false;
1851
      op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1852
      return mep_expand_setcc_1 (LTU, dest, op1, op2);
1853
 
1854
    case GE:
1855
      if (GET_CODE (op2) != CONST_INT
1856
          || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1857
        return false;
1858
      op2 = GEN_INT (INTVAL (op2) - 1);
1859
      return mep_expand_setcc_1 (GT, dest, op1, op2);
1860
 
1861
    case GEU:
1862
      if (GET_CODE (op2) != CONST_INT
1863
          || op2 == const0_rtx)
1864
        return false;
1865
      op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1866
      return mep_expand_setcc_1 (GTU, dest, op1, op2);
1867
 
1868
    default:
1869
      gcc_unreachable ();
1870
    }
1871
}
1872
 
1873
bool
1874
mep_expand_setcc (rtx *operands)
1875
{
1876
  rtx dest = operands[0];
1877
  enum rtx_code code = GET_CODE (operands[1]);
1878
  rtx op0 = operands[2];
1879
  rtx op1 = operands[3];
1880
 
1881
  return mep_expand_setcc_1 (code, dest, op0, op1);
1882
}
1883
 
1884
rtx
1885
mep_expand_cbranch (rtx *operands)
1886
{
1887
  enum rtx_code code = GET_CODE (operands[0]);
1888
  rtx op0 = operands[1];
1889
  rtx op1 = operands[2];
1890
  rtx tmp;
1891
 
1892
 restart:
1893
  switch (code)
1894
    {
1895
    case LT:
1896
      if (mep_imm4_operand (op1, SImode))
1897
        break;
1898
 
1899
      tmp = gen_reg_rtx (SImode);
1900
      gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1901
      code = NE;
1902
      op0 = tmp;
1903
      op1 = const0_rtx;
1904
      break;
1905
 
1906
    case GE:
1907
      if (mep_imm4_operand (op1, SImode))
1908
        break;
1909
 
1910
      tmp = gen_reg_rtx (SImode);
1911
      gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1912
 
1913
      code = EQ;
1914
      op0 = tmp;
1915
      op1 = const0_rtx;
1916
      break;
1917
 
1918
    case EQ:
1919
    case NE:
1920
      if (! mep_reg_or_imm4_operand (op1, SImode))
1921
        op1 = force_reg (SImode, op1);
1922
      break;
1923
 
1924
    case LE:
1925
    case GT:
1926
      if (GET_CODE (op1) == CONST_INT
1927
          && INTVAL (op1) != 0x7fffffff)
1928
        {
1929
          op1 = GEN_INT (INTVAL (op1) + 1);
1930
          code = (code == LE ? LT : GE);
1931
          goto restart;
1932
        }
1933
 
1934
      tmp = gen_reg_rtx (SImode);
1935
      gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1936
 
1937
      code = (code == LE ? EQ : NE);
1938
      op0 = tmp;
1939
      op1 = const0_rtx;
1940
      break;
1941
 
1942
    case LTU:
1943
      if (op1 == const1_rtx)
1944
        {
1945
          code = EQ;
1946
          op1 = const0_rtx;
1947
          break;
1948
        }
1949
 
1950
      tmp = gen_reg_rtx (SImode);
1951
      gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1952
      code = NE;
1953
      op0 = tmp;
1954
      op1 = const0_rtx;
1955
      break;
1956
 
1957
    case LEU:
1958
      tmp = gen_reg_rtx (SImode);
1959
      if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1960
        code = NE;
1961
      else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1962
        code = EQ;
1963
      else
1964
        gcc_unreachable ();
1965
      op0 = tmp;
1966
      op1 = const0_rtx;
1967
      break;
1968
 
1969
    case GTU:
1970
      tmp = gen_reg_rtx (SImode);
1971
      gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1972
                  || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1973
      code = NE;
1974
      op0 = tmp;
1975
      op1 = const0_rtx;
1976
      break;
1977
 
1978
    case GEU:
1979
      tmp = gen_reg_rtx (SImode);
1980
      if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1981
        code = NE;
1982
      else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1983
        code = EQ;
1984
      else
1985
        gcc_unreachable ();
1986
      op0 = tmp;
1987
      op1 = const0_rtx;
1988
      break;
1989
 
1990
    default:
1991
      gcc_unreachable ();
1992
    }
1993
 
1994
  return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1995
}
1996
 
1997
const char *
1998
mep_emit_cbranch (rtx *operands, int ne)
1999
{
2000
  if (GET_CODE (operands[1]) == REG)
2001
    return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
2002
  else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
2003
    return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
2004
  else
2005
    return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
2006
}
2007
 
2008
void
2009
mep_expand_call (rtx *operands, int returns_value)
2010
{
2011
  rtx addr = operands[returns_value];
2012
  rtx tp = mep_tp_rtx ();
2013
  rtx gp = mep_gp_rtx ();
2014
 
2015
  gcc_assert (GET_CODE (addr) == MEM);
2016
 
2017
  addr = XEXP (addr, 0);
2018
 
2019
  if (! mep_call_address_operand (addr, VOIDmode))
2020
    addr = force_reg (SImode, addr);
2021
 
2022
  if (! operands[returns_value+2])
2023
    operands[returns_value+2] = const0_rtx;
2024
 
2025
  if (returns_value)
2026
    emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
2027
                                             operands[3], tp, gp));
2028
  else
2029
    emit_call_insn (gen_call_internal (addr, operands[1],
2030
                                       operands[2], tp, gp));
2031
}
2032
 
2033
/* Aliasing Support.  */
2034
 
2035
/* If X is a machine specific address (i.e. a symbol or label being
2036
   referenced as a displacement from the GOT implemented using an
2037
   UNSPEC), then return the base term.  Otherwise return X.  */
2038
 
2039
rtx
2040
mep_find_base_term (rtx x)
2041
{
2042
  rtx base, term;
2043
  int unspec;
2044
 
2045
  if (GET_CODE (x) != PLUS)
2046
    return x;
2047
  base = XEXP (x, 0);
2048
  term = XEXP (x, 1);
2049
 
2050
  if (has_hard_reg_initial_val(Pmode, TP_REGNO)
2051
      && base == mep_tp_rtx ())
2052
    unspec = UNS_TPREL;
2053
  else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
2054
           && base == mep_gp_rtx ())
2055
    unspec = UNS_GPREL;
2056
  else
2057
    return x;
2058
 
2059
  if (GET_CODE (term) != CONST)
2060
    return x;
2061
  term = XEXP (term, 0);
2062
 
2063
  if (GET_CODE (term) != UNSPEC
2064
      || XINT (term, 1) != unspec)
2065
    return x;
2066
 
2067
  return XVECEXP (term, 0, 0);
2068
}
2069
 
2070
/* Reload Support.  */
2071
 
2072
/* Return true if the registers in CLASS cannot represent the change from
2073
   modes FROM to TO.  */
2074
 
2075
bool
2076
mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
2077
                               enum reg_class regclass)
2078
{
2079
  if (from == to)
2080
    return false;
2081
 
2082
  /* 64-bit COP regs must remain 64-bit COP regs.  */
2083
  if (TARGET_64BIT_CR_REGS
2084
      && (regclass == CR_REGS
2085
          || regclass == LOADABLE_CR_REGS)
2086
      && (GET_MODE_SIZE (to) < 8
2087
          || GET_MODE_SIZE (from) < 8))
2088
    return true;
2089
 
2090
  return false;
2091
}
2092
 
2093
#define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
2094
 
2095
static bool
2096
mep_general_reg (rtx x)
2097
{
2098
  while (GET_CODE (x) == SUBREG)
2099
    x = XEXP (x, 0);
2100
  return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
2101
}
2102
 
2103
static bool
2104
mep_nongeneral_reg (rtx x)
2105
{
2106
  while (GET_CODE (x) == SUBREG)
2107
    x = XEXP (x, 0);
2108
  return (GET_CODE (x) == REG
2109
          && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2110
}
2111
 
2112
static bool
2113
mep_general_copro_reg (rtx x)
2114
{
2115
  while (GET_CODE (x) == SUBREG)
2116
    x = XEXP (x, 0);
2117
  return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
2118
}
2119
 
2120
static bool
2121
mep_nonregister (rtx x)
2122
{
2123
  while (GET_CODE (x) == SUBREG)
2124
    x = XEXP (x, 0);
2125
  return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
2126
}
2127
 
2128
#define DEBUG_RELOAD 0
2129
 
2130
/* Return the secondary reload class needed for moving value X to or
2131
   from a register in coprocessor register class CLASS.  */
2132
 
2133
static enum reg_class
2134
mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2135
{
2136
  if (mep_general_reg (x))
2137
    /* We can do the move directly if mep_have_core_copro_moves_p,
2138
       otherwise we need to go through memory.  Either way, no secondary
2139
       register is needed.  */
2140
    return NO_REGS;
2141
 
2142
  if (mep_general_copro_reg (x))
2143
    {
2144
      /* We can do the move directly if mep_have_copro_copro_moves_p.  */
2145
      if (mep_have_copro_copro_moves_p)
2146
        return NO_REGS;
2147
 
2148
      /* Otherwise we can use a temporary if mep_have_core_copro_moves_p.  */
2149
      if (mep_have_core_copro_moves_p)
2150
        return GENERAL_REGS;
2151
 
2152
      /* Otherwise we need to do it through memory.  No secondary
2153
         register is needed.  */
2154
      return NO_REGS;
2155
    }
2156
 
2157
  if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2158
      && constraint_satisfied_p (x, CONSTRAINT_U))
2159
    /* X is a memory value that we can access directly.  */
2160
    return NO_REGS;
2161
 
2162
  /* We have to move X into a GPR first and then copy it to
2163
     the coprocessor register.  The move from the GPR to the
2164
     coprocessor might be done directly or through memory,
2165
     depending on mep_have_core_copro_moves_p. */
2166
  return GENERAL_REGS;
2167
}
2168
 
2169
/* Copying X to register in RCLASS.  */
2170
 
2171
int
2172
mep_secondary_input_reload_class (enum reg_class rclass,
2173
                                  enum machine_mode mode ATTRIBUTE_UNUSED,
2174
                                  rtx x)
2175
{
2176
  int rv = NO_REGS;
2177
 
2178
#if DEBUG_RELOAD
2179
  fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2180
  debug_rtx (x);
2181
#endif
2182
 
2183
  if (reg_class_subset_p (rclass, CR_REGS))
2184
    rv = mep_secondary_copro_reload_class (rclass, x);
2185
  else if (MEP_NONGENERAL_CLASS (rclass)
2186
           && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2187
    rv = GENERAL_REGS;
2188
 
2189
#if DEBUG_RELOAD
2190
  fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2191
#endif
2192
  return rv;
2193
}
2194
 
2195
/* Copying register in RCLASS to X.  */
2196
 
2197
int
2198
mep_secondary_output_reload_class (enum reg_class rclass,
2199
                                   enum machine_mode mode ATTRIBUTE_UNUSED,
2200
                                   rtx x)
2201
{
2202
  int rv = NO_REGS;
2203
 
2204
#if DEBUG_RELOAD
2205
  fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2206
  debug_rtx (x);
2207
#endif
2208
 
2209
  if (reg_class_subset_p (rclass, CR_REGS))
2210
    rv = mep_secondary_copro_reload_class (rclass, x);
2211
  else if (MEP_NONGENERAL_CLASS (rclass)
2212
           && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2213
    rv = GENERAL_REGS;
2214
 
2215
#if DEBUG_RELOAD
2216
  fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2217
#endif
2218
 
2219
  return rv;
2220
}
2221
 
2222
/* Implement SECONDARY_MEMORY_NEEDED.  */
2223
 
2224
bool
2225
mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2226
                             enum machine_mode mode ATTRIBUTE_UNUSED)
2227
{
2228
  if (!mep_have_core_copro_moves_p)
2229
    {
2230
      if (reg_classes_intersect_p (rclass1, CR_REGS)
2231
          && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2232
        return true;
2233
      if (reg_classes_intersect_p (rclass2, CR_REGS)
2234
          && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2235
        return true;
2236
      if (!mep_have_copro_copro_moves_p
2237
          && reg_classes_intersect_p (rclass1, CR_REGS)
2238
          && reg_classes_intersect_p (rclass2, CR_REGS))
2239
        return true;
2240
    }
2241
  return false;
2242
}
2243
 
2244
void
2245
mep_expand_reload (rtx *operands, enum machine_mode mode)
2246
{
2247
  /* There are three cases for each direction:
2248
     register, farsym
2249
     control, farsym
2250
     control, nearsym */
2251
 
2252
  int s0 = mep_section_tag (operands[0]) == 'f';
2253
  int s1 = mep_section_tag (operands[1]) == 'f';
2254
  int c0 = mep_nongeneral_reg (operands[0]);
2255
  int c1 = mep_nongeneral_reg (operands[1]);
2256
  int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2257
 
2258
#if DEBUG_RELOAD
2259
  fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2260
  debug_rtx (operands[0]);
2261
  debug_rtx (operands[1]);
2262
#endif
2263
 
2264
  switch (which)
2265
    {
2266
    case 00: /* Don't know why this gets here.  */
2267
    case 02: /* general = far */
2268
      emit_move_insn (operands[0], operands[1]);
2269
      return;
2270
 
2271
    case 10: /* cr = mem */
2272
    case 11: /* cr = cr */
2273
    case 01: /* mem = cr */
2274
    case 12: /* cr = far */
2275
      emit_move_insn (operands[2], operands[1]);
2276
      emit_move_insn (operands[0], operands[2]);
2277
      return;
2278
 
2279
    case 20: /* far = general */
2280
      emit_move_insn (operands[2], XEXP (operands[1], 0));
2281
      emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2282
      return;
2283
 
2284
    case 21: /* far = cr */
2285
    case 22: /* far = far */
2286
    default:
2287
      fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2288
               which, mode_name[mode]);
2289
      debug_rtx (operands[0]);
2290
      debug_rtx (operands[1]);
2291
      gcc_unreachable ();
2292
    }
2293
}
2294
 
2295
/* Implement PREFERRED_RELOAD_CLASS.  See whether X is a constant that
2296
   can be moved directly into registers 0 to 7, but not into the rest.
2297
   If so, and if the required class includes registers 0 to 7, restrict
2298
   it to those registers.  */
2299
 
2300
enum reg_class
2301
mep_preferred_reload_class (rtx x, enum reg_class rclass)
2302
{
2303
  switch (GET_CODE (x))
2304
    {
2305
    case CONST_INT:
2306
      if (INTVAL (x) >= 0x10000
2307
          && INTVAL (x) < 0x01000000
2308
          && (INTVAL (x) & 0xffff) != 0
2309
          && reg_class_subset_p (TPREL_REGS, rclass))
2310
        rclass = TPREL_REGS;
2311
      break;
2312
 
2313
    case CONST:
2314
    case SYMBOL_REF:
2315
    case LABEL_REF:
2316
      if (mep_section_tag (x) != 'f'
2317
          && reg_class_subset_p (TPREL_REGS, rclass))
2318
        rclass = TPREL_REGS;
2319
      break;
2320
 
2321
    default:
2322
      break;
2323
    }
2324
  return rclass;
2325
}
2326
 
2327
/* Implement REGISTER_MOVE_COST.  Return 2 for direct single-register
2328
   moves, 4 for direct double-register moves, and 1000 for anything
2329
   that requires a temporary register or temporary stack slot.  */
2330
 
2331
int
2332
mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2333
{
2334
  if (mep_have_copro_copro_moves_p
2335
      && reg_class_subset_p (from, CR_REGS)
2336
      && reg_class_subset_p (to, CR_REGS))
2337
    {
2338
      if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2339
        return 4;
2340
      return 2;
2341
    }
2342
  if (reg_class_subset_p (from, CR_REGS)
2343
      && reg_class_subset_p (to, CR_REGS))
2344
    {
2345
      if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2346
        return 8;
2347
      return 4;
2348
    }
2349
  if (reg_class_subset_p (from, CR_REGS)
2350
      || reg_class_subset_p (to, CR_REGS))
2351
    {
2352
      if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2353
        return 4;
2354
      return 2;
2355
    }
2356
  if (mep_secondary_memory_needed (from, to, mode))
2357
    return 1000;
2358
  if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2359
    return 1000;
2360
 
2361
  if (GET_MODE_SIZE (mode) > 4)
2362
    return 4;
2363
 
2364
  return 2;
2365
}
2366
 
2367
 
2368
/* Functions to save and restore machine-specific function data.  */
2369
 
2370
static struct machine_function *
2371
mep_init_machine_status (void)
2372
{
2373
  struct machine_function *f;
2374
 
2375
  f = (struct machine_function *) ggc_alloc_cleared (sizeof (struct machine_function));
2376
 
2377
  return f;
2378
}
2379
 
2380
static rtx
2381
mep_allocate_initial_value (rtx reg)
2382
{
2383
  int rss;
2384
 
2385
  if (GET_CODE (reg) != REG)
2386
    return NULL_RTX;
2387
 
2388
  if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2389
    return NULL_RTX;
2390
 
2391
  /* In interrupt functions, the "initial" values of $gp and $tp are
2392
     provided by the prologue.  They are not necessarily the same as
2393
     the values that the caller was using.  */
2394
  if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2395
    if (mep_interrupt_p ())
2396
      return NULL_RTX;
2397
 
2398
  if (! cfun->machine->reg_save_slot[REGNO(reg)])
2399
    {
2400
      cfun->machine->reg_save_size += 4;
2401
      cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2402
    }
2403
 
2404
  rss = cfun->machine->reg_save_slot[REGNO(reg)];
2405
  return gen_rtx_MEM (SImode, plus_constant (arg_pointer_rtx, -rss));
2406
}
2407
 
2408
rtx
2409
mep_return_addr_rtx (int count)
2410
{
2411
  if (count != 0)
2412
    return const0_rtx;
2413
 
2414
  return get_hard_reg_initial_val (Pmode, LP_REGNO);
2415
}
2416
 
2417
static rtx
2418
mep_tp_rtx (void)
2419
{
2420
  return get_hard_reg_initial_val (Pmode, TP_REGNO);
2421
}
2422
 
2423
static rtx
2424
mep_gp_rtx (void)
2425
{
2426
  return get_hard_reg_initial_val (Pmode, GP_REGNO);
2427
}
2428
 
2429
static bool
2430
mep_interrupt_p (void)
2431
{
2432
  if (cfun->machine->interrupt_handler == 0)
2433
    {
2434
      int interrupt_handler
2435
        = (lookup_attribute ("interrupt",
2436
                             DECL_ATTRIBUTES (current_function_decl))
2437
           != NULL_TREE);
2438
      cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2439
    }
2440
  return cfun->machine->interrupt_handler == 2;
2441
}
2442
 
2443
static bool
2444
mep_disinterrupt_p (void)
2445
{
2446
  if (cfun->machine->disable_interrupts == 0)
2447
    {
2448
      int disable_interrupts
2449
        = (lookup_attribute ("disinterrupt",
2450
                             DECL_ATTRIBUTES (current_function_decl))
2451
           != NULL_TREE);
2452
      cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2453
    }
2454
  return cfun->machine->disable_interrupts == 2;
2455
}
2456
 
2457
 
2458
/* Frame/Epilog/Prolog Related.  */
2459
 
2460
static bool
2461
mep_reg_set_p (rtx reg, rtx insn)
2462
{
2463
  /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2464
  if (INSN_P (insn))
2465
    {
2466
      if (FIND_REG_INC_NOTE (insn, reg))
2467
        return true;
2468
      insn = PATTERN (insn);
2469
    }
2470
 
2471
  if (GET_CODE (insn) == SET
2472
      && GET_CODE (XEXP (insn, 0)) == REG
2473
      && GET_CODE (XEXP (insn, 1)) == REG
2474
      && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2475
    return false;
2476
 
2477
  return set_of (reg, insn) != NULL_RTX;
2478
}
2479
 
2480
 
2481
#define MEP_SAVES_UNKNOWN 0
2482
#define MEP_SAVES_YES 1
2483
#define MEP_SAVES_MAYBE 2
2484
#define MEP_SAVES_NO 3
2485
 
2486
static bool
2487
mep_reg_set_in_function (int regno)
2488
{
2489
  rtx reg, insn;
2490
 
2491
  if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2492
    return true;
2493
 
2494
  if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2495
    return true;
2496
 
2497
  push_topmost_sequence ();
2498
  insn = get_insns ();
2499
  pop_topmost_sequence ();
2500
 
2501
  if (!insn)
2502
    return false;
2503
 
2504
  reg = gen_rtx_REG (SImode, regno);
2505
 
2506
  for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2507
    if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2508
      return true;
2509
  return false;
2510
}
2511
 
2512
static bool
2513
mep_asm_without_operands_p (void)
2514
{
2515
  if (cfun->machine->asms_without_operands == 0)
2516
    {
2517
      rtx insn;
2518
 
2519
      push_topmost_sequence ();
2520
      insn = get_insns ();
2521
      pop_topmost_sequence ();
2522
 
2523
      cfun->machine->asms_without_operands = 1;
2524
      while (insn)
2525
        {
2526
          if (INSN_P (insn)
2527
              && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2528
            {
2529
              cfun->machine->asms_without_operands = 2;
2530
              break;
2531
            }
2532
          insn = NEXT_INSN (insn);
2533
        }
2534
 
2535
    }
2536
  return cfun->machine->asms_without_operands == 2;
2537
}
2538
 
2539
/* Interrupt functions save/restore every call-preserved register, and
2540
   any call-used register it uses (or all if it calls any function,
2541
   since they may get clobbered there too).  Here we check to see
2542
   which call-used registers need saving.  */
2543
 
2544
#define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2545
                           && (r == FIRST_CCR_REGNO + 1 \
2546
                               || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2547
                               || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2548
 
2549
static bool
2550
mep_interrupt_saved_reg (int r)
2551
{
2552
  if (!mep_interrupt_p ())
2553
    return false;
2554
  if (r == REGSAVE_CONTROL_TEMP
2555
      || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2556
    return true;
2557
  if (mep_asm_without_operands_p ()
2558
      && (!fixed_regs[r]
2559
          || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2560
          || IVC2_ISAVED_REG (r)))
2561
    return true;
2562
  if (!current_function_is_leaf)
2563
    /* Function calls mean we need to save $lp.  */
2564
    if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2565
      return true;
2566
  if (!current_function_is_leaf || cfun->machine->doloop_tags > 0)
2567
    /* The interrupt handler might use these registers for repeat blocks,
2568
       or it might call a function that does so.  */
2569
    if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2570
      return true;
2571
  if (current_function_is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2572
    return false;
2573
  /* Functions we call might clobber these.  */
2574
  if (call_used_regs[r] && !fixed_regs[r])
2575
    return true;
2576
  /* Additional registers that need to be saved for IVC2.  */
2577
  if (IVC2_ISAVED_REG (r))
2578
    return true;
2579
 
2580
  return false;
2581
}
2582
 
2583
static bool
2584
mep_call_saves_register (int r)
2585
{
2586
  if (! cfun->machine->frame_locked)
2587
    {
2588
      int rv = MEP_SAVES_NO;
2589
 
2590
      if (cfun->machine->reg_save_slot[r])
2591
        rv = MEP_SAVES_YES;
2592
      else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2593
        rv = MEP_SAVES_YES;
2594
      else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2595
        rv = MEP_SAVES_YES;
2596
      else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2597
        rv = MEP_SAVES_YES;
2598
      else if (crtl->calls_eh_return && (r == 10 || r == 11))
2599
        /* We need these to have stack slots so that they can be set during
2600
           unwinding.  */
2601
        rv = MEP_SAVES_YES;
2602
      else if (mep_interrupt_saved_reg (r))
2603
        rv = MEP_SAVES_YES;
2604
      cfun->machine->reg_saved[r] = rv;
2605
    }
2606
  return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2607
}
2608
 
2609
/* Return true if epilogue uses register REGNO.  */
2610
 
2611
bool
2612
mep_epilogue_uses (int regno)
2613
{
2614
  /* Since $lp is a call-saved register, the generic code will normally
2615
     mark it used in the epilogue if it needs to be saved and restored.
2616
     However, when profiling is enabled, the profiling code will implicitly
2617
     clobber $11.  This case has to be handled specially both here and in
2618
     mep_call_saves_register.  */
2619
  if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2620
    return true;
2621
  /* Interrupt functions save/restore pretty much everything.  */
2622
  return (reload_completed && mep_interrupt_saved_reg (regno));
2623
}
2624
 
2625
static int
2626
mep_reg_size (int regno)
2627
{
2628
  if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2629
    return 8;
2630
  return 4;
2631
}
2632
 
2633
/* Worker function for TARGET_CAN_ELIMINATE.  */
2634
 
2635
bool
2636
mep_can_eliminate (const int from, const int to)
2637
{
2638
  return  (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2639
           ? ! frame_pointer_needed
2640
           : true);
2641
}
2642
 
2643
int
2644
mep_elimination_offset (int from, int to)
2645
{
2646
  int reg_save_size;
2647
  int i;
2648
  int frame_size = get_frame_size () + crtl->outgoing_args_size;
2649
  int total_size;
2650
 
2651
  if (!cfun->machine->frame_locked)
2652
    memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2653
 
2654
  /* We don't count arg_regs_to_save in the arg pointer offset, because
2655
     gcc thinks the arg pointer has moved along with the saved regs.
2656
     However, we do count it when we adjust $sp in the prologue.  */
2657
  reg_save_size = 0;
2658
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2659
    if (mep_call_saves_register (i))
2660
      reg_save_size += mep_reg_size (i);
2661
 
2662
  if (reg_save_size % 8)
2663
    cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2664
  else
2665
    cfun->machine->regsave_filler = 0;
2666
 
2667
  /* This is what our total stack adjustment looks like.  */
2668
  total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2669
 
2670
  if (total_size % 8)
2671
    cfun->machine->frame_filler = 8 - (total_size % 8);
2672
  else
2673
    cfun->machine->frame_filler = 0;
2674
 
2675
 
2676
  if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2677
    return reg_save_size + cfun->machine->regsave_filler;
2678
 
2679
  if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2680
    return cfun->machine->frame_filler + frame_size;
2681
 
2682
  if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2683
    return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2684
 
2685
  gcc_unreachable ();
2686
}
2687
 
2688
static rtx
2689
F (rtx x)
2690
{
2691
  RTX_FRAME_RELATED_P (x) = 1;
2692
  return x;
2693
}
2694
 
2695
/* Since the prologue/epilogue code is generated after optimization,
2696
   we can't rely on gcc to split constants for us.  So, this code
2697
   captures all the ways to add a constant to a register in one logic
2698
   chunk, including optimizing away insns we just don't need.  This
2699
   makes the prolog/epilog code easier to follow.  */
2700
static void
2701
add_constant (int dest, int src, int value, int mark_frame)
2702
{
2703
  rtx insn;
2704
  int hi, lo;
2705
 
2706
  if (src == dest && value == 0)
2707
    return;
2708
 
2709
  if (value == 0)
2710
    {
2711
      insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2712
                             gen_rtx_REG (SImode, src));
2713
      if (mark_frame)
2714
        RTX_FRAME_RELATED_P(insn) = 1;
2715
      return;
2716
    }
2717
 
2718
  if (value >= -32768 && value <= 32767)
2719
    {
2720
      insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2721
                                    gen_rtx_REG (SImode, src),
2722
                                    GEN_INT (value)));
2723
      if (mark_frame)
2724
        RTX_FRAME_RELATED_P(insn) = 1;
2725
      return;
2726
    }
2727
 
2728
  /* Big constant, need to use a temp register.  We use
2729
     REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2730
     area is always small enough to directly add to).  */
2731
 
2732
  hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2733
  lo = value & 0xffff;
2734
 
2735
  insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2736
                         GEN_INT (hi));
2737
 
2738
  if (lo)
2739
    {
2740
      insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2741
                                    gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2742
                                    GEN_INT (lo)));
2743
    }
2744
 
2745
  insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2746
                                gen_rtx_REG (SImode, src),
2747
                                gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2748
  if (mark_frame)
2749
    {
2750
      RTX_FRAME_RELATED_P(insn) = 1;
2751
      add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2752
                    gen_rtx_SET (SImode,
2753
                                 gen_rtx_REG (SImode, dest),
2754
                                 gen_rtx_PLUS (SImode,
2755
                                               gen_rtx_REG (SImode, dest),
2756
                                               GEN_INT (value))));
2757
    }
2758
}
2759
 
2760
/* Move SRC to DEST.  Mark the move as being potentially dead if
2761
   MAYBE_DEAD_P.  */
2762
 
2763
static rtx
2764
maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2765
{
2766
  rtx insn = emit_move_insn (dest, src);
2767
#if 0
2768
  if (maybe_dead_p)
2769
    REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2770
#endif
2771
  return insn;
2772
}
2773
 
2774
/* Used for interrupt functions, which can't assume that $tp and $gp
2775
   contain the correct pointers.  */
2776
 
2777
static void
2778
mep_reload_pointer (int regno, const char *symbol)
2779
{
2780
  rtx reg, sym;
2781
 
2782
  if (!df_regs_ever_live_p(regno) && current_function_is_leaf)
2783
    return;
2784
 
2785
  reg = gen_rtx_REG (SImode, regno);
2786
  sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2787
  emit_insn (gen_movsi_topsym_s (reg, sym));
2788
  emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2789
}
2790
 
2791
/* Assign save slots for any register not already saved.  DImode
2792
   registers go at the end of the reg save area; the rest go at the
2793
   beginning.  This is for alignment purposes.  Returns true if a frame
2794
   is really needed.  */
2795
static bool
2796
mep_assign_save_slots (int reg_save_size)
2797
{
2798
  bool really_need_stack_frame = false;
2799
  int di_ofs = 0;
2800
  int i;
2801
 
2802
  for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2803
    if (mep_call_saves_register(i))
2804
      {
2805
        int regsize = mep_reg_size (i);
2806
 
2807
        if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2808
            || mep_reg_set_in_function (i))
2809
          really_need_stack_frame = true;
2810
 
2811
        if (cfun->machine->reg_save_slot[i])
2812
          continue;
2813
 
2814
        if (regsize < 8)
2815
          {
2816
            cfun->machine->reg_save_size += regsize;
2817
            cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2818
          }
2819
        else
2820
          {
2821
            cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2822
            di_ofs += 8;
2823
          }
2824
      }
2825
  cfun->machine->frame_locked = 1;
2826
  return really_need_stack_frame;
2827
}
2828
 
2829
void
2830
mep_expand_prologue (void)
2831
{
2832
  int i, rss, sp_offset = 0;
2833
  int reg_save_size;
2834
  int frame_size;
2835
  int really_need_stack_frame;
2836
 
2837
  /* We must not allow register renaming in interrupt functions,
2838
     because that invalidates the correctness of the set of call-used
2839
     registers we're going to save/restore.  */
2840
  mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2841
 
2842
  if (mep_disinterrupt_p ())
2843
    emit_insn (gen_mep_disable_int ());
2844
 
2845
  cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2846
 
2847
  reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2848
  frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2849
  really_need_stack_frame = frame_size;
2850
 
2851
  really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2852
 
2853
  sp_offset = reg_save_size;
2854
  if (sp_offset + frame_size < 128)
2855
    sp_offset += frame_size ;
2856
 
2857
  add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2858
 
2859
  for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2860
    if (mep_call_saves_register(i))
2861
      {
2862
        rtx mem;
2863
        bool maybe_dead_p;
2864
        enum machine_mode rmode;
2865
 
2866
        rss = cfun->machine->reg_save_slot[i];
2867
 
2868
        if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2869
            && (!mep_reg_set_in_function (i)
2870
                && !mep_interrupt_p ()))
2871
          continue;
2872
 
2873
        if (mep_reg_size (i) == 8)
2874
          rmode = DImode;
2875
        else
2876
          rmode = SImode;
2877
 
2878
        /* If there is a pseudo associated with this register's initial value,
2879
           reload might have already spilt it to the stack slot suggested by
2880
           ALLOCATE_INITIAL_VALUE.  The moves emitted here can then be safely
2881
           deleted as dead.  */
2882
        mem = gen_rtx_MEM (rmode,
2883
                           plus_constant (stack_pointer_rtx, sp_offset - rss));
2884
        maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2885
 
2886
        if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2887
          F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2888
        else if (rmode == DImode)
2889
          {
2890
            rtx insn;
2891
            int be = TARGET_BIG_ENDIAN ? 4 : 0;
2892
 
2893
            mem = gen_rtx_MEM (SImode,
2894
                               plus_constant (stack_pointer_rtx, sp_offset - rss + be));
2895
 
2896
            maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2897
                             gen_rtx_REG (SImode, i),
2898
                             maybe_dead_p);
2899
            maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2900
                             gen_rtx_ZERO_EXTRACT (SImode,
2901
                                                   gen_rtx_REG (DImode, i),
2902
                                                   GEN_INT (32),
2903
                                                   GEN_INT (32)),
2904
                             maybe_dead_p);
2905
            insn = maybe_dead_move (mem,
2906
                                    gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2907
                                    maybe_dead_p);
2908
            RTX_FRAME_RELATED_P (insn) = 1;
2909
 
2910
            add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2911
                          gen_rtx_SET (VOIDmode,
2912
                                       copy_rtx (mem),
2913
                                       gen_rtx_REG (rmode, i)));
2914
            mem = gen_rtx_MEM (SImode,
2915
                               plus_constant (stack_pointer_rtx, sp_offset - rss + (4-be)));
2916
            insn = maybe_dead_move (mem,
2917
                                    gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2918
                                    maybe_dead_p);
2919
          }
2920
        else
2921
          {
2922
            rtx insn;
2923
            maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2924
                             gen_rtx_REG (rmode, i),
2925
                             maybe_dead_p);
2926
            insn = maybe_dead_move (mem,
2927
                                    gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2928
                                    maybe_dead_p);
2929
            RTX_FRAME_RELATED_P (insn) = 1;
2930
 
2931
            add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2932
                          gen_rtx_SET (VOIDmode,
2933
                                       copy_rtx (mem),
2934
                                       gen_rtx_REG (rmode, i)));
2935
          }
2936
      }
2937
 
2938
  if (frame_pointer_needed)
2939
    {
2940
      /* We've already adjusted down by sp_offset.  Total $sp change
2941
         is reg_save_size + frame_size.  We want a net change here of
2942
         just reg_save_size.  */
2943
      add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2944
    }
2945
 
2946
  add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2947
 
2948
  if (mep_interrupt_p ())
2949
    {
2950
      mep_reload_pointer(GP_REGNO, "__sdabase");
2951
      mep_reload_pointer(TP_REGNO, "__tpbase");
2952
    }
2953
}
2954
 
2955
static void
2956
mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2957
{
2958
  int local = hwi_local;
2959
  int frame_size = local + crtl->outgoing_args_size;
2960
  int reg_save_size;
2961
  int ffill;
2962
  int i, sp, skip;
2963
  int sp_offset;
2964
  int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2965
 
2966
  reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2967
  frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2968
  sp_offset = reg_save_size + frame_size;
2969
 
2970
  ffill = cfun->machine->frame_filler;
2971
 
2972
  if (cfun->machine->mep_frame_pointer_needed)
2973
    reg_names[FP_REGNO] = "$fp";
2974
  else
2975
    reg_names[FP_REGNO] = "$8";
2976
 
2977
  if (sp_offset == 0)
2978
    return;
2979
 
2980
  if (debug_info_level == DINFO_LEVEL_NONE)
2981
    {
2982
      fprintf (file, "\t# frame: %d", sp_offset);
2983
      if (reg_save_size)
2984
        fprintf (file, "   %d regs", reg_save_size);
2985
      if (local)
2986
        fprintf (file, "   %d locals", local);
2987
      if (crtl->outgoing_args_size)
2988
        fprintf (file, "   %d args", crtl->outgoing_args_size);
2989
      fprintf (file, "\n");
2990
      return;
2991
    }
2992
 
2993
  fprintf (file, "\t#\n");
2994
  fprintf (file, "\t# Initial Frame Information:\n");
2995
  if (sp_offset || !frame_pointer_needed)
2996
    fprintf (file, "\t# Entry   ---------- 0\n");
2997
 
2998
  /* Sort registers by save slots, so they're printed in the order
2999
     they appear in memory, not the order they're saved in.  */
3000
  for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
3001
    slot_map[si] = si;
3002
  for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
3003
    for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
3004
      if (cfun->machine->reg_save_slot[slot_map[si]]
3005
          > cfun->machine->reg_save_slot[slot_map[sj]])
3006
        {
3007
          int t = slot_map[si];
3008
          slot_map[si] = slot_map[sj];
3009
          slot_map[sj] = t;
3010
        }
3011
 
3012
  sp = 0;
3013
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3014
    {
3015
      int rsize;
3016
      int r = slot_map[i];
3017
      int rss = cfun->machine->reg_save_slot[r];
3018
 
3019
      if (!mep_call_saves_register (r))
3020
        continue;
3021
 
3022
      if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
3023
          && (!mep_reg_set_in_function (r)
3024
              && !mep_interrupt_p ()))
3025
        continue;
3026
 
3027
      rsize = mep_reg_size(r);
3028
      skip = rss - (sp+rsize);
3029
      if (skip)
3030
        fprintf (file, "\t#         %3d bytes for alignment\n", skip);
3031
      fprintf (file, "\t#         %3d bytes for saved %-3s   %3d($sp)\n",
3032
               rsize, reg_names[r], sp_offset - rss);
3033
      sp = rss;
3034
    }
3035
 
3036
  skip = reg_save_size - sp;
3037
  if (skip)
3038
    fprintf (file, "\t#         %3d bytes for alignment\n", skip);
3039
 
3040
  if (frame_pointer_needed)
3041
    fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
3042
  if (local)
3043
    fprintf (file, "\t#         %3d bytes for local vars\n", local);
3044
  if (ffill)
3045
    fprintf (file, "\t#         %3d bytes for alignment\n", ffill);
3046
  if (crtl->outgoing_args_size)
3047
    fprintf (file, "\t#         %3d bytes for outgoing args\n",
3048
             crtl->outgoing_args_size);
3049
  fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
3050
  fprintf (file, "\t#\n");
3051
}
3052
 
3053
 
3054
static int mep_prevent_lp_restore = 0;
3055
static int mep_sibcall_epilogue = 0;
3056
 
3057
void
3058
mep_expand_epilogue (void)
3059
{
3060
  int i, sp_offset = 0;
3061
  int reg_save_size = 0;
3062
  int frame_size;
3063
  int lp_temp = LP_REGNO, lp_slot = -1;
3064
  int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
3065
  int interrupt_handler = mep_interrupt_p ();
3066
 
3067
  if (profile_arc_flag == 2)
3068
    emit_insn (gen_mep_bb_trace_ret ());
3069
 
3070
  reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
3071
  frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
3072
 
3073
  really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
3074
 
3075
  if (frame_pointer_needed)
3076
    {
3077
      /* If we have a frame pointer, we won't have a reliable stack
3078
         pointer (alloca, you know), so rebase SP from FP */
3079
      emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
3080
                      gen_rtx_REG (SImode, FP_REGNO));
3081
      sp_offset = reg_save_size;
3082
    }
3083
  else
3084
    {
3085
      /* SP is right under our local variable space.  Adjust it if
3086
         needed.  */
3087
      sp_offset = reg_save_size + frame_size;
3088
      if (sp_offset >= 128)
3089
        {
3090
          add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
3091
          sp_offset -= frame_size;
3092
        }
3093
    }
3094
 
3095
  /* This is backwards so that we restore the control and coprocessor
3096
     registers before the temporary registers we use to restore
3097
     them.  */
3098
  for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
3099
    if (mep_call_saves_register (i))
3100
      {
3101
        enum machine_mode rmode;
3102
        int rss = cfun->machine->reg_save_slot[i];
3103
 
3104
        if (mep_reg_size (i) == 8)
3105
          rmode = DImode;
3106
        else
3107
          rmode = SImode;
3108
 
3109
        if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
3110
            && !(mep_reg_set_in_function (i) || interrupt_handler))
3111
          continue;
3112
        if (mep_prevent_lp_restore && i == LP_REGNO)
3113
          continue;
3114
        if (!mep_prevent_lp_restore
3115
            && !interrupt_handler
3116
            && (i == 10 || i == 11))
3117
          continue;
3118
 
3119
        if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
3120
          emit_move_insn (gen_rtx_REG (rmode, i),
3121
                          gen_rtx_MEM (rmode,
3122
                                       plus_constant (stack_pointer_rtx,
3123
                                                      sp_offset-rss)));
3124
        else
3125
          {
3126
            if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
3127
              /* Defer this one so we can jump indirect rather than
3128
                 copying the RA to $lp and "ret".  EH epilogues
3129
                 automatically skip this anyway.  */
3130
              lp_slot = sp_offset-rss;
3131
            else
3132
              {
3133
                emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
3134
                                gen_rtx_MEM (rmode,
3135
                                             plus_constant (stack_pointer_rtx,
3136
                                                            sp_offset-rss)));
3137
                emit_move_insn (gen_rtx_REG (rmode, i),
3138
                                gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3139
              }
3140
          }
3141
      }
3142
  if (lp_slot != -1)
3143
    {
3144
      /* Restore this one last so we know it will be in the temp
3145
         register when we return by jumping indirectly via the temp.  */
3146
      emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3147
                      gen_rtx_MEM (SImode,
3148
                                   plus_constant (stack_pointer_rtx,
3149
                                                  lp_slot)));
3150
      lp_temp = REGSAVE_CONTROL_TEMP;
3151
    }
3152
 
3153
 
3154
  add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3155
 
3156
  if (crtl->calls_eh_return && mep_prevent_lp_restore)
3157
    emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3158
                           gen_rtx_REG (SImode, SP_REGNO),
3159
                           cfun->machine->eh_stack_adjust));
3160
 
3161
  if (mep_sibcall_epilogue)
3162
    return;
3163
 
3164
  if (mep_disinterrupt_p ())
3165
    emit_insn (gen_mep_enable_int ());
3166
 
3167
  if (mep_prevent_lp_restore)
3168
    {
3169
      emit_jump_insn (gen_eh_return_internal ());
3170
      emit_barrier ();
3171
    }
3172
  else if (interrupt_handler)
3173
    emit_jump_insn (gen_mep_reti ());
3174
  else
3175
    emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3176
}
3177
 
3178
void
3179
mep_expand_eh_return (rtx *operands)
3180
{
3181
  if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3182
    {
3183
      rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3184
      emit_move_insn (ra, operands[0]);
3185
      operands[0] = ra;
3186
    }
3187
 
3188
  emit_insn (gen_eh_epilogue (operands[0]));
3189
}
3190
 
3191
void
3192
mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3193
{
3194
  cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3195
  mep_prevent_lp_restore = 1;
3196
  mep_expand_epilogue ();
3197
  mep_prevent_lp_restore = 0;
3198
}
3199
 
3200
void
3201
mep_expand_sibcall_epilogue (void)
3202
{
3203
  mep_sibcall_epilogue = 1;
3204
  mep_expand_epilogue ();
3205
  mep_sibcall_epilogue = 0;
3206
}
3207
 
3208
static bool
3209
mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3210
{
3211
  if (decl == NULL)
3212
    return false;
3213
 
3214
  if (mep_section_tag (DECL_RTL (decl)) == 'f')
3215
    return false;
3216
 
3217
  /* Can't call to a sibcall from an interrupt or disinterrupt function.  */
3218
  if (mep_interrupt_p () || mep_disinterrupt_p ())
3219
    return false;
3220
 
3221
  return true;
3222
}
3223
 
3224
rtx
3225
mep_return_stackadj_rtx (void)
3226
{
3227
  return gen_rtx_REG (SImode, 10);
3228
}
3229
 
3230
rtx
3231
mep_return_handler_rtx (void)
3232
{
3233
  return gen_rtx_REG (SImode, LP_REGNO);
3234
}
3235
 
3236
void
3237
mep_function_profiler (FILE *file)
3238
{
3239
  /* Always right at the beginning of the function.  */
3240
  fprintf (file, "\t# mep function profiler\n");
3241
  fprintf (file, "\tadd\t$sp, -8\n");
3242
  fprintf (file, "\tsw\t$0, ($sp)\n");
3243
  fprintf (file, "\tldc\t$0, $lp\n");
3244
  fprintf (file, "\tsw\t$0, 4($sp)\n");
3245
  fprintf (file, "\tbsr\t__mep_mcount\n");
3246
  fprintf (file, "\tlw\t$0, 4($sp)\n");
3247
  fprintf (file, "\tstc\t$0, $lp\n");
3248
  fprintf (file, "\tlw\t$0, ($sp)\n");
3249
  fprintf (file, "\tadd\t$sp, 8\n\n");
3250
}
3251
 
3252
const char *
3253
mep_emit_bb_trace_ret (void)
3254
{
3255
  fprintf (asm_out_file, "\t# end of block profiling\n");
3256
  fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3257
  fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3258
  fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3259
  fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3260
  fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3261
  fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3262
  fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3263
  fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3264
  fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3265
  return "";
3266
}
3267
 
3268
#undef SAVE
3269
#undef RESTORE
3270
 
3271
/* Operand Printing.  */
3272
 
3273
void
3274
mep_print_operand_address (FILE *stream, rtx address)
3275
{
3276
  if (GET_CODE (address) == MEM)
3277
    address = XEXP (address, 0);
3278
  else
3279
    /* cf: gcc.dg/asm-4.c.  */
3280
    gcc_assert (GET_CODE (address) == REG);
3281
 
3282
  mep_print_operand (stream, address, 0);
3283
}
3284
 
3285
static struct
3286
{
3287
  char code;
3288
  const char *pattern;
3289
  const char *format;
3290
}
3291
const conversions[] =
3292
{
3293
  { 0, "r", "0" },
3294
  { 0, "m+ri", "3(2)" },
3295
  { 0, "mr", "(1)" },
3296
  { 0, "ms", "(1)" },
3297
  { 0, "ml", "(1)" },
3298
  { 0, "mLrs", "%lo(3)(2)" },
3299
  { 0, "mLr+si", "%lo(4+5)(2)" },
3300
  { 0, "m+ru2s", "%tpoff(5)(2)" },
3301
  { 0, "m+ru3s", "%sdaoff(5)(2)" },
3302
  { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3303
  { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3304
  { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3305
  { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3306
  { 0, "mi", "(1)" },
3307
  { 0, "m+si", "(2+3)" },
3308
  { 0, "m+li", "(2+3)" },
3309
  { 0, "i", "0" },
3310
  { 0, "s", "0" },
3311
  { 0, "+si", "1+2" },
3312
  { 0, "+u2si", "%tpoff(3+4)" },
3313
  { 0, "+u3si", "%sdaoff(3+4)" },
3314
  { 0, "l", "0" },
3315
  { 'b', "i", "0" },
3316
  { 'B', "i", "0" },
3317
  { 'U', "i", "0" },
3318
  { 'h', "i", "0" },
3319
  { 'h', "Hs", "%hi(1)" },
3320
  { 'I', "i", "0" },
3321
  { 'I', "u2s", "%tpoff(2)" },
3322
  { 'I', "u3s", "%sdaoff(2)" },
3323
  { 'I', "+u2si", "%tpoff(3+4)" },
3324
  { 'I', "+u3si", "%sdaoff(3+4)" },
3325
  { 'J', "i", "0" },
3326
  { 'P', "mr", "(1\\+),\\0" },
3327
  { 'x', "i", "0" },
3328
  { 0, 0, 0 }
3329
};
3330
 
3331
static int
3332
unique_bit_in (HOST_WIDE_INT i)
3333
{
3334
  switch (i & 0xff)
3335
    {
3336
    case 0x01: case 0xfe: return 0;
3337
    case 0x02: case 0xfd: return 1;
3338
    case 0x04: case 0xfb: return 2;
3339
    case 0x08: case 0xf7: return 3;
3340
    case 0x10: case 0x7f: return 4;
3341
    case 0x20: case 0xbf: return 5;
3342
    case 0x40: case 0xdf: return 6;
3343
    case 0x80: case 0xef: return 7;
3344
    default:
3345
      gcc_unreachable ();
3346
    }
3347
}
3348
 
3349
static int
3350
bit_size_for_clip (HOST_WIDE_INT i)
3351
{
3352
  int rv;
3353
 
3354
  for (rv = 0; rv < 31; rv ++)
3355
    if (((HOST_WIDE_INT) 1 << rv) > i)
3356
      return rv + 1;
3357
  gcc_unreachable ();
3358
}
3359
 
3360
/* Print an operand to a assembler instruction.  */
3361
 
3362
void
3363
mep_print_operand (FILE *file, rtx x, int code)
3364
{
3365
  int i, j;
3366
  const char *real_name;
3367
 
3368
  if (code == '<')
3369
    {
3370
      /* Print a mnemonic to do CR <- CR moves.  Find out which intrinsic
3371
         we're using, then skip over the "mep_" part of its name.  */
3372
      const struct cgen_insn *insn;
3373
 
3374
      if (mep_get_move_insn (mep_cmov, &insn))
3375
        fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3376
      else
3377
        mep_intrinsic_unavailable (mep_cmov);
3378
      return;
3379
    }
3380
  if (code == 'L')
3381
    {
3382
      switch (GET_CODE (x))
3383
        {
3384
        case AND:
3385
          fputs ("clr", file);
3386
          return;
3387
        case IOR:
3388
          fputs ("set", file);
3389
          return;
3390
        case XOR:
3391
          fputs ("not", file);
3392
          return;
3393
        default:
3394
          output_operand_lossage ("invalid %%L code");
3395
        }
3396
    }
3397
  if (code == 'M')
3398
    {
3399
      /* Print the second operand of a CR <- CR move.  If we're using
3400
         a two-operand instruction (i.e., a real cmov), then just print
3401
         the operand normally.  If we're using a "reg, reg, immediate"
3402
         instruction such as caddi3, print the operand followed by a
3403
         zero field.  If we're using a three-register instruction,
3404
         print the operand twice.  */
3405
      const struct cgen_insn *insn;
3406
 
3407
      mep_print_operand (file, x, 0);
3408
      if (mep_get_move_insn (mep_cmov, &insn)
3409
          && insn_data[insn->icode].n_operands == 3)
3410
        {
3411
          fputs (", ", file);
3412
          if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3413
            mep_print_operand (file, x, 0);
3414
          else
3415
            mep_print_operand (file, const0_rtx, 0);
3416
        }
3417
      return;
3418
    }
3419
 
3420
  encode_pattern (x);
3421
  for (i = 0; conversions[i].pattern; i++)
3422
    if (conversions[i].code == code
3423
        && strcmp(conversions[i].pattern, pattern) == 0)
3424
      {
3425
        for (j = 0; conversions[i].format[j]; j++)
3426
          if (conversions[i].format[j] == '\\')
3427
            {
3428
              fputc (conversions[i].format[j+1], file);
3429
              j++;
3430
            }
3431
          else if (ISDIGIT(conversions[i].format[j]))
3432
            {
3433
              rtx r = patternr[conversions[i].format[j] - '0'];
3434
              switch (GET_CODE (r))
3435
                {
3436
                case REG:
3437
                  fprintf (file, "%s", reg_names [REGNO (r)]);
3438
                  break;
3439
                case CONST_INT:
3440
                  switch (code)
3441
                    {
3442
                    case 'b':
3443
                      fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3444
                      break;
3445
                    case 'B':
3446
                      fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3447
                      break;
3448
                    case 'h':
3449
                      fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3450
                      break;
3451
                    case 'U':
3452
                      fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3453
                      break;
3454
                    case 'J':
3455
                      fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3456
                      break;
3457
                    case 'x':
3458
                      if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3459
                          && !(INTVAL (r) & 0xff))
3460
                        fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3461
                      else
3462
                        fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3463
                      break;
3464
                    case 'I':
3465
                      if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3466
                          && conversions[i].format[j+1] == 0)
3467
                        {
3468
                          fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3469
                          fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3470
                        }
3471
                      else
3472
                        fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3473
                      break;
3474
                    default:
3475
                      fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3476
                      break;
3477
                    }
3478
                  break;
3479
                case CONST_DOUBLE:
3480
                  fprintf(file, "[const_double 0x%lx]",
3481
                          (unsigned long) CONST_DOUBLE_HIGH(r));
3482
                  break;
3483
                case SYMBOL_REF:
3484
                  real_name = TARGET_STRIP_NAME_ENCODING (XSTR (r, 0));
3485
                  assemble_name (file, real_name);
3486
                  break;
3487
                case LABEL_REF:
3488
                  output_asm_label (r);
3489
                  break;
3490
                default:
3491
                  fprintf (stderr, "don't know how to print this operand:");
3492
                  debug_rtx (r);
3493
                  gcc_unreachable ();
3494
                }
3495
            }
3496
          else
3497
            {
3498
              if (conversions[i].format[j] == '+'
3499
                  && (!code || code == 'I')
3500
                  && ISDIGIT (conversions[i].format[j+1])
3501
                  && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3502
                  && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3503
                continue;
3504
              fputc(conversions[i].format[j], file);
3505
            }
3506
        break;
3507
      }
3508
  if (!conversions[i].pattern)
3509
    {
3510
      error ("unconvertible operand %c %qs", code?code:'-', pattern);
3511
      debug_rtx(x);
3512
    }
3513
 
3514
  return;
3515
}
3516
 
3517
void
3518
mep_final_prescan_insn (rtx insn, rtx *operands ATTRIBUTE_UNUSED,
3519
                        int noperands ATTRIBUTE_UNUSED)
3520
{
3521
  /* Despite the fact that MeP is perfectly capable of branching and
3522
     doing something else in the same bundle, gcc does jump
3523
     optimization *after* scheduling, so we cannot trust the bundling
3524
     flags on jump instructions.  */
3525
  if (GET_MODE (insn) == BImode
3526
      && get_attr_slots (insn) != SLOTS_CORE)
3527
    fputc ('+', asm_out_file);
3528
}
3529
 
3530
/* Function args in registers.  */
3531
 
3532
static void
3533
mep_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
3534
                            enum machine_mode mode ATTRIBUTE_UNUSED,
3535
                            tree type ATTRIBUTE_UNUSED, int *pretend_size,
3536
                            int second_time ATTRIBUTE_UNUSED)
3537
{
3538
  int nsave = 4 - (cum->nregs + 1);
3539
 
3540
  if (nsave > 0)
3541
    cfun->machine->arg_regs_to_save = nsave;
3542
  *pretend_size = nsave * 4;
3543
}
3544
 
3545
static int
3546
bytesize (const_tree type, enum machine_mode mode)
3547
{
3548
  if (mode == BLKmode)
3549
    return int_size_in_bytes (type);
3550
  return GET_MODE_SIZE (mode);
3551
}
3552
 
3553
static rtx
3554
mep_expand_builtin_saveregs (void)
3555
{
3556
  int bufsize, i, ns;
3557
  rtx regbuf;
3558
 
3559
  ns = cfun->machine->arg_regs_to_save;
3560
  if (TARGET_IVC2)
3561
    {
3562
      bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3563
      regbuf = assign_stack_local (SImode, bufsize, 64);
3564
    }
3565
  else
3566
    {
3567
      bufsize = ns * 4;
3568
      regbuf = assign_stack_local (SImode, bufsize, 32);
3569
    }
3570
 
3571
  move_block_from_reg (5-ns, regbuf, ns);
3572
 
3573
  if (TARGET_IVC2)
3574
    {
3575
      rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3576
      int ofs = 8 * ((ns+1)/2);
3577
 
3578
      for (i=0; i<ns; i++)
3579
        {
3580
          int rn = (4-ns) + i + 49;
3581
          rtx ptr;
3582
 
3583
          ptr = offset_address (tmp, GEN_INT (ofs), 2);
3584
          emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3585
          ofs += 8;
3586
        }
3587
    }
3588
  return XEXP (regbuf, 0);
3589
}
3590
 
3591
#define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3592
 
3593
static tree
3594
mep_build_builtin_va_list (void)
3595
{
3596
  tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3597
  tree record;
3598
 
3599
 
3600
  record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3601
 
3602
  f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3603
                          get_identifier ("__va_next_gp"), ptr_type_node);
3604
  f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3605
                                get_identifier ("__va_next_gp_limit"),
3606
                                ptr_type_node);
3607
  f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3608
                           ptr_type_node);
3609
  f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3610
                             ptr_type_node);
3611
 
3612
  DECL_FIELD_CONTEXT (f_next_gp) = record;
3613
  DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3614
  DECL_FIELD_CONTEXT (f_next_cop) = record;
3615
  DECL_FIELD_CONTEXT (f_next_stack) = record;
3616
 
3617
  TYPE_FIELDS (record) = f_next_gp;
3618
  TREE_CHAIN (f_next_gp) = f_next_gp_limit;
3619
  TREE_CHAIN (f_next_gp_limit) = f_next_cop;
3620
  TREE_CHAIN (f_next_cop) = f_next_stack;
3621
 
3622
  layout_type (record);
3623
 
3624
  return record;
3625
}
3626
 
3627
static void
3628
mep_expand_va_start (tree valist, rtx nextarg)
3629
{
3630
  tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3631
  tree next_gp, next_gp_limit, next_cop, next_stack;
3632
  tree t, u;
3633
  int ns;
3634
 
3635
  ns = cfun->machine->arg_regs_to_save;
3636
 
3637
  f_next_gp = TYPE_FIELDS (va_list_type_node);
3638
  f_next_gp_limit = TREE_CHAIN (f_next_gp);
3639
  f_next_cop = TREE_CHAIN (f_next_gp_limit);
3640
  f_next_stack = TREE_CHAIN (f_next_cop);
3641
 
3642
  next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3643
                    NULL_TREE);
3644
  next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3645
                          valist, f_next_gp_limit, NULL_TREE);
3646
  next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3647
                     NULL_TREE);
3648
  next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3649
                       valist, f_next_stack, NULL_TREE);
3650
 
3651
  /* va_list.next_gp = expand_builtin_saveregs (); */
3652
  u = make_tree (sizetype, expand_builtin_saveregs ());
3653
  u = fold_convert (ptr_type_node, u);
3654
  t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3655
  TREE_SIDE_EFFECTS (t) = 1;
3656
  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3657
 
3658
  /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3659
  u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3660
                   size_int (4 * ns));
3661
  t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3662
  TREE_SIDE_EFFECTS (t) = 1;
3663
  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3664
 
3665
  u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3666
                   size_int (8 * ((ns+1)/2)));
3667
  /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3668
  t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3669
  TREE_SIDE_EFFECTS (t) = 1;
3670
  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3671
 
3672
  /* va_list.next_stack = nextarg; */
3673
  u = make_tree (ptr_type_node, nextarg);
3674
  t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3675
  TREE_SIDE_EFFECTS (t) = 1;
3676
  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3677
}
3678
 
3679
static tree
3680
mep_gimplify_va_arg_expr (tree valist, tree type,
3681
                          gimple_seq *pre_p,
3682
                          gimple_seq *post_p ATTRIBUTE_UNUSED)
3683
{
3684
  HOST_WIDE_INT size, rsize;
3685
  bool by_reference, ivc2_vec;
3686
  tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3687
  tree next_gp, next_gp_limit, next_cop, next_stack;
3688
  tree label_sover, label_selse;
3689
  tree tmp, res_addr;
3690
 
3691
  ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3692
 
3693
  size = int_size_in_bytes (type);
3694
  by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3695
 
3696
  if (by_reference)
3697
    {
3698
      type = build_pointer_type (type);
3699
      size = 4;
3700
    }
3701
  rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3702
 
3703
  f_next_gp = TYPE_FIELDS (va_list_type_node);
3704
  f_next_gp_limit = TREE_CHAIN (f_next_gp);
3705
  f_next_cop = TREE_CHAIN (f_next_gp_limit);
3706
  f_next_stack = TREE_CHAIN (f_next_cop);
3707
 
3708
  next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3709
                    NULL_TREE);
3710
  next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3711
                          valist, f_next_gp_limit, NULL_TREE);
3712
  next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3713
                     NULL_TREE);
3714
  next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3715
                       valist, f_next_stack, NULL_TREE);
3716
 
3717
  /* if f_next_gp < f_next_gp_limit
3718
       IF (VECTOR_P && IVC2)
3719
         val = *f_next_cop;
3720
       ELSE
3721
         val = *f_next_gp;
3722
       f_next_gp += 4;
3723
       f_next_cop += 8;
3724
     else
3725
       label_selse:
3726
       val = *f_next_stack;
3727
       f_next_stack += rsize;
3728
     label_sover:
3729
  */
3730
 
3731
  label_sover = create_artificial_label (UNKNOWN_LOCATION);
3732
  label_selse = create_artificial_label (UNKNOWN_LOCATION);
3733
  res_addr = create_tmp_var (ptr_type_node, NULL);
3734
 
3735
  tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3736
                unshare_expr (next_gp_limit));
3737
  tmp = build3 (COND_EXPR, void_type_node, tmp,
3738
                build1 (GOTO_EXPR, void_type_node,
3739
                        unshare_expr (label_selse)),
3740
                NULL_TREE);
3741
  gimplify_and_add (tmp, pre_p);
3742
 
3743
  if (ivc2_vec)
3744
    {
3745
      tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3746
      gimplify_and_add (tmp, pre_p);
3747
    }
3748
  else
3749
    {
3750
      tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3751
      gimplify_and_add (tmp, pre_p);
3752
    }
3753
 
3754
  tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3755
                unshare_expr (next_gp), size_int (4));
3756
  gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3757
 
3758
  tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3759
                unshare_expr (next_cop), size_int (8));
3760
  gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3761
 
3762
  tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3763
  gimplify_and_add (tmp, pre_p);
3764
 
3765
  /* - - */
3766
 
3767
  tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3768
  gimplify_and_add (tmp, pre_p);
3769
 
3770
  tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3771
  gimplify_and_add (tmp, pre_p);
3772
 
3773
  tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3774
                unshare_expr (next_stack), size_int (rsize));
3775
  gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3776
 
3777
  /* - - */
3778
 
3779
  tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3780
  gimplify_and_add (tmp, pre_p);
3781
 
3782
  res_addr = fold_convert (build_pointer_type (type), res_addr);
3783
 
3784
  if (by_reference)
3785
    res_addr = build_va_arg_indirect_ref (res_addr);
3786
 
3787
  return build_va_arg_indirect_ref (res_addr);
3788
}
3789
 
3790
void
3791
mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3792
                          rtx libname ATTRIBUTE_UNUSED,
3793
                          tree fndecl ATTRIBUTE_UNUSED)
3794
{
3795
  pcum->nregs = 0;
3796
 
3797
  if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3798
    pcum->vliw = 1;
3799
  else
3800
    pcum->vliw = 0;
3801
}
3802
 
3803
rtx
3804
mep_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
3805
                  tree type ATTRIBUTE_UNUSED, int named ATTRIBUTE_UNUSED)
3806
{
3807
  /* VOIDmode is a signal for the backend to pass data to the call
3808
     expander via the second operand to the call pattern.  We use
3809
     this to determine whether to use "jsr" or "jsrv".  */
3810
  if (mode == VOIDmode)
3811
    return GEN_INT (cum.vliw);
3812
 
3813
  /* If we havn't run out of argument registers, return the next.  */
3814
  if (cum.nregs < 4)
3815
    {
3816
      if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3817
        return gen_rtx_REG (mode, cum.nregs + 49);
3818
      else
3819
        return gen_rtx_REG (mode, cum.nregs + 1);
3820
    }
3821
 
3822
  /* Otherwise the argument goes on the stack.  */
3823
  return NULL_RTX;
3824
}
3825
 
3826
static bool
3827
mep_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
3828
                       enum machine_mode mode,
3829
                       const_tree        type,
3830
                       bool              named ATTRIBUTE_UNUSED)
3831
{
3832
  int size = bytesize (type, mode);
3833
 
3834
  /* This is non-obvious, but yes, large values passed after we've run
3835
     out of registers are *still* passed by reference - we put the
3836
     address of the parameter on the stack, as well as putting the
3837
     parameter itself elsewhere on the stack.  */
3838
 
3839
  if (size <= 0 || size > 8)
3840
    return true;
3841
  if (size <= 4)
3842
    return false;
3843
  if (TARGET_IVC2 && cum->nregs < 4 && type != NULL_TREE && VECTOR_TYPE_P (type))
3844
    return false;
3845
  return true;
3846
}
3847
 
3848
void
3849
mep_arg_advance (CUMULATIVE_ARGS *pcum,
3850
                 enum machine_mode mode ATTRIBUTE_UNUSED,
3851
                 tree type ATTRIBUTE_UNUSED, int named ATTRIBUTE_UNUSED)
3852
{
3853
  pcum->nregs += 1;
3854
}
3855
 
3856
bool
3857
mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3858
{
3859
  int size = bytesize (type, BLKmode);
3860
  if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3861
    return size > 0 && size <= 8 ? 0 : 1;
3862
  return size > 0 && size <= 4 ? 0 : 1;
3863
}
3864
 
3865
static bool
3866
mep_narrow_volatile_bitfield (void)
3867
{
3868
  return true;
3869
  return false;
3870
}
3871
 
3872
/* Implement FUNCTION_VALUE.  All values are returned in $0.  */
3873
 
3874
rtx
3875
mep_function_value (tree type, tree func ATTRIBUTE_UNUSED)
3876
{
3877
  if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3878
    return gen_rtx_REG (TYPE_MODE (type), 48);
3879
  return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3880
}
3881
 
3882
/* Implement LIBCALL_VALUE, using the same rules as mep_function_value.  */
3883
 
3884
rtx
3885
mep_libcall_value (enum machine_mode mode)
3886
{
3887
  return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3888
}
3889
 
3890
/* Handle pipeline hazards.  */
3891
 
3892
typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3893
static const char *opnames[] = { "", "stc", "fsft", "ret" };
3894
 
3895
static int prev_opcode = 0;
3896
 
3897
/* This isn't as optimal as it could be, because we don't know what
3898
   control register the STC opcode is storing in.  We only need to add
3899
   the nop if it's the relevent register, but we add it for irrelevent
3900
   registers also.  */
3901
 
3902
void
3903
mep_asm_output_opcode (FILE *file, const char *ptr)
3904
{
3905
  int this_opcode = op_none;
3906
  const char *hazard = 0;
3907
 
3908
  switch (*ptr)
3909
    {
3910
    case 'f':
3911
      if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3912
        this_opcode = op_fsft;
3913
      break;
3914
    case 'r':
3915
      if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3916
        this_opcode = op_ret;
3917
      break;
3918
    case 's':
3919
      if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3920
        this_opcode = op_stc;
3921
      break;
3922
    }
3923
 
3924
  if (prev_opcode == op_stc && this_opcode == op_fsft)
3925
    hazard = "nop";
3926
  if (prev_opcode == op_stc && this_opcode == op_ret)
3927
    hazard = "nop";
3928
 
3929
  if (hazard)
3930
    fprintf(file, "%s\t# %s-%s hazard\n\t",
3931
            hazard, opnames[prev_opcode], opnames[this_opcode]);
3932
 
3933
  prev_opcode = this_opcode;
3934
}
3935
 
3936
/* Handle attributes.  */
3937
 
3938
static tree
3939
mep_validate_based_tiny (tree *node, tree name, tree args,
3940
                         int flags ATTRIBUTE_UNUSED, bool *no_add)
3941
{
3942
  if (TREE_CODE (*node) != VAR_DECL
3943
      && TREE_CODE (*node) != POINTER_TYPE
3944
      && TREE_CODE (*node) != TYPE_DECL)
3945
    {
3946
      warning (0, "%qE attribute only applies to variables", name);
3947
      *no_add = true;
3948
    }
3949
  else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3950
    {
3951
      if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3952
        {
3953
          warning (0, "address region attributes not allowed with auto storage class");
3954
          *no_add = true;
3955
        }
3956
      /* Ignore storage attribute of pointed to variable: char __far * x;  */
3957
      if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3958
        {
3959
          warning (0, "address region attributes on pointed-to types ignored");
3960
          *no_add = true;
3961
        }
3962
    }
3963
 
3964
  return NULL_TREE;
3965
}
3966
 
3967
static int
3968
mep_multiple_address_regions (tree list, bool check_section_attr)
3969
{
3970
  tree a;
3971
  int count_sections = 0;
3972
  int section_attr_count = 0;
3973
 
3974
  for (a = list; a; a = TREE_CHAIN (a))
3975
    {
3976
      if (is_attribute_p ("based", TREE_PURPOSE (a))
3977
          || is_attribute_p ("tiny", TREE_PURPOSE (a))
3978
          || is_attribute_p ("near", TREE_PURPOSE (a))
3979
          || is_attribute_p ("far", TREE_PURPOSE (a))
3980
          || is_attribute_p ("io", TREE_PURPOSE (a)))
3981
        count_sections ++;
3982
      if (check_section_attr)
3983
        section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3984
    }
3985
 
3986
  if (check_section_attr)
3987
    return section_attr_count;
3988
  else
3989
    return count_sections;
3990
}
3991
 
3992
#define MEP_ATTRIBUTES(decl) \
3993
  (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3994
                : DECL_ATTRIBUTES (decl) \
3995
                  ? (DECL_ATTRIBUTES (decl)) \
3996
                  : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3997
 
3998
static tree
3999
mep_validate_near_far (tree *node, tree name, tree args,
4000
                       int flags ATTRIBUTE_UNUSED, bool *no_add)
4001
{
4002
  if (TREE_CODE (*node) != VAR_DECL
4003
      && TREE_CODE (*node) != FUNCTION_DECL
4004
      && TREE_CODE (*node) != METHOD_TYPE
4005
      && TREE_CODE (*node) != POINTER_TYPE
4006
      && TREE_CODE (*node) != TYPE_DECL)
4007
    {
4008
      warning (0, "%qE attribute only applies to variables and functions",
4009
               name);
4010
      *no_add = true;
4011
    }
4012
  else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
4013
    {
4014
      if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
4015
        {
4016
          warning (0, "address region attributes not allowed with auto storage class");
4017
          *no_add = true;
4018
        }
4019
      /* Ignore storage attribute of pointed to variable: char __far * x;  */
4020
      if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
4021
        {
4022
          warning (0, "address region attributes on pointed-to types ignored");
4023
          *no_add = true;
4024
        }
4025
    }
4026
  else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
4027
    {
4028
      warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4029
               name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
4030
      DECL_ATTRIBUTES (*node) = NULL_TREE;
4031
    }
4032
  return NULL_TREE;
4033
}
4034
 
4035
static tree
4036
mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4037
                           int flags ATTRIBUTE_UNUSED, bool *no_add)
4038
{
4039
  if (TREE_CODE (*node) != FUNCTION_DECL
4040
      && TREE_CODE (*node) != METHOD_TYPE)
4041
    {
4042
      warning (0, "%qE attribute only applies to functions", name);
4043
      *no_add = true;
4044
    }
4045
  return NULL_TREE;
4046
}
4047
 
4048
static tree
4049
mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4050
                        int flags ATTRIBUTE_UNUSED, bool *no_add)
4051
{
4052
  tree function_type;
4053
 
4054
  if (TREE_CODE (*node) != FUNCTION_DECL)
4055
    {
4056
      warning (0, "%qE attribute only applies to functions", name);
4057
      *no_add = true;
4058
      return NULL_TREE;
4059
    }
4060
 
4061
  if (DECL_DECLARED_INLINE_P (*node))
4062
    error ("cannot inline interrupt function %qE", DECL_NAME (*node));
4063
  DECL_UNINLINABLE (*node) = 1;
4064
 
4065
  function_type = TREE_TYPE (*node);
4066
 
4067
  if (TREE_TYPE (function_type) != void_type_node)
4068
    error ("interrupt function must have return type of void");
4069
 
4070
  if (TYPE_ARG_TYPES (function_type)
4071
      && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
4072
          || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
4073
    error ("interrupt function must have no arguments");
4074
 
4075
  return NULL_TREE;
4076
}
4077
 
4078
static tree
4079
mep_validate_io_cb (tree *node, tree name, tree args,
4080
                    int flags ATTRIBUTE_UNUSED, bool *no_add)
4081
{
4082
  if (TREE_CODE (*node) != VAR_DECL)
4083
    {
4084
      warning (0, "%qE attribute only applies to variables", name);
4085
      *no_add = true;
4086
    }
4087
 
4088
  if (args != NULL_TREE)
4089
    {
4090
      if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
4091
        TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
4092
      if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
4093
        {
4094
          warning (0, "%qE attribute allows only an integer constant argument",
4095
                   name);
4096
          *no_add = true;
4097
        }
4098
    }
4099
 
4100
  if (*no_add == false && !TARGET_IO_NO_VOLATILE)
4101
    TREE_THIS_VOLATILE (*node) = 1;
4102
 
4103
  return NULL_TREE;
4104
}
4105
 
4106
static tree
4107
mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4108
                   int flags ATTRIBUTE_UNUSED, bool *no_add)
4109
{
4110
  if (TREE_CODE (*node) != FUNCTION_TYPE
4111
      && TREE_CODE (*node) != FUNCTION_DECL
4112
      && TREE_CODE (*node) != METHOD_TYPE
4113
      && TREE_CODE (*node) != FIELD_DECL
4114
      && TREE_CODE (*node) != TYPE_DECL)
4115
    {
4116
      static int gave_pointer_note = 0;
4117
      static int gave_array_note = 0;
4118
      static const char * given_type = NULL;
4119
 
4120
      given_type = tree_code_name[TREE_CODE (*node)];
4121
      if (TREE_CODE (*node) == POINTER_TYPE)
4122
        given_type = "pointers";
4123
      if (TREE_CODE (*node) == ARRAY_TYPE)
4124
        given_type = "arrays";
4125
 
4126
      if (given_type)
4127
        warning (0, "%qE attribute only applies to functions, not %s",
4128
                 name, given_type);
4129
      else
4130
        warning (0, "%qE attribute only applies to functions",
4131
                 name);
4132
      *no_add = true;
4133
 
4134
      if (TREE_CODE (*node) == POINTER_TYPE
4135
          && !gave_pointer_note)
4136
        {
4137
          inform (input_location, "To describe a pointer to a VLIW function, use syntax like this:");
4138
          inform (input_location, "  typedef int (__vliw *vfuncptr) ();");
4139
          gave_pointer_note = 1;
4140
        }
4141
 
4142
      if (TREE_CODE (*node) == ARRAY_TYPE
4143
          && !gave_array_note)
4144
        {
4145
          inform (input_location, "To describe an array of VLIW function pointers, use syntax like this:");
4146
          inform (input_location, "  typedef int (__vliw *vfuncptr[]) ();");
4147
          gave_array_note = 1;
4148
        }
4149
    }
4150
  if (!TARGET_VLIW)
4151
    error ("VLIW functions are not allowed without a VLIW configuration");
4152
  return NULL_TREE;
4153
}
4154
 
4155
static const struct attribute_spec mep_attribute_table[11] =
4156
{
4157
  /* name         min max decl   type   func   handler */
4158
  { "based",        0, 0, false, false, false, mep_validate_based_tiny },
4159
  { "tiny",         0, 0, false, false, false, mep_validate_based_tiny },
4160
  { "near",         0, 0, false, false, false, mep_validate_near_far },
4161
  { "far",          0, 0, false, false, false, mep_validate_near_far },
4162
  { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt },
4163
  { "interrupt",    0, 0, false, false, false, mep_validate_interrupt },
4164
  { "io",           0, 1, false, false, false, mep_validate_io_cb },
4165
  { "cb",           0, 1, false, false, false, mep_validate_io_cb },
4166
  { "vliw",         0, 0, false, true,  false, mep_validate_vliw },
4167
  { NULL,           0, 0, false, false, false, NULL }
4168
};
4169
 
4170
static bool
4171
mep_function_attribute_inlinable_p (const_tree callee)
4172
{
4173
  tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4174
  if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4175
  return (lookup_attribute ("disinterrupt", attrs) == 0
4176
          && lookup_attribute ("interrupt", attrs) == 0);
4177
}
4178
 
4179
static bool
4180
mep_can_inline_p (tree caller, tree callee)
4181
{
4182
  if (TREE_CODE (callee) == ADDR_EXPR)
4183
    callee = TREE_OPERAND (callee, 0);
4184
 
4185
  if (!mep_vliw_function_p (caller)
4186
      && mep_vliw_function_p (callee))
4187
    {
4188
      return false;
4189
    }
4190
  return true;
4191
}
4192
 
4193
#define FUNC_CALL               1
4194
#define FUNC_DISINTERRUPT       2
4195
 
4196
 
4197
struct GTY(()) pragma_entry {
4198
  int used;
4199
  int flag;
4200
  const char *funcname;
4201
};
4202
typedef struct pragma_entry pragma_entry;
4203
 
4204
/* Hash table of farcall-tagged sections.  */
4205
static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4206
 
4207
static int
4208
pragma_entry_eq (const void *p1, const void *p2)
4209
{
4210
  const pragma_entry *old = (const pragma_entry *) p1;
4211
  const char *new_name = (const char *) p2;
4212
 
4213
  return strcmp (old->funcname, new_name) == 0;
4214
}
4215
 
4216
static hashval_t
4217
pragma_entry_hash (const void *p)
4218
{
4219
  const pragma_entry *old = (const pragma_entry *) p;
4220
  return htab_hash_string (old->funcname);
4221
}
4222
 
4223
static void
4224
mep_note_pragma_flag (const char *funcname, int flag)
4225
{
4226
  pragma_entry **slot;
4227
 
4228
  if (!pragma_htab)
4229
    pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4230
                                    pragma_entry_eq, NULL);
4231
 
4232
  slot = (pragma_entry **)
4233
    htab_find_slot_with_hash (pragma_htab, funcname,
4234
                              htab_hash_string (funcname), INSERT);
4235
 
4236
  if (!*slot)
4237
    {
4238
      *slot = GGC_NEW (pragma_entry);
4239
      (*slot)->flag = 0;
4240
      (*slot)->used = 0;
4241
      (*slot)->funcname = ggc_strdup (funcname);
4242
    }
4243
  (*slot)->flag |= flag;
4244
}
4245
 
4246
static bool
4247
mep_lookup_pragma_flag (const char *funcname, int flag)
4248
{
4249
  pragma_entry **slot;
4250
 
4251
  if (!pragma_htab)
4252
    return false;
4253
 
4254
  if (funcname[0] == '@' && funcname[2] == '.')
4255
    funcname += 3;
4256
 
4257
  slot = (pragma_entry **)
4258
    htab_find_slot_with_hash (pragma_htab, funcname,
4259
                              htab_hash_string (funcname), NO_INSERT);
4260
  if (slot && *slot && ((*slot)->flag & flag))
4261
    {
4262
      (*slot)->used |= flag;
4263
      return true;
4264
    }
4265
  return false;
4266
}
4267
 
4268
bool
4269
mep_lookup_pragma_call (const char *funcname)
4270
{
4271
  return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4272
}
4273
 
4274
void
4275
mep_note_pragma_call (const char *funcname)
4276
{
4277
  mep_note_pragma_flag (funcname, FUNC_CALL);
4278
}
4279
 
4280
bool
4281
mep_lookup_pragma_disinterrupt (const char *funcname)
4282
{
4283
  return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4284
}
4285
 
4286
void
4287
mep_note_pragma_disinterrupt (const char *funcname)
4288
{
4289
  mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4290
}
4291
 
4292
static int
4293
note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4294
{
4295
  const pragma_entry *d = (const pragma_entry *)(*slot);
4296
 
4297
  if ((d->flag & FUNC_DISINTERRUPT)
4298
      && !(d->used & FUNC_DISINTERRUPT))
4299
    warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4300
  return 1;
4301
}
4302
 
4303
void
4304
mep_file_cleanups (void)
4305
{
4306
  if (pragma_htab)
4307
    htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4308
}
4309
 
4310
 
4311
static int
4312
mep_attrlist_to_encoding (tree list, tree decl)
4313
{
4314
  if (mep_multiple_address_regions (list, false) > 1)
4315
    {
4316
      warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4317
               TREE_PURPOSE (TREE_CHAIN (list)),
4318
               DECL_NAME (decl),
4319
               DECL_SOURCE_LINE (decl));
4320
      TREE_CHAIN (list) = NULL_TREE;
4321
    }
4322
 
4323
  while (list)
4324
    {
4325
      if (is_attribute_p ("based", TREE_PURPOSE (list)))
4326
        return 'b';
4327
      if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4328
        return 't';
4329
      if (is_attribute_p ("near", TREE_PURPOSE (list)))
4330
        return 'n';
4331
      if (is_attribute_p ("far", TREE_PURPOSE (list)))
4332
        return 'f';
4333
      if (is_attribute_p ("io", TREE_PURPOSE (list)))
4334
        {
4335
          if (TREE_VALUE (list)
4336
              && TREE_VALUE (TREE_VALUE (list))
4337
              && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4338
            {
4339
              int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4340
              if (location >= 0
4341
                  && location <= 0x1000000)
4342
                return 'i';
4343
            }
4344
          return 'I';
4345
        }
4346
      if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4347
        return 'c';
4348
      list = TREE_CHAIN (list);
4349
    }
4350
  if (TARGET_TF
4351
      && TREE_CODE (decl) == FUNCTION_DECL
4352
      && DECL_SECTION_NAME (decl) == 0)
4353
    return 'f';
4354
  return 0;
4355
}
4356
 
4357
static int
4358
mep_comp_type_attributes (const_tree t1, const_tree t2)
4359
{
4360
  int vliw1, vliw2;
4361
 
4362
  vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4363
  vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4364
 
4365
  if (vliw1 != vliw2)
4366
    return 0;
4367
 
4368
  return 1;
4369
}
4370
 
4371
static void
4372
mep_insert_attributes (tree decl, tree *attributes)
4373
{
4374
  int size;
4375
  const char *secname = 0;
4376
  tree attrib, attrlist;
4377
  char encoding;
4378
 
4379
  if (TREE_CODE (decl) == FUNCTION_DECL)
4380
    {
4381
      const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4382
 
4383
      if (mep_lookup_pragma_disinterrupt (funcname))
4384
        {
4385
          attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4386
          *attributes = chainon (*attributes, attrib);
4387
        }
4388
    }
4389
 
4390
  if (TREE_CODE (decl) != VAR_DECL
4391
      || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4392
    return;
4393
 
4394
  if (TREE_READONLY (decl) && TARGET_DC)
4395
    /* -mdc means that const variables default to the near section,
4396
       regardless of the size cutoff.  */
4397
    return;
4398
 
4399
  /* User specified an attribute, so override the default.
4400
     Ignore storage attribute of pointed to variable. char __far * x;  */
4401
  if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4402
    {
4403
      if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4404
        TYPE_ATTRIBUTES (decl) = NULL_TREE;
4405
      else if (DECL_ATTRIBUTES (decl) && *attributes)
4406
        DECL_ATTRIBUTES (decl) = NULL_TREE;
4407
    }
4408
 
4409
  attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4410
  encoding = mep_attrlist_to_encoding (attrlist, decl);
4411
  if (!encoding && TYPE_P (TREE_TYPE (decl)))
4412
    {
4413
      attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4414
      encoding = mep_attrlist_to_encoding (attrlist, decl);
4415
    }
4416
  if (encoding)
4417
    {
4418
      /* This means that the declaration has a specific section
4419
         attribute, so we should not apply the default rules.  */
4420
 
4421
      if (encoding == 'i' || encoding == 'I')
4422
        {
4423
          tree attr = lookup_attribute ("io", attrlist);
4424
          if (attr
4425
              && TREE_VALUE (attr)
4426
              && TREE_VALUE (TREE_VALUE(attr)))
4427
            {
4428
              int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4429
              static tree previous_value = 0;
4430
              static int previous_location = 0;
4431
              static tree previous_name = 0;
4432
 
4433
              /* We take advantage of the fact that gcc will reuse the
4434
                 same tree pointer when applying an attribute to a
4435
                 list of decls, but produce a new tree for attributes
4436
                 on separate source lines, even when they're textually
4437
                 identical.  This is the behavior we want.  */
4438
              if (TREE_VALUE (attr) == previous_value
4439
                  && location == previous_location)
4440
                {
4441
                  warning(0, "__io address 0x%x is the same for %qE and %qE",
4442
                          location, previous_name, DECL_NAME (decl));
4443
                }
4444
              previous_name = DECL_NAME (decl);
4445
              previous_location = location;
4446
              previous_value = TREE_VALUE (attr);
4447
            }
4448
        }
4449
      return;
4450
    }
4451
 
4452
 
4453
  /* Declarations of arrays can change size.  Don't trust them.  */
4454
  if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4455
    size = 0;
4456
  else
4457
    size = int_size_in_bytes (TREE_TYPE (decl));
4458
 
4459
  if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4460
    {
4461
      if (TREE_PUBLIC (decl)
4462
          || DECL_EXTERNAL (decl)
4463
          || TREE_STATIC (decl))
4464
        {
4465
          const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4466
          int key = 0;
4467
 
4468
          while (*name)
4469
            key += *name++;
4470
 
4471
          switch (key & 3)
4472
            {
4473
            case 0:
4474
              secname = "based";
4475
              break;
4476
            case 1:
4477
              secname = "tiny";
4478
              break;
4479
            case 2:
4480
              secname = "far";
4481
              break;
4482
            default:
4483
              ;
4484
            }
4485
        }
4486
    }
4487
  else
4488
    {
4489
      if (size <= mep_based_cutoff && size > 0)
4490
        secname = "based";
4491
      else if (size <= mep_tiny_cutoff && size > 0)
4492
        secname = "tiny";
4493
      else if (TARGET_L)
4494
        secname = "far";
4495
    }
4496
 
4497
  if (mep_const_section && TREE_READONLY (decl))
4498
    {
4499
      if (strcmp (mep_const_section, "tiny") == 0)
4500
        secname = "tiny";
4501
      else if (strcmp (mep_const_section, "near") == 0)
4502
        return;
4503
      else if (strcmp (mep_const_section, "far") == 0)
4504
        secname = "far";
4505
    }
4506
 
4507
  if (!secname)
4508
    return;
4509
 
4510
  if (!mep_multiple_address_regions (*attributes, true)
4511
      && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4512
    {
4513
      attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4514
 
4515
      /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4516
         in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4517
         and mep_validate_based_tiny.  */
4518
      DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4519
    }
4520
}
4521
 
4522
static void
4523
mep_encode_section_info (tree decl, rtx rtl, int first)
4524
{
4525
  rtx rtlname;
4526
  const char *oldname;
4527
  const char *secname;
4528
  char encoding;
4529
  char *newname;
4530
  tree idp;
4531
  int maxsize;
4532
  tree type;
4533
  tree mep_attributes;
4534
 
4535
  if (! first)
4536
    return;
4537
 
4538
  if (TREE_CODE (decl) != VAR_DECL
4539
      && TREE_CODE (decl) != FUNCTION_DECL)
4540
    return;
4541
 
4542
  rtlname = XEXP (rtl, 0);
4543
  if (GET_CODE (rtlname) == SYMBOL_REF)
4544
    oldname = XSTR (rtlname, 0);
4545
  else if (GET_CODE (rtlname) == MEM
4546
           && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4547
    oldname = XSTR (XEXP (rtlname, 0), 0);
4548
  else
4549
    gcc_unreachable ();
4550
 
4551
  type = TREE_TYPE (decl);
4552
  if (type == error_mark_node)
4553
    return;
4554
  mep_attributes = MEP_ATTRIBUTES (decl);
4555
 
4556
  encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4557
 
4558
  if (encoding)
4559
    {
4560
      newname = (char *) alloca (strlen (oldname) + 4);
4561
      sprintf (newname, "@%c.%s", encoding, oldname);
4562
      idp = get_identifier (newname);
4563
      XEXP (rtl, 0) =
4564
        gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4565
      SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4566
      SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4567
 
4568
      switch (encoding)
4569
        {
4570
        case 'b':
4571
          maxsize = 128;
4572
          secname = "based";
4573
          break;
4574
        case 't':
4575
          maxsize = 65536;
4576
          secname = "tiny";
4577
          break;
4578
        case 'n':
4579
          maxsize = 0x1000000;
4580
          secname = "near";
4581
          break;
4582
        default:
4583
          maxsize = 0;
4584
          secname = 0;
4585
          break;
4586
        }
4587
      if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4588
        {
4589
          warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4590
                   oldname,
4591
                   (long) int_size_in_bytes (TREE_TYPE (decl)),
4592
                   secname,
4593
                   maxsize);
4594
        }
4595
    }
4596
}
4597
 
4598
const char *
4599
mep_strip_name_encoding (const char *sym)
4600
{
4601
  while (1)
4602
    {
4603
      if (*sym == '*')
4604
        sym++;
4605
      else if (*sym == '@' && sym[2] == '.')
4606
        sym += 3;
4607
      else
4608
        return sym;
4609
    }
4610
}
4611
 
4612
static section *
4613
mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4614
                    unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4615
{
4616
  int readonly = 1;
4617
  int encoding;
4618
 
4619
  switch (TREE_CODE (decl))
4620
    {
4621
    case VAR_DECL:
4622
      if (!TREE_READONLY (decl)
4623
          || TREE_SIDE_EFFECTS (decl)
4624
          || !DECL_INITIAL (decl)
4625
          || (DECL_INITIAL (decl) != error_mark_node
4626
              && !TREE_CONSTANT (DECL_INITIAL (decl))))
4627
        readonly = 0;
4628
      break;
4629
    case CONSTRUCTOR:
4630
      if (! TREE_CONSTANT (decl))
4631
        readonly = 0;
4632
      break;
4633
 
4634
    default:
4635
      break;
4636
    }
4637
 
4638
  if (TREE_CODE (decl) == FUNCTION_DECL)
4639
    {
4640
      const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4641
 
4642
      if (name[0] == '@' && name[2] == '.')
4643
        encoding = name[1];
4644
      else
4645
        encoding = 0;
4646
 
4647
      if (flag_function_sections || DECL_ONE_ONLY (decl))
4648
        mep_unique_section (decl, 0);
4649
      else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4650
        {
4651
          if (encoding == 'f')
4652
            return vftext_section;
4653
          else
4654
            return vtext_section;
4655
        }
4656
      else if (encoding == 'f')
4657
        return ftext_section;
4658
      else
4659
        return text_section;
4660
    }
4661
 
4662
  if (TREE_CODE (decl) == VAR_DECL)
4663
    {
4664
      const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4665
 
4666
      if (name[0] == '@' && name[2] == '.')
4667
        switch (name[1])
4668
          {
4669
          case 'b':
4670
            return based_section;
4671
 
4672
          case 't':
4673
            if (readonly)
4674
              return srodata_section;
4675
            if (DECL_INITIAL (decl))
4676
              return sdata_section;
4677
            return tinybss_section;
4678
 
4679
          case 'f':
4680
            if (readonly)
4681
              return frodata_section;
4682
            return far_section;
4683
 
4684
          case 'i':
4685
          case 'I':
4686
            error_at (DECL_SOURCE_LOCATION (decl),
4687
                      "variable %D of type %<io%> must be uninitialized", decl);
4688
            return data_section;
4689
 
4690
          case 'c':
4691
            error_at (DECL_SOURCE_LOCATION (decl),
4692
                      "variable %D of type %<cb%> must be uninitialized", decl);
4693
            return data_section;
4694
          }
4695
    }
4696
 
4697
  if (readonly)
4698
    return readonly_data_section;
4699
 
4700
  return data_section;
4701
}
4702
 
4703
static void
4704
mep_unique_section (tree decl, int reloc)
4705
{
4706
  static const char *prefixes[][2] =
4707
  {
4708
    { ".text.",   ".gnu.linkonce.t." },
4709
    { ".rodata.", ".gnu.linkonce.r." },
4710
    { ".data.",   ".gnu.linkonce.d." },
4711
    { ".based.",   ".gnu.linkonce.based." },
4712
    { ".sdata.",   ".gnu.linkonce.s." },
4713
    { ".far.",     ".gnu.linkonce.far." },
4714
    { ".ftext.",   ".gnu.linkonce.ft." },
4715
    { ".frodata.", ".gnu.linkonce.frd." },
4716
    { ".srodata.", ".gnu.linkonce.srd." },
4717
    { ".vtext.",   ".gnu.linkonce.v." },
4718
    { ".vftext.",   ".gnu.linkonce.vf." }
4719
  };
4720
  int sec = 2; /* .data */
4721
  int len;
4722
  const char *name, *prefix;
4723
  char *string;
4724
 
4725
  name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4726
  if (DECL_RTL (decl))
4727
    name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4728
 
4729
  if (TREE_CODE (decl) == FUNCTION_DECL)
4730
    {
4731
      if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4732
        sec = 9; /* .vtext */
4733
      else
4734
        sec = 0; /* .text */
4735
    }
4736
  else if (decl_readonly_section (decl, reloc))
4737
    sec = 1; /* .rodata */
4738
 
4739
  if (name[0] == '@' && name[2] == '.')
4740
    {
4741
      switch (name[1])
4742
        {
4743
        case 'b':
4744
          sec = 3; /* .based */
4745
          break;
4746
        case 't':
4747
          if (sec == 1)
4748
            sec = 8; /* .srodata */
4749
          else
4750
            sec = 4; /* .sdata */
4751
          break;
4752
        case 'f':
4753
          if (sec == 0)
4754
            sec = 6; /* .ftext */
4755
          else if (sec == 9)
4756
            sec = 10; /* .vftext */
4757
          else if (sec == 1)
4758
            sec = 7; /* .frodata */
4759
          else
4760
            sec = 5; /* .far. */
4761
          break;
4762
        }
4763
      name += 3;
4764
    }
4765
 
4766
  prefix = prefixes[sec][DECL_ONE_ONLY(decl)];
4767
  len    = strlen (name) + strlen (prefix);
4768
  string = (char *) alloca (len + 1);
4769
 
4770
  sprintf (string, "%s%s", prefix, name);
4771
 
4772
  DECL_SECTION_NAME (decl) = build_string (len, string);
4773
}
4774
 
4775
/* Given a decl, a section name, and whether the decl initializer
4776
   has relocs, choose attributes for the section.  */
4777
 
4778
#define SECTION_MEP_VLIW        SECTION_MACH_DEP
4779
 
4780
static unsigned int
4781
mep_section_type_flags (tree decl, const char *name, int reloc)
4782
{
4783
  unsigned int flags = default_section_type_flags (decl, name, reloc);
4784
 
4785
  if (decl && TREE_CODE (decl) == FUNCTION_DECL
4786
      && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4787
    flags |= SECTION_MEP_VLIW;
4788
 
4789
  return flags;
4790
}
4791
 
4792
/* Switch to an arbitrary section NAME with attributes as specified
4793
   by FLAGS.  ALIGN specifies any known alignment requirements for
4794
   the section; 0 if the default should be used.
4795
 
4796
   Differs from the standard ELF version only in support of VLIW mode.  */
4797
 
4798
static void
4799
mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4800
{
4801
  char flagchars[8], *f = flagchars;
4802
  const char *type;
4803
 
4804
  if (!(flags & SECTION_DEBUG))
4805
    *f++ = 'a';
4806
  if (flags & SECTION_WRITE)
4807
    *f++ = 'w';
4808
  if (flags & SECTION_CODE)
4809
    *f++ = 'x';
4810
  if (flags & SECTION_SMALL)
4811
    *f++ = 's';
4812
  if (flags & SECTION_MEP_VLIW)
4813
    *f++ = 'v';
4814
  *f = '\0';
4815
 
4816
  if (flags & SECTION_BSS)
4817
    type = "nobits";
4818
  else
4819
    type = "progbits";
4820
 
4821
  fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4822
           name, flagchars, type);
4823
 
4824
  if (flags & SECTION_CODE)
4825
    fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4826
           asm_out_file);
4827
}
4828
 
4829
void
4830
mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4831
                           int size, int align, int global)
4832
{
4833
  /* We intentionally don't use mep_section_tag() here.  */
4834
  if (name[0] == '@'
4835
      && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4836
      && name[2] == '.')
4837
    {
4838
      int location = -1;
4839
      tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4840
                                    DECL_ATTRIBUTES (decl));
4841
      if (attr
4842
          && TREE_VALUE (attr)
4843
          && TREE_VALUE (TREE_VALUE(attr)))
4844
        location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4845
      if (location == -1)
4846
        return;
4847
      if (global)
4848
        {
4849
          fprintf (stream, "\t.globl\t");
4850
          assemble_name (stream, name);
4851
          fprintf (stream, "\n");
4852
        }
4853
      assemble_name (stream, name);
4854
      fprintf (stream, " = %d\n", location);
4855
      return;
4856
    }
4857
  if (name[0] == '@' && name[2] == '.')
4858
    {
4859
      const char *sec = 0;
4860
      switch (name[1])
4861
        {
4862
        case 'b':
4863
          switch_to_section (based_section);
4864
          sec = ".based";
4865
          break;
4866
        case 't':
4867
          switch_to_section (tinybss_section);
4868
          sec = ".sbss";
4869
          break;
4870
        case 'f':
4871
          switch_to_section (farbss_section);
4872
          sec = ".farbss";
4873
          break;
4874
        }
4875
      if (sec)
4876
        {
4877
          const char *name2;
4878
          int p2align = 0;
4879
 
4880
          while (align > BITS_PER_UNIT)
4881
            {
4882
              align /= 2;
4883
              p2align ++;
4884
            }
4885
          name2 = TARGET_STRIP_NAME_ENCODING (name);
4886
          if (global)
4887
            fprintf (stream, "\t.globl\t%s\n", name2);
4888
          fprintf (stream, "\t.p2align %d\n", p2align);
4889
          fprintf (stream, "\t.type\t%s,@object\n", name2);
4890
          fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4891
          fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4892
          return;
4893
        }
4894
    }
4895
 
4896
  if (!global)
4897
    {
4898
      fprintf (stream, "\t.local\t");
4899
      assemble_name (stream, name);
4900
      fprintf (stream, "\n");
4901
    }
4902
  fprintf (stream, "\t.comm\t");
4903
  assemble_name (stream, name);
4904
  fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4905
}
4906
 
4907
/* Trampolines.  */
4908
 
4909
static void
4910
mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4911
{
4912
  rtx addr = XEXP (m_tramp, 0);
4913
  rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4914
 
4915
  emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4916
                     LCT_NORMAL, VOIDmode, 3,
4917
                     addr, Pmode,
4918
                     fnaddr, Pmode,
4919
                     static_chain, Pmode);
4920
}
4921
 
4922
/* Experimental Reorg.  */
4923
 
4924
static bool
4925
mep_mentioned_p (rtx in,
4926
                 rtx reg, /* NULL for mem */
4927
                 int modes_too) /* if nonzero, modes must match also.  */
4928
{
4929
  const char *fmt;
4930
  int i;
4931
  enum rtx_code code;
4932
 
4933
  if (in == 0)
4934
    return false;
4935
  if (reg && GET_CODE (reg) != REG)
4936
    return false;
4937
 
4938
  if (GET_CODE (in) == LABEL_REF)
4939
    return (reg == 0);
4940
 
4941
  code = GET_CODE (in);
4942
 
4943
  switch (code)
4944
    {
4945
    case MEM:
4946
      if (reg)
4947
        return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4948
      return true;
4949
 
4950
    case REG:
4951
      if (!reg)
4952
        return false;
4953
      if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4954
        return false;
4955
      return (REGNO (in) == REGNO (reg));
4956
 
4957
    case SCRATCH:
4958
    case CC0:
4959
    case PC:
4960
    case CONST_INT:
4961
    case CONST_DOUBLE:
4962
      return false;
4963
 
4964
    default:
4965
      break;
4966
    }
4967
 
4968
  /* Set's source should be read-only.  */
4969
  if (code == SET && !reg)
4970
    return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4971
 
4972
  fmt = GET_RTX_FORMAT (code);
4973
 
4974
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4975
    {
4976
      if (fmt[i] == 'E')
4977
        {
4978
          register int j;
4979
          for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4980
            if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4981
              return true;
4982
        }
4983
      else if (fmt[i] == 'e'
4984
               && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4985
        return true;
4986
    }
4987
  return false;
4988
}
4989
 
4990
#define EXPERIMENTAL_REGMOVE_REORG 1
4991
 
4992
#if EXPERIMENTAL_REGMOVE_REORG
4993
 
4994
static int
4995
mep_compatible_reg_class (int r1, int r2)
4996
{
4997
  if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4998
    return 1;
4999
  if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
5000
    return 1;
5001
  return 0;
5002
}
5003
 
5004
static void
5005
mep_reorg_regmove (rtx insns)
5006
{
5007
  rtx insn, next, pat, follow, *where;
5008
  int count = 0, done = 0, replace, before = 0;
5009
 
5010
  if (dump_file)
5011
    for (insn = insns; insn; insn = NEXT_INSN (insn))
5012
      if (GET_CODE (insn) == INSN)
5013
        before++;
5014
 
5015
  /* We're looking for (set r2 r1) moves where r1 dies, followed by a
5016
     set that uses the r2 and r2 dies there.  We replace r2 with r1
5017
     and see if it's still a valid insn.  If so, delete the first set.
5018
     Copied from reorg.c.  */
5019
 
5020
  while (!done)
5021
    {
5022
      done = 1;
5023
      for (insn = insns; insn; insn = next)
5024
        {
5025
          next = NEXT_INSN (insn);
5026
          if (GET_CODE (insn) != INSN)
5027
            continue;
5028
          pat = PATTERN (insn);
5029
 
5030
          replace = 0;
5031
 
5032
          if (GET_CODE (pat) == SET
5033
              && GET_CODE (SET_SRC (pat)) == REG
5034
              && GET_CODE (SET_DEST (pat)) == REG
5035
              && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
5036
              && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
5037
            {
5038
              follow = next_nonnote_insn (insn);
5039
              if (dump_file)
5040
                fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
5041
 
5042
              while (follow && GET_CODE (follow) == INSN
5043
                     && GET_CODE (PATTERN (follow)) == SET
5044
                     && !dead_or_set_p (follow, SET_SRC (pat))
5045
                     && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
5046
                     && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
5047
                {
5048
                  if (dump_file)
5049
                    fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
5050
                  follow = next_nonnote_insn (follow);
5051
                }
5052
 
5053
              if (dump_file)
5054
                fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
5055
              if (follow && GET_CODE (follow) == INSN
5056
                  && GET_CODE (PATTERN (follow)) == SET
5057
                  && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
5058
                {
5059
                  if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
5060
                    {
5061
                      if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
5062
                        {
5063
                          replace = 1;
5064
                          where = & SET_SRC (PATTERN (follow));
5065
                        }
5066
                    }
5067
                  else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
5068
                    {
5069
                      if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
5070
                        {
5071
                          replace = 1;
5072
                          where = & PATTERN (follow);
5073
                        }
5074
                    }
5075
                }
5076
            }
5077
 
5078
          /* If so, follow is the corresponding insn */
5079
          if (replace)
5080
            {
5081
              if (dump_file)
5082
                {
5083
                  rtx x;
5084
 
5085
                  fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
5086
                  for (x = insn; x ;x = NEXT_INSN (x))
5087
                    {
5088
                      print_rtl_single (dump_file, x);
5089
                      if (x == follow)
5090
                        break;
5091
                      fprintf (dump_file, "\n");
5092
                    }
5093
                }
5094
 
5095
              if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
5096
                                               follow, where))
5097
                {
5098
                  count ++;
5099
                  next = delete_insn (insn);
5100
                  if (dump_file)
5101
                    {
5102
                      fprintf (dump_file, "\n----- Success!  new insn:\n\n");
5103
                      print_rtl_single (dump_file, follow);
5104
                    }
5105
                  done = 0;
5106
                }
5107
            }
5108
        }
5109
    }
5110
 
5111
  if (dump_file)
5112
    {
5113
      fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5114
      fprintf (dump_file, "=====\n");
5115
    }
5116
}
5117
#endif
5118
 
5119
 
5120
/* Figure out where to put LABEL, which is the label for a repeat loop.
5121
   If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5122
   the loop ends just before LAST_INSN.  If SHARED, insns other than the
5123
   "repeat" might use LABEL to jump to the loop's continuation point.
5124
 
5125
   Return the last instruction in the adjusted loop.  */
5126
 
5127
static rtx
5128
mep_insert_repeat_label_last (rtx last_insn, rtx label, bool including,
5129
                              bool shared)
5130
{
5131
  rtx next, prev;
5132
  int count = 0, code, icode;
5133
 
5134
  if (dump_file)
5135
    fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5136
             INSN_UID (last_insn));
5137
 
5138
  /* Set PREV to the last insn in the loop.  */
5139
  prev = last_insn;
5140
  if (!including)
5141
    prev = PREV_INSN (prev);
5142
 
5143
  /* Set NEXT to the next insn after the repeat label.  */
5144
  next = last_insn;
5145
  if (!shared)
5146
    while (prev != 0)
5147
      {
5148
        code = GET_CODE (prev);
5149
        if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5150
          break;
5151
 
5152
        if (INSN_P (prev))
5153
          {
5154
            if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5155
              prev = XVECEXP (PATTERN (prev), 0, 1);
5156
 
5157
            /* Other insns that should not be in the last two opcodes.  */
5158
            icode = recog_memoized (prev);
5159
            if (icode < 0
5160
                || icode == CODE_FOR_repeat
5161
                || icode == CODE_FOR_erepeat
5162
                || get_attr_may_trap (prev) == MAY_TRAP_YES)
5163
              break;
5164
 
5165
            /* That leaves JUMP_INSN and INSN.  It will have BImode if it
5166
               is the second instruction in a VLIW bundle.  In that case,
5167
               loop again: if the first instruction also satisfies the
5168
               conditions above then we will reach here again and put
5169
               both of them into the repeat epilogue.  Otherwise both
5170
               should remain outside.  */
5171
            if (GET_MODE (prev) != BImode)
5172
              {
5173
                count++;
5174
                next = prev;
5175
                if (dump_file)
5176
                  print_rtl_single (dump_file, next);
5177
                if (count == 2)
5178
                  break;
5179
              }
5180
          }
5181
        prev = PREV_INSN (prev);
5182
      }
5183
 
5184
  /* See if we're adding the label immediately after the repeat insn.
5185
     If so, we need to separate them with a nop.  */
5186
  prev = prev_real_insn (next);
5187
  if (prev)
5188
    switch (recog_memoized (prev))
5189
      {
5190
      case CODE_FOR_repeat:
5191
      case CODE_FOR_erepeat:
5192
        if (dump_file)
5193
          fprintf (dump_file, "Adding nop inside loop\n");
5194
        emit_insn_before (gen_nop (), next);
5195
        break;
5196
 
5197
      default:
5198
        break;
5199
      }
5200
 
5201
  /* Insert the label.  */
5202
  emit_label_before (label, next);
5203
 
5204
  /* Insert the nops.  */
5205
  if (dump_file && count < 2)
5206
    fprintf (dump_file, "Adding %d nop%s\n\n",
5207
             2 - count, count == 1 ? "" : "s");
5208
 
5209
  for (; count < 2; count++)
5210
    if (including)
5211
      last_insn = emit_insn_after (gen_nop (), last_insn);
5212
    else
5213
      emit_insn_before (gen_nop (), last_insn);
5214
 
5215
  return last_insn;
5216
}
5217
 
5218
 
5219
void
5220
mep_emit_doloop (rtx *operands, int is_end)
5221
{
5222
  rtx tag;
5223
 
5224
  if (cfun->machine->doloop_tags == 0
5225
      || cfun->machine->doloop_tag_from_end == is_end)
5226
    {
5227
      cfun->machine->doloop_tags++;
5228
      cfun->machine->doloop_tag_from_end = is_end;
5229
    }
5230
 
5231
  tag = GEN_INT (cfun->machine->doloop_tags - 1);
5232
  if (is_end)
5233
    emit_jump_insn (gen_doloop_end_internal (operands[0], operands[4], tag));
5234
  else
5235
    emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5236
}
5237
 
5238
 
5239
/* Code for converting doloop_begins and doloop_ends into valid
5240
   MeP instructions.  A doloop_begin is just a placeholder:
5241
 
5242
        $count = unspec ($count)
5243
 
5244
   where $count is initially the number of iterations - 1.
5245
   doloop_end has the form:
5246
 
5247
        if ($count-- == 0) goto label
5248
 
5249
   The counter variable is private to the doloop insns, nothing else
5250
   relies on its value.
5251
 
5252
   There are three cases, in decreasing order of preference:
5253
 
5254
      1. A loop has exactly one doloop_begin and one doloop_end.
5255
         The doloop_end branches to the first instruction after
5256
         the doloop_begin.
5257
 
5258
         In this case we can replace the doloop_begin with a repeat
5259
         instruction and remove the doloop_end.  I.e.:
5260
 
5261
                $count1 = unspec ($count1)
5262
            label:
5263
                ...
5264
                insn1
5265
                insn2
5266
                if ($count2-- == 0) goto label
5267
 
5268
          becomes:
5269
 
5270
                repeat $count1,repeat_label
5271
            label:
5272
                ...
5273
            repeat_label:
5274
                insn1
5275
                insn2
5276
                # end repeat
5277
 
5278
      2. As for (1), except there are several doloop_ends.  One of them
5279
         (call it X) falls through to a label L.  All the others fall
5280
         through to branches to L.
5281
 
5282
         In this case, we remove X and replace the other doloop_ends
5283
         with branches to the repeat label.  For example:
5284
 
5285
                $count1 = unspec ($count1)
5286
            start:
5287
                ...
5288
                if ($count2-- == 0) goto label
5289
            end:
5290
                ...
5291
                if ($count3-- == 0) goto label
5292
                goto end
5293
 
5294
         becomes:
5295
 
5296
                repeat $count1,repeat_label
5297
            start:
5298
                ...
5299
            repeat_label:
5300
                nop
5301
                nop
5302
                # end repeat
5303
            end:
5304
                ...
5305
                goto repeat_label
5306
 
5307
      3. The fallback case.  Replace doloop_begins with:
5308
 
5309
                $count = $count + 1
5310
 
5311
         Replace doloop_ends with the equivalent of:
5312
 
5313
                $count = $count - 1
5314
                if ($count == 0) goto label
5315
 
5316
         Note that this might need a scratch register if $count
5317
         is stored in memory.  */
5318
 
5319
/* A structure describing one doloop_begin.  */
5320
struct mep_doloop_begin {
5321
  /* The next doloop_begin with the same tag.  */
5322
  struct mep_doloop_begin *next;
5323
 
5324
  /* The instruction itself.  */
5325
  rtx insn;
5326
 
5327
  /* The initial counter value.  This is known to be a general register.  */
5328
  rtx counter;
5329
};
5330
 
5331
/* A structure describing a doloop_end.  */
5332
struct mep_doloop_end {
5333
  /* The next doloop_end with the same loop tag.  */
5334
  struct mep_doloop_end *next;
5335
 
5336
  /* The instruction itself.  */
5337
  rtx insn;
5338
 
5339
  /* The first instruction after INSN when the branch isn't taken.  */
5340
  rtx fallthrough;
5341
 
5342
  /* The location of the counter value.  Since doloop_end_internal is a
5343
     jump instruction, it has to allow the counter to be stored anywhere
5344
     (any non-fixed register or memory location).  */
5345
  rtx counter;
5346
 
5347
  /* The target label (the place where the insn branches when the counter
5348
     isn't zero).  */
5349
  rtx label;
5350
 
5351
  /* A scratch register.  Only available when COUNTER isn't stored
5352
     in a general register.  */
5353
  rtx scratch;
5354
};
5355
 
5356
 
5357
/* One do-while loop.  */
5358
struct mep_doloop {
5359
  /* All the doloop_begins for this loop (in no particular order).  */
5360
  struct mep_doloop_begin *begin;
5361
 
5362
  /* All the doloop_ends.  When there is more than one, arrange things
5363
     so that the first one is the most likely to be X in case (2) above.  */
5364
  struct mep_doloop_end *end;
5365
};
5366
 
5367
 
5368
/* Return true if LOOP can be converted into repeat/repeat_end form
5369
   (that is, if it matches cases (1) or (2) above).  */
5370
 
5371
static bool
5372
mep_repeat_loop_p (struct mep_doloop *loop)
5373
{
5374
  struct mep_doloop_end *end;
5375
  rtx fallthrough;
5376
 
5377
  /* There must be exactly one doloop_begin and at least one doloop_end.  */
5378
  if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5379
    return false;
5380
 
5381
  /* The first doloop_end (X) must branch back to the insn after
5382
     the doloop_begin.  */
5383
  if (prev_real_insn (loop->end->label) != loop->begin->insn)
5384
    return false;
5385
 
5386
  /* All the other doloop_ends must branch to the same place as X.
5387
     When the branch isn't taken, they must jump to the instruction
5388
     after X.  */
5389
  fallthrough = loop->end->fallthrough;
5390
  for (end = loop->end->next; end != 0; end = end->next)
5391
    if (end->label != loop->end->label
5392
        || !simplejump_p (end->fallthrough)
5393
        || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5394
      return false;
5395
 
5396
  return true;
5397
}
5398
 
5399
 
5400
/* The main repeat reorg function.  See comment above for details.  */
5401
 
5402
static void
5403
mep_reorg_repeat (rtx insns)
5404
{
5405
  rtx insn;
5406
  struct mep_doloop *loops, *loop;
5407
  struct mep_doloop_begin *begin;
5408
  struct mep_doloop_end *end;
5409
 
5410
  /* Quick exit if we haven't created any loops.  */
5411
  if (cfun->machine->doloop_tags == 0)
5412
    return;
5413
 
5414
  /* Create an array of mep_doloop structures.  */
5415
  loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5416
  memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5417
 
5418
  /* Search the function for do-while insns and group them by loop tag.  */
5419
  for (insn = insns; insn; insn = NEXT_INSN (insn))
5420
    if (INSN_P (insn))
5421
      switch (recog_memoized (insn))
5422
        {
5423
        case CODE_FOR_doloop_begin_internal:
5424
          insn_extract (insn);
5425
          loop = &loops[INTVAL (recog_data.operand[2])];
5426
 
5427
          begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5428
          begin->next = loop->begin;
5429
          begin->insn = insn;
5430
          begin->counter = recog_data.operand[0];
5431
 
5432
          loop->begin = begin;
5433
          break;
5434
 
5435
        case CODE_FOR_doloop_end_internal:
5436
          insn_extract (insn);
5437
          loop = &loops[INTVAL (recog_data.operand[2])];
5438
 
5439
          end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5440
          end->insn = insn;
5441
          end->fallthrough = next_real_insn (insn);
5442
          end->counter = recog_data.operand[0];
5443
          end->label = recog_data.operand[1];
5444
          end->scratch = recog_data.operand[3];
5445
 
5446
          /* If this insn falls through to an unconditional jump,
5447
             give it a lower priority than the others.  */
5448
          if (loop->end != 0 && simplejump_p (end->fallthrough))
5449
            {
5450
              end->next = loop->end->next;
5451
              loop->end->next = end;
5452
            }
5453
          else
5454
            {
5455
              end->next = loop->end;
5456
              loop->end = end;
5457
            }
5458
          break;
5459
        }
5460
 
5461
  /* Convert the insns for each loop in turn.  */
5462
  for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5463
    if (mep_repeat_loop_p (loop))
5464
      {
5465
        /* Case (1) or (2).  */
5466
        rtx repeat_label, label_ref;
5467
 
5468
        /* Create a new label for the repeat insn.  */
5469
        repeat_label = gen_label_rtx ();
5470
 
5471
        /* Replace the doloop_begin with a repeat.  */
5472
        label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5473
        emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5474
                          loop->begin->insn);
5475
        delete_insn (loop->begin->insn);
5476
 
5477
        /* Insert the repeat label before the first doloop_end.
5478
           Fill the gap with nops if there are other doloop_ends.  */
5479
        mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5480
                                      false, loop->end->next != 0);
5481
 
5482
        /* Emit a repeat_end (to improve the readability of the output).  */
5483
        emit_insn_before (gen_repeat_end (), loop->end->insn);
5484
 
5485
        /* Delete the first doloop_end.  */
5486
        delete_insn (loop->end->insn);
5487
 
5488
        /* Replace the others with branches to REPEAT_LABEL.  */
5489
        for (end = loop->end->next; end != 0; end = end->next)
5490
          {
5491
            emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5492
            delete_insn (end->insn);
5493
            delete_insn (end->fallthrough);
5494
          }
5495
      }
5496
    else
5497
      {
5498
        /* Case (3).  First replace all the doloop_begins with increment
5499
           instructions.  */
5500
        for (begin = loop->begin; begin != 0; begin = begin->next)
5501
          {
5502
            emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5503
                                             begin->counter, const1_rtx),
5504
                              begin->insn);
5505
            delete_insn (begin->insn);
5506
          }
5507
 
5508
        /* Replace all the doloop_ends with decrement-and-branch sequences.  */
5509
        for (end = loop->end; end != 0; end = end->next)
5510
          {
5511
            rtx reg;
5512
 
5513
            start_sequence ();
5514
 
5515
            /* Load the counter value into a general register.  */
5516
            reg = end->counter;
5517
            if (!REG_P (reg) || REGNO (reg) > 15)
5518
              {
5519
                reg = end->scratch;
5520
                emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5521
              }
5522
 
5523
            /* Decrement the counter.  */
5524
            emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5525
                                      constm1_rtx));
5526
 
5527
            /* Copy it back to its original location.  */
5528
            if (reg != end->counter)
5529
              emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5530
 
5531
            /* Jump back to the start label.  */
5532
            insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5533
                                                     end->label));
5534
            JUMP_LABEL (insn) = end->label;
5535
            LABEL_NUSES (end->label)++;
5536
 
5537
            /* Emit the whole sequence before the doloop_end.  */
5538
            insn = get_insns ();
5539
            end_sequence ();
5540
            emit_insn_before (insn, end->insn);
5541
 
5542
            /* Delete the doloop_end.  */
5543
            delete_insn (end->insn);
5544
          }
5545
      }
5546
}
5547
 
5548
 
5549
static bool
5550
mep_invertable_branch_p (rtx insn)
5551
{
5552
  rtx cond, set;
5553
  enum rtx_code old_code;
5554
  int i;
5555
 
5556
  set = PATTERN (insn);
5557
  if (GET_CODE (set) != SET)
5558
    return false;
5559
  if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5560
    return false;
5561
  cond = XEXP (XEXP (set, 1), 0);
5562
  old_code = GET_CODE (cond);
5563
  switch (old_code)
5564
    {
5565
    case EQ:
5566
      PUT_CODE (cond, NE);
5567
      break;
5568
    case NE:
5569
      PUT_CODE (cond, EQ);
5570
      break;
5571
    case LT:
5572
      PUT_CODE (cond, GE);
5573
      break;
5574
    case GE:
5575
      PUT_CODE (cond, LT);
5576
      break;
5577
    default:
5578
      return false;
5579
    }
5580
  INSN_CODE (insn) = -1;
5581
  i = recog_memoized (insn);
5582
  PUT_CODE (cond, old_code);
5583
  INSN_CODE (insn) = -1;
5584
  return i >= 0;
5585
}
5586
 
5587
static void
5588
mep_invert_branch (rtx insn, rtx after)
5589
{
5590
  rtx cond, set, label;
5591
  int i;
5592
 
5593
  set = PATTERN (insn);
5594
 
5595
  gcc_assert (GET_CODE (set) == SET);
5596
  gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5597
 
5598
  cond = XEXP (XEXP (set, 1), 0);
5599
  switch (GET_CODE (cond))
5600
    {
5601
    case EQ:
5602
      PUT_CODE (cond, NE);
5603
      break;
5604
    case NE:
5605
      PUT_CODE (cond, EQ);
5606
      break;
5607
    case LT:
5608
      PUT_CODE (cond, GE);
5609
      break;
5610
    case GE:
5611
      PUT_CODE (cond, LT);
5612
      break;
5613
    default:
5614
      gcc_unreachable ();
5615
    }
5616
  label = gen_label_rtx ();
5617
  emit_label_after (label, after);
5618
  for (i=1; i<=2; i++)
5619
    if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5620
      {
5621
        rtx ref = XEXP (XEXP (set, 1), i);
5622
        if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5623
          delete_insn (XEXP (ref, 0));
5624
        XEXP (ref, 0) = label;
5625
        LABEL_NUSES (label) ++;
5626
        JUMP_LABEL (insn) = label;
5627
      }
5628
  INSN_CODE (insn) = -1;
5629
  i = recog_memoized (insn);
5630
  gcc_assert (i >= 0);
5631
}
5632
 
5633
static void
5634
mep_reorg_erepeat (rtx insns)
5635
{
5636
  rtx insn, prev, label_before, l, x;
5637
  int count;
5638
 
5639
  for (insn = insns; insn; insn = NEXT_INSN (insn))
5640
    if (JUMP_P (insn)
5641
        && ! JUMP_TABLE_DATA_P (insn)
5642
        && mep_invertable_branch_p (insn))
5643
      {
5644
        if (dump_file)
5645
          {
5646
            fprintf (dump_file, "\n------------------------------\n");
5647
            fprintf (dump_file, "erepeat: considering this jump:\n");
5648
            print_rtl_single (dump_file, insn);
5649
          }
5650
        count = simplejump_p (insn) ? 0 : 1;
5651
        label_before = 0;
5652
        for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5653
          {
5654
            if (GET_CODE (prev) == CALL_INSN
5655
                || BARRIER_P (prev))
5656
              break;
5657
 
5658
            if (prev == JUMP_LABEL (insn))
5659
              {
5660
                rtx newlast;
5661
                if (dump_file)
5662
                  fprintf (dump_file, "found loop top, %d insns\n", count);
5663
 
5664
                if (LABEL_NUSES (prev) == 1)
5665
                  /* We're the only user, always safe */ ;
5666
                else if (LABEL_NUSES (prev) == 2)
5667
                  {
5668
                    /* See if there's a barrier before this label.  If
5669
                       so, we know nobody inside the loop uses it.
5670
                       But we must be careful to put the erepeat
5671
                       *after* the label.  */
5672
                    rtx barrier;
5673
                    for (barrier = PREV_INSN (prev);
5674
                         barrier && GET_CODE (barrier) == NOTE;
5675
                         barrier = PREV_INSN (barrier))
5676
                      ;
5677
                    if (barrier && GET_CODE (barrier) != BARRIER)
5678
                      break;
5679
                  }
5680
                else
5681
                  {
5682
                    /* We don't know who else, within or without our loop, uses this */
5683
                    if (dump_file)
5684
                      fprintf (dump_file, "... but there are multiple users, too risky.\n");
5685
                    break;
5686
                  }
5687
 
5688
                /* Generate a label to be used by the erepat insn.  */
5689
                l = gen_label_rtx ();
5690
 
5691
                /* Insert the erepeat after INSN's target label.  */
5692
                x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5693
                LABEL_NUSES (l)++;
5694
                emit_insn_after (x, prev);
5695
 
5696
                /* Insert the erepeat label.  */
5697
                newlast = (mep_insert_repeat_label_last
5698
                           (insn, l, !simplejump_p (insn), false));
5699
                if (simplejump_p (insn))
5700
                  {
5701
                    emit_insn_before (gen_erepeat_end (), insn);
5702
                    delete_insn (insn);
5703
                  }
5704
                else
5705
                  {
5706
                    mep_invert_branch (insn, newlast);
5707
                    emit_insn_after (gen_erepeat_end (), newlast);
5708
                  }
5709
                break;
5710
              }
5711
 
5712
            if (LABEL_P (prev))
5713
              {
5714
                /* A label is OK if there is exactly one user, and we
5715
                   can find that user before the next label.  */
5716
                rtx user = 0;
5717
                int safe = 0;
5718
                if (LABEL_NUSES (prev) == 1)
5719
                  {
5720
                    for (user = PREV_INSN (prev);
5721
                         user && (INSN_P (user) || GET_CODE (user) == NOTE);
5722
                         user = PREV_INSN (user))
5723
                      if (GET_CODE (user) == JUMP_INSN
5724
                          && JUMP_LABEL (user) == prev)
5725
                        {
5726
                          safe = INSN_UID (user);
5727
                          break;
5728
                        }
5729
                  }
5730
                if (!safe)
5731
                  break;
5732
                if (dump_file)
5733
                  fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5734
                           safe, INSN_UID (prev));
5735
              }
5736
 
5737
            if (INSN_P (prev))
5738
              {
5739
                count ++;
5740
                if (count == 2)
5741
                  label_before = prev;
5742
              }
5743
          }
5744
      }
5745
  if (dump_file)
5746
    fprintf (dump_file, "\n==============================\n");
5747
}
5748
 
5749
/* Replace a jump to a return, with a copy of the return.  GCC doesn't
5750
   always do this on its own.  */
5751
 
5752
static void
5753
mep_jmp_return_reorg (rtx insns)
5754
{
5755
  rtx insn, label, ret;
5756
  int ret_code;
5757
 
5758
  for (insn = insns; insn; insn = NEXT_INSN (insn))
5759
    if (simplejump_p (insn))
5760
    {
5761
      /* Find the fist real insn the jump jumps to.  */
5762
      label = ret = JUMP_LABEL (insn);
5763
      while (ret
5764
             && (GET_CODE (ret) == NOTE
5765
                 || GET_CODE (ret) == CODE_LABEL
5766
                 || GET_CODE (PATTERN (ret)) == USE))
5767
        ret = NEXT_INSN (ret);
5768
 
5769
      if (ret)
5770
        {
5771
          /* Is it a return?  */
5772
          ret_code = recog_memoized (ret);
5773
          if (ret_code == CODE_FOR_return_internal
5774
              || ret_code == CODE_FOR_eh_return_internal)
5775
            {
5776
              /* It is.  Replace the jump with a return.  */
5777
              LABEL_NUSES (label) --;
5778
              if (LABEL_NUSES (label) == 0)
5779
                delete_insn (label);
5780
              PATTERN (insn) = copy_rtx (PATTERN (ret));
5781
              INSN_CODE (insn) = -1;
5782
            }
5783
        }
5784
    }
5785
}
5786
 
5787
 
5788
static void
5789
mep_reorg_addcombine (rtx insns)
5790
{
5791
  rtx i, n;
5792
 
5793
  for (i = insns; i; i = NEXT_INSN (i))
5794
    if (INSN_P (i)
5795
        && INSN_CODE (i) == CODE_FOR_addsi3
5796
        && GET_CODE (SET_DEST (PATTERN (i))) == REG
5797
        && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5798
        && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5799
        && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5800
      {
5801
        n = NEXT_INSN (i);
5802
        if (INSN_P (n)
5803
            && INSN_CODE (n) == CODE_FOR_addsi3
5804
            && GET_CODE (SET_DEST (PATTERN (n))) == REG
5805
            && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5806
            && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5807
            && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5808
          {
5809
            int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5810
            int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5811
            if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5812
                && ic + nc < 32767
5813
                && ic + nc > -32768)
5814
              {
5815
                XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5816
                NEXT_INSN (i) = NEXT_INSN (n);
5817
                if (NEXT_INSN (i))
5818
                  PREV_INSN (NEXT_INSN (i)) = i;
5819
              }
5820
          }
5821
      }
5822
}
5823
 
5824
/* If this insn adjusts the stack, return the adjustment, else return
5825
   zero.  */
5826
static int
5827
add_sp_insn_p (rtx insn)
5828
{
5829
  rtx pat;
5830
 
5831
  if (! single_set (insn))
5832
    return 0;
5833
  pat = PATTERN (insn);
5834
  if (GET_CODE (SET_DEST (pat)) != REG)
5835
    return 0;
5836
  if (REGNO (SET_DEST (pat)) != SP_REGNO)
5837
    return 0;
5838
  if (GET_CODE (SET_SRC (pat)) != PLUS)
5839
    return 0;
5840
  if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5841
    return 0;
5842
  if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5843
    return 0;
5844
  if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5845
    return 0;
5846
  return INTVAL (XEXP (SET_SRC (pat), 1));
5847
}
5848
 
5849
/* Check for trivial functions that set up an unneeded stack
5850
   frame.  */
5851
static void
5852
mep_reorg_noframe (rtx insns)
5853
{
5854
  rtx start_frame_insn;
5855
  rtx end_frame_insn = 0;
5856
  int sp_adjust, sp2;
5857
  rtx sp;
5858
 
5859
  /* The first insn should be $sp = $sp + N */
5860
  while (insns && ! INSN_P (insns))
5861
    insns = NEXT_INSN (insns);
5862
  if (!insns)
5863
    return;
5864
 
5865
  sp_adjust = add_sp_insn_p (insns);
5866
  if (sp_adjust == 0)
5867
    return;
5868
 
5869
  start_frame_insn = insns;
5870
  sp = SET_DEST (PATTERN (start_frame_insn));
5871
 
5872
  insns = next_real_insn (insns);
5873
 
5874
  while (insns)
5875
    {
5876
      rtx next = next_real_insn (insns);
5877
      if (!next)
5878
        break;
5879
 
5880
      sp2 = add_sp_insn_p (insns);
5881
      if (sp2)
5882
        {
5883
          if (end_frame_insn)
5884
            return;
5885
          end_frame_insn = insns;
5886
          if (sp2 != -sp_adjust)
5887
            return;
5888
        }
5889
      else if (mep_mentioned_p (insns, sp, 0))
5890
        return;
5891
      else if (CALL_P (insns))
5892
        return;
5893
 
5894
      insns = next;
5895
    }
5896
 
5897
  if (end_frame_insn)
5898
    {
5899
      delete_insn (start_frame_insn);
5900
      delete_insn (end_frame_insn);
5901
    }
5902
}
5903
 
5904
static void
5905
mep_reorg (void)
5906
{
5907
  rtx insns = get_insns ();
5908
 
5909
  /* We require accurate REG_DEAD notes.  */
5910
  compute_bb_for_insn ();
5911
  df_note_add_problem ();
5912
  df_analyze ();
5913
 
5914
  mep_reorg_addcombine (insns);
5915
#if EXPERIMENTAL_REGMOVE_REORG
5916
  /* VLIW packing has been done already, so we can't just delete things.  */
5917
  if (!mep_vliw_function_p (cfun->decl))
5918
    mep_reorg_regmove (insns);
5919
#endif
5920
  mep_jmp_return_reorg (insns);
5921
  mep_bundle_insns (insns);
5922
  mep_reorg_repeat (insns);
5923
  if (optimize
5924
      && !profile_flag
5925
      && !profile_arc_flag
5926
      && TARGET_OPT_REPEAT
5927
      && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5928
    mep_reorg_erepeat (insns);
5929
 
5930
  /* This may delete *insns so make sure it's last.  */
5931
  mep_reorg_noframe (insns);
5932
 
5933
  df_finish_pass (false);
5934
}
5935
 
5936
 
5937
 
5938
/*----------------------------------------------------------------------*/
5939
/* Builtins                                                             */
5940
/*----------------------------------------------------------------------*/
5941
 
5942
/* Element X gives the index into cgen_insns[] of the most general
5943
   implementation of intrinsic X.  Unimplemented intrinsics are
5944
   mapped to -1.  */
5945
int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5946
 
5947
/* Element X gives the index of another instruction that is mapped to
5948
   the same intrinsic as cgen_insns[X].  It is -1 when there is no other
5949
   instruction.
5950
 
5951
   Things are set up so that mep_intrinsic_chain[X] < X.  */
5952
static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5953
 
5954
/* The bitmask for the current ISA.  The ISA masks are declared
5955
   in mep-intrin.h.  */
5956
unsigned int mep_selected_isa;
5957
 
5958
struct mep_config {
5959
  const char *config_name;
5960
  unsigned int isa;
5961
};
5962
 
5963
static struct mep_config mep_configs[] = {
5964
#ifdef COPROC_SELECTION_TABLE
5965
  COPROC_SELECTION_TABLE,
5966
#endif
5967
  { 0, 0 }
5968
};
5969
 
5970
/* Initialize the global intrinsics variables above.  */
5971
 
5972
static void
5973
mep_init_intrinsics (void)
5974
{
5975
  size_t i;
5976
 
5977
  /* Set MEP_SELECTED_ISA to the ISA flag for this configuration.  */
5978
  mep_selected_isa = mep_configs[0].isa;
5979
  if (mep_config_string != 0)
5980
    for (i = 0; mep_configs[i].config_name; i++)
5981
      if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5982
        {
5983
          mep_selected_isa = mep_configs[i].isa;
5984
          break;
5985
        }
5986
 
5987
  /* Assume all intrinsics are unavailable.  */
5988
  for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5989
    mep_intrinsic_insn[i] = -1;
5990
 
5991
  /* Build up the global intrinsic tables.  */
5992
  for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5993
    if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5994
      {
5995
        mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5996
        mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5997
      }
5998
  /* See whether we can directly move values between one coprocessor
5999
     register and another.  */
6000
  for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6001
    if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
6002
      mep_have_copro_copro_moves_p = true;
6003
 
6004
  /* See whether we can directly move values between core and
6005
     coprocessor registers.  */
6006
  mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
6007
                                 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
6008
 
6009
  mep_have_core_copro_moves_p = 1;
6010
}
6011
 
6012
/* Declare all available intrinsic functions.  Called once only.  */
6013
 
6014
static tree cp_data_bus_int_type_node;
6015
static tree opaque_vector_type_node;
6016
static tree v8qi_type_node;
6017
static tree v4hi_type_node;
6018
static tree v2si_type_node;
6019
static tree v8uqi_type_node;
6020
static tree v4uhi_type_node;
6021
static tree v2usi_type_node;
6022
 
6023
static tree
6024
mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
6025
{
6026
  switch (cr)
6027
    {
6028
    case cgen_regnum_operand_type_POINTER:      return ptr_type_node;
6029
    case cgen_regnum_operand_type_LONG:         return long_integer_type_node;
6030
    case cgen_regnum_operand_type_ULONG:        return long_unsigned_type_node;
6031
    case cgen_regnum_operand_type_SHORT:        return short_integer_type_node;
6032
    case cgen_regnum_operand_type_USHORT:       return short_unsigned_type_node;
6033
    case cgen_regnum_operand_type_CHAR:         return char_type_node;
6034
    case cgen_regnum_operand_type_UCHAR:        return unsigned_char_type_node;
6035
    case cgen_regnum_operand_type_SI:           return intSI_type_node;
6036
    case cgen_regnum_operand_type_DI:           return intDI_type_node;
6037
    case cgen_regnum_operand_type_VECTOR:       return opaque_vector_type_node;
6038
    case cgen_regnum_operand_type_V8QI:         return v8qi_type_node;
6039
    case cgen_regnum_operand_type_V4HI:         return v4hi_type_node;
6040
    case cgen_regnum_operand_type_V2SI:         return v2si_type_node;
6041
    case cgen_regnum_operand_type_V8UQI:        return v8uqi_type_node;
6042
    case cgen_regnum_operand_type_V4UHI:        return v4uhi_type_node;
6043
    case cgen_regnum_operand_type_V2USI:        return v2usi_type_node;
6044
    case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
6045
    default:
6046
      return void_type_node;
6047
    }
6048
}
6049
 
6050
static void
6051
mep_init_builtins (void)
6052
{
6053
  size_t i;
6054
 
6055
  if (TARGET_64BIT_CR_REGS)
6056
    cp_data_bus_int_type_node = long_long_integer_type_node;
6057
  else
6058
    cp_data_bus_int_type_node = long_integer_type_node;
6059
 
6060
  opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
6061
  v8qi_type_node = build_vector_type (intQI_type_node, 8);
6062
  v4hi_type_node = build_vector_type (intHI_type_node, 4);
6063
  v2si_type_node = build_vector_type (intSI_type_node, 2);
6064
  v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
6065
  v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
6066
  v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
6067
 
6068
  (*lang_hooks.decls.pushdecl)
6069
    (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_data_bus_int"),
6070
                 cp_data_bus_int_type_node));
6071
 
6072
  (*lang_hooks.decls.pushdecl)
6073
    (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_vector"),
6074
                 opaque_vector_type_node));
6075
 
6076
  (*lang_hooks.decls.pushdecl)
6077
    (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8qi"),
6078
                 v8qi_type_node));
6079
  (*lang_hooks.decls.pushdecl)
6080
    (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4hi"),
6081
                 v4hi_type_node));
6082
  (*lang_hooks.decls.pushdecl)
6083
    (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2si"),
6084
                 v2si_type_node));
6085
 
6086
  (*lang_hooks.decls.pushdecl)
6087
    (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8uqi"),
6088
                 v8uqi_type_node));
6089
  (*lang_hooks.decls.pushdecl)
6090
    (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4uhi"),
6091
                 v4uhi_type_node));
6092
  (*lang_hooks.decls.pushdecl)
6093
    (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2usi"),
6094
                 v2usi_type_node));
6095
 
6096
  /* Intrinsics like mep_cadd3 are implemented with two groups of
6097
     instructions, one which uses UNSPECs and one which uses a specific
6098
     rtl code such as PLUS.  Instructions in the latter group belong
6099
     to GROUP_KNOWN_CODE.
6100
 
6101
     In such cases, the intrinsic will have two entries in the global
6102
     tables above.  The unspec form is accessed using builtin functions
6103
     while the specific form is accessed using the mep_* enum in
6104
     mep-intrin.h.
6105
 
6106
     The idea is that __cop arithmetic and builtin functions have
6107
     different optimization requirements.  If mep_cadd3() appears in
6108
     the source code, the user will surely except gcc to use cadd3
6109
     rather than a work-alike such as add3.  However, if the user
6110
     just writes "a + b", where a or b are __cop variables, it is
6111
     reasonable for gcc to choose a core instruction rather than
6112
     cadd3 if it believes that is more optimal.  */
6113
  for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
6114
    if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
6115
        && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
6116
      {
6117
        tree ret_type = void_type_node;
6118
        tree bi_type;
6119
 
6120
        if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
6121
          continue;
6122
 
6123
        if (cgen_insns[i].cret_p)
6124
          ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
6125
 
6126
        bi_type = build_function_type (ret_type, 0);
6127
        add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
6128
                              bi_type,
6129
                              cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
6130
      }
6131
}
6132
 
6133
/* Report the unavailablity of the given intrinsic.  */
6134
 
6135
#if 1
6136
static void
6137
mep_intrinsic_unavailable (int intrinsic)
6138
{
6139
  static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6140
 
6141
  if (already_reported_p[intrinsic])
6142
    return;
6143
 
6144
  if (mep_intrinsic_insn[intrinsic] < 0)
6145
    error ("coprocessor intrinsic %qs is not available in this configuration",
6146
           cgen_intrinsics[intrinsic]);
6147
  else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6148
    error ("%qs is not available in VLIW functions",
6149
           cgen_intrinsics[intrinsic]);
6150
  else
6151
    error ("%qs is not available in non-VLIW functions",
6152
           cgen_intrinsics[intrinsic]);
6153
 
6154
  already_reported_p[intrinsic] = 1;
6155
}
6156
#endif
6157
 
6158
 
6159
/* See if any implementation of INTRINSIC is available to the
6160
   current function.  If so, store the most general implementation
6161
   in *INSN_PTR and return true.  Return false otherwise.  */
6162
 
6163
static bool
6164
mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6165
{
6166
  int i;
6167
 
6168
  i = mep_intrinsic_insn[intrinsic];
6169
  while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6170
    i = mep_intrinsic_chain[i];
6171
 
6172
  if (i >= 0)
6173
    {
6174
      *insn_ptr = &cgen_insns[i];
6175
      return true;
6176
    }
6177
  return false;
6178
}
6179
 
6180
 
6181
/* Like mep_get_intrinsic_insn, but with extra handling for moves.
6182
   If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6183
   try using a work-alike instead.  In this case, the returned insn
6184
   may have three operands rather than two.  */
6185
 
6186
static bool
6187
mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6188
{
6189
  size_t i;
6190
 
6191
  if (intrinsic == mep_cmov)
6192
    {
6193
      for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6194
        if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6195
          return true;
6196
      return false;
6197
    }
6198
  return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6199
}
6200
 
6201
 
6202
/* If ARG is a register operand that is the same size as MODE, convert it
6203
   to MODE using a subreg.  Otherwise return ARG as-is.  */
6204
 
6205
static rtx
6206
mep_convert_arg (enum machine_mode mode, rtx arg)
6207
{
6208
  if (GET_MODE (arg) != mode
6209
      && register_operand (arg, VOIDmode)
6210
      && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6211
    return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6212
  return arg;
6213
}
6214
 
6215
 
6216
/* Apply regnum conversions to ARG using the description given by REGNUM.
6217
   Return the new argument on success and null on failure.  */
6218
 
6219
static rtx
6220
mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6221
{
6222
  if (regnum->count == 0)
6223
    return arg;
6224
 
6225
  if (GET_CODE (arg) != CONST_INT
6226
      || INTVAL (arg) < 0
6227
      || INTVAL (arg) >= regnum->count)
6228
    return 0;
6229
 
6230
  return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6231
}
6232
 
6233
 
6234
/* Try to make intrinsic argument ARG match the given operand.
6235
   UNSIGNED_P is true if the argument has an unsigned type.  */
6236
 
6237
static rtx
6238
mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6239
                    int unsigned_p)
6240
{
6241
  if (GET_CODE (arg) == CONST_INT)
6242
    {
6243
      /* CONST_INTs can only be bound to integer operands.  */
6244
      if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6245
        return 0;
6246
    }
6247
  else if (GET_CODE (arg) == CONST_DOUBLE)
6248
    /* These hold vector constants.  */;
6249
  else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6250
    {
6251
      /* If the argument is a different size from what's expected, we must
6252
         have a value in the right mode class in order to convert it.  */
6253
      if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6254
        return 0;
6255
 
6256
      /* If the operand is an rvalue, promote or demote it to match the
6257
         operand's size.  This might not need extra instructions when
6258
         ARG is a register value.  */
6259
      if (operand->constraint[0] != '=')
6260
        arg = convert_to_mode (operand->mode, arg, unsigned_p);
6261
    }
6262
 
6263
  /* If the operand is an lvalue, bind the operand to a new register.
6264
     The caller will copy this value into ARG after the main
6265
     instruction.  By doing this always, we produce slightly more
6266
     optimal code.  */
6267
  /* But not for control registers.  */
6268
  if (operand->constraint[0] == '='
6269
      && (! REG_P (arg)
6270
          || ! (CONTROL_REGNO_P (REGNO (arg))
6271
                || CCR_REGNO_P (REGNO (arg))
6272
                || CR_REGNO_P (REGNO (arg)))
6273
          ))
6274
    return gen_reg_rtx (operand->mode);
6275
 
6276
  /* Try simple mode punning.  */
6277
  arg = mep_convert_arg (operand->mode, arg);
6278
  if (operand->predicate (arg, operand->mode))
6279
    return arg;
6280
 
6281
  /* See if forcing the argument into a register will make it match.  */
6282
  if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6283
    arg = force_reg (operand->mode, arg);
6284
  else
6285
    arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6286
  if (operand->predicate (arg, operand->mode))
6287
    return arg;
6288
 
6289
  return 0;
6290
}
6291
 
6292
 
6293
/* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6294
   function FNNAME.  OPERAND describes the operand to which ARGNUM
6295
   is mapped.  */
6296
 
6297
static void
6298
mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6299
                      int argnum, tree fnname)
6300
{
6301
  size_t i;
6302
 
6303
  if (GET_CODE (arg) == CONST_INT)
6304
    for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6305
      if (operand->predicate == cgen_immediate_predicates[i].predicate)
6306
        {
6307
          const struct cgen_immediate_predicate *predicate;
6308
          HOST_WIDE_INT argval;
6309
 
6310
          predicate = &cgen_immediate_predicates[i];
6311
          argval = INTVAL (arg);
6312
          if (argval < predicate->lower || argval >= predicate->upper)
6313
            error ("argument %d of %qE must be in the range %d...%d",
6314
                   argnum, fnname, predicate->lower, predicate->upper - 1);
6315
          else
6316
            error ("argument %d of %qE must be a multiple of %d",
6317
                   argnum, fnname, predicate->align);
6318
          return;
6319
        }
6320
 
6321
  error ("incompatible type for argument %d of %qE", argnum, fnname);
6322
}
6323
 
6324
static rtx
6325
mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6326
                    rtx subtarget ATTRIBUTE_UNUSED,
6327
                    enum machine_mode mode ATTRIBUTE_UNUSED,
6328
                    int ignore ATTRIBUTE_UNUSED)
6329
{
6330
  rtx pat, op[10], arg[10];
6331
  unsigned int a;
6332
  int opindex, unsigned_p[10];
6333
  tree fndecl, args;
6334
  unsigned int n_args;
6335
  tree fnname;
6336
  const struct cgen_insn *cgen_insn;
6337
  const struct insn_data *idata;
6338
  unsigned int first_arg = 0;
6339
  tree return_type = void_type_node;
6340
  unsigned int builtin_n_args;
6341
 
6342
  fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6343
  fnname = DECL_NAME (fndecl);
6344
 
6345
  /* Find out which instruction we should emit.  Note that some coprocessor
6346
     intrinsics may only be available in VLIW mode, or only in normal mode.  */
6347
  if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6348
    {
6349
      mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6350
      return NULL_RTX;
6351
    }
6352
  idata = &insn_data[cgen_insn->icode];
6353
 
6354
  builtin_n_args = cgen_insn->num_args;
6355
 
6356
  if (cgen_insn->cret_p)
6357
    {
6358
      if (cgen_insn->cret_p > 1)
6359
        builtin_n_args ++;
6360
      first_arg = 1;
6361
      return_type = mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6362
      builtin_n_args --;
6363
    }
6364
 
6365
  /* Evaluate each argument.  */
6366
  n_args = call_expr_nargs (exp);
6367
 
6368
  if (n_args < builtin_n_args)
6369
    {
6370
      error ("too few arguments to %qE", fnname);
6371
      return NULL_RTX;
6372
    }
6373
  if (n_args > builtin_n_args)
6374
    {
6375
      error ("too many arguments to %qE", fnname);
6376
      return NULL_RTX;
6377
    }
6378
 
6379
  for (a = first_arg; a < builtin_n_args + first_arg; a++)
6380
    {
6381
      tree value;
6382
 
6383
      args = CALL_EXPR_ARG (exp, a - first_arg);
6384
 
6385
      value = args;
6386
 
6387
#if 0
6388
      if (cgen_insn->regnums[a].reference_p)
6389
        {
6390
          if (TREE_CODE (value) != ADDR_EXPR)
6391
            {
6392
              debug_tree(value);
6393
              error ("argument %d of %qE must be an address", a+1, fnname);
6394
              return NULL_RTX;
6395
            }
6396
          value = TREE_OPERAND (value, 0);
6397
        }
6398
#endif
6399
 
6400
      /* If the argument has been promoted to int, get the unpromoted
6401
         value.  This is necessary when sub-int memory values are bound
6402
         to reference parameters.  */
6403
      if (TREE_CODE (value) == NOP_EXPR
6404
          && TREE_TYPE (value) == integer_type_node
6405
          && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6406
          && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6407
              < TYPE_PRECISION (TREE_TYPE (value))))
6408
        value = TREE_OPERAND (value, 0);
6409
 
6410
      /* If the argument has been promoted to double, get the unpromoted
6411
         SFmode value.  This is necessary for FMAX support, for example.  */
6412
      if (TREE_CODE (value) == NOP_EXPR
6413
          && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6414
          && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6415
          && TYPE_MODE (TREE_TYPE (value)) == DFmode
6416
          && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6417
        value = TREE_OPERAND (value, 0);
6418
 
6419
      unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6420
      arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6421
      arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6422
      if (cgen_insn->regnums[a].reference_p)
6423
        {
6424
          tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6425
          enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6426
 
6427
          arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6428
        }
6429
      if (arg[a] == 0)
6430
        {
6431
          error ("argument %d of %qE must be in the range %d...%d",
6432
                 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6433
          return NULL_RTX;
6434
        }
6435
    }
6436
 
6437
  for (a = 0; a < first_arg; a++)
6438
    {
6439
      if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6440
        arg[a] = target;
6441
      else
6442
        arg[a] = gen_reg_rtx (idata->operand[0].mode);
6443
    }
6444
 
6445
  /* Convert the arguments into a form suitable for the intrinsic.
6446
     Report an error if this isn't possible.  */
6447
  for (opindex = 0; opindex < idata->n_operands; opindex++)
6448
    {
6449
      a = cgen_insn->op_mapping[opindex];
6450
      op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6451
                                        arg[a], unsigned_p[a]);
6452
      if (op[opindex] == 0)
6453
        {
6454
          mep_incompatible_arg (&idata->operand[opindex],
6455
                                arg[a], a + 1 - first_arg, fnname);
6456
          return NULL_RTX;
6457
        }
6458
    }
6459
 
6460
  /* Emit the instruction.  */
6461
  pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6462
                       op[5], op[6], op[7], op[8], op[9]);
6463
 
6464
  if (GET_CODE (pat) == SET
6465
      && GET_CODE (SET_DEST (pat)) == PC
6466
      && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6467
    emit_jump_insn (pat);
6468
  else
6469
    emit_insn (pat);
6470
 
6471
  /* Copy lvalues back to their final locations.  */
6472
  for (opindex = 0; opindex < idata->n_operands; opindex++)
6473
    if (idata->operand[opindex].constraint[0] == '=')
6474
      {
6475
        a = cgen_insn->op_mapping[opindex];
6476
        if (a >= first_arg)
6477
          {
6478
            if (GET_MODE_CLASS (GET_MODE (arg[a]))
6479
                != GET_MODE_CLASS (GET_MODE (op[opindex])))
6480
              emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6481
                                                   op[opindex]));
6482
            else
6483
              {
6484
                /* First convert the operand to the right mode, then copy it
6485
                   into the destination.  Doing the conversion as a separate
6486
                   step (rather than using convert_move) means that we can
6487
                   avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6488
                   refer to the same register.  */
6489
                op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6490
                                               op[opindex], unsigned_p[a]);
6491
                if (!rtx_equal_p (arg[a], op[opindex]))
6492
                  emit_move_insn (arg[a], op[opindex]);
6493
              }
6494
          }
6495
      }
6496
 
6497
  if (first_arg > 0 && target && target != op[0])
6498
    {
6499
      emit_move_insn (target, op[0]);
6500
    }
6501
 
6502
  return target;
6503
}
6504
 
6505
static bool
6506
mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6507
{
6508
  return false;
6509
}
6510
 
6511
/* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6512
   a global register.  */
6513
 
6514
static int
6515
global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6516
{
6517
  int regno;
6518
  rtx x = *loc;
6519
 
6520
  if (! x)
6521
    return 0;
6522
 
6523
  switch (GET_CODE (x))
6524
    {
6525
    case SUBREG:
6526
      if (REG_P (SUBREG_REG (x)))
6527
        {
6528
          if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6529
              && global_regs[subreg_regno (x)])
6530
            return 1;
6531
          return 0;
6532
        }
6533
      break;
6534
 
6535
    case REG:
6536
      regno = REGNO (x);
6537
      if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6538
        return 1;
6539
      return 0;
6540
 
6541
    case SCRATCH:
6542
    case PC:
6543
    case CC0:
6544
    case CONST_INT:
6545
    case CONST_DOUBLE:
6546
    case CONST:
6547
    case LABEL_REF:
6548
      return 0;
6549
 
6550
    case CALL:
6551
      /* A non-constant call might use a global register.  */
6552
      return 1;
6553
 
6554
    default:
6555
      break;
6556
    }
6557
 
6558
  return 0;
6559
}
6560
 
6561
/* Returns nonzero if X mentions a global register.  */
6562
 
6563
static int
6564
global_reg_mentioned_p (rtx x)
6565
{
6566
  if (INSN_P (x))
6567
    {
6568
      if (CALL_P (x))
6569
        {
6570
          if (! RTL_CONST_OR_PURE_CALL_P (x))
6571
            return 1;
6572
          x = CALL_INSN_FUNCTION_USAGE (x);
6573
          if (x == 0)
6574
            return 0;
6575
        }
6576
      else
6577
        x = PATTERN (x);
6578
    }
6579
 
6580
  return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6581
}
6582
/* Scheduling hooks for VLIW mode.
6583
 
6584
   Conceptually this is very simple: we have a two-pack architecture
6585
   that takes one core insn and one coprocessor insn to make up either
6586
   a 32- or 64-bit instruction word (depending on the option bit set in
6587
   the chip).  I.e. in VL32 mode, we can pack one 16-bit core insn and
6588
   one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6589
   and one 48-bit cop insn or two 32-bit core/cop insns.
6590
 
6591
   In practice, instruction selection will be a bear.  Consider in
6592
   VL64 mode the following insns
6593
 
6594
        add $1, 1
6595
        cmov $cr0, $0
6596
 
6597
   these cannot pack, since the add is a 16-bit core insn and cmov
6598
   is a 32-bit cop insn.  However,
6599
 
6600
        add3 $1, $1, 1
6601
        cmov $cr0, $0
6602
 
6603
   packs just fine.  For good VLIW code generation in VL64 mode, we
6604
   will have to have 32-bit alternatives for many of the common core
6605
   insns.  Not implemented.  */
6606
 
6607
static int
6608
mep_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
6609
{
6610
  int cost_specified;
6611
 
6612
  if (REG_NOTE_KIND (link) != 0)
6613
    {
6614
      /* See whether INSN and DEP_INSN are intrinsics that set the same
6615
         hard register.  If so, it is more important to free up DEP_INSN
6616
         than it is to free up INSN.
6617
 
6618
         Note that intrinsics like mep_mulr are handled differently from
6619
         the equivalent mep.md patterns.  In mep.md, if we don't care
6620
         about the value of $lo and $hi, the pattern will just clobber
6621
         the registers, not set them.  Since clobbers don't count as
6622
         output dependencies, it is often possible to reorder two mulrs,
6623
         even after reload.
6624
 
6625
         In contrast, mep_mulr() sets both $lo and $hi to specific values,
6626
         so any pair of mep_mulr()s will be inter-dependent.   We should
6627
         therefore give the first mep_mulr() a higher priority.  */
6628
      if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6629
          && global_reg_mentioned_p (PATTERN (insn))
6630
          && global_reg_mentioned_p (PATTERN (dep_insn)))
6631
        return 1;
6632
 
6633
      /* If the dependence is an anti or output dependence, assume it
6634
         has no cost.  */
6635
      return 0;
6636
    }
6637
 
6638
  /* If we can't recognize the insns, we can't really do anything.  */
6639
  if (recog_memoized (dep_insn) < 0)
6640
    return cost;
6641
 
6642
  /* The latency attribute doesn't apply to MeP-h1: we use the stall
6643
     attribute instead.  */
6644
  if (!TARGET_H1)
6645
    {
6646
      cost_specified = get_attr_latency (dep_insn);
6647
      if (cost_specified != 0)
6648
        return cost_specified;
6649
    }
6650
 
6651
  return cost;
6652
}
6653
 
6654
/* ??? We don't properly compute the length of a load/store insn,
6655
   taking into account the addressing mode.  */
6656
 
6657
static int
6658
mep_issue_rate (void)
6659
{
6660
  return TARGET_IVC2 ? 3 : 2;
6661
}
6662
 
6663
/* Return true if function DECL was declared with the vliw attribute.  */
6664
 
6665
bool
6666
mep_vliw_function_p (tree decl)
6667
{
6668
  return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6669
}
6670
 
6671
static rtx
6672
mep_find_ready_insn (rtx *ready, int nready, enum attr_slot slot, int length)
6673
{
6674
  int i;
6675
 
6676
  for (i = nready - 1; i >= 0; --i)
6677
    {
6678
      rtx insn = ready[i];
6679
      if (recog_memoized (insn) >= 0
6680
          && get_attr_slot (insn) == slot
6681
          && get_attr_length (insn) == length)
6682
        return insn;
6683
    }
6684
 
6685
  return NULL_RTX;
6686
}
6687
 
6688
static void
6689
mep_move_ready_insn (rtx *ready, int nready, rtx insn)
6690
{
6691
  int i;
6692
 
6693
  for (i = 0; i < nready; ++i)
6694
    if (ready[i] == insn)
6695
      {
6696
        for (; i < nready - 1; ++i)
6697
          ready[i] = ready[i + 1];
6698
        ready[i] = insn;
6699
        return;
6700
      }
6701
 
6702
  gcc_unreachable ();
6703
}
6704
 
6705
static void
6706
mep_print_sched_insn (FILE *dump, rtx insn)
6707
{
6708
  const char *slots = "none";
6709
  const char *name = NULL;
6710
  int code;
6711
  char buf[30];
6712
 
6713
  if (GET_CODE (PATTERN (insn)) == SET
6714
      || GET_CODE (PATTERN (insn)) == PARALLEL)
6715
    {
6716
      switch (get_attr_slots (insn))
6717
        {
6718
        case SLOTS_CORE: slots = "core"; break;
6719
        case SLOTS_C3: slots = "c3"; break;
6720
        case SLOTS_P0: slots = "p0"; break;
6721
        case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6722
        case SLOTS_P0_P1: slots = "p0,p1"; break;
6723
        case SLOTS_P0S: slots = "p0s"; break;
6724
        case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6725
        case SLOTS_P1: slots = "p1"; break;
6726
        default:
6727
          sprintf(buf, "%d", get_attr_slots (insn));
6728
          slots = buf;
6729
          break;
6730
        }
6731
    }
6732
  if (GET_CODE (PATTERN (insn)) == USE)
6733
    slots = "use";
6734
 
6735
  code = INSN_CODE (insn);
6736
  if (code >= 0)
6737
    name = get_insn_name (code);
6738
  if (!name)
6739
    name = "{unknown}";
6740
 
6741
  fprintf (dump,
6742
           "insn %4d %4d  %8s  %s\n",
6743
           code,
6744
           INSN_UID (insn),
6745
           name,
6746
           slots);
6747
}
6748
 
6749
static int
6750
mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6751
                   int sched_verbose ATTRIBUTE_UNUSED, rtx *ready,
6752
                   int *pnready, int clock ATTRIBUTE_UNUSED)
6753
{
6754
  int nready = *pnready;
6755
  rtx core_insn, cop_insn;
6756
  int i;
6757
 
6758
  if (dump && sched_verbose > 1)
6759
    {
6760
      fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6761
      for (i=0; i<nready; i++)
6762
        mep_print_sched_insn (dump, ready[i]);
6763
      fprintf (dump, "\n");
6764
    }
6765
 
6766
  if (!mep_vliw_function_p (cfun->decl))
6767
    return 1;
6768
  if (nready < 2)
6769
    return 1;
6770
 
6771
  /* IVC2 uses a DFA to determine what's ready and what's not. */
6772
  if (TARGET_IVC2)
6773
    return nready;
6774
 
6775
  /* We can issue either a core or coprocessor instruction.
6776
     Look for a matched pair of insns to reorder.  If we don't
6777
     find any, don't second-guess the scheduler's priorities.  */
6778
 
6779
  if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6780
      && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6781
                                          TARGET_OPT_VL64 ? 6 : 2)))
6782
    ;
6783
  else if (TARGET_OPT_VL64
6784
           && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6785
           && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6786
    ;
6787
  else
6788
    /* We didn't find a pair.  Issue the single insn at the head
6789
       of the ready list.  */
6790
    return 1;
6791
 
6792
  /* Reorder the two insns first.  */
6793
  mep_move_ready_insn (ready, nready, core_insn);
6794
  mep_move_ready_insn (ready, nready - 1, cop_insn);
6795
  return 2;
6796
}
6797
 
6798
/* A for_each_rtx callback.  Return true if *X is a register that is
6799
   set by insn PREV.  */
6800
 
6801
static int
6802
mep_store_find_set (rtx *x, void *prev)
6803
{
6804
  return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6805
}
6806
 
6807
/* Like mep_store_bypass_p, but takes a pattern as the second argument,
6808
   not the containing insn.  */
6809
 
6810
static bool
6811
mep_store_data_bypass_1 (rtx prev, rtx pat)
6812
{
6813
  /* Cope with intrinsics like swcpa.  */
6814
  if (GET_CODE (pat) == PARALLEL)
6815
    {
6816
      int i;
6817
 
6818
      for (i = 0; i < XVECLEN (pat, 0); i++)
6819
        if (mep_store_data_bypass_p (prev, XVECEXP (pat, 0, i)))
6820
          return true;
6821
 
6822
      return false;
6823
    }
6824
 
6825
  /* Check for some sort of store.  */
6826
  if (GET_CODE (pat) != SET
6827
      || GET_CODE (SET_DEST (pat)) != MEM)
6828
    return false;
6829
 
6830
  /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6831
     The first operand to the unspec is the store data and the other operands
6832
     are used to calculate the address.  */
6833
  if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6834
    {
6835
      rtx src;
6836
      int i;
6837
 
6838
      src = SET_SRC (pat);
6839
      for (i = 1; i < XVECLEN (src, 0); i++)
6840
        if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6841
          return false;
6842
 
6843
      return true;
6844
    }
6845
 
6846
  /* Otherwise just check that PREV doesn't modify any register mentioned
6847
     in the memory destination.  */
6848
  return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6849
}
6850
 
6851
/* Return true if INSN is a store instruction and if the store address
6852
   has no true dependence on PREV.  */
6853
 
6854
bool
6855
mep_store_data_bypass_p (rtx prev, rtx insn)
6856
{
6857
  return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6858
}
6859
 
6860
/* A for_each_rtx subroutine of mep_mul_hilo_bypass_p.  Return 1 if *X
6861
   is a register other than LO or HI and if PREV sets *X.  */
6862
 
6863
static int
6864
mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6865
{
6866
  return (REG_P (*x)
6867
          && REGNO (*x) != LO_REGNO
6868
          && REGNO (*x) != HI_REGNO
6869
          && reg_set_p (*x, (const_rtx) prev));
6870
}
6871
 
6872
/* Return true if, apart from HI/LO, there are no true dependencies
6873
   between multiplication instructions PREV and INSN.  */
6874
 
6875
bool
6876
mep_mul_hilo_bypass_p (rtx prev, rtx insn)
6877
{
6878
  rtx pat;
6879
 
6880
  pat = PATTERN (insn);
6881
  if (GET_CODE (pat) == PARALLEL)
6882
    pat = XVECEXP (pat, 0, 0);
6883
  return (GET_CODE (pat) == SET
6884
          && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6885
}
6886
 
6887
/* Return true if INSN is an ldc instruction that issues to the
6888
   MeP-h1 integer pipeline.  This is true for instructions that
6889
   read from PSW, LP, SAR, HI and LO.  */
6890
 
6891
bool
6892
mep_ipipe_ldc_p (rtx insn)
6893
{
6894
  rtx pat, src;
6895
 
6896
  pat = PATTERN (insn);
6897
 
6898
  /* Cope with instrinsics that set both a hard register and its shadow.
6899
     The set of the hard register comes first.  */
6900
  if (GET_CODE (pat) == PARALLEL)
6901
    pat = XVECEXP (pat, 0, 0);
6902
 
6903
  if (GET_CODE (pat) == SET)
6904
    {
6905
      src = SET_SRC (pat);
6906
 
6907
      /* Cope with intrinsics.  The first operand to the unspec is
6908
         the source register.  */
6909
      if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6910
        src = XVECEXP (src, 0, 0);
6911
 
6912
      if (REG_P (src))
6913
        switch (REGNO (src))
6914
          {
6915
          case PSW_REGNO:
6916
          case LP_REGNO:
6917
          case SAR_REGNO:
6918
          case HI_REGNO:
6919
          case LO_REGNO:
6920
            return true;
6921
          }
6922
    }
6923
  return false;
6924
}
6925
 
6926
/* Create a VLIW bundle from core instruction CORE and coprocessor
6927
   instruction COP.  COP always satisfies INSN_P, but CORE can be
6928
   either a new pattern or an existing instruction.
6929
 
6930
   Emit the bundle in place of COP and return it.  */
6931
 
6932
static rtx
6933
mep_make_bundle (rtx core, rtx cop)
6934
{
6935
  rtx insn;
6936
 
6937
  /* If CORE is an existing instruction, remove it, otherwise put
6938
     the new pattern in an INSN harness.  */
6939
  if (INSN_P (core))
6940
    remove_insn (core);
6941
  else
6942
    core = make_insn_raw (core);
6943
 
6944
  /* Generate the bundle sequence and replace COP with it.  */
6945
  insn = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core, cop));
6946
  insn = emit_insn_after (insn, cop);
6947
  remove_insn (cop);
6948
 
6949
  /* Set up the links of the insns inside the SEQUENCE.  */
6950
  PREV_INSN (core) = PREV_INSN (insn);
6951
  NEXT_INSN (core) = cop;
6952
  PREV_INSN (cop) = core;
6953
  NEXT_INSN (cop) = NEXT_INSN (insn);
6954
 
6955
  /* Set the VLIW flag for the coprocessor instruction.  */
6956
  PUT_MODE (core, VOIDmode);
6957
  PUT_MODE (cop, BImode);
6958
 
6959
  /* Derive a location for the bundle.  Individual instructions cannot
6960
     have their own location because there can be no assembler labels
6961
     between CORE and COP.  */
6962
  INSN_LOCATOR (insn) = INSN_LOCATOR (INSN_LOCATOR (core) ? core : cop);
6963
  INSN_LOCATOR (core) = 0;
6964
  INSN_LOCATOR (cop) = 0;
6965
 
6966
  return insn;
6967
}
6968
 
6969
/* A helper routine for ms1_insn_dependent_p called through note_stores.  */
6970
 
6971
static void
6972
mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6973
{
6974
  rtx * pinsn = (rtx *) data;
6975
 
6976
  if (*pinsn && reg_mentioned_p (x, *pinsn))
6977
    *pinsn = NULL_RTX;
6978
}
6979
 
6980
/* Return true if anything in insn X is (anti,output,true) dependent on
6981
   anything in insn Y.  */
6982
 
6983
static int
6984
mep_insn_dependent_p (rtx x, rtx y)
6985
{
6986
  rtx tmp;
6987
 
6988
  gcc_assert (INSN_P (x));
6989
  gcc_assert (INSN_P (y));
6990
 
6991
  tmp = PATTERN (y);
6992
  note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6993
  if (tmp == NULL_RTX)
6994
    return 1;
6995
 
6996
  tmp = PATTERN (x);
6997
  note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6998
  if (tmp == NULL_RTX)
6999
    return 1;
7000
 
7001
  return 0;
7002
}
7003
 
7004
static int
7005
core_insn_p (rtx insn)
7006
{
7007
  if (GET_CODE (PATTERN (insn)) == USE)
7008
    return 0;
7009
  if (get_attr_slot (insn) == SLOT_CORE)
7010
    return 1;
7011
  return 0;
7012
}
7013
 
7014
/* Mark coprocessor instructions that can be bundled together with
7015
   the immediately preceeding core instruction.  This is later used
7016
   to emit the "+" that tells the assembler to create a VLIW insn.
7017
 
7018
   For unbundled insns, the assembler will automatically add coprocessor
7019
   nops, and 16-bit core nops.  Due to an apparent oversight in the
7020
   spec, the assembler will _not_ automatically add 32-bit core nops,
7021
   so we have to emit those here.
7022
 
7023
   Called from mep_insn_reorg.  */
7024
 
7025
static void
7026
mep_bundle_insns (rtx insns)
7027
{
7028
  rtx insn, last = NULL_RTX, first = NULL_RTX;
7029
  int saw_scheduling = 0;
7030
 
7031
  /* Only do bundling if we're in vliw mode.  */
7032
  if (!mep_vliw_function_p (cfun->decl))
7033
    return;
7034
 
7035
  /* The first insn in a bundle are TImode, the remainder are
7036
     VOIDmode.  After this function, the first has VOIDmode and the
7037
     rest have BImode.  */
7038
 
7039
  /* Note: this doesn't appear to be true for JUMP_INSNs.  */
7040
 
7041
  /* First, move any NOTEs that are within a bundle, to the beginning
7042
     of the bundle.  */
7043
  for (insn = insns; insn ; insn = NEXT_INSN (insn))
7044
    {
7045
      if (NOTE_P (insn) && first)
7046
        /* Don't clear FIRST.  */;
7047
 
7048
      else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
7049
        first = insn;
7050
 
7051
      else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
7052
        {
7053
          rtx note, prev;
7054
 
7055
          /* INSN is part of a bundle; FIRST is the first insn in that
7056
             bundle.  Move all intervening notes out of the bundle.
7057
             In addition, since the debug pass may insert a label
7058
             whenever the current line changes, set the location info
7059
             for INSN to match FIRST.  */
7060
 
7061
          INSN_LOCATOR (insn) = INSN_LOCATOR (first);
7062
 
7063
          note = PREV_INSN (insn);
7064
          while (note && note != first)
7065
            {
7066
              prev = PREV_INSN (note);
7067
 
7068
              if (NOTE_P (note))
7069
                {
7070
                  /* Remove NOTE from here... */
7071
                  PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
7072
                  NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
7073
                  /* ...and put it in here.  */
7074
                  NEXT_INSN (note) = first;
7075
                  PREV_INSN (note) = PREV_INSN (first);
7076
                  NEXT_INSN (PREV_INSN (note)) = note;
7077
                  PREV_INSN (NEXT_INSN (note)) = note;
7078
                }
7079
 
7080
              note = prev;
7081
            }
7082
        }
7083
 
7084
      else if (!NONJUMP_INSN_P (insn))
7085
        first = 0;
7086
    }
7087
 
7088
  /* Now fix up the bundles.  */
7089
  for (insn = insns; insn ; insn = NEXT_INSN (insn))
7090
    {
7091
      if (NOTE_P (insn))
7092
        continue;
7093
 
7094
      if (!NONJUMP_INSN_P (insn))
7095
        {
7096
          last = 0;
7097
          continue;
7098
        }
7099
 
7100
      /* If we're not optimizing enough, there won't be scheduling
7101
         info.  We detect that here.  */
7102
      if (GET_MODE (insn) == TImode)
7103
        saw_scheduling = 1;
7104
      if (!saw_scheduling)
7105
        continue;
7106
 
7107
      if (TARGET_IVC2)
7108
        {
7109
          rtx core_insn = NULL_RTX;
7110
 
7111
          /* IVC2 slots are scheduled by DFA, so we just accept
7112
             whatever the scheduler gives us.  However, we must make
7113
             sure the core insn (if any) is the first in the bundle.
7114
             The IVC2 assembler can insert whatever NOPs are needed,
7115
             and allows a COP insn to be first.  */
7116
 
7117
          if (NONJUMP_INSN_P (insn)
7118
              && GET_CODE (PATTERN (insn)) != USE
7119
              && GET_MODE (insn) == TImode)
7120
            {
7121
              for (last = insn;
7122
                   NEXT_INSN (last)
7123
                     && GET_MODE (NEXT_INSN (last)) == VOIDmode
7124
                     && NONJUMP_INSN_P (NEXT_INSN (last));
7125
                   last = NEXT_INSN (last))
7126
                {
7127
                  if (core_insn_p (last))
7128
                    core_insn = last;
7129
                }
7130
              if (core_insn_p (last))
7131
                core_insn = last;
7132
 
7133
              if (core_insn && core_insn != insn)
7134
                {
7135
                  /* Swap core insn to first in the bundle.  */
7136
 
7137
                  /* Remove core insn.  */
7138
                  if (PREV_INSN (core_insn))
7139
                    NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7140
                  if (NEXT_INSN (core_insn))
7141
                    PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7142
 
7143
                  /* Re-insert core insn.  */
7144
                  PREV_INSN (core_insn) = PREV_INSN (insn);
7145
                  NEXT_INSN (core_insn) = insn;
7146
 
7147
                  if (PREV_INSN (core_insn))
7148
                    NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7149
                  PREV_INSN (insn) = core_insn;
7150
 
7151
                  PUT_MODE (core_insn, TImode);
7152
                  PUT_MODE (insn, VOIDmode);
7153
                }
7154
            }
7155
 
7156
          /* The first insn has TImode, the rest have VOIDmode */
7157
          if (GET_MODE (insn) == TImode)
7158
            PUT_MODE (insn, VOIDmode);
7159
          else
7160
            PUT_MODE (insn, BImode);
7161
          continue;
7162
        }
7163
 
7164
      PUT_MODE (insn, VOIDmode);
7165
      if (recog_memoized (insn) >= 0
7166
          && get_attr_slot (insn) == SLOT_COP)
7167
        {
7168
          if (GET_CODE (insn) == JUMP_INSN
7169
              || ! last
7170
              || recog_memoized (last) < 0
7171
              || get_attr_slot (last) != SLOT_CORE
7172
              || (get_attr_length (insn)
7173
                  != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7174
              || mep_insn_dependent_p (insn, last))
7175
            {
7176
              switch (get_attr_length (insn))
7177
                {
7178
                case 8:
7179
                  break;
7180
                case 6:
7181
                  insn = mep_make_bundle (gen_nop (), insn);
7182
                  break;
7183
                case 4:
7184
                  if (TARGET_OPT_VL64)
7185
                    insn = mep_make_bundle (gen_nop32 (), insn);
7186
                  break;
7187
                case 2:
7188
                  if (TARGET_OPT_VL64)
7189
                    error ("2 byte cop instructions are"
7190
                           " not allowed in 64-bit VLIW mode");
7191
                  else
7192
                    insn = mep_make_bundle (gen_nop (), insn);
7193
                  break;
7194
                default:
7195
                  error ("unexpected %d byte cop instruction",
7196
                         get_attr_length (insn));
7197
                  break;
7198
                }
7199
            }
7200
          else
7201
            insn = mep_make_bundle (last, insn);
7202
        }
7203
 
7204
      last = insn;
7205
    }
7206
}
7207
 
7208
 
7209
/* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7210
   Return true on success.  This function can fail if the intrinsic
7211
   is unavailable or if the operands don't satisfy their predicates.  */
7212
 
7213
bool
7214
mep_emit_intrinsic (int intrinsic, const rtx *operands)
7215
{
7216
  const struct cgen_insn *cgen_insn;
7217
  const struct insn_data *idata;
7218
  rtx newop[10];
7219
  int i;
7220
 
7221
  if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7222
    return false;
7223
 
7224
  idata = &insn_data[cgen_insn->icode];
7225
  for (i = 0; i < idata->n_operands; i++)
7226
    {
7227
      newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7228
      if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7229
        return false;
7230
    }
7231
 
7232
  emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7233
                            newop[3], newop[4], newop[5],
7234
                            newop[6], newop[7], newop[8]));
7235
 
7236
  return true;
7237
}
7238
 
7239
 
7240
/* Apply the given unary intrinsic to OPERANDS[1] and store it on
7241
   OPERANDS[0].  Report an error if the instruction could not
7242
   be synthesized.  OPERANDS[1] is a register_operand.  For sign
7243
   and zero extensions, it may be smaller than SImode.  */
7244
 
7245
bool
7246
mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7247
                            rtx * operands ATTRIBUTE_UNUSED)
7248
{
7249
  return false;
7250
}
7251
 
7252
 
7253
/* Likewise, but apply a binary operation to OPERANDS[1] and
7254
   OPERANDS[2].  OPERANDS[1] is a register_operand, OPERANDS[2]
7255
   can be a general_operand.
7256
 
7257
   IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7258
   third operand.  REG and REG3 take register operands only.  */
7259
 
7260
bool
7261
mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7262
                             int ATTRIBUTE_UNUSED immediate3,
7263
                             int ATTRIBUTE_UNUSED reg,
7264
                             int ATTRIBUTE_UNUSED reg3,
7265
                             rtx * operands ATTRIBUTE_UNUSED)
7266
{
7267
  return false;
7268
}
7269
 
7270
static bool
7271
mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total, bool ATTRIBUTE_UNUSED speed_t)
7272
{
7273
  switch (code)
7274
    {
7275
    case CONST_INT:
7276
      if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7277
        *total = 0;
7278
      else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7279
        *total = 1;
7280
      else
7281
        *total = 3;
7282
      return true;
7283
 
7284
    case SYMBOL_REF:
7285
      *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7286
      return true;
7287
 
7288
    case MULT:
7289
      *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7290
                ? COSTS_N_INSNS (3)
7291
                : COSTS_N_INSNS (2));
7292
      return true;
7293
    }
7294
  return false;
7295
}
7296
 
7297
static int
7298
mep_address_cost (rtx addr ATTRIBUTE_UNUSED, bool ATTRIBUTE_UNUSED speed_p)
7299
{
7300
  return 1;
7301
}
7302
 
7303
static bool
7304
mep_handle_option (size_t code,
7305
                   const char *arg ATTRIBUTE_UNUSED,
7306
                   int value ATTRIBUTE_UNUSED)
7307
{
7308
  int i;
7309
 
7310
  switch (code)
7311
    {
7312
    case OPT_mall_opts:
7313
      target_flags |= MEP_ALL_OPTS;
7314
      break;
7315
 
7316
    case OPT_mno_opts:
7317
      target_flags &= ~ MEP_ALL_OPTS;
7318
      break;
7319
 
7320
    case OPT_mcop64:
7321
      target_flags |= MASK_COP;
7322
      target_flags |= MASK_64BIT_CR_REGS;
7323
      break;
7324
 
7325
    case OPT_mtiny_:
7326
      option_mtiny_specified = 1;
7327
 
7328
    case OPT_mivc2:
7329
      target_flags |= MASK_COP;
7330
      target_flags |= MASK_64BIT_CR_REGS;
7331
      target_flags |= MASK_VLIW;
7332
      target_flags |= MASK_OPT_VL64;
7333
      target_flags |= MASK_IVC2;
7334
 
7335
      for (i=0; i<32; i++)
7336
        fixed_regs[i+48] = 0;
7337
      for (i=0; i<32; i++)
7338
        call_used_regs[i+48] = 1;
7339
      for (i=6; i<8; i++)
7340
        call_used_regs[i+48] = 0;
7341
 
7342
#define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
7343
      RN (0, "$csar0");
7344
      RN (1, "$cc");
7345
      RN (4, "$cofr0");
7346
      RN (5, "$cofr1");
7347
      RN (6, "$cofa0");
7348
      RN (7, "$cofa1");
7349
      RN (15, "$csar1");
7350
 
7351
      RN (16, "$acc0_0");
7352
      RN (17, "$acc0_1");
7353
      RN (18, "$acc0_2");
7354
      RN (19, "$acc0_3");
7355
      RN (20, "$acc0_4");
7356
      RN (21, "$acc0_5");
7357
      RN (22, "$acc0_6");
7358
      RN (23, "$acc0_7");
7359
 
7360
      RN (24, "$acc1_0");
7361
      RN (25, "$acc1_1");
7362
      RN (26, "$acc1_2");
7363
      RN (27, "$acc1_3");
7364
      RN (28, "$acc1_4");
7365
      RN (29, "$acc1_5");
7366
      RN (30, "$acc1_6");
7367
      RN (31, "$acc1_7");
7368
#undef RN
7369
 
7370
      break;
7371
 
7372
    default:
7373
      break;
7374
    }
7375
  return TRUE;
7376
}
7377
 
7378
static void
7379
mep_asm_init_sections (void)
7380
{
7381
  based_section
7382
    = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7383
                           "\t.section .based,\"aw\"");
7384
 
7385
  tinybss_section
7386
    = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7387
                           "\t.section .sbss,\"aw\"");
7388
 
7389
  sdata_section
7390
    = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7391
                           "\t.section .sdata,\"aw\",@progbits");
7392
 
7393
  far_section
7394
    = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7395
                           "\t.section .far,\"aw\"");
7396
 
7397
  farbss_section
7398
    = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7399
                           "\t.section .farbss,\"aw\"");
7400
 
7401
  frodata_section
7402
    = get_unnamed_section (0, output_section_asm_op,
7403
                           "\t.section .frodata,\"a\"");
7404
 
7405
  srodata_section
7406
    = get_unnamed_section (0, output_section_asm_op,
7407
                           "\t.section .srodata,\"a\"");
7408
 
7409
  vtext_section
7410
    = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7411
                           "\t.section .vtext,\"axv\"\n\t.vliw");
7412
 
7413
  vftext_section
7414
    = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7415
                           "\t.section .vftext,\"axv\"\n\t.vliw");
7416
 
7417
  ftext_section
7418
    = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7419
                           "\t.section .ftext,\"ax\"\n\t.core");
7420
 
7421
}
7422
 
7423
#include "gt-mep.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.