OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [config/] [c4x/] [c4x.c] - Blame information for rev 844

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Subroutines for assembler code output on the TMS320C[34]x
2
   Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2003,
3
   2004, 2005, 2007
4
   Free Software Foundation, Inc.
5
 
6
   Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
7
              and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
8
 
9
This file is part of GCC.
10
 
11
GCC is free software; you can redistribute it and/or modify
12
it under the terms of the GNU General Public License as published by
13
the Free Software Foundation; either version 3, or (at your option)
14
any later version.
15
 
16
GCC is distributed in the hope that it will be useful,
17
but WITHOUT ANY WARRANTY; without even the implied warranty of
18
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
19
GNU General Public License for more details.
20
 
21
You should have received a copy of the GNU General Public License
22
along with GCC; see the file COPYING3.  If not see
23
<http://www.gnu.org/licenses/>.  */
24
 
25
/* Some output-actions in c4x.md need these.  */
26
#include "config.h"
27
#include "system.h"
28
#include "coretypes.h"
29
#include "tm.h"
30
#include "rtl.h"
31
#include "tree.h"
32
#include "regs.h"
33
#include "hard-reg-set.h"
34
#include "basic-block.h"
35
#include "real.h"
36
#include "insn-config.h"
37
#include "insn-attr.h"
38
#include "conditions.h"
39
#include "output.h"
40
#include "function.h"
41
#include "expr.h"
42
#include "optabs.h"
43
#include "libfuncs.h"
44
#include "flags.h"
45
#include "recog.h"
46
#include "ggc.h"
47
#include "cpplib.h"
48
#include "toplev.h"
49
#include "tm_p.h"
50
#include "target.h"
51
#include "target-def.h"
52
#include "langhooks.h"
53
 
54
rtx smulhi3_libfunc;
55
rtx umulhi3_libfunc;
56
rtx fix_truncqfhi2_libfunc;
57
rtx fixuns_truncqfhi2_libfunc;
58
rtx fix_trunchfhi2_libfunc;
59
rtx fixuns_trunchfhi2_libfunc;
60
rtx floathiqf2_libfunc;
61
rtx floatunshiqf2_libfunc;
62
rtx floathihf2_libfunc;
63
rtx floatunshihf2_libfunc;
64
 
65
static int c4x_leaf_function;
66
 
67
static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
68
 
69
/* Array of the smallest class containing reg number REGNO, indexed by
70
   REGNO.  Used by REGNO_REG_CLASS in c4x.h.  We assume that all these
71
   registers are available and set the class to NO_REGS for registers
72
   that the target switches say are unavailable.  */
73
 
74
enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
75
{
76
                                /* Reg          Modes           Saved.  */
77
  R0R1_REGS,                    /* R0           QI, QF, HF      No.  */
78
  R0R1_REGS,                    /* R1           QI, QF, HF      No.  */
79
  R2R3_REGS,                    /* R2           QI, QF, HF      No.  */
80
  R2R3_REGS,                    /* R3           QI, QF, HF      No.  */
81
  EXT_LOW_REGS,                 /* R4           QI, QF, HF      QI.  */
82
  EXT_LOW_REGS,                 /* R5           QI, QF, HF      QI.  */
83
  EXT_LOW_REGS,                 /* R6           QI, QF, HF      QF.  */
84
  EXT_LOW_REGS,                 /* R7           QI, QF, HF      QF.  */
85
  ADDR_REGS,                    /* AR0          QI              No.  */
86
  ADDR_REGS,                    /* AR1          QI              No.  */
87
  ADDR_REGS,                    /* AR2          QI              No.  */
88
  ADDR_REGS,                    /* AR3          QI              QI.  */
89
  ADDR_REGS,                    /* AR4          QI              QI.  */
90
  ADDR_REGS,                    /* AR5          QI              QI.  */
91
  ADDR_REGS,                    /* AR6          QI              QI.  */
92
  ADDR_REGS,                    /* AR7          QI              QI.  */
93
  DP_REG,                       /* DP           QI              No.  */
94
  INDEX_REGS,                   /* IR0          QI              No.  */
95
  INDEX_REGS,                   /* IR1          QI              No.  */
96
  BK_REG,                       /* BK           QI              QI.  */
97
  SP_REG,                       /* SP           QI              No.  */
98
  ST_REG,                       /* ST           CC              No.  */
99
  NO_REGS,                      /* DIE/IE                       No.  */
100
  NO_REGS,                      /* IIE/IF                       No.  */
101
  NO_REGS,                      /* IIF/IOF                      No.  */
102
  INT_REGS,                     /* RS           QI              No.  */
103
  INT_REGS,                     /* RE           QI              No.  */
104
  RC_REG,                       /* RC           QI              No.  */
105
  EXT_REGS,                     /* R8           QI, QF, HF      QI.  */
106
  EXT_REGS,                     /* R9           QI, QF, HF      No.  */
107
  EXT_REGS,                     /* R10          QI, QF, HF      No.  */
108
  EXT_REGS,                     /* R11          QI, QF, HF      No.  */
109
};
110
 
111
enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
112
{
113
                                /* Reg          Modes           Saved.  */
114
  HFmode,                       /* R0           QI, QF, HF      No.  */
115
  HFmode,                       /* R1           QI, QF, HF      No.  */
116
  HFmode,                       /* R2           QI, QF, HF      No.  */
117
  HFmode,                       /* R3           QI, QF, HF      No.  */
118
  QFmode,                       /* R4           QI, QF, HF      QI.  */
119
  QFmode,                       /* R5           QI, QF, HF      QI.  */
120
  QImode,                       /* R6           QI, QF, HF      QF.  */
121
  QImode,                       /* R7           QI, QF, HF      QF.  */
122
  QImode,                       /* AR0          QI              No.  */
123
  QImode,                       /* AR1          QI              No.  */
124
  QImode,                       /* AR2          QI              No.  */
125
  QImode,                       /* AR3          QI              QI.  */
126
  QImode,                       /* AR4          QI              QI.  */
127
  QImode,                       /* AR5          QI              QI.  */
128
  QImode,                       /* AR6          QI              QI.  */
129
  QImode,                       /* AR7          QI              QI.  */
130
  VOIDmode,                     /* DP           QI              No.  */
131
  QImode,                       /* IR0          QI              No.  */
132
  QImode,                       /* IR1          QI              No.  */
133
  QImode,                       /* BK           QI              QI.  */
134
  VOIDmode,                     /* SP           QI              No.  */
135
  VOIDmode,                     /* ST           CC              No.  */
136
  VOIDmode,                     /* DIE/IE                       No.  */
137
  VOIDmode,                     /* IIE/IF                       No.  */
138
  VOIDmode,                     /* IIF/IOF                      No.  */
139
  QImode,                       /* RS           QI              No.  */
140
  QImode,                       /* RE           QI              No.  */
141
  VOIDmode,                     /* RC           QI              No.  */
142
  QFmode,                       /* R8           QI, QF, HF      QI.  */
143
  HFmode,                       /* R9           QI, QF, HF      No.  */
144
  HFmode,                       /* R10          QI, QF, HF      No.  */
145
  HFmode,                       /* R11          QI, QF, HF      No.  */
146
};
147
 
148
 
149
/* Test and compare insns in c4x.md store the information needed to
150
   generate branch and scc insns here.  */
151
 
152
rtx c4x_compare_op0;
153
rtx c4x_compare_op1;
154
 
155
int c4x_cpu_version = 40;       /* CPU version C30/31/32/33/40/44.  */
156
 
157
/* Pragma definitions.  */
158
 
159
tree code_tree = NULL_TREE;
160
tree data_tree = NULL_TREE;
161
tree pure_tree = NULL_TREE;
162
tree noreturn_tree = NULL_TREE;
163
tree interrupt_tree = NULL_TREE;
164
tree naked_tree = NULL_TREE;
165
 
166
/* Forward declarations */
167
static bool c4x_handle_option (size_t, const char *, int);
168
static int c4x_isr_reg_used_p (unsigned int);
169
static int c4x_leaf_function_p (void);
170
static int c4x_naked_function_p (void);
171
static int c4x_immed_int_constant (rtx);
172
static int c4x_immed_float_constant (rtx);
173
static int c4x_R_indirect (rtx);
174
static void c4x_S_address_parse (rtx , int *, int *, int *, int *);
175
static int c4x_valid_operands (enum rtx_code, rtx *, enum machine_mode, int);
176
static int c4x_arn_reg_operand (rtx, enum machine_mode, unsigned int);
177
static int c4x_arn_mem_operand (rtx, enum machine_mode, unsigned int);
178
static void c4x_file_start (void);
179
static void c4x_file_end (void);
180
static void c4x_check_attribute (const char *, tree, tree, tree *);
181
static int c4x_r11_set_p (rtx);
182
static int c4x_rptb_valid_p (rtx, rtx);
183
static void c4x_reorg (void);
184
static int c4x_label_ref_used_p (rtx, rtx);
185
static tree c4x_handle_fntype_attribute (tree *, tree, tree, int, bool *);
186
const struct attribute_spec c4x_attribute_table[];
187
static void c4x_insert_attributes (tree, tree *);
188
static void c4x_asm_named_section (const char *, unsigned int, tree);
189
static int c4x_adjust_cost (rtx, rtx, rtx, int);
190
static void c4x_globalize_label (FILE *, const char *);
191
static bool c4x_rtx_costs (rtx, int, int, int *);
192
static int c4x_address_cost (rtx);
193
static void c4x_init_libfuncs (void);
194
static void c4x_external_libcall (rtx);
195
static rtx c4x_struct_value_rtx (tree, int);
196
static tree c4x_gimplify_va_arg_expr (tree, tree, tree *, tree *);
197
 
198
/* Initialize the GCC target structure.  */
199
#undef TARGET_ASM_BYTE_OP
200
#define TARGET_ASM_BYTE_OP "\t.word\t"
201
#undef TARGET_ASM_ALIGNED_HI_OP
202
#define TARGET_ASM_ALIGNED_HI_OP NULL
203
#undef TARGET_ASM_ALIGNED_SI_OP
204
#define TARGET_ASM_ALIGNED_SI_OP NULL
205
#undef TARGET_ASM_FILE_START
206
#define TARGET_ASM_FILE_START c4x_file_start
207
#undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
208
#define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
209
#undef TARGET_ASM_FILE_END
210
#define TARGET_ASM_FILE_END c4x_file_end
211
 
212
#undef TARGET_ASM_EXTERNAL_LIBCALL
213
#define TARGET_ASM_EXTERNAL_LIBCALL c4x_external_libcall
214
 
215
/* Play safe, not the fastest code.  */
216
#undef TARGET_DEFAULT_TARGET_FLAGS
217
#define TARGET_DEFAULT_TARGET_FLAGS (MASK_ALIASES | MASK_PARALLEL \
218
                                     | MASK_PARALLEL_MPY | MASK_RPTB)
219
#undef TARGET_HANDLE_OPTION
220
#define TARGET_HANDLE_OPTION c4x_handle_option
221
 
222
#undef TARGET_ATTRIBUTE_TABLE
223
#define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
224
 
225
#undef TARGET_INSERT_ATTRIBUTES
226
#define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
227
 
228
#undef TARGET_INIT_BUILTINS
229
#define TARGET_INIT_BUILTINS c4x_init_builtins
230
 
231
#undef TARGET_EXPAND_BUILTIN
232
#define TARGET_EXPAND_BUILTIN c4x_expand_builtin
233
 
234
#undef TARGET_SCHED_ADJUST_COST
235
#define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
236
 
237
#undef TARGET_ASM_GLOBALIZE_LABEL
238
#define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
239
 
240
#undef TARGET_RTX_COSTS
241
#define TARGET_RTX_COSTS c4x_rtx_costs
242
#undef TARGET_ADDRESS_COST
243
#define TARGET_ADDRESS_COST c4x_address_cost
244
 
245
#undef TARGET_MACHINE_DEPENDENT_REORG
246
#define TARGET_MACHINE_DEPENDENT_REORG c4x_reorg
247
 
248
#undef TARGET_INIT_LIBFUNCS
249
#define TARGET_INIT_LIBFUNCS c4x_init_libfuncs
250
 
251
#undef TARGET_STRUCT_VALUE_RTX
252
#define TARGET_STRUCT_VALUE_RTX c4x_struct_value_rtx
253
 
254
#undef TARGET_GIMPLIFY_VA_ARG_EXPR
255
#define TARGET_GIMPLIFY_VA_ARG_EXPR c4x_gimplify_va_arg_expr
256
 
257
struct gcc_target targetm = TARGET_INITIALIZER;
258
 
259
/* Implement TARGET_HANDLE_OPTION.  */
260
 
261
static bool
262
c4x_handle_option (size_t code, const char *arg, int value)
263
{
264
  switch (code)
265
    {
266
    case OPT_m30: c4x_cpu_version = 30; return true;
267
    case OPT_m31: c4x_cpu_version = 31; return true;
268
    case OPT_m32: c4x_cpu_version = 32; return true;
269
    case OPT_m33: c4x_cpu_version = 33; return true;
270
    case OPT_m40: c4x_cpu_version = 40; return true;
271
    case OPT_m44: c4x_cpu_version = 44; return true;
272
 
273
    case OPT_mcpu_:
274
      if (arg[0] == 'c' || arg[0] == 'C')
275
        arg++;
276
      value = atoi (arg);
277
      switch (value)
278
        {
279
        case 30: case 31: case 32: case 33: case 40: case 44:
280
          c4x_cpu_version = value;
281
          return true;
282
        }
283
      return false;
284
 
285
    default:
286
      return true;
287
    }
288
}
289
 
290
/* Override command line options.
291
   Called once after all options have been parsed.
292
   Mostly we process the processor
293
   type and sometimes adjust other TARGET_ options.  */
294
 
295
void
296
c4x_override_options (void)
297
{
298
  /* Convert foo / 8.0 into foo * 0.125, etc.  */
299
  set_fast_math_flags (1);
300
 
301
  /* We should phase out the following at some stage.
302
     This provides compatibility with the old -mno-aliases option.  */
303
  if (! TARGET_ALIASES && ! flag_argument_noalias)
304
    flag_argument_noalias = 1;
305
 
306
  if (!TARGET_C3X)
307
    target_flags |= MASK_MPYI | MASK_DB;
308
 
309
  if (optimize < 2)
310
    target_flags &= ~(MASK_RPTB | MASK_PARALLEL);
311
 
312
  if (!TARGET_PARALLEL)
313
    target_flags &= ~MASK_PARALLEL_MPY;
314
}
315
 
316
 
317
/* This is called before c4x_override_options.  */
318
 
319
void
320
c4x_optimization_options (int level ATTRIBUTE_UNUSED,
321
                          int size ATTRIBUTE_UNUSED)
322
{
323
  /* Scheduling before register allocation can screw up global
324
     register allocation, especially for functions that use MPY||ADD
325
     instructions.  The benefit we gain we get by scheduling before
326
     register allocation is probably marginal anyhow.  */
327
  flag_schedule_insns = 0;
328
}
329
 
330
 
331
/* Write an ASCII string.  */
332
 
333
#define C4X_ASCII_LIMIT 40
334
 
335
void
336
c4x_output_ascii (FILE *stream, const char *ptr, int len)
337
{
338
  char sbuf[C4X_ASCII_LIMIT + 1];
339
  int s, l, special, first = 1, onlys;
340
 
341
  if (len)
342
      fprintf (stream, "\t.byte\t");
343
 
344
  for (s = l = 0; len > 0; --len, ++ptr)
345
    {
346
      onlys = 0;
347
 
348
      /* Escape " and \ with a \".  */
349
      special = *ptr == '\"' || *ptr == '\\';
350
 
351
      /* If printable - add to buff.  */
352
      if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
353
        {
354
          if (special)
355
            sbuf[s++] = '\\';
356
          sbuf[s++] = *ptr;
357
          if (s < C4X_ASCII_LIMIT - 1)
358
            continue;
359
          onlys = 1;
360
        }
361
      if (s)
362
        {
363
          if (first)
364
            first = 0;
365
          else
366
            {
367
              fputc (',', stream);
368
              l++;
369
            }
370
 
371
          sbuf[s] = 0;
372
          fprintf (stream, "\"%s\"", sbuf);
373
          l += s + 2;
374
          if (TARGET_TI && l >= 80 && len > 1)
375
            {
376
              fprintf (stream, "\n\t.byte\t");
377
              first = 1;
378
              l = 0;
379
            }
380
 
381
          s = 0;
382
        }
383
      if (onlys)
384
        continue;
385
 
386
      if (first)
387
        first = 0;
388
      else
389
        {
390
          fputc (',', stream);
391
          l++;
392
        }
393
 
394
      fprintf (stream, "%d", *ptr);
395
      l += 3;
396
      if (TARGET_TI && l >= 80 && len > 1)
397
        {
398
          fprintf (stream, "\n\t.byte\t");
399
          first = 1;
400
          l = 0;
401
        }
402
    }
403
  if (s)
404
    {
405
      if (! first)
406
        fputc (',', stream);
407
 
408
      sbuf[s] = 0;
409
      fprintf (stream, "\"%s\"", sbuf);
410
      s = 0;
411
    }
412
  fputc ('\n', stream);
413
}
414
 
415
 
416
int
417
c4x_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
418
{
419
  switch (mode)
420
    {
421
#if Pmode != QImode
422
    case Pmode:                 /* Pointer (24/32 bits).  */
423
#endif
424
    case QImode:                /* Integer (32 bits).  */
425
      return IS_INT_REGNO (regno);
426
 
427
    case QFmode:                /* Float, Double (32 bits).  */
428
    case HFmode:                /* Long Double (40 bits).  */
429
      return IS_EXT_REGNO (regno);
430
 
431
    case CCmode:                /* Condition Codes.  */
432
    case CC_NOOVmode:           /* Condition Codes.  */
433
      return IS_ST_REGNO (regno);
434
 
435
    case HImode:                /* Long Long (64 bits).  */
436
      /* We need two registers to store long longs.  Note that
437
         it is much easier to constrain the first register
438
         to start on an even boundary.  */
439
      return IS_INT_REGNO (regno)
440
        && IS_INT_REGNO (regno + 1)
441
        && (regno & 1) == 0;
442
 
443
    default:
444
      return 0;                  /* We don't support these modes.  */
445
    }
446
 
447
  return 0;
448
}
449
 
450
/* Return nonzero if REGNO1 can be renamed to REGNO2.  */
451
int
452
c4x_hard_regno_rename_ok (unsigned int regno1, unsigned int regno2)
453
{
454
  /* We cannot copy call saved registers from mode QI into QF or from
455
     mode QF into QI.  */
456
  if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
457
    return 0;
458
  if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
459
    return 0;
460
  /* We cannot copy from an extended (40 bit) register to a standard
461
     (32 bit) register because we only set the condition codes for
462
     extended registers.  */
463
  if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
464
    return 0;
465
  if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
466
    return 0;
467
  return 1;
468
}
469
 
470
/* The TI C3x C compiler register argument runtime model uses 6 registers,
471
   AR2, R2, R3, RC, RS, RE.
472
 
473
   The first two floating point arguments (float, double, long double)
474
   that are found scanning from left to right are assigned to R2 and R3.
475
 
476
   The remaining integer (char, short, int, long) or pointer arguments
477
   are assigned to the remaining registers in the order AR2, R2, R3,
478
   RC, RS, RE when scanning left to right, except for the last named
479
   argument prior to an ellipsis denoting variable number of
480
   arguments.  We don't have to worry about the latter condition since
481
   function.c treats the last named argument as anonymous (unnamed).
482
 
483
   All arguments that cannot be passed in registers are pushed onto
484
   the stack in reverse order (right to left).  GCC handles that for us.
485
 
486
   c4x_init_cumulative_args() is called at the start, so we can parse
487
   the args to see how many floating point arguments and how many
488
   integer (or pointer) arguments there are.  c4x_function_arg() is
489
   then called (sometimes repeatedly) for each argument (parsed left
490
   to right) to obtain the register to pass the argument in, or zero
491
   if the argument is to be passed on the stack.  Once the compiler is
492
   happy, c4x_function_arg_advance() is called.
493
 
494
   Don't use R0 to pass arguments in, we use 0 to indicate a stack
495
   argument.  */
496
 
497
static const int c4x_int_reglist[3][6] =
498
{
499
  {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
500
  {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
501
  {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
502
};
503
 
504
static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
505
 
506
 
507
/* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
508
   function whose data type is FNTYPE.
509
   For a library call, FNTYPE is  0.  */
510
 
511
void
512
c4x_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname)
513
{
514
  tree param, next_param;
515
 
516
  cum->floats = cum->ints = 0;
517
  cum->init = 0;
518
  cum->var = 0;
519
  cum->args = 0;
520
 
521
  if (TARGET_DEBUG)
522
    {
523
      fprintf (stderr, "\nc4x_init_cumulative_args (");
524
      if (fntype)
525
        {
526
          tree ret_type = TREE_TYPE (fntype);
527
 
528
          fprintf (stderr, "fntype code = %s, ret code = %s",
529
                   tree_code_name[(int) TREE_CODE (fntype)],
530
                   tree_code_name[(int) TREE_CODE (ret_type)]);
531
        }
532
      else
533
        fprintf (stderr, "no fntype");
534
 
535
      if (libname)
536
        fprintf (stderr, ", libname = %s", XSTR (libname, 0));
537
    }
538
 
539
  cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
540
 
541
  for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
542
       param; param = next_param)
543
    {
544
      tree type;
545
 
546
      next_param = TREE_CHAIN (param);
547
 
548
      type = TREE_VALUE (param);
549
      if (type && type != void_type_node)
550
        {
551
          enum machine_mode mode;
552
 
553
          /* If the last arg doesn't have void type then we have
554
             variable arguments.  */
555
          if (! next_param)
556
            cum->var = 1;
557
 
558
          if ((mode = TYPE_MODE (type)))
559
            {
560
              if (! targetm.calls.must_pass_in_stack (mode, type))
561
                {
562
                  /* Look for float, double, or long double argument.  */
563
                  if (mode == QFmode || mode == HFmode)
564
                    cum->floats++;
565
                  /* Look for integer, enumeral, boolean, char, or pointer
566
                     argument.  */
567
                  else if (mode == QImode || mode == Pmode)
568
                    cum->ints++;
569
                }
570
            }
571
          cum->args++;
572
        }
573
    }
574
 
575
  if (TARGET_DEBUG)
576
    fprintf (stderr, "%s%s, args = %d)\n",
577
             cum->prototype ? ", prototype" : "",
578
             cum->var ? ", variable args" : "",
579
             cum->args);
580
}
581
 
582
 
583
/* Update the data in CUM to advance over an argument
584
   of mode MODE and data type TYPE.
585
   (TYPE is null for libcalls where that information may not be available.)  */
586
 
587
void
588
c4x_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
589
                          tree type, int named)
590
{
591
  if (TARGET_DEBUG)
592
    fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
593
             GET_MODE_NAME (mode), named);
594
  if (! TARGET_MEMPARM
595
      && named
596
      && type
597
      && ! targetm.calls.must_pass_in_stack (mode, type))
598
    {
599
      /* Look for float, double, or long double argument.  */
600
      if (mode == QFmode || mode == HFmode)
601
        cum->floats++;
602
      /* Look for integer, enumeral, boolean, char, or pointer argument.  */
603
      else if (mode == QImode || mode == Pmode)
604
        cum->ints++;
605
    }
606
  else if (! TARGET_MEMPARM && ! type)
607
    {
608
      /* Handle libcall arguments.  */
609
      if (mode == QFmode || mode == HFmode)
610
        cum->floats++;
611
      else if (mode == QImode || mode == Pmode)
612
        cum->ints++;
613
    }
614
  return;
615
}
616
 
617
 
618
/* Define where to put the arguments to a function.  Value is zero to
619
   push the argument on the stack, or a hard register in which to
620
   store the argument.
621
 
622
   MODE is the argument's machine mode.
623
   TYPE is the data type of the argument (as a tree).
624
   This is null for libcalls where that information may
625
   not be available.
626
   CUM is a variable of type CUMULATIVE_ARGS which gives info about
627
   the preceding args and about the function being called.
628
   NAMED is nonzero if this argument is a named parameter
629
   (otherwise it is an extra parameter matching an ellipsis).  */
630
 
631
struct rtx_def *
632
c4x_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
633
                  tree type, int named)
634
{
635
  int reg = 0;                   /* Default to passing argument on stack.  */
636
 
637
  if (! cum->init)
638
    {
639
      /* We can handle at most 2 floats in R2, R3.  */
640
      cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
641
 
642
      /* We can handle at most 6 integers minus number of floats passed
643
         in registers.  */
644
      cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
645
        6 - cum->maxfloats : cum->ints;
646
 
647
      /* If there is no prototype, assume all the arguments are integers.  */
648
      if (! cum->prototype)
649
        cum->maxints = 6;
650
 
651
      cum->ints = cum->floats = 0;
652
      cum->init = 1;
653
    }
654
 
655
  /* This marks the last argument.  We don't need to pass this through
656
     to the call insn.  */
657
  if (type == void_type_node)
658
    return 0;
659
 
660
  if (! TARGET_MEMPARM
661
      && named
662
      && type
663
      && ! targetm.calls.must_pass_in_stack (mode, type))
664
    {
665
      /* Look for float, double, or long double argument.  */
666
      if (mode == QFmode || mode == HFmode)
667
        {
668
          if (cum->floats < cum->maxfloats)
669
            reg = c4x_fp_reglist[cum->floats];
670
        }
671
      /* Look for integer, enumeral, boolean, char, or pointer argument.  */
672
      else if (mode == QImode || mode == Pmode)
673
        {
674
          if (cum->ints < cum->maxints)
675
            reg = c4x_int_reglist[cum->maxfloats][cum->ints];
676
        }
677
    }
678
  else if (! TARGET_MEMPARM && ! type)
679
    {
680
      /* We could use a different argument calling model for libcalls,
681
         since we're only calling functions in libgcc.  Thus we could
682
         pass arguments for long longs in registers rather than on the
683
         stack.  In the meantime, use the odd TI format.  We make the
684
         assumption that we won't have more than two floating point
685
         args, six integer args, and that all the arguments are of the
686
         same mode.  */
687
      if (mode == QFmode || mode == HFmode)
688
        reg = c4x_fp_reglist[cum->floats];
689
      else if (mode == QImode || mode == Pmode)
690
        reg = c4x_int_reglist[0][cum->ints];
691
    }
692
 
693
  if (TARGET_DEBUG)
694
    {
695
      fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
696
               GET_MODE_NAME (mode), named);
697
      if (reg)
698
        fprintf (stderr, ", reg=%s", reg_names[reg]);
699
      else
700
        fprintf (stderr, ", stack");
701
      fprintf (stderr, ")\n");
702
    }
703
  if (reg)
704
    return gen_rtx_REG (mode, reg);
705
  else
706
    return NULL_RTX;
707
}
708
 
709
/* C[34]x arguments grow in weird ways (downwards) that the standard
710
   varargs stuff can't handle..  */
711
 
712
static tree
713
c4x_gimplify_va_arg_expr (tree valist, tree type,
714
                          tree *pre_p ATTRIBUTE_UNUSED,
715
                          tree *post_p ATTRIBUTE_UNUSED)
716
{
717
  tree t;
718
  bool indirect;
719
 
720
  indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
721
  if (indirect)
722
    type = build_pointer_type (type);
723
 
724
  t = build2 (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
725
              build_int_cst (NULL_TREE, int_size_in_bytes (type)));
726
  t = fold_convert (build_pointer_type (type), t);
727
  t = build_va_arg_indirect_ref (t);
728
 
729
  if (indirect)
730
    t = build_va_arg_indirect_ref (t);
731
 
732
  return t;
733
}
734
 
735
 
736
static int
737
c4x_isr_reg_used_p (unsigned int regno)
738
{
739
  /* Don't save/restore FP or ST, we handle them separately.  */
740
  if (regno == FRAME_POINTER_REGNUM
741
      || IS_ST_REGNO (regno))
742
    return 0;
743
 
744
  /* We could be a little smarter abut saving/restoring DP.
745
     We'll only save if for the big memory model or if
746
     we're paranoid. ;-)  */
747
  if (IS_DP_REGNO (regno))
748
    return ! TARGET_SMALL || TARGET_PARANOID;
749
 
750
  /* Only save/restore regs in leaf function that are used.  */
751
  if (c4x_leaf_function)
752
    return regs_ever_live[regno] && fixed_regs[regno] == 0;
753
 
754
  /* Only save/restore regs that are used by the ISR and regs
755
     that are likely to be used by functions the ISR calls
756
     if they are not fixed.  */
757
  return IS_EXT_REGNO (regno)
758
    || ((regs_ever_live[regno] || call_used_regs[regno])
759
        && fixed_regs[regno] == 0);
760
}
761
 
762
 
763
static int
764
c4x_leaf_function_p (void)
765
{
766
  /* A leaf function makes no calls, so we only need
767
     to save/restore the registers we actually use.
768
     For the global variable leaf_function to be set, we need
769
     to define LEAF_REGISTERS and all that it entails.
770
     Let's check ourselves....  */
771
 
772
  if (lookup_attribute ("leaf_pretend",
773
                        TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
774
    return 1;
775
 
776
  /* Use the leaf_pretend attribute at your own risk.  This is a hack
777
     to speed up ISRs that call a function infrequently where the
778
     overhead of saving and restoring the additional registers is not
779
     warranted.  You must save and restore the additional registers
780
     required by the called function.  Caveat emptor.  Here's enough
781
     rope...  */
782
 
783
  if (leaf_function_p ())
784
    return 1;
785
 
786
  return 0;
787
}
788
 
789
 
790
static int
791
c4x_naked_function_p (void)
792
{
793
  tree type;
794
 
795
  type = TREE_TYPE (current_function_decl);
796
  return lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL;
797
}
798
 
799
 
800
int
801
c4x_interrupt_function_p (void)
802
{
803
  const char *cfun_name;
804
  if (lookup_attribute ("interrupt",
805
                        TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
806
    return 1;
807
 
808
  /* Look for TI style c_intnn.  */
809
  cfun_name = current_function_name ();
810
  return cfun_name[0] == 'c'
811
    && cfun_name[1] == '_'
812
    && cfun_name[2] == 'i'
813
    && cfun_name[3] == 'n'
814
    && cfun_name[4] == 't'
815
    && ISDIGIT (cfun_name[5])
816
    && ISDIGIT (cfun_name[6]);
817
}
818
 
819
void
820
c4x_expand_prologue (void)
821
{
822
  unsigned int regno;
823
  int size = get_frame_size ();
824
  rtx insn;
825
 
826
  /* In functions where ar3 is not used but frame pointers are still
827
     specified, frame pointers are not adjusted (if >= -O2) and this
828
     is used so it won't needlessly push the frame pointer.  */
829
  int dont_push_ar3;
830
 
831
  /* For __naked__ function don't build a prologue.  */
832
  if (c4x_naked_function_p ())
833
    {
834
      return;
835
    }
836
 
837
  /* For __interrupt__ function build specific prologue.  */
838
  if (c4x_interrupt_function_p ())
839
    {
840
      c4x_leaf_function = c4x_leaf_function_p ();
841
 
842
      insn = emit_insn (gen_push_st ());
843
      RTX_FRAME_RELATED_P (insn) = 1;
844
      if (size)
845
        {
846
          insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
847
          RTX_FRAME_RELATED_P (insn) = 1;
848
          insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
849
                                       gen_rtx_REG (QImode, SP_REGNO)));
850
          RTX_FRAME_RELATED_P (insn) = 1;
851
          /* We require that an ISR uses fewer than 32768 words of
852
             local variables, otherwise we have to go to lots of
853
             effort to save a register, load it with the desired size,
854
             adjust the stack pointer, and then restore the modified
855
             register.  Frankly, I think it is a poor ISR that
856
             requires more than 32767 words of local temporary
857
             storage!  */
858
          if (size > 32767)
859
            error ("ISR %s requires %d words of local vars, max is 32767",
860
                   current_function_name (), size);
861
 
862
          insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
863
                                        gen_rtx_REG (QImode, SP_REGNO),
864
                                        GEN_INT (size)));
865
          RTX_FRAME_RELATED_P (insn) = 1;
866
        }
867
      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
868
        {
869
          if (c4x_isr_reg_used_p (regno))
870
            {
871
              if (regno == DP_REGNO)
872
                {
873
                  insn = emit_insn (gen_push_dp ());
874
                  RTX_FRAME_RELATED_P (insn) = 1;
875
                }
876
              else
877
                {
878
                  insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
879
                  RTX_FRAME_RELATED_P (insn) = 1;
880
                  if (IS_EXT_REGNO (regno))
881
                    {
882
                      insn = emit_insn (gen_pushqf
883
                                        (gen_rtx_REG (QFmode, regno)));
884
                      RTX_FRAME_RELATED_P (insn) = 1;
885
                    }
886
                }
887
            }
888
        }
889
      /* We need to clear the repeat mode flag if the ISR is
890
         going to use a RPTB instruction or uses the RC, RS, or RE
891
         registers.  */
892
      if (regs_ever_live[RC_REGNO]
893
          || regs_ever_live[RS_REGNO]
894
          || regs_ever_live[RE_REGNO])
895
        {
896
          insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
897
          RTX_FRAME_RELATED_P (insn) = 1;
898
        }
899
 
900
      /* Reload DP reg if we are paranoid about some turkey
901
         violating small memory model rules.  */
902
      if (TARGET_SMALL && TARGET_PARANOID)
903
        {
904
          insn = emit_insn (gen_set_ldp_prologue
905
                            (gen_rtx_REG (QImode, DP_REGNO),
906
                             gen_rtx_SYMBOL_REF (QImode, "data_sec")));
907
          RTX_FRAME_RELATED_P (insn) = 1;
908
        }
909
    }
910
  else
911
    {
912
      if (frame_pointer_needed)
913
        {
914
          if ((size != 0)
915
              || (current_function_args_size != 0)
916
              || (optimize < 2))
917
            {
918
              insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
919
              RTX_FRAME_RELATED_P (insn) = 1;
920
              insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
921
                                           gen_rtx_REG (QImode, SP_REGNO)));
922
              RTX_FRAME_RELATED_P (insn) = 1;
923
              dont_push_ar3 = 1;
924
            }
925
          else
926
            {
927
              /* Since ar3 is not used, we don't need to push it.  */
928
              dont_push_ar3 = 1;
929
            }
930
        }
931
      else
932
        {
933
          /* If we use ar3, we need to push it.  */
934
          dont_push_ar3 = 0;
935
          if ((size != 0) || (current_function_args_size != 0))
936
            {
937
              /* If we are omitting the frame pointer, we still have
938
                 to make space for it so the offsets are correct
939
                 unless we don't use anything on the stack at all.  */
940
              size += 1;
941
            }
942
        }
943
 
944
      if (size > 32767)
945
        {
946
          /* Local vars are too big, it will take multiple operations
947
             to increment SP.  */
948
          if (TARGET_C3X)
949
            {
950
              insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
951
                                           GEN_INT(size >> 16)));
952
              RTX_FRAME_RELATED_P (insn) = 1;
953
              insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
954
                                             gen_rtx_REG (QImode, R1_REGNO),
955
                                             GEN_INT(-16)));
956
              RTX_FRAME_RELATED_P (insn) = 1;
957
            }
958
          else
959
            {
960
              insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
961
                                           GEN_INT(size & ~0xffff)));
962
              RTX_FRAME_RELATED_P (insn) = 1;
963
            }
964
          insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
965
                                        gen_rtx_REG (QImode, R1_REGNO),
966
                                        GEN_INT(size & 0xffff)));
967
          RTX_FRAME_RELATED_P (insn) = 1;
968
          insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
969
                                        gen_rtx_REG (QImode, SP_REGNO),
970
                                        gen_rtx_REG (QImode, R1_REGNO)));
971
          RTX_FRAME_RELATED_P (insn) = 1;
972
        }
973
      else if (size != 0)
974
        {
975
          /* Local vars take up less than 32767 words, so we can directly
976
             add the number.  */
977
          insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
978
                                        gen_rtx_REG (QImode, SP_REGNO),
979
                                        GEN_INT (size)));
980
          RTX_FRAME_RELATED_P (insn) = 1;
981
        }
982
 
983
      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
984
        {
985
          if (regs_ever_live[regno] && ! call_used_regs[regno])
986
            {
987
              if (IS_FLOAT_CALL_SAVED_REGNO (regno))
988
                {
989
                  if (TARGET_PRESERVE_FLOAT)
990
                    {
991
                      insn = emit_insn (gen_pushqi
992
                                        (gen_rtx_REG (QImode, regno)));
993
                      RTX_FRAME_RELATED_P (insn) = 1;
994
                    }
995
                  insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
996
                  RTX_FRAME_RELATED_P (insn) = 1;
997
                }
998
              else if ((! dont_push_ar3) || (regno != AR3_REGNO))
999
                {
1000
                  insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
1001
                  RTX_FRAME_RELATED_P (insn) = 1;
1002
                }
1003
            }
1004
        }
1005
    }
1006
}
1007
 
1008
 
1009
void
1010
c4x_expand_epilogue(void)
1011
{
1012
  int regno;
1013
  int jump = 0;
1014
  int dont_pop_ar3;
1015
  rtx insn;
1016
  int size = get_frame_size ();
1017
 
1018
  /* For __naked__ function build no epilogue.  */
1019
  if (c4x_naked_function_p ())
1020
    {
1021
      insn = emit_jump_insn (gen_return_from_epilogue ());
1022
      RTX_FRAME_RELATED_P (insn) = 1;
1023
      return;
1024
    }
1025
 
1026
  /* For __interrupt__ function build specific epilogue.  */
1027
  if (c4x_interrupt_function_p ())
1028
    {
1029
      for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1030
        {
1031
          if (! c4x_isr_reg_used_p (regno))
1032
            continue;
1033
          if (regno == DP_REGNO)
1034
            {
1035
              insn = emit_insn (gen_pop_dp ());
1036
              RTX_FRAME_RELATED_P (insn) = 1;
1037
            }
1038
          else
1039
            {
1040
              /* We have to use unspec because the compiler will delete insns
1041
                 that are not call-saved.  */
1042
              if (IS_EXT_REGNO (regno))
1043
                {
1044
                  insn = emit_insn (gen_popqf_unspec
1045
                                    (gen_rtx_REG (QFmode, regno)));
1046
                  RTX_FRAME_RELATED_P (insn) = 1;
1047
                }
1048
              insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1049
              RTX_FRAME_RELATED_P (insn) = 1;
1050
            }
1051
        }
1052
      if (size)
1053
        {
1054
          insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1055
                                        gen_rtx_REG (QImode, SP_REGNO),
1056
                                        GEN_INT(size)));
1057
          RTX_FRAME_RELATED_P (insn) = 1;
1058
          insn = emit_insn (gen_popqi
1059
                            (gen_rtx_REG (QImode, AR3_REGNO)));
1060
          RTX_FRAME_RELATED_P (insn) = 1;
1061
        }
1062
      insn = emit_insn (gen_pop_st ());
1063
      RTX_FRAME_RELATED_P (insn) = 1;
1064
      insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1065
      RTX_FRAME_RELATED_P (insn) = 1;
1066
    }
1067
  else
1068
    {
1069
      if (frame_pointer_needed)
1070
        {
1071
          if ((size != 0)
1072
              || (current_function_args_size != 0)
1073
              || (optimize < 2))
1074
            {
1075
              insn = emit_insn
1076
                (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1077
                            gen_rtx_MEM (QImode,
1078
                                         gen_rtx_PLUS
1079
                                         (QImode, gen_rtx_REG (QImode,
1080
                                                               AR3_REGNO),
1081
                                          constm1_rtx))));
1082
              RTX_FRAME_RELATED_P (insn) = 1;
1083
 
1084
              /* We already have the return value and the fp,
1085
                 so we need to add those to the stack.  */
1086
              size += 2;
1087
              jump = 1;
1088
              dont_pop_ar3 = 1;
1089
            }
1090
          else
1091
            {
1092
              /* Since ar3 is not used for anything, we don't need to
1093
                 pop it.  */
1094
              dont_pop_ar3 = 1;
1095
            }
1096
        }
1097
      else
1098
        {
1099
          dont_pop_ar3 = 0;      /* If we use ar3, we need to pop it.  */
1100
          if (size || current_function_args_size)
1101
            {
1102
              /* If we are omitting the frame pointer, we still have
1103
                 to make space for it so the offsets are correct
1104
                 unless we don't use anything on the stack at all.  */
1105
              size += 1;
1106
            }
1107
        }
1108
 
1109
      /* Now restore the saved registers, putting in the delayed branch
1110
         where required.  */
1111
      for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1112
        {
1113
          if (regs_ever_live[regno] && ! call_used_regs[regno])
1114
            {
1115
              if (regno == AR3_REGNO && dont_pop_ar3)
1116
                continue;
1117
 
1118
              if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1119
                {
1120
                  insn = emit_insn (gen_popqf_unspec
1121
                                    (gen_rtx_REG (QFmode, regno)));
1122
                  RTX_FRAME_RELATED_P (insn) = 1;
1123
                  if (TARGET_PRESERVE_FLOAT)
1124
                    {
1125
                      insn = emit_insn (gen_popqi_unspec
1126
                                        (gen_rtx_REG (QImode, regno)));
1127
                      RTX_FRAME_RELATED_P (insn) = 1;
1128
                    }
1129
                }
1130
              else
1131
                {
1132
                  insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1133
                  RTX_FRAME_RELATED_P (insn) = 1;
1134
                }
1135
            }
1136
        }
1137
 
1138
      if (frame_pointer_needed)
1139
        {
1140
          if ((size != 0)
1141
              || (current_function_args_size != 0)
1142
              || (optimize < 2))
1143
            {
1144
              /* Restore the old FP.  */
1145
              insn = emit_insn
1146
                (gen_movqi
1147
                 (gen_rtx_REG (QImode, AR3_REGNO),
1148
                  gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1149
 
1150
              RTX_FRAME_RELATED_P (insn) = 1;
1151
            }
1152
        }
1153
 
1154
      if (size > 32767)
1155
        {
1156
          /* Local vars are too big, it will take multiple operations
1157
             to decrement SP.  */
1158
          if (TARGET_C3X)
1159
            {
1160
              insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1161
                                           GEN_INT(size >> 16)));
1162
              RTX_FRAME_RELATED_P (insn) = 1;
1163
              insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1164
                                             gen_rtx_REG (QImode, R3_REGNO),
1165
                                             GEN_INT(-16)));
1166
              RTX_FRAME_RELATED_P (insn) = 1;
1167
            }
1168
          else
1169
            {
1170
              insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1171
                                           GEN_INT(size & ~0xffff)));
1172
              RTX_FRAME_RELATED_P (insn) = 1;
1173
            }
1174
          insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1175
                                        gen_rtx_REG (QImode, R3_REGNO),
1176
                                        GEN_INT(size & 0xffff)));
1177
          RTX_FRAME_RELATED_P (insn) = 1;
1178
          insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1179
                                        gen_rtx_REG (QImode, SP_REGNO),
1180
                                        gen_rtx_REG (QImode, R3_REGNO)));
1181
          RTX_FRAME_RELATED_P (insn) = 1;
1182
        }
1183
      else if (size != 0)
1184
        {
1185
          /* Local vars take up less than 32768 words, so we can directly
1186
             subtract the number.  */
1187
          insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1188
                                        gen_rtx_REG (QImode, SP_REGNO),
1189
                                        GEN_INT(size)));
1190
          RTX_FRAME_RELATED_P (insn) = 1;
1191
        }
1192
 
1193
      if (jump)
1194
        {
1195
          insn = emit_jump_insn (gen_return_indirect_internal
1196
                                 (gen_rtx_REG (QImode, R2_REGNO)));
1197
          RTX_FRAME_RELATED_P (insn) = 1;
1198
        }
1199
      else
1200
        {
1201
          insn = emit_jump_insn (gen_return_from_epilogue ());
1202
          RTX_FRAME_RELATED_P (insn) = 1;
1203
        }
1204
    }
1205
}
1206
 
1207
 
1208
int
1209
c4x_null_epilogue_p (void)
1210
{
1211
  int regno;
1212
 
1213
  if (reload_completed
1214
      && ! c4x_naked_function_p ()
1215
      && ! c4x_interrupt_function_p ()
1216
      && ! current_function_calls_alloca
1217
      && ! current_function_args_size
1218
      && ! (optimize < 2)
1219
      && ! get_frame_size ())
1220
    {
1221
      for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1222
        if (regs_ever_live[regno] && ! call_used_regs[regno]
1223
            && (regno != AR3_REGNO))
1224
          return 1;
1225
      return 0;
1226
    }
1227
  return 1;
1228
}
1229
 
1230
 
1231
int
1232
c4x_emit_move_sequence (rtx *operands, enum machine_mode mode)
1233
{
1234
  rtx op0 = operands[0];
1235
  rtx op1 = operands[1];
1236
 
1237
  if (! reload_in_progress
1238
      && ! REG_P (op0)
1239
      && ! REG_P (op1)
1240
      && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1241
    op1 = force_reg (mode, op1);
1242
 
1243
  if (GET_CODE (op1) == LO_SUM
1244
      && GET_MODE (op1) == Pmode
1245
      && dp_reg_operand (XEXP (op1, 0), mode))
1246
    {
1247
      /* expand_increment will sometimes create a LO_SUM immediate
1248
         address.  Undo this silliness.  */
1249
      op1 = XEXP (op1, 1);
1250
    }
1251
 
1252
  if (symbolic_address_operand (op1, mode))
1253
    {
1254
      if (TARGET_LOAD_ADDRESS)
1255
        {
1256
          /* Alias analysis seems to do a better job if we force
1257
             constant addresses to memory after reload.  */
1258
          emit_insn (gen_load_immed_address (op0, op1));
1259
          return 1;
1260
        }
1261
      else
1262
        {
1263
          /* Stick symbol or label address into the constant pool.  */
1264
          op1 = force_const_mem (Pmode, op1);
1265
        }
1266
    }
1267
  else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1268
    {
1269
      /* We could be a lot smarter about loading some of these
1270
         constants...  */
1271
      op1 = force_const_mem (mode, op1);
1272
    }
1273
 
1274
  /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1275
     and emit associated (HIGH (SYMREF)) if large memory model.
1276
     c4x_legitimize_address could be used to do this,
1277
     perhaps by calling validize_address.  */
1278
  if (TARGET_EXPOSE_LDP
1279
      && ! (reload_in_progress || reload_completed)
1280
      && GET_CODE (op1) == MEM
1281
      && symbolic_address_operand (XEXP (op1, 0), Pmode))
1282
    {
1283
      rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1284
      if (! TARGET_SMALL)
1285
        emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1286
      op1 = change_address (op1, mode,
1287
                            gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1288
    }
1289
 
1290
  if (TARGET_EXPOSE_LDP
1291
      && ! (reload_in_progress || reload_completed)
1292
      && GET_CODE (op0) == MEM
1293
      && symbolic_address_operand (XEXP (op0, 0), Pmode))
1294
    {
1295
      rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1296
      if (! TARGET_SMALL)
1297
        emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1298
      op0 = change_address (op0, mode,
1299
                            gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1300
    }
1301
 
1302
  if (GET_CODE (op0) == SUBREG
1303
      && mixed_subreg_operand (op0, mode))
1304
    {
1305
      /* We should only generate these mixed mode patterns
1306
         during RTL generation.  If we need do it later on
1307
         then we'll have to emit patterns that won't clobber CC.  */
1308
      if (reload_in_progress || reload_completed)
1309
        abort ();
1310
      if (GET_MODE (SUBREG_REG (op0)) == QImode)
1311
        op0 = SUBREG_REG (op0);
1312
      else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1313
        {
1314
          op0 = copy_rtx (op0);
1315
          PUT_MODE (op0, QImode);
1316
        }
1317
      else
1318
        abort ();
1319
 
1320
      if (mode == QFmode)
1321
        emit_insn (gen_storeqf_int_clobber (op0, op1));
1322
      else
1323
        abort ();
1324
      return 1;
1325
    }
1326
 
1327
  if (GET_CODE (op1) == SUBREG
1328
      && mixed_subreg_operand (op1, mode))
1329
    {
1330
      /* We should only generate these mixed mode patterns
1331
         during RTL generation.  If we need do it later on
1332
         then we'll have to emit patterns that won't clobber CC.  */
1333
      if (reload_in_progress || reload_completed)
1334
        abort ();
1335
      if (GET_MODE (SUBREG_REG (op1)) == QImode)
1336
        op1 = SUBREG_REG (op1);
1337
      else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1338
        {
1339
          op1 = copy_rtx (op1);
1340
          PUT_MODE (op1, QImode);
1341
        }
1342
      else
1343
        abort ();
1344
 
1345
      if (mode == QFmode)
1346
        emit_insn (gen_loadqf_int_clobber (op0, op1));
1347
      else
1348
        abort ();
1349
      return 1;
1350
    }
1351
 
1352
  if (mode == QImode
1353
      && reg_operand (op0, mode)
1354
      && const_int_operand (op1, mode)
1355
      && ! IS_INT16_CONST (INTVAL (op1))
1356
      && ! IS_HIGH_CONST (INTVAL (op1)))
1357
    {
1358
      emit_insn (gen_loadqi_big_constant (op0, op1));
1359
      return 1;
1360
    }
1361
 
1362
  if (mode == HImode
1363
      && reg_operand (op0, mode)
1364
      && const_int_operand (op1, mode))
1365
    {
1366
      emit_insn (gen_loadhi_big_constant (op0, op1));
1367
      return 1;
1368
    }
1369
 
1370
  /* Adjust operands in case we have modified them.  */
1371
  operands[0] = op0;
1372
  operands[1] = op1;
1373
 
1374
  /* Emit normal pattern.  */
1375
  return 0;
1376
}
1377
 
1378
 
1379
void
1380
c4x_emit_libcall (rtx libcall, enum rtx_code code,
1381
                  enum machine_mode dmode, enum machine_mode smode,
1382
                  int noperands, rtx *operands)
1383
{
1384
  rtx ret;
1385
  rtx insns;
1386
  rtx equiv;
1387
 
1388
  start_sequence ();
1389
  switch (noperands)
1390
    {
1391
    case 2:
1392
      ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1393
                                     operands[1], smode);
1394
      equiv = gen_rtx_fmt_e (code, dmode, operands[1]);
1395
      break;
1396
 
1397
    case 3:
1398
      ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1399
                                     operands[1], smode, operands[2], smode);
1400
      equiv = gen_rtx_fmt_ee (code, dmode, operands[1], operands[2]);
1401
      break;
1402
 
1403
    default:
1404
      abort ();
1405
    }
1406
 
1407
  insns = get_insns ();
1408
  end_sequence ();
1409
  emit_libcall_block (insns, operands[0], ret, equiv);
1410
}
1411
 
1412
 
1413
void
1414
c4x_emit_libcall3 (rtx libcall, enum rtx_code code,
1415
                   enum machine_mode mode, rtx *operands)
1416
{
1417
  c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1418
}
1419
 
1420
 
1421
void
1422
c4x_emit_libcall_mulhi (rtx libcall, enum rtx_code code,
1423
                        enum machine_mode mode, rtx *operands)
1424
{
1425
  rtx ret;
1426
  rtx insns;
1427
  rtx equiv;
1428
 
1429
  start_sequence ();
1430
  ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1431
                                 operands[1], mode, operands[2], mode);
1432
  equiv = gen_rtx_TRUNCATE (mode,
1433
                   gen_rtx_LSHIFTRT (HImode,
1434
                            gen_rtx_MULT (HImode,
1435
                                     gen_rtx_fmt_e (code, HImode, operands[1]),
1436
                                     gen_rtx_fmt_e (code, HImode, operands[2])),
1437
                                     GEN_INT (32)));
1438
  insns = get_insns ();
1439
  end_sequence ();
1440
  emit_libcall_block (insns, operands[0], ret, equiv);
1441
}
1442
 
1443
 
1444
int
1445
c4x_legitimate_address_p (enum machine_mode mode, rtx addr, int strict)
1446
{
1447
  rtx base = NULL_RTX;          /* Base register (AR0-AR7).  */
1448
  rtx indx = NULL_RTX;          /* Index register (IR0,IR1).  */
1449
  rtx disp = NULL_RTX;          /* Displacement.  */
1450
  enum rtx_code code;
1451
 
1452
  code = GET_CODE (addr);
1453
  switch (code)
1454
    {
1455
      /* Register indirect with auto increment/decrement.  We don't
1456
         allow SP here---push_operand should recognize an operand
1457
         being pushed on the stack.  */
1458
 
1459
    case PRE_DEC:
1460
    case PRE_INC:
1461
    case POST_DEC:
1462
      if (mode != QImode && mode != QFmode)
1463
        return 0;
1464
 
1465
    case POST_INC:
1466
      base = XEXP (addr, 0);
1467
      if (! REG_P (base))
1468
        return 0;
1469
      break;
1470
 
1471
    case PRE_MODIFY:
1472
    case POST_MODIFY:
1473
      {
1474
        rtx op0 = XEXP (addr, 0);
1475
        rtx op1 = XEXP (addr, 1);
1476
 
1477
        if (mode != QImode && mode != QFmode)
1478
          return 0;
1479
 
1480
        if (! REG_P (op0)
1481
            || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1482
          return 0;
1483
        base = XEXP (op1, 0);
1484
        if (! REG_P (base))
1485
            return 0;
1486
        if (REGNO (base) != REGNO (op0))
1487
          return 0;
1488
        if (REG_P (XEXP (op1, 1)))
1489
          indx = XEXP (op1, 1);
1490
        else
1491
          disp = XEXP (op1, 1);
1492
      }
1493
      break;
1494
 
1495
      /* Register indirect.  */
1496
    case REG:
1497
      base = addr;
1498
      break;
1499
 
1500
      /* Register indirect with displacement or index.  */
1501
    case PLUS:
1502
      {
1503
        rtx op0 = XEXP (addr, 0);
1504
        rtx op1 = XEXP (addr, 1);
1505
        enum rtx_code code0 = GET_CODE (op0);
1506
 
1507
        switch (code0)
1508
          {
1509
          case REG:
1510
            if (REG_P (op1))
1511
              {
1512
                base = op0;     /* Base + index.  */
1513
                indx = op1;
1514
                if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1515
                  {
1516
                    base = op1;
1517
                    indx = op0;
1518
                  }
1519
              }
1520
            else
1521
              {
1522
                base = op0;     /* Base + displacement.  */
1523
                disp = op1;
1524
              }
1525
            break;
1526
 
1527
          default:
1528
            return 0;
1529
          }
1530
      }
1531
      break;
1532
 
1533
      /* Direct addressing with DP register.  */
1534
    case LO_SUM:
1535
      {
1536
        rtx op0 = XEXP (addr, 0);
1537
        rtx op1 = XEXP (addr, 1);
1538
 
1539
        /* HImode and HFmode direct memory references aren't truly
1540
           offsettable (consider case at end of data page).  We
1541
           probably get better code by loading a pointer and using an
1542
           indirect memory reference.  */
1543
        if (mode == HImode || mode == HFmode)
1544
          return 0;
1545
 
1546
        if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1547
          return 0;
1548
 
1549
        if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1550
          return 1;
1551
 
1552
        if (GET_CODE (op1) == CONST)
1553
          return 1;
1554
        return 0;
1555
      }
1556
      break;
1557
 
1558
      /* Direct addressing with some work for the assembler...  */
1559
    case CONST:
1560
      /* Direct addressing.  */
1561
    case LABEL_REF:
1562
    case SYMBOL_REF:
1563
      if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1564
        return 1;
1565
      /* These need to be converted to a LO_SUM (...).
1566
         LEGITIMIZE_RELOAD_ADDRESS will do this during reload.  */
1567
      return 0;
1568
 
1569
      /* Do not allow direct memory access to absolute addresses.
1570
         This is more pain than it's worth, especially for the
1571
         small memory model where we can't guarantee that
1572
         this address is within the data page---we don't want
1573
         to modify the DP register in the small memory model,
1574
         even temporarily, since an interrupt can sneak in....  */
1575
    case CONST_INT:
1576
      return 0;
1577
 
1578
      /* Indirect indirect addressing.  */
1579
    case MEM:
1580
      return 0;
1581
 
1582
    case CONST_DOUBLE:
1583
      fatal_insn ("using CONST_DOUBLE for address", addr);
1584
 
1585
    default:
1586
      return 0;
1587
    }
1588
 
1589
  /* Validate the base register.  */
1590
  if (base)
1591
    {
1592
      /* Check that the address is offsettable for HImode and HFmode.  */
1593
      if (indx && (mode == HImode || mode == HFmode))
1594
        return 0;
1595
 
1596
      /* Handle DP based stuff.  */
1597
      if (REGNO (base) == DP_REGNO)
1598
        return 1;
1599
      if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1600
        return 0;
1601
      else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1602
        return 0;
1603
    }
1604
 
1605
  /* Now validate the index register.  */
1606
  if (indx)
1607
    {
1608
      if (GET_CODE (indx) != REG)
1609
        return 0;
1610
      if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1611
        return 0;
1612
      else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1613
        return 0;
1614
    }
1615
 
1616
  /* Validate displacement.  */
1617
  if (disp)
1618
    {
1619
      if (GET_CODE (disp) != CONST_INT)
1620
        return 0;
1621
      if (mode == HImode || mode == HFmode)
1622
        {
1623
          /* The offset displacement must be legitimate.  */
1624
          if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1625
            return 0;
1626
        }
1627
      else
1628
        {
1629
          if (! IS_DISP8_CONST (INTVAL (disp)))
1630
            return 0;
1631
        }
1632
      /* Can't add an index with a disp.  */
1633
      if (indx)
1634
        return 0;
1635
    }
1636
  return 1;
1637
}
1638
 
1639
 
1640
rtx
1641
c4x_legitimize_address (rtx orig ATTRIBUTE_UNUSED,
1642
                        enum machine_mode mode ATTRIBUTE_UNUSED)
1643
{
1644
  if (GET_CODE (orig) == SYMBOL_REF
1645
      || GET_CODE (orig) == LABEL_REF)
1646
    {
1647
      if (mode == HImode || mode == HFmode)
1648
        {
1649
          /* We need to force the address into
1650
             a register so that it is offsettable.  */
1651
          rtx addr_reg = gen_reg_rtx (Pmode);
1652
          emit_move_insn (addr_reg, orig);
1653
          return addr_reg;
1654
        }
1655
      else
1656
        {
1657
          rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1658
 
1659
          if (! TARGET_SMALL)
1660
            emit_insn (gen_set_ldp (dp_reg, orig));
1661
 
1662
          return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1663
        }
1664
    }
1665
 
1666
  return NULL_RTX;
1667
}
1668
 
1669
 
1670
/* Provide the costs of an addressing mode that contains ADDR.
1671
   If ADDR is not a valid address, its cost is irrelevant.
1672
   This is used in cse and loop optimization to determine
1673
   if it is worthwhile storing a common address into a register.
1674
   Unfortunately, the C4x address cost depends on other operands.  */
1675
 
1676
static int
1677
c4x_address_cost (rtx addr)
1678
{
1679
  switch (GET_CODE (addr))
1680
    {
1681
    case REG:
1682
      return 1;
1683
 
1684
    case POST_INC:
1685
    case POST_DEC:
1686
    case PRE_INC:
1687
    case PRE_DEC:
1688
      return 1;
1689
 
1690
      /* These shouldn't be directly generated.  */
1691
    case SYMBOL_REF:
1692
    case LABEL_REF:
1693
    case CONST:
1694
      return 10;
1695
 
1696
    case LO_SUM:
1697
      {
1698
        rtx op1 = XEXP (addr, 1);
1699
 
1700
        if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1701
          return TARGET_SMALL ? 3 : 4;
1702
 
1703
        if (GET_CODE (op1) == CONST)
1704
          {
1705
            rtx offset = const0_rtx;
1706
 
1707
            op1 = eliminate_constant_term (op1, &offset);
1708
 
1709
            /* ??? These costs need rethinking...  */
1710
            if (GET_CODE (op1) == LABEL_REF)
1711
              return 3;
1712
 
1713
            if (GET_CODE (op1) != SYMBOL_REF)
1714
              return 4;
1715
 
1716
            if (INTVAL (offset) == 0)
1717
              return 3;
1718
 
1719
            return 4;
1720
          }
1721
        fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1722
      }
1723
      break;
1724
 
1725
    case PLUS:
1726
      {
1727
        register rtx op0 = XEXP (addr, 0);
1728
        register rtx op1 = XEXP (addr, 1);
1729
 
1730
        if (GET_CODE (op0) != REG)
1731
          break;
1732
 
1733
        switch (GET_CODE (op1))
1734
          {
1735
          default:
1736
            break;
1737
 
1738
          case REG:
1739
            /* This cost for REG+REG must be greater than the cost
1740
               for REG if we want autoincrement addressing modes.  */
1741
            return 2;
1742
 
1743
          case CONST_INT:
1744
            /* The following tries to improve GIV combination
1745
               in strength reduce but appears not to help.  */
1746
            if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1747
              return 1;
1748
 
1749
            if (IS_DISP1_CONST (INTVAL (op1)))
1750
              return 1;
1751
 
1752
            if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1753
              return 2;
1754
 
1755
            return 3;
1756
          }
1757
      }
1758
    default:
1759
      break;
1760
    }
1761
 
1762
  return 4;
1763
}
1764
 
1765
 
1766
rtx
1767
c4x_gen_compare_reg (enum rtx_code code, rtx x, rtx y)
1768
{
1769
  enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1770
  rtx cc_reg;
1771
 
1772
  if (mode == CC_NOOVmode
1773
      && (code == LE || code == GE || code == LT || code == GT))
1774
    return NULL_RTX;
1775
 
1776
  cc_reg = gen_rtx_REG (mode, ST_REGNO);
1777
  emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1778
                          gen_rtx_COMPARE (mode, x, y)));
1779
  return cc_reg;
1780
}
1781
 
1782
char *
1783
c4x_output_cbranch (const char *form, rtx seq)
1784
{
1785
  int delayed = 0;
1786
  int annultrue = 0;
1787
  int annulfalse = 0;
1788
  rtx delay;
1789
  char *cp;
1790
  static char str[100];
1791
 
1792
  if (final_sequence)
1793
    {
1794
      delay = XVECEXP (final_sequence, 0, 1);
1795
      delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1796
      annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1797
      annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1798
    }
1799
  strcpy (str, form);
1800
  cp = &str [strlen (str)];
1801
  if (delayed)
1802
    {
1803
      *cp++ = '%';
1804
      *cp++ = '#';
1805
    }
1806
  if (annultrue)
1807
    {
1808
      *cp++ = 'a';
1809
      *cp++ = 't';
1810
    }
1811
  if (annulfalse)
1812
    {
1813
      *cp++ = 'a';
1814
      *cp++ = 'f';
1815
    }
1816
  *cp++ = '\t';
1817
  *cp++ = '%';
1818
  *cp++ = 'l';
1819
  *cp++ = '1';
1820
  *cp = 0;
1821
  return str;
1822
}
1823
 
1824
void
1825
c4x_print_operand (FILE *file, rtx op, int letter)
1826
{
1827
  rtx op1;
1828
  enum rtx_code code;
1829
 
1830
  switch (letter)
1831
    {
1832
    case '#':                   /* Delayed.  */
1833
      if (final_sequence)
1834
        fprintf (file, "d");
1835
      return;
1836
    }
1837
 
1838
  code = GET_CODE (op);
1839
  switch (letter)
1840
    {
1841
    case 'A':                   /* Direct address.  */
1842
      if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1843
        fprintf (file, "@");
1844
      break;
1845
 
1846
    case 'H':                   /* Sethi.  */
1847
      output_addr_const (file, op);
1848
      return;
1849
 
1850
    case 'I':                   /* Reversed condition.  */
1851
      code = reverse_condition (code);
1852
      break;
1853
 
1854
    case 'L':                   /* Log 2 of constant.  */
1855
      if (code != CONST_INT)
1856
        fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1857
      fprintf (file, "%d", exact_log2 (INTVAL (op)));
1858
      return;
1859
 
1860
    case 'N':                   /* Ones complement of small constant.  */
1861
      if (code != CONST_INT)
1862
        fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1863
      fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~INTVAL (op));
1864
      return;
1865
 
1866
    case 'K':                   /* Generate ldp(k) if direct address.  */
1867
      if (! TARGET_SMALL
1868
          && code == MEM
1869
          && GET_CODE (XEXP (op, 0)) == LO_SUM
1870
          && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1871
          && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1872
        {
1873
          op1 = XEXP (XEXP (op, 0), 1);
1874
          if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1875
            {
1876
              fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1877
              output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1878
              fprintf (file, "\n");
1879
            }
1880
        }
1881
      return;
1882
 
1883
    case 'M':                   /* Generate ldp(k) if direct address.  */
1884
      if (! TARGET_SMALL        /* Only used in asm statements.  */
1885
          && code == MEM
1886
          && (GET_CODE (XEXP (op, 0)) == CONST
1887
              || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1888
        {
1889
          fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1890
          output_address (XEXP (op, 0));
1891
          fprintf (file, "\n\t");
1892
        }
1893
      return;
1894
 
1895
    case 'O':                   /* Offset address.  */
1896
      if (code == MEM && c4x_autoinc_operand (op, Pmode))
1897
        break;
1898
      else if (code == MEM)
1899
        output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1900
      else if (code == REG)
1901
        fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1902
      else
1903
        fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1904
      return;
1905
 
1906
    case 'C':                   /* Call.  */
1907
      break;
1908
 
1909
    case 'U':                   /* Call/callu.  */
1910
      if (code != SYMBOL_REF)
1911
        fprintf (file, "u");
1912
      return;
1913
 
1914
    default:
1915
      break;
1916
    }
1917
 
1918
  switch (code)
1919
    {
1920
    case REG:
1921
      if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1922
          && ! TARGET_TI)
1923
        fprintf (file, "%s", float_reg_names[REGNO (op)]);
1924
      else
1925
        fprintf (file, "%s", reg_names[REGNO (op)]);
1926
      break;
1927
 
1928
    case MEM:
1929
      output_address (XEXP (op, 0));
1930
      break;
1931
 
1932
    case CONST_DOUBLE:
1933
      {
1934
        char str[64];
1935
 
1936
        real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (op),
1937
                         sizeof (str), 0, 1);
1938
        fprintf (file, "%s", str);
1939
      }
1940
      break;
1941
 
1942
    case CONST_INT:
1943
      fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (op));
1944
      break;
1945
 
1946
    case NE:
1947
      fprintf (file, "ne");
1948
      break;
1949
 
1950
    case EQ:
1951
      fprintf (file, "eq");
1952
      break;
1953
 
1954
    case GE:
1955
      fprintf (file, "ge");
1956
      break;
1957
 
1958
    case GT:
1959
      fprintf (file, "gt");
1960
      break;
1961
 
1962
    case LE:
1963
      fprintf (file, "le");
1964
      break;
1965
 
1966
    case LT:
1967
      fprintf (file, "lt");
1968
      break;
1969
 
1970
    case GEU:
1971
      fprintf (file, "hs");
1972
      break;
1973
 
1974
    case GTU:
1975
      fprintf (file, "hi");
1976
      break;
1977
 
1978
    case LEU:
1979
      fprintf (file, "ls");
1980
      break;
1981
 
1982
    case LTU:
1983
      fprintf (file, "lo");
1984
      break;
1985
 
1986
    case SYMBOL_REF:
1987
      output_addr_const (file, op);
1988
      break;
1989
 
1990
    case CONST:
1991
      output_addr_const (file, XEXP (op, 0));
1992
      break;
1993
 
1994
    case CODE_LABEL:
1995
      break;
1996
 
1997
    default:
1998
      fatal_insn ("c4x_print_operand: Bad operand case", op);
1999
      break;
2000
    }
2001
}
2002
 
2003
 
2004
void
2005
c4x_print_operand_address (FILE *file, rtx addr)
2006
{
2007
  switch (GET_CODE (addr))
2008
    {
2009
    case REG:
2010
      fprintf (file, "*%s", reg_names[REGNO (addr)]);
2011
      break;
2012
 
2013
    case PRE_DEC:
2014
      fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2015
      break;
2016
 
2017
    case POST_INC:
2018
      fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2019
      break;
2020
 
2021
    case POST_MODIFY:
2022
      {
2023
        rtx op0 = XEXP (XEXP (addr, 1), 0);
2024
        rtx op1 = XEXP (XEXP (addr, 1), 1);
2025
 
2026
        if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2027
          fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2028
                   reg_names[REGNO (op1)]);
2029
        else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2030
          fprintf (file, "*%s++(" HOST_WIDE_INT_PRINT_DEC ")",
2031
                   reg_names[REGNO (op0)], INTVAL (op1));
2032
        else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2033
          fprintf (file, "*%s--(" HOST_WIDE_INT_PRINT_DEC ")",
2034
                   reg_names[REGNO (op0)], -INTVAL (op1));
2035
        else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2036
          fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2037
                   reg_names[REGNO (op1)]);
2038
        else
2039
          fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2040
      }
2041
      break;
2042
 
2043
    case PRE_MODIFY:
2044
      {
2045
        rtx op0 = XEXP (XEXP (addr, 1), 0);
2046
        rtx op1 = XEXP (XEXP (addr, 1), 1);
2047
 
2048
        if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2049
          fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2050
                   reg_names[REGNO (op1)]);
2051
        else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2052
          fprintf (file, "*++%s(" HOST_WIDE_INT_PRINT_DEC ")",
2053
                   reg_names[REGNO (op0)], INTVAL (op1));
2054
        else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2055
          fprintf (file, "*--%s(" HOST_WIDE_INT_PRINT_DEC ")",
2056
                   reg_names[REGNO (op0)], -INTVAL (op1));
2057
        else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2058
          fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2059
                   reg_names[REGNO (op1)]);
2060
        else
2061
          fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2062
      }
2063
      break;
2064
 
2065
    case PRE_INC:
2066
      fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2067
      break;
2068
 
2069
    case POST_DEC:
2070
      fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2071
      break;
2072
 
2073
    case PLUS:                  /* Indirect with displacement.  */
2074
      {
2075
        rtx op0 = XEXP (addr, 0);
2076
        rtx op1 = XEXP (addr, 1);
2077
 
2078
        if (REG_P (op0))
2079
          {
2080
            if (REG_P (op1))
2081
              {
2082
                if (IS_INDEX_REG (op0))
2083
                  {
2084
                    fprintf (file, "*+%s(%s)",
2085
                             reg_names[REGNO (op1)],
2086
                             reg_names[REGNO (op0)]);   /* Index + base.  */
2087
                  }
2088
                else
2089
                  {
2090
                    fprintf (file, "*+%s(%s)",
2091
                             reg_names[REGNO (op0)],
2092
                             reg_names[REGNO (op1)]);   /* Base + index.  */
2093
                  }
2094
              }
2095
            else if (INTVAL (op1) < 0)
2096
              {
2097
                fprintf (file, "*-%s(" HOST_WIDE_INT_PRINT_DEC ")",
2098
                         reg_names[REGNO (op0)],
2099
                         -INTVAL (op1));        /* Base - displacement.  */
2100
              }
2101
            else
2102
              {
2103
                fprintf (file, "*+%s(" HOST_WIDE_INT_PRINT_DEC ")",
2104
                         reg_names[REGNO (op0)],
2105
                         INTVAL (op1)); /* Base + displacement.  */
2106
              }
2107
          }
2108
        else
2109
          fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2110
      }
2111
      break;
2112
 
2113
    case LO_SUM:
2114
      {
2115
        rtx op0 = XEXP (addr, 0);
2116
        rtx op1 = XEXP (addr, 1);
2117
 
2118
        if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2119
          c4x_print_operand_address (file, op1);
2120
        else
2121
          fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2122
      }
2123
      break;
2124
 
2125
    case CONST:
2126
    case SYMBOL_REF:
2127
    case LABEL_REF:
2128
      fprintf (file, "@");
2129
      output_addr_const (file, addr);
2130
      break;
2131
 
2132
      /* We shouldn't access CONST_INT addresses.  */
2133
    case CONST_INT:
2134
 
2135
    default:
2136
      fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2137
      break;
2138
    }
2139
}
2140
 
2141
 
2142
/* Return nonzero if the floating point operand will fit
2143
   in the immediate field.  */
2144
 
2145
int
2146
c4x_immed_float_p (rtx op)
2147
{
2148
  long convval[2];
2149
  int exponent;
2150
  REAL_VALUE_TYPE r;
2151
 
2152
  REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2153
  if (GET_MODE (op) == HFmode)
2154
    REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2155
  else
2156
    {
2157
      REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2158
      convval[1] = 0;
2159
    }
2160
 
2161
  /* Sign extend exponent.  */
2162
  exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2163
  if (exponent == -128)
2164
    return 1;                   /* 0.0  */
2165
  if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2166
    return 0;                    /* Precision doesn't fit.  */
2167
  return (exponent <= 7)        /* Positive exp.  */
2168
    && (exponent >= -7);        /* Negative exp.  */
2169
}
2170
 
2171
 
2172
/* The last instruction in a repeat block cannot be a Bcond, DBcound,
2173
   CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2174
 
2175
   None of the last four instructions from the bottom of the block can
2176
   be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2177
   BcondAT or RETIcondD.
2178
 
2179
   This routine scans the four previous insns for a jump insn, and if
2180
   one is found, returns 1 so that we bung in a nop instruction.
2181
   This simple minded strategy will add a nop, when it may not
2182
   be required.  Say when there is a JUMP_INSN near the end of the
2183
   block that doesn't get converted into a delayed branch.
2184
 
2185
   Note that we cannot have a call insn, since we don't generate
2186
   repeat loops with calls in them (although I suppose we could, but
2187
   there's no benefit.)
2188
 
2189
   !!! FIXME.  The rptb_top insn may be sucked into a SEQUENCE.  */
2190
 
2191
int
2192
c4x_rptb_nop_p (rtx insn)
2193
{
2194
  rtx start_label;
2195
  int i;
2196
 
2197
  /* Extract the start label from the jump pattern (rptb_end).  */
2198
  start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2199
 
2200
  /* If there is a label at the end of the loop we must insert
2201
     a NOP.  */
2202
  do {
2203
    insn = previous_insn (insn);
2204
  } while (GET_CODE (insn) == NOTE
2205
           || GET_CODE (insn) == USE
2206
           || GET_CODE (insn) == CLOBBER);
2207
  if (GET_CODE (insn) == CODE_LABEL)
2208
    return 1;
2209
 
2210
  for (i = 0; i < 4; i++)
2211
    {
2212
      /* Search back for prev non-note and non-label insn.  */
2213
      while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2214
             || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2215
        {
2216
          if (insn == start_label)
2217
            return i == 0;
2218
 
2219
          insn = previous_insn (insn);
2220
        };
2221
 
2222
      /* If we have a jump instruction we should insert a NOP. If we
2223
         hit repeat block top we should only insert a NOP if the loop
2224
         is empty.  */
2225
      if (GET_CODE (insn) == JUMP_INSN)
2226
        return 1;
2227
      insn = previous_insn (insn);
2228
    }
2229
  return 0;
2230
}
2231
 
2232
 
2233
/* The C4x looping instruction needs to be emitted at the top of the
2234
  loop.  Emitting the true RTL for a looping instruction at the top of
2235
  the loop can cause problems with flow analysis.  So instead, a dummy
2236
  doloop insn is emitted at the end of the loop.  This routine checks
2237
  for the presence of this doloop insn and then searches back to the
2238
  top of the loop, where it inserts the true looping insn (provided
2239
  there are no instructions in the loop which would cause problems).
2240
  Any additional labels can be emitted at this point.  In addition, if
2241
  the desired loop count register was not allocated, this routine does
2242
  nothing.
2243
 
2244
  Before we can create a repeat block looping instruction we have to
2245
  verify that there are no jumps outside the loop and no jumps outside
2246
  the loop go into this loop. This can happen in the basic blocks reorder
2247
  pass. The C4x cpu cannot handle this.  */
2248
 
2249
static int
2250
c4x_label_ref_used_p (rtx x, rtx code_label)
2251
{
2252
  enum rtx_code code;
2253
  int i, j;
2254
  const char *fmt;
2255
 
2256
  if (x == 0)
2257
    return 0;
2258
 
2259
  code = GET_CODE (x);
2260
  if (code == LABEL_REF)
2261
    return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2262
 
2263
  fmt = GET_RTX_FORMAT (code);
2264
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2265
    {
2266
      if (fmt[i] == 'e')
2267
        {
2268
          if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2269
            return 1;
2270
        }
2271
      else if (fmt[i] == 'E')
2272
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2273
          if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2274
            return 1;
2275
    }
2276
  return 0;
2277
}
2278
 
2279
 
2280
static int
2281
c4x_rptb_valid_p (rtx insn, rtx start_label)
2282
{
2283
  rtx end = insn;
2284
  rtx start;
2285
  rtx tmp;
2286
 
2287
  /* Find the start label.  */
2288
  for (; insn; insn = PREV_INSN (insn))
2289
    if (insn == start_label)
2290
      break;
2291
 
2292
  /* Note found then we cannot use a rptb or rpts.  The label was
2293
     probably moved by the basic block reorder pass.  */
2294
  if (! insn)
2295
    return 0;
2296
 
2297
  start = insn;
2298
  /* If any jump jumps inside this block then we must fail.  */
2299
  for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2300
    {
2301
      if (GET_CODE (insn) == CODE_LABEL)
2302
        {
2303
          for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2304
            if (GET_CODE (tmp) == JUMP_INSN
2305
                && c4x_label_ref_used_p (tmp, insn))
2306
              return 0;
2307
        }
2308
    }
2309
  for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2310
    {
2311
      if (GET_CODE (insn) == CODE_LABEL)
2312
        {
2313
          for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2314
            if (GET_CODE (tmp) == JUMP_INSN
2315
                && c4x_label_ref_used_p (tmp, insn))
2316
              return 0;
2317
        }
2318
    }
2319
  /* If any jump jumps outside this block then we must fail.  */
2320
  for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2321
    {
2322
      if (GET_CODE (insn) == CODE_LABEL)
2323
        {
2324
          for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2325
            if (GET_CODE (tmp) == JUMP_INSN
2326
                && c4x_label_ref_used_p (tmp, insn))
2327
              return 0;
2328
          for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2329
            if (GET_CODE (tmp) == JUMP_INSN
2330
                && c4x_label_ref_used_p (tmp, insn))
2331
              return 0;
2332
        }
2333
    }
2334
 
2335
  /* All checks OK.  */
2336
  return 1;
2337
}
2338
 
2339
 
2340
void
2341
c4x_rptb_insert (rtx insn)
2342
{
2343
  rtx end_label;
2344
  rtx start_label;
2345
  rtx new_start_label;
2346
  rtx count_reg;
2347
 
2348
  /* If the count register has not been allocated to RC, say if
2349
     there is a movmem pattern in the loop, then do not insert a
2350
     RPTB instruction.  Instead we emit a decrement and branch
2351
     at the end of the loop.  */
2352
  count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2353
  if (REGNO (count_reg) != RC_REGNO)
2354
    return;
2355
 
2356
  /* Extract the start label from the jump pattern (rptb_end).  */
2357
  start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2358
 
2359
  if (! c4x_rptb_valid_p (insn, start_label))
2360
    {
2361
      /* We cannot use the rptb insn.  Replace it so reorg can use
2362
         the delay slots of the jump insn.  */
2363
      emit_insn_before (gen_addqi3 (count_reg, count_reg, constm1_rtx), insn);
2364
      emit_insn_before (gen_cmpqi (count_reg, const0_rtx), insn);
2365
      emit_insn_before (gen_bge (start_label), insn);
2366
      LABEL_NUSES (start_label)++;
2367
      delete_insn (insn);
2368
      return;
2369
    }
2370
 
2371
  end_label = gen_label_rtx ();
2372
  LABEL_NUSES (end_label)++;
2373
  emit_label_after (end_label, insn);
2374
 
2375
  new_start_label = gen_label_rtx ();
2376
  LABEL_NUSES (new_start_label)++;
2377
 
2378
  for (; insn; insn = PREV_INSN (insn))
2379
    {
2380
      if (insn == start_label)
2381
         break;
2382
      if (GET_CODE (insn) == JUMP_INSN &&
2383
          JUMP_LABEL (insn) == start_label)
2384
        redirect_jump (insn, new_start_label, 0);
2385
    }
2386
  if (! insn)
2387
    fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2388
 
2389
  emit_label_after (new_start_label, insn);
2390
 
2391
  if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2392
    emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2393
  else
2394
    emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2395
  if (LABEL_NUSES (start_label) == 0)
2396
    delete_insn (start_label);
2397
}
2398
 
2399
 
2400
/* We need to use direct addressing for large constants and addresses
2401
   that cannot fit within an instruction.  We must check for these
2402
   after after the final jump optimization pass, since this may
2403
   introduce a local_move insn for a SYMBOL_REF.  This pass
2404
   must come before delayed branch slot filling since it can generate
2405
   additional instructions.
2406
 
2407
   This function also fixes up RTPB style loops that didn't get RC
2408
   allocated as the loop counter.  */
2409
 
2410
static void
2411
c4x_reorg (void)
2412
{
2413
  rtx insn;
2414
 
2415
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2416
    {
2417
      /* Look for insn.  */
2418
      if (INSN_P (insn))
2419
        {
2420
          int insn_code_number;
2421
          rtx old;
2422
 
2423
          insn_code_number = recog_memoized (insn);
2424
 
2425
          if (insn_code_number < 0)
2426
            continue;
2427
 
2428
          /* Insert the RTX for RPTB at the top of the loop
2429
             and a label at the end of the loop.  */
2430
          if (insn_code_number == CODE_FOR_rptb_end)
2431
            c4x_rptb_insert(insn);
2432
 
2433
          /* We need to split the insn here. Otherwise the calls to
2434
             force_const_mem will not work for load_immed_address.  */
2435
          old = insn;
2436
 
2437
          /* Don't split the insn if it has been deleted.  */
2438
          if (! INSN_DELETED_P (old))
2439
            insn = try_split (PATTERN(old), old, 1);
2440
 
2441
          /* When not optimizing, the old insn will be still left around
2442
             with only the 'deleted' bit set.  Transform it into a note
2443
             to avoid confusion of subsequent processing.  */
2444
          if (INSN_DELETED_P (old))
2445
            {
2446
              PUT_CODE (old, NOTE);
2447
              NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2448
              NOTE_SOURCE_FILE (old) = 0;
2449
            }
2450
        }
2451
    }
2452
}
2453
 
2454
 
2455
int
2456
c4x_a_register (rtx op)
2457
{
2458
  return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2459
}
2460
 
2461
 
2462
int
2463
c4x_x_register (rtx op)
2464
{
2465
  return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2466
}
2467
 
2468
 
2469
static int
2470
c4x_immed_int_constant (rtx op)
2471
{
2472
  if (GET_CODE (op) != CONST_INT)
2473
    return 0;
2474
 
2475
  return GET_MODE (op) == VOIDmode
2476
    || GET_MODE_CLASS (GET_MODE (op)) == MODE_INT
2477
    || GET_MODE_CLASS (GET_MODE (op)) == MODE_PARTIAL_INT;
2478
}
2479
 
2480
 
2481
static int
2482
c4x_immed_float_constant (rtx op)
2483
{
2484
  if (GET_CODE (op) != CONST_DOUBLE)
2485
    return 0;
2486
 
2487
  /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2488
     present this only means that a MEM rtx has been generated. It does
2489
     not mean the rtx is really in memory.  */
2490
 
2491
  return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2492
}
2493
 
2494
 
2495
int
2496
c4x_shiftable_constant (rtx op)
2497
{
2498
  int i;
2499
  int mask;
2500
  int val = INTVAL (op);
2501
 
2502
  for (i = 0; i < 16; i++)
2503
    {
2504
      if (val & (1 << i))
2505
        break;
2506
    }
2507
  mask = ((0xffff >> i) << 16) | 0xffff;
2508
  if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2509
                                      : (val >> i) & mask))
2510
    return i;
2511
  return -1;
2512
}
2513
 
2514
 
2515
int
2516
c4x_H_constant (rtx op)
2517
{
2518
  return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2519
}
2520
 
2521
 
2522
int
2523
c4x_I_constant (rtx op)
2524
{
2525
  return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2526
}
2527
 
2528
 
2529
int
2530
c4x_J_constant (rtx op)
2531
{
2532
  if (TARGET_C3X)
2533
    return 0;
2534
  return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2535
}
2536
 
2537
 
2538
int
2539
c4x_K_constant (rtx op)
2540
{
2541
  if (TARGET_C3X || ! c4x_immed_int_constant (op))
2542
    return 0;
2543
  return IS_INT5_CONST (INTVAL (op));
2544
}
2545
 
2546
 
2547
int
2548
c4x_L_constant (rtx op)
2549
{
2550
  return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2551
}
2552
 
2553
 
2554
int
2555
c4x_N_constant (rtx op)
2556
{
2557
  return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2558
}
2559
 
2560
 
2561
int
2562
c4x_O_constant (rtx op)
2563
{
2564
  return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2565
}
2566
 
2567
 
2568
/* The constraints do not have to check the register class,
2569
   except when needed to discriminate between the constraints.
2570
   The operand has been checked by the predicates to be valid.  */
2571
 
2572
/* ARx + 9-bit signed const or IRn
2573
   *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2574
   We don't include the pre/post inc/dec forms here since
2575
   they are handled by the <> constraints.  */
2576
 
2577
int
2578
c4x_Q_constraint (rtx op)
2579
{
2580
  enum machine_mode mode = GET_MODE (op);
2581
 
2582
  if (GET_CODE (op) != MEM)
2583
    return 0;
2584
  op = XEXP (op, 0);
2585
  switch (GET_CODE (op))
2586
    {
2587
    case REG:
2588
      return 1;
2589
 
2590
    case PLUS:
2591
      {
2592
        rtx op0 = XEXP (op, 0);
2593
        rtx op1 = XEXP (op, 1);
2594
 
2595
        if (! REG_P (op0))
2596
          return 0;
2597
 
2598
        if (REG_P (op1))
2599
          return 1;
2600
 
2601
        if (GET_CODE (op1) != CONST_INT)
2602
          return 0;
2603
 
2604
        /* HImode and HFmode must be offsettable.  */
2605
        if (mode == HImode || mode == HFmode)
2606
          return IS_DISP8_OFF_CONST (INTVAL (op1));
2607
 
2608
        return IS_DISP8_CONST (INTVAL (op1));
2609
      }
2610
      break;
2611
 
2612
    default:
2613
      break;
2614
    }
2615
  return 0;
2616
}
2617
 
2618
 
2619
/* ARx + 5-bit unsigned const
2620
   *ARx, *+ARx(n) for n < 32.  */
2621
 
2622
int
2623
c4x_R_constraint (rtx op)
2624
{
2625
  enum machine_mode mode = GET_MODE (op);
2626
 
2627
  if (TARGET_C3X)
2628
    return 0;
2629
  if (GET_CODE (op) != MEM)
2630
    return 0;
2631
  op = XEXP (op, 0);
2632
  switch (GET_CODE (op))
2633
    {
2634
    case REG:
2635
      return 1;
2636
 
2637
    case PLUS:
2638
      {
2639
        rtx op0 = XEXP (op, 0);
2640
        rtx op1 = XEXP (op, 1);
2641
 
2642
        if (! REG_P (op0))
2643
          return 0;
2644
 
2645
        if (GET_CODE (op1) != CONST_INT)
2646
          return 0;
2647
 
2648
        /* HImode and HFmode must be offsettable.  */
2649
        if (mode == HImode || mode == HFmode)
2650
          return IS_UINT5_CONST (INTVAL (op1) + 1);
2651
 
2652
        return IS_UINT5_CONST (INTVAL (op1));
2653
      }
2654
      break;
2655
 
2656
    default:
2657
      break;
2658
    }
2659
  return 0;
2660
}
2661
 
2662
 
2663
static int
2664
c4x_R_indirect (rtx op)
2665
{
2666
  enum machine_mode mode = GET_MODE (op);
2667
 
2668
  if (TARGET_C3X || GET_CODE (op) != MEM)
2669
    return 0;
2670
 
2671
  op = XEXP (op, 0);
2672
  switch (GET_CODE (op))
2673
    {
2674
    case REG:
2675
      return IS_ADDR_OR_PSEUDO_REG (op);
2676
 
2677
    case PLUS:
2678
      {
2679
        rtx op0 = XEXP (op, 0);
2680
        rtx op1 = XEXP (op, 1);
2681
 
2682
        /* HImode and HFmode must be offsettable.  */
2683
        if (mode == HImode || mode == HFmode)
2684
          return IS_ADDR_OR_PSEUDO_REG (op0)
2685
            && GET_CODE (op1) == CONST_INT
2686
            && IS_UINT5_CONST (INTVAL (op1) + 1);
2687
 
2688
        return REG_P (op0)
2689
          && IS_ADDR_OR_PSEUDO_REG (op0)
2690
          && GET_CODE (op1) == CONST_INT
2691
          && IS_UINT5_CONST (INTVAL (op1));
2692
      }
2693
      break;
2694
 
2695
    default:
2696
      break;
2697
    }
2698
  return 0;
2699
}
2700
 
2701
 
2702
/* ARx + 1-bit unsigned const or IRn
2703
   *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2704
   We don't include the pre/post inc/dec forms here since
2705
   they are handled by the <> constraints.  */
2706
 
2707
int
2708
c4x_S_constraint (rtx op)
2709
{
2710
  enum machine_mode mode = GET_MODE (op);
2711
  if (GET_CODE (op) != MEM)
2712
    return 0;
2713
  op = XEXP (op, 0);
2714
  switch (GET_CODE (op))
2715
    {
2716
    case REG:
2717
      return 1;
2718
 
2719
    case PRE_MODIFY:
2720
    case POST_MODIFY:
2721
      {
2722
        rtx op0 = XEXP (op, 0);
2723
        rtx op1 = XEXP (op, 1);
2724
 
2725
        if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2726
            || (op0 != XEXP (op1, 0)))
2727
          return 0;
2728
 
2729
        op0 = XEXP (op1, 0);
2730
        op1 = XEXP (op1, 1);
2731
        return REG_P (op0) && REG_P (op1);
2732
        /* Pre or post_modify with a displacement of 0 or 1
2733
           should not be generated.  */
2734
      }
2735
      break;
2736
 
2737
    case PLUS:
2738
      {
2739
        rtx op0 = XEXP (op, 0);
2740
        rtx op1 = XEXP (op, 1);
2741
 
2742
        if (!REG_P (op0))
2743
          return 0;
2744
 
2745
        if (REG_P (op1))
2746
          return 1;
2747
 
2748
        if (GET_CODE (op1) != CONST_INT)
2749
          return 0;
2750
 
2751
        /* HImode and HFmode must be offsettable.  */
2752
        if (mode == HImode || mode == HFmode)
2753
          return IS_DISP1_OFF_CONST (INTVAL (op1));
2754
 
2755
        return IS_DISP1_CONST (INTVAL (op1));
2756
      }
2757
      break;
2758
 
2759
    default:
2760
      break;
2761
    }
2762
  return 0;
2763
}
2764
 
2765
 
2766
int
2767
c4x_S_indirect (rtx op)
2768
{
2769
  enum machine_mode mode = GET_MODE (op);
2770
  if (GET_CODE (op) != MEM)
2771
    return 0;
2772
 
2773
  op = XEXP (op, 0);
2774
  switch (GET_CODE (op))
2775
    {
2776
    case PRE_DEC:
2777
    case POST_DEC:
2778
      if (mode != QImode && mode != QFmode)
2779
        return 0;
2780
    case PRE_INC:
2781
    case POST_INC:
2782
      op = XEXP (op, 0);
2783
 
2784
    case REG:
2785
      return IS_ADDR_OR_PSEUDO_REG (op);
2786
 
2787
    case PRE_MODIFY:
2788
    case POST_MODIFY:
2789
      {
2790
        rtx op0 = XEXP (op, 0);
2791
        rtx op1 = XEXP (op, 1);
2792
 
2793
        if (mode != QImode && mode != QFmode)
2794
          return 0;
2795
 
2796
        if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2797
            || (op0 != XEXP (op1, 0)))
2798
          return 0;
2799
 
2800
        op0 = XEXP (op1, 0);
2801
        op1 = XEXP (op1, 1);
2802
        return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2803
          && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2804
        /* Pre or post_modify with a displacement of 0 or 1
2805
           should not be generated.  */
2806
      }
2807
 
2808
    case PLUS:
2809
      {
2810
        rtx op0 = XEXP (op, 0);
2811
        rtx op1 = XEXP (op, 1);
2812
 
2813
        if (REG_P (op0))
2814
          {
2815
            /* HImode and HFmode must be offsettable.  */
2816
            if (mode == HImode || mode == HFmode)
2817
              return IS_ADDR_OR_PSEUDO_REG (op0)
2818
                && GET_CODE (op1) == CONST_INT
2819
                && IS_DISP1_OFF_CONST (INTVAL (op1));
2820
 
2821
            if (REG_P (op1))
2822
              return (IS_INDEX_OR_PSEUDO_REG (op1)
2823
                      && IS_ADDR_OR_PSEUDO_REG (op0))
2824
                || (IS_ADDR_OR_PSEUDO_REG (op1)
2825
                    && IS_INDEX_OR_PSEUDO_REG (op0));
2826
 
2827
            return IS_ADDR_OR_PSEUDO_REG (op0)
2828
              && GET_CODE (op1) == CONST_INT
2829
              && IS_DISP1_CONST (INTVAL (op1));
2830
          }
2831
      }
2832
      break;
2833
 
2834
    default:
2835
      break;
2836
    }
2837
  return 0;
2838
}
2839
 
2840
 
2841
/* Direct memory operand.  */
2842
 
2843
int
2844
c4x_T_constraint (rtx op)
2845
{
2846
  if (GET_CODE (op) != MEM)
2847
    return 0;
2848
  op = XEXP (op, 0);
2849
 
2850
  if (GET_CODE (op) != LO_SUM)
2851
    {
2852
      /* Allow call operands.  */
2853
      return GET_CODE (op) == SYMBOL_REF
2854
        && GET_MODE (op) == Pmode
2855
        && SYMBOL_REF_FUNCTION_P (op);
2856
    }
2857
 
2858
  /* HImode and HFmode are not offsettable.  */
2859
  if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2860
    return 0;
2861
 
2862
  if ((GET_CODE (XEXP (op, 0)) == REG)
2863
      && (REGNO (XEXP (op, 0)) == DP_REGNO))
2864
    return c4x_U_constraint (XEXP (op, 1));
2865
 
2866
  return 0;
2867
}
2868
 
2869
 
2870
/* Symbolic operand.  */
2871
 
2872
int
2873
c4x_U_constraint (rtx op)
2874
{
2875
  /* Don't allow direct addressing to an arbitrary constant.  */
2876
  return GET_CODE (op) == CONST
2877
         || GET_CODE (op) == SYMBOL_REF
2878
         || GET_CODE (op) == LABEL_REF;
2879
}
2880
 
2881
 
2882
int
2883
c4x_autoinc_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2884
{
2885
  if (GET_CODE (op) == MEM)
2886
    {
2887
      enum rtx_code code = GET_CODE (XEXP (op, 0));
2888
 
2889
      if (code == PRE_INC
2890
          || code == PRE_DEC
2891
          || code == POST_INC
2892
          || code == POST_DEC
2893
          || code == PRE_MODIFY
2894
          || code == POST_MODIFY
2895
          )
2896
        return 1;
2897
    }
2898
  return 0;
2899
}
2900
 
2901
 
2902
int
2903
mixed_subreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2904
{
2905
  /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
2906
     int and a long double.  */
2907
  if (GET_CODE (op) == SUBREG
2908
      && (GET_MODE (op) == QFmode)
2909
      && (GET_MODE (SUBREG_REG (op)) == QImode
2910
          || GET_MODE (SUBREG_REG (op)) == HImode))
2911
    return 1;
2912
  return 0;
2913
}
2914
 
2915
 
2916
int
2917
reg_imm_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2918
{
2919
  if (REG_P (op) || CONSTANT_P (op))
2920
    return 1;
2921
  return 0;
2922
}
2923
 
2924
 
2925
int
2926
not_modify_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2927
{
2928
  if (REG_P (op) || CONSTANT_P (op))
2929
    return 1;
2930
  if (GET_CODE (op) != MEM)
2931
    return 0;
2932
  op = XEXP (op, 0);
2933
  switch (GET_CODE (op))
2934
    {
2935
    case REG:
2936
      return 1;
2937
 
2938
    case PLUS:
2939
      {
2940
        rtx op0 = XEXP (op, 0);
2941
        rtx op1 = XEXP (op, 1);
2942
 
2943
        if (! REG_P (op0))
2944
          return 0;
2945
 
2946
        if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
2947
          return 1;
2948
      }
2949
 
2950
    case LO_SUM:
2951
      {
2952
        rtx op0 = XEXP (op, 0);
2953
 
2954
        if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2955
          return 1;
2956
      }
2957
      break;
2958
 
2959
    case CONST:
2960
    case SYMBOL_REF:
2961
    case LABEL_REF:
2962
      return 1;
2963
 
2964
    default:
2965
      break;
2966
    }
2967
  return 0;
2968
}
2969
 
2970
 
2971
int
2972
not_rc_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2973
{
2974
  if (REG_P (op) && REGNO (op) == RC_REGNO)
2975
    return 0;
2976
  return 1;
2977
}
2978
 
2979
 
2980
static void
2981
c4x_S_address_parse (rtx op, int *base, int *incdec, int *index, int *disp)
2982
{
2983
  *base = 0;
2984
  *incdec = 0;
2985
  *index = 0;
2986
  *disp = 0;
2987
 
2988
  if (GET_CODE (op) != MEM)
2989
    fatal_insn ("invalid indirect memory address", op);
2990
 
2991
  op = XEXP (op, 0);
2992
  switch (GET_CODE (op))
2993
    {
2994
    case PRE_DEC:
2995
      *base = REGNO (XEXP (op, 0));
2996
      *incdec = 1;
2997
      *disp = -1;
2998
      return;
2999
 
3000
    case POST_DEC:
3001
      *base = REGNO (XEXP (op, 0));
3002
      *incdec = 1;
3003
      *disp = 0;
3004
      return;
3005
 
3006
    case PRE_INC:
3007
      *base = REGNO (XEXP (op, 0));
3008
      *incdec = 1;
3009
      *disp = 1;
3010
      return;
3011
 
3012
    case POST_INC:
3013
      *base = REGNO (XEXP (op, 0));
3014
      *incdec = 1;
3015
      *disp = 0;
3016
      return;
3017
 
3018
    case POST_MODIFY:
3019
      *base = REGNO (XEXP (op, 0));
3020
      if (REG_P (XEXP (XEXP (op, 1), 1)))
3021
        {
3022
          *index = REGNO (XEXP (XEXP (op, 1), 1));
3023
          *disp = 0;             /* ??? */
3024
        }
3025
      else
3026
          *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3027
      *incdec = 1;
3028
      return;
3029
 
3030
    case PRE_MODIFY:
3031
      *base = REGNO (XEXP (op, 0));
3032
      if (REG_P (XEXP (XEXP (op, 1), 1)))
3033
        {
3034
          *index = REGNO (XEXP (XEXP (op, 1), 1));
3035
          *disp = 1;            /* ??? */
3036
        }
3037
      else
3038
          *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3039
      *incdec = 1;
3040
 
3041
      return;
3042
 
3043
    case REG:
3044
      *base = REGNO (op);
3045
      return;
3046
 
3047
    case PLUS:
3048
      {
3049
        rtx op0 = XEXP (op, 0);
3050
        rtx op1 = XEXP (op, 1);
3051
 
3052
        if (c4x_a_register (op0))
3053
          {
3054
            if (c4x_x_register (op1))
3055
              {
3056
                *base = REGNO (op0);
3057
                *index = REGNO (op1);
3058
                return;
3059
              }
3060
            else if ((GET_CODE (op1) == CONST_INT
3061
                      && IS_DISP1_CONST (INTVAL (op1))))
3062
              {
3063
                *base = REGNO (op0);
3064
                *disp = INTVAL (op1);
3065
                return;
3066
              }
3067
          }
3068
        else if (c4x_x_register (op0) && c4x_a_register (op1))
3069
          {
3070
            *base = REGNO (op1);
3071
            *index = REGNO (op0);
3072
            return;
3073
          }
3074
      }
3075
      /* Fall through.  */
3076
 
3077
    default:
3078
      fatal_insn ("invalid indirect (S) memory address", op);
3079
    }
3080
}
3081
 
3082
 
3083
int
3084
c4x_address_conflict (rtx op0, rtx op1, int store0, int store1)
3085
{
3086
  int base0;
3087
  int base1;
3088
  int incdec0;
3089
  int incdec1;
3090
  int index0;
3091
  int index1;
3092
  int disp0;
3093
  int disp1;
3094
 
3095
  if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3096
    return 1;
3097
 
3098
  c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3099
  c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3100
 
3101
  if (store0 && store1)
3102
    {
3103
      /* If we have two stores in parallel to the same address, then
3104
         the C4x only executes one of the stores.  This is unlikely to
3105
         cause problems except when writing to a hardware device such
3106
         as a FIFO since the second write will be lost.  The user
3107
         should flag the hardware location as being volatile so that
3108
         we don't do this optimization.  While it is unlikely that we
3109
         have an aliased address if both locations are not marked
3110
         volatile, it is probably safer to flag a potential conflict
3111
         if either location is volatile.  */
3112
      if (! flag_argument_noalias)
3113
        {
3114
          if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3115
            return 1;
3116
        }
3117
    }
3118
 
3119
  /* If have a parallel load and a store to the same address, the load
3120
     is performed first, so there is no conflict.  Similarly, there is
3121
     no conflict if have parallel loads from the same address.  */
3122
 
3123
  /* Cannot use auto increment or auto decrement twice for same
3124
     base register.  */
3125
  if (base0 == base1 && incdec0 && incdec0)
3126
    return 1;
3127
 
3128
  /* It might be too confusing for GCC if we have use a base register
3129
     with a side effect and a memory reference using the same register
3130
     in parallel.  */
3131
  if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3132
    return 1;
3133
 
3134
  /* We cannot optimize the case where op1 and op2 refer to the same
3135
     address.  */
3136
  if (base0 == base1 && disp0 == disp1 && index0 == index1)
3137
    return 1;
3138
 
3139
  /* No conflict.  */
3140
  return 0;
3141
}
3142
 
3143
 
3144
/* Check for while loop inside a decrement and branch loop.  */
3145
 
3146
int
3147
c4x_label_conflict (rtx insn, rtx jump, rtx db)
3148
{
3149
  while (insn)
3150
    {
3151
      if (GET_CODE (insn) == CODE_LABEL)
3152
        {
3153
          if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3154
            return 1;
3155
          if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3156
            return 0;
3157
        }
3158
      insn = PREV_INSN (insn);
3159
    }
3160
  return 1;
3161
}
3162
 
3163
 
3164
/* Validate combination of operands for parallel load/store instructions.  */
3165
 
3166
int
3167
valid_parallel_load_store (rtx *operands,
3168
                           enum machine_mode mode ATTRIBUTE_UNUSED)
3169
{
3170
  rtx op0 = operands[0];
3171
  rtx op1 = operands[1];
3172
  rtx op2 = operands[2];
3173
  rtx op3 = operands[3];
3174
 
3175
  if (GET_CODE (op0) == SUBREG)
3176
    op0 = SUBREG_REG (op0);
3177
  if (GET_CODE (op1) == SUBREG)
3178
    op1 = SUBREG_REG (op1);
3179
  if (GET_CODE (op2) == SUBREG)
3180
    op2 = SUBREG_REG (op2);
3181
  if (GET_CODE (op3) == SUBREG)
3182
    op3 = SUBREG_REG (op3);
3183
 
3184
  /* The patterns should only allow ext_low_reg_operand() or
3185
     par_ind_operand() operands.  Thus of the 4 operands, only 2
3186
     should be REGs and the other 2 should be MEMs.  */
3187
 
3188
  /* This test prevents the multipack pass from using this pattern if
3189
     op0 is used as an index or base register in op2 or op3, since
3190
     this combination will require reloading.  */
3191
  if (GET_CODE (op0) == REG
3192
      && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3193
          || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3194
    return 0;
3195
 
3196
  /* LDI||LDI.  */
3197
  if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3198
    return (REGNO (op0) != REGNO (op2))
3199
      && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3200
      && ! c4x_address_conflict (op1, op3, 0, 0);
3201
 
3202
  /* STI||STI.  */
3203
  if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3204
    return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3205
      && ! c4x_address_conflict (op0, op2, 1, 1);
3206
 
3207
  /* LDI||STI.  */
3208
  if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3209
    return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3210
      && ! c4x_address_conflict (op1, op2, 0, 1);
3211
 
3212
  /* STI||LDI.  */
3213
  if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3214
    return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3215
      && ! c4x_address_conflict (op0, op3, 1, 0);
3216
 
3217
  return 0;
3218
}
3219
 
3220
 
3221
int
3222
valid_parallel_operands_4 (rtx *operands,
3223
                           enum machine_mode mode ATTRIBUTE_UNUSED)
3224
{
3225
  rtx op0 = operands[0];
3226
  rtx op2 = operands[2];
3227
 
3228
  if (GET_CODE (op0) == SUBREG)
3229
    op0 = SUBREG_REG (op0);
3230
  if (GET_CODE (op2) == SUBREG)
3231
    op2 = SUBREG_REG (op2);
3232
 
3233
  /* This test prevents the multipack pass from using this pattern if
3234
     op0 is used as an index or base register in op2, since this combination
3235
     will require reloading.  */
3236
  if (GET_CODE (op0) == REG
3237
      && GET_CODE (op2) == MEM
3238
      && reg_mentioned_p (op0, XEXP (op2, 0)))
3239
    return 0;
3240
 
3241
  return 1;
3242
}
3243
 
3244
 
3245
int
3246
valid_parallel_operands_5 (rtx *operands,
3247
                           enum machine_mode mode ATTRIBUTE_UNUSED)
3248
{
3249
  int regs = 0;
3250
  rtx op0 = operands[0];
3251
  rtx op1 = operands[1];
3252
  rtx op2 = operands[2];
3253
  rtx op3 = operands[3];
3254
 
3255
  if (GET_CODE (op0) == SUBREG)
3256
    op0 = SUBREG_REG (op0);
3257
  if (GET_CODE (op1) == SUBREG)
3258
    op1 = SUBREG_REG (op1);
3259
  if (GET_CODE (op2) == SUBREG)
3260
    op2 = SUBREG_REG (op2);
3261
 
3262
  /* The patterns should only allow ext_low_reg_operand() or
3263
     par_ind_operand() operands.  Operands 1 and 2 may be commutative
3264
     but only one of them can be a register.  */
3265
  if (GET_CODE (op1) == REG)
3266
    regs++;
3267
  if (GET_CODE (op2) == REG)
3268
    regs++;
3269
 
3270
  if (regs != 1)
3271
    return 0;
3272
 
3273
  /* This test prevents the multipack pass from using this pattern if
3274
     op0 is used as an index or base register in op3, since this combination
3275
     will require reloading.  */
3276
  if (GET_CODE (op0) == REG
3277
      && GET_CODE (op3) == MEM
3278
      && reg_mentioned_p (op0, XEXP (op3, 0)))
3279
    return 0;
3280
 
3281
  return 1;
3282
}
3283
 
3284
 
3285
int
3286
valid_parallel_operands_6 (rtx *operands,
3287
                           enum machine_mode mode ATTRIBUTE_UNUSED)
3288
{
3289
  int regs = 0;
3290
  rtx op0 = operands[0];
3291
  rtx op1 = operands[1];
3292
  rtx op2 = operands[2];
3293
  rtx op4 = operands[4];
3294
  rtx op5 = operands[5];
3295
 
3296
  if (GET_CODE (op1) == SUBREG)
3297
    op1 = SUBREG_REG (op1);
3298
  if (GET_CODE (op2) == SUBREG)
3299
    op2 = SUBREG_REG (op2);
3300
  if (GET_CODE (op4) == SUBREG)
3301
    op4 = SUBREG_REG (op4);
3302
  if (GET_CODE (op5) == SUBREG)
3303
    op5 = SUBREG_REG (op5);
3304
 
3305
  /* The patterns should only allow ext_low_reg_operand() or
3306
     par_ind_operand() operands.  Thus of the 4 input operands, only 2
3307
     should be REGs and the other 2 should be MEMs.  */
3308
 
3309
  if (GET_CODE (op1) == REG)
3310
    regs++;
3311
  if (GET_CODE (op2) == REG)
3312
    regs++;
3313
  if (GET_CODE (op4) == REG)
3314
    regs++;
3315
  if (GET_CODE (op5) == REG)
3316
    regs++;
3317
 
3318
  /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3319
     Perhaps we should count the MEMs as well?  */
3320
  if (regs != 2)
3321
    return 0;
3322
 
3323
  /* This test prevents the multipack pass from using this pattern if
3324
     op0 is used as an index or base register in op4 or op5, since
3325
     this combination will require reloading.  */
3326
  if (GET_CODE (op0) == REG
3327
      && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3328
          || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3329
    return 0;
3330
 
3331
  return 1;
3332
}
3333
 
3334
 
3335
/* Validate combination of src operands.  Note that the operands have
3336
   been screened by the src_operand predicate.  We just have to check
3337
   that the combination of operands is valid.  If FORCE is set, ensure
3338
   that the destination regno is valid if we have a 2 operand insn.  */
3339
 
3340
static int
3341
c4x_valid_operands (enum rtx_code code, rtx *operands,
3342
                    enum machine_mode mode ATTRIBUTE_UNUSED,
3343
                    int force)
3344
{
3345
  rtx op0;
3346
  rtx op1;
3347
  rtx op2;
3348
  enum rtx_code code1;
3349
  enum rtx_code code2;
3350
 
3351
 
3352
  /* FIXME, why can't we tighten the operands for IF_THEN_ELSE?  */
3353
  if (code == IF_THEN_ELSE)
3354
      return 1 || (operands[0] == operands[2] || operands[0] == operands[3]);
3355
 
3356
  if (code == COMPARE)
3357
    {
3358
      op1 = operands[0];
3359
      op2 = operands[1];
3360
    }
3361
  else
3362
    {
3363
      op1 = operands[1];
3364
      op2 = operands[2];
3365
    }
3366
 
3367
  op0 = operands[0];
3368
 
3369
  if (GET_CODE (op0) == SUBREG)
3370
    op0 = SUBREG_REG (op0);
3371
  if (GET_CODE (op1) == SUBREG)
3372
    op1 = SUBREG_REG (op1);
3373
  if (GET_CODE (op2) == SUBREG)
3374
    op2 = SUBREG_REG (op2);
3375
 
3376
  code1 = GET_CODE (op1);
3377
  code2 = GET_CODE (op2);
3378
 
3379
 
3380
  if (code1 == REG && code2 == REG)
3381
    return 1;
3382
 
3383
  if (code1 == MEM && code2 == MEM)
3384
    {
3385
      if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3386
        return 1;
3387
      return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3388
    }
3389
 
3390
  /* We cannot handle two MEMs or two CONSTS, etc.  */
3391
  if (code1 == code2)
3392
    return 0;
3393
 
3394
  if (code1 == REG)
3395
    {
3396
      switch (code2)
3397
        {
3398
        case CONST_INT:
3399
          if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3400
            return 1;
3401
          break;
3402
 
3403
        case CONST_DOUBLE:
3404
          if (! c4x_H_constant (op2))
3405
            return 0;
3406
          break;
3407
 
3408
          /* Any valid memory operand screened by src_operand is OK.  */
3409
        case MEM:
3410
          break;
3411
 
3412
        default:
3413
          fatal_insn ("c4x_valid_operands: Internal error", op2);
3414
          break;
3415
        }
3416
 
3417
      if (GET_CODE (op0) == SCRATCH)
3418
          return 1;
3419
 
3420
      if (!REG_P (op0))
3421
          return 0;
3422
 
3423
      /* Check that we have a valid destination register for a two operand
3424
         instruction.  */
3425
      return ! force || code == COMPARE || REGNO (op1) == REGNO (op0);
3426
    }
3427
 
3428
 
3429
  /* Check non-commutative operators.  */
3430
  if (code == ASHIFTRT || code == LSHIFTRT
3431
      || code == ASHIFT || code == COMPARE)
3432
    return code2 == REG
3433
      && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3434
 
3435
 
3436
  /* Assume MINUS is commutative since the subtract patterns
3437
     also support the reverse subtract instructions.  Since op1
3438
     is not a register, and op2 is a register, op1 can only
3439
     be a restricted memory operand for a shift instruction.  */
3440
  if (code2 == REG)
3441
    {
3442
      switch (code1)
3443
        {
3444
        case CONST_INT:
3445
          break;
3446
 
3447
        case CONST_DOUBLE:
3448
          if (! c4x_H_constant (op1))
3449
            return 0;
3450
          break;
3451
 
3452
          /* Any valid memory operand screened by src_operand is OK.  */
3453
        case MEM:
3454
          break;
3455
 
3456
        default:
3457
          abort ();
3458
          break;
3459
        }
3460
 
3461
      if (GET_CODE (op0) == SCRATCH)
3462
          return 1;
3463
 
3464
      if (!REG_P (op0))
3465
          return 0;
3466
 
3467
      /* Check that we have a valid destination register for a two operand
3468
         instruction.  */
3469
      return ! force || REGNO (op1) == REGNO (op0);
3470
    }
3471
 
3472
  if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3473
    return 1;
3474
 
3475
  return 0;
3476
}
3477
 
3478
 
3479
int valid_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
3480
{
3481
 
3482
  /* If we are not optimizing then we have to let anything go and let
3483
     reload fix things up.  instantiate_decl in function.c can produce
3484
     invalid insns by changing the offset of a memory operand from a
3485
     valid one into an invalid one, when the second operand is also a
3486
     memory operand.  The alternative is not to allow two memory
3487
     operands for an insn when not optimizing.  The problem only rarely
3488
     occurs, for example with the C-torture program DFcmp.c.  */
3489
 
3490
  return ! optimize || c4x_valid_operands (code, operands, mode, 0);
3491
}
3492
 
3493
 
3494
int
3495
legitimize_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
3496
{
3497
  /* Compare only has 2 operands.  */
3498
  if (code == COMPARE)
3499
    {
3500
      /* During RTL generation, force constants into pseudos so that
3501
         they can get hoisted out of loops.  This will tie up an extra
3502
         register but can save an extra cycle.  Only do this if loop
3503
         optimization enabled.  (We cannot pull this trick for add and
3504
         sub instructions since the flow pass won't find
3505
         autoincrements etc.)  This allows us to generate compare
3506
         instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
3507
         of LDI *AR0++, R0; CMPI 42, R0.
3508
 
3509
         Note that expand_binops will try to load an expensive constant
3510
         into a register if it is used within a loop.  Unfortunately,
3511
         the cost mechanism doesn't allow us to look at the other
3512
         operand to decide whether the constant is expensive.  */
3513
 
3514
      if (! reload_in_progress
3515
          && TARGET_HOIST
3516
          && optimize > 0
3517
          && GET_CODE (operands[1]) == CONST_INT
3518
          && rtx_cost (operands[1], code) > 1)
3519
        operands[1] = force_reg (mode, operands[1]);
3520
 
3521
      if (! reload_in_progress
3522
          && ! c4x_valid_operands (code, operands, mode, 0))
3523
        operands[0] = force_reg (mode, operands[0]);
3524
      return 1;
3525
    }
3526
 
3527
  /* We cannot do this for ADDI/SUBI insns since we will
3528
     defeat the flow pass from finding autoincrement addressing
3529
     opportunities.  */
3530
  if (! reload_in_progress
3531
      && ! ((code == PLUS || code == MINUS) && mode == Pmode)
3532
      && TARGET_HOIST
3533
      && optimize > 1
3534
      && GET_CODE (operands[2]) == CONST_INT
3535
      && rtx_cost (operands[2], code) > 1)
3536
    operands[2] = force_reg (mode, operands[2]);
3537
 
3538
  /* We can get better code on a C30 if we force constant shift counts
3539
     into a register.  This way they can get hoisted out of loops,
3540
     tying up a register but saving an instruction.  The downside is
3541
     that they may get allocated to an address or index register, and
3542
     thus we will get a pipeline conflict if there is a nearby
3543
     indirect address using an address register.
3544
 
3545
     Note that expand_binops will not try to load an expensive constant
3546
     into a register if it is used within a loop for a shift insn.  */
3547
 
3548
  if (! reload_in_progress
3549
      && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
3550
    {
3551
      /* If the operand combination is invalid, we force operand1 into a
3552
         register, preventing reload from having doing to do this at a
3553
         later stage.  */
3554
      operands[1] = force_reg (mode, operands[1]);
3555
      if (TARGET_FORCE)
3556
        {
3557
          emit_move_insn (operands[0], operands[1]);
3558
          operands[1] = copy_rtx (operands[0]);
3559
        }
3560
      else
3561
        {
3562
          /* Just in case...  */
3563
          if (! c4x_valid_operands (code, operands, mode, 0))
3564
            operands[2] = force_reg (mode, operands[2]);
3565
        }
3566
    }
3567
 
3568
  /* Right shifts require a negative shift count, but GCC expects
3569
     a positive count, so we emit a NEG.  */
3570
  if ((code == ASHIFTRT || code == LSHIFTRT)
3571
      && (GET_CODE (operands[2]) != CONST_INT))
3572
    operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
3573
 
3574
 
3575
  /* When the shift count is greater than 32 then the result
3576
     can be implementation dependent.  We truncate the result to
3577
     fit in 5 bits so that we do not emit invalid code when
3578
     optimizing---such as trying to generate lhu2 with 20021124-1.c.  */
3579
  if (((code == ASHIFTRT || code == LSHIFTRT || code == ASHIFT)
3580
      && (GET_CODE (operands[2]) == CONST_INT))
3581
      && INTVAL (operands[2]) > (GET_MODE_BITSIZE (mode) - 1))
3582
      operands[2]
3583
          = GEN_INT (INTVAL (operands[2]) & (GET_MODE_BITSIZE (mode) - 1));
3584
 
3585
  return 1;
3586
}
3587
 
3588
 
3589
/* The following predicates are used for instruction scheduling.  */
3590
 
3591
int
3592
group1_reg_operand (rtx op, enum machine_mode mode)
3593
{
3594
  if (mode != VOIDmode && mode != GET_MODE (op))
3595
    return 0;
3596
  if (GET_CODE (op) == SUBREG)
3597
    op = SUBREG_REG (op);
3598
  return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
3599
}
3600
 
3601
 
3602
int
3603
group1_mem_operand (rtx op, enum machine_mode mode)
3604
{
3605
  if (mode != VOIDmode && mode != GET_MODE (op))
3606
    return 0;
3607
 
3608
  if (GET_CODE (op) == MEM)
3609
    {
3610
      op = XEXP (op, 0);
3611
      if (GET_CODE (op) == PLUS)
3612
        {
3613
          rtx op0 = XEXP (op, 0);
3614
          rtx op1 = XEXP (op, 1);
3615
 
3616
          if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
3617
              || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
3618
            return 1;
3619
        }
3620
      else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
3621
        return 1;
3622
    }
3623
 
3624
  return 0;
3625
}
3626
 
3627
 
3628
/* Return true if any one of the address registers.  */
3629
 
3630
int
3631
arx_reg_operand (rtx op, enum machine_mode mode)
3632
{
3633
  if (mode != VOIDmode && mode != GET_MODE (op))
3634
    return 0;
3635
  if (GET_CODE (op) == SUBREG)
3636
    op = SUBREG_REG (op);
3637
  return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
3638
}
3639
 
3640
 
3641
static int
3642
c4x_arn_reg_operand (rtx op, enum machine_mode mode, unsigned int regno)
3643
{
3644
  if (mode != VOIDmode && mode != GET_MODE (op))
3645
    return 0;
3646
  if (GET_CODE (op) == SUBREG)
3647
    op = SUBREG_REG (op);
3648
  return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
3649
}
3650
 
3651
 
3652
static int
3653
c4x_arn_mem_operand (rtx op, enum machine_mode mode, unsigned int regno)
3654
{
3655
  if (mode != VOIDmode && mode != GET_MODE (op))
3656
    return 0;
3657
 
3658
  if (GET_CODE (op) == MEM)
3659
    {
3660
      op = XEXP (op, 0);
3661
      switch (GET_CODE (op))
3662
        {
3663
        case PRE_DEC:
3664
        case POST_DEC:
3665
        case PRE_INC:
3666
        case POST_INC:
3667
          op = XEXP (op, 0);
3668
 
3669
        case REG:
3670
          return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
3671
 
3672
        case PRE_MODIFY:
3673
        case POST_MODIFY:
3674
          if (REG_P (XEXP (op, 0)) && (! reload_completed
3675
                                       || (REGNO (XEXP (op, 0)) == regno)))
3676
            return 1;
3677
          if (REG_P (XEXP (XEXP (op, 1), 1))
3678
              && (! reload_completed
3679
                  || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
3680
            return 1;
3681
          break;
3682
 
3683
        case PLUS:
3684
          {
3685
            rtx op0 = XEXP (op, 0);
3686
            rtx op1 = XEXP (op, 1);
3687
 
3688
            if ((REG_P (op0) && (! reload_completed
3689
                                 || (REGNO (op0) == regno)))
3690
                || (REG_P (op1) && (! reload_completed
3691
                                    || (REGNO (op1) == regno))))
3692
              return 1;
3693
          }
3694
          break;
3695
 
3696
        default:
3697
          break;
3698
        }
3699
    }
3700
  return 0;
3701
}
3702
 
3703
 
3704
int
3705
ar0_reg_operand (rtx op, enum machine_mode mode)
3706
{
3707
  return c4x_arn_reg_operand (op, mode, AR0_REGNO);
3708
}
3709
 
3710
 
3711
int
3712
ar0_mem_operand (rtx op, enum machine_mode mode)
3713
{
3714
  return c4x_arn_mem_operand (op, mode, AR0_REGNO);
3715
}
3716
 
3717
 
3718
int
3719
ar1_reg_operand (rtx op, enum machine_mode mode)
3720
{
3721
  return c4x_arn_reg_operand (op, mode, AR1_REGNO);
3722
}
3723
 
3724
 
3725
int
3726
ar1_mem_operand (rtx op, enum machine_mode mode)
3727
{
3728
  return c4x_arn_mem_operand (op, mode, AR1_REGNO);
3729
}
3730
 
3731
 
3732
int
3733
ar2_reg_operand (rtx op, enum machine_mode mode)
3734
{
3735
  return c4x_arn_reg_operand (op, mode, AR2_REGNO);
3736
}
3737
 
3738
 
3739
int
3740
ar2_mem_operand (rtx op, enum machine_mode mode)
3741
{
3742
  return c4x_arn_mem_operand (op, mode, AR2_REGNO);
3743
}
3744
 
3745
 
3746
int
3747
ar3_reg_operand (rtx op, enum machine_mode mode)
3748
{
3749
  return c4x_arn_reg_operand (op, mode, AR3_REGNO);
3750
}
3751
 
3752
 
3753
int
3754
ar3_mem_operand (rtx op, enum machine_mode mode)
3755
{
3756
  return c4x_arn_mem_operand (op, mode, AR3_REGNO);
3757
}
3758
 
3759
 
3760
int
3761
ar4_reg_operand (rtx op, enum machine_mode mode)
3762
{
3763
  return c4x_arn_reg_operand (op, mode, AR4_REGNO);
3764
}
3765
 
3766
 
3767
int
3768
ar4_mem_operand (rtx op, enum machine_mode mode)
3769
{
3770
  return c4x_arn_mem_operand (op, mode, AR4_REGNO);
3771
}
3772
 
3773
 
3774
int
3775
ar5_reg_operand (rtx op, enum machine_mode mode)
3776
{
3777
  return c4x_arn_reg_operand (op, mode, AR5_REGNO);
3778
}
3779
 
3780
 
3781
int
3782
ar5_mem_operand (rtx op, enum machine_mode mode)
3783
{
3784
  return c4x_arn_mem_operand (op, mode, AR5_REGNO);
3785
}
3786
 
3787
 
3788
int
3789
ar6_reg_operand (rtx op, enum machine_mode mode)
3790
{
3791
  return c4x_arn_reg_operand (op, mode, AR6_REGNO);
3792
}
3793
 
3794
 
3795
int
3796
ar6_mem_operand (rtx op, enum machine_mode mode)
3797
{
3798
  return c4x_arn_mem_operand (op, mode, AR6_REGNO);
3799
}
3800
 
3801
 
3802
int
3803
ar7_reg_operand (rtx op, enum machine_mode mode)
3804
{
3805
  return c4x_arn_reg_operand (op, mode, AR7_REGNO);
3806
}
3807
 
3808
 
3809
int
3810
ar7_mem_operand (rtx op, enum machine_mode mode)
3811
{
3812
  return c4x_arn_mem_operand (op, mode, AR7_REGNO);
3813
}
3814
 
3815
 
3816
int
3817
ir0_reg_operand (rtx op, enum machine_mode mode)
3818
{
3819
  return c4x_arn_reg_operand (op, mode, IR0_REGNO);
3820
}
3821
 
3822
 
3823
int
3824
ir0_mem_operand (rtx op, enum machine_mode mode)
3825
{
3826
  return c4x_arn_mem_operand (op, mode, IR0_REGNO);
3827
}
3828
 
3829
 
3830
int
3831
ir1_reg_operand (rtx op, enum machine_mode mode)
3832
{
3833
  return c4x_arn_reg_operand (op, mode, IR1_REGNO);
3834
}
3835
 
3836
 
3837
int
3838
ir1_mem_operand (rtx op, enum machine_mode mode)
3839
{
3840
  return c4x_arn_mem_operand (op, mode, IR1_REGNO);
3841
}
3842
 
3843
 
3844
/* This is similar to operand_subword but allows autoincrement
3845
   addressing.  */
3846
 
3847
rtx
3848
c4x_operand_subword (rtx op, int i, int validate_address,
3849
                     enum machine_mode  mode)
3850
{
3851
  if (mode != HImode && mode != HFmode)
3852
    fatal_insn ("c4x_operand_subword: invalid mode", op);
3853
 
3854
  if (mode == HFmode && REG_P (op))
3855
    fatal_insn ("c4x_operand_subword: invalid operand", op);
3856
 
3857
  if (GET_CODE (op) == MEM)
3858
    {
3859
      enum rtx_code code = GET_CODE (XEXP (op, 0));
3860
      enum machine_mode mode = GET_MODE (XEXP (op, 0));
3861
      enum machine_mode submode;
3862
 
3863
      submode = mode;
3864
      if (mode == HImode)
3865
        submode = QImode;
3866
      else if (mode == HFmode)
3867
        submode = QFmode;
3868
 
3869
      switch (code)
3870
        {
3871
        case POST_INC:
3872
        case PRE_INC:
3873
          return gen_rtx_MEM (submode, XEXP (op, 0));
3874
 
3875
        case POST_DEC:
3876
        case PRE_DEC:
3877
        case PRE_MODIFY:
3878
        case POST_MODIFY:
3879
          /* We could handle these with some difficulty.
3880
             e.g., *p-- => *(p-=2); *(p+1).  */
3881
          fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
3882
 
3883
        case SYMBOL_REF:
3884
        case LABEL_REF:
3885
        case CONST:
3886
        case CONST_INT:
3887
          fatal_insn ("c4x_operand_subword: invalid address", op);
3888
 
3889
          /* Even though offsettable_address_p considers (MEM
3890
             (LO_SUM)) to be offsettable, it is not safe if the
3891
             address is at the end of the data page since we also have
3892
             to fix up the associated high PART.  In this case where
3893
             we are trying to split a HImode or HFmode memory
3894
             reference, we would have to emit another insn to reload a
3895
             new HIGH value.  It's easier to disable LO_SUM memory references
3896
             in HImode or HFmode and we probably get better code.  */
3897
        case LO_SUM:
3898
          fatal_insn ("c4x_operand_subword: address not offsettable", op);
3899
 
3900
        default:
3901
          break;
3902
        }
3903
    }
3904
 
3905
  return operand_subword (op, i, validate_address, mode);
3906
}
3907
 
3908
struct name_list
3909
{
3910
  struct name_list *next;
3911
  const char *name;
3912
};
3913
 
3914
static struct name_list *global_head;
3915
static struct name_list *extern_head;
3916
 
3917
 
3918
/* Add NAME to list of global symbols and remove from external list if
3919
   present on external list.  */
3920
 
3921
void
3922
c4x_global_label (const char *name)
3923
{
3924
  struct name_list *p, *last;
3925
 
3926
  /* Do not insert duplicate names, so linearly search through list of
3927
     existing names.  */
3928
  p = global_head;
3929
  while (p)
3930
    {
3931
      if (strcmp (p->name, name) == 0)
3932
        return;
3933
      p = p->next;
3934
    }
3935
  p = (struct name_list *) xmalloc (sizeof *p);
3936
  p->next = global_head;
3937
  p->name = name;
3938
  global_head = p;
3939
 
3940
  /* Remove this name from ref list if present.  */
3941
  last = NULL;
3942
  p = extern_head;
3943
  while (p)
3944
    {
3945
      if (strcmp (p->name, name) == 0)
3946
        {
3947
          if (last)
3948
            last->next = p->next;
3949
          else
3950
            extern_head = p->next;
3951
          break;
3952
        }
3953
      last = p;
3954
      p = p->next;
3955
    }
3956
}
3957
 
3958
 
3959
/* Add NAME to list of external symbols.  */
3960
 
3961
void
3962
c4x_external_ref (const char *name)
3963
{
3964
  struct name_list *p;
3965
 
3966
  /* Do not insert duplicate names.  */
3967
  p = extern_head;
3968
  while (p)
3969
    {
3970
      if (strcmp (p->name, name) == 0)
3971
        return;
3972
      p = p->next;
3973
    }
3974
 
3975
  /* Do not insert ref if global found.  */
3976
  p = global_head;
3977
  while (p)
3978
    {
3979
      if (strcmp (p->name, name) == 0)
3980
        return;
3981
      p = p->next;
3982
    }
3983
  p = (struct name_list *) xmalloc (sizeof *p);
3984
  p->next = extern_head;
3985
  p->name = name;
3986
  extern_head = p;
3987
}
3988
 
3989
/* We need to have a data section we can identify so that we can set
3990
   the DP register back to a data pointer in the small memory model.
3991
   This is only required for ISRs if we are paranoid that someone
3992
   may have quietly changed this register on the sly.  */
3993
static void
3994
c4x_file_start (void)
3995
{
3996
  default_file_start ();
3997
  fprintf (asm_out_file, "\t.version\t%d\n", c4x_cpu_version);
3998
  fputs ("\n\t.data\ndata_sec:\n", asm_out_file);
3999
}
4000
 
4001
 
4002
static void
4003
c4x_file_end (void)
4004
{
4005
  struct name_list *p;
4006
 
4007
  /* Output all external names that are not global.  */
4008
  p = extern_head;
4009
  while (p)
4010
    {
4011
      fprintf (asm_out_file, "\t.ref\t");
4012
      assemble_name (asm_out_file, p->name);
4013
      fprintf (asm_out_file, "\n");
4014
      p = p->next;
4015
    }
4016
  fprintf (asm_out_file, "\t.end\n");
4017
}
4018
 
4019
 
4020
static void
4021
c4x_check_attribute (const char *attrib, tree list, tree decl, tree *attributes)
4022
{
4023
  while (list != NULL_TREE
4024
         && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4025
         != IDENTIFIER_POINTER (DECL_NAME (decl)))
4026
    list = TREE_CHAIN (list);
4027
  if (list)
4028
    *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4029
                             *attributes);
4030
}
4031
 
4032
 
4033
static void
4034
c4x_insert_attributes (tree decl, tree *attributes)
4035
{
4036
  switch (TREE_CODE (decl))
4037
    {
4038
    case FUNCTION_DECL:
4039
      c4x_check_attribute ("section", code_tree, decl, attributes);
4040
      c4x_check_attribute ("const", pure_tree, decl, attributes);
4041
      c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4042
      c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4043
      c4x_check_attribute ("naked", naked_tree, decl, attributes);
4044
      break;
4045
 
4046
    case VAR_DECL:
4047
      c4x_check_attribute ("section", data_tree, decl, attributes);
4048
      break;
4049
 
4050
    default:
4051
      break;
4052
    }
4053
}
4054
 
4055
/* Table of valid machine attributes.  */
4056
const struct attribute_spec c4x_attribute_table[] =
4057
{
4058
  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4059
  { "interrupt",    0, 0, false, true,  true,  c4x_handle_fntype_attribute },
4060
  { "naked",    0, 0, false, true,  true,  c4x_handle_fntype_attribute },
4061
  { "leaf_pretend", 0, 0, false, true,  true,  c4x_handle_fntype_attribute },
4062
  { NULL,           0, 0, false, false, false, NULL }
4063
};
4064
 
4065
/* Handle an attribute requiring a FUNCTION_TYPE;
4066
   arguments as in struct attribute_spec.handler.  */
4067
static tree
4068
c4x_handle_fntype_attribute (tree *node, tree name,
4069
                             tree args ATTRIBUTE_UNUSED,
4070
                             int flags ATTRIBUTE_UNUSED,
4071
                             bool *no_add_attrs)
4072
{
4073
  if (TREE_CODE (*node) != FUNCTION_TYPE)
4074
    {
4075
      warning (OPT_Wattributes, "%qs attribute only applies to functions",
4076
               IDENTIFIER_POINTER (name));
4077
      *no_add_attrs = true;
4078
    }
4079
 
4080
  return NULL_TREE;
4081
}
4082
 
4083
 
4084
/* !!! FIXME to emit RPTS correctly.  */
4085
 
4086
int
4087
c4x_rptb_rpts_p (rtx insn, rtx op)
4088
{
4089
  /* The next insn should be our label marking where the
4090
     repeat block starts.  */
4091
  insn = NEXT_INSN (insn);
4092
  if (GET_CODE (insn) != CODE_LABEL)
4093
    {
4094
      /* Some insns may have been shifted between the RPTB insn
4095
         and the top label... They were probably destined to
4096
         be moved out of the loop.  For now, let's leave them
4097
         where they are and print a warning.  We should
4098
         probably move these insns before the repeat block insn.  */
4099
      if (TARGET_DEBUG)
4100
        fatal_insn ("c4x_rptb_rpts_p: Repeat block top label moved",
4101
                    insn);
4102
      return 0;
4103
    }
4104
 
4105
  /* Skip any notes.  */
4106
  insn = next_nonnote_insn (insn);
4107
 
4108
  /* This should be our first insn in the loop.  */
4109
  if (! INSN_P (insn))
4110
    return 0;
4111
 
4112
  /* Skip any notes.  */
4113
  insn = next_nonnote_insn (insn);
4114
 
4115
  if (! INSN_P (insn))
4116
    return 0;
4117
 
4118
  if (recog_memoized (insn) != CODE_FOR_rptb_end)
4119
    return 0;
4120
 
4121
  if (TARGET_RPTS)
4122
    return 1;
4123
 
4124
  return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4125
}
4126
 
4127
 
4128
/* Check if register r11 is used as the destination of an insn.  */
4129
 
4130
static int
4131
c4x_r11_set_p(rtx x)
4132
{
4133
  rtx set;
4134
  int i, j;
4135
  const char *fmt;
4136
 
4137
  if (x == 0)
4138
    return 0;
4139
 
4140
  if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4141
    x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4142
 
4143
  if (INSN_P (x) && (set = single_set (x)))
4144
    x = SET_DEST (set);
4145
 
4146
  if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4147
    return 1;
4148
 
4149
  fmt = GET_RTX_FORMAT (GET_CODE (x));
4150
  for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4151
    {
4152
      if (fmt[i] == 'e')
4153
        {
4154
          if (c4x_r11_set_p (XEXP (x, i)))
4155
            return 1;
4156
        }
4157
      else if (fmt[i] == 'E')
4158
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4159
          if (c4x_r11_set_p (XVECEXP (x, i, j)))
4160
            return 1;
4161
    }
4162
  return 0;
4163
}
4164
 
4165
 
4166
/* The c4x sometimes has a problem when the insn before the laj insn
4167
   sets the r11 register.  Check for this situation.  */
4168
 
4169
int
4170
c4x_check_laj_p (rtx insn)
4171
{
4172
  insn = prev_nonnote_insn (insn);
4173
 
4174
  /* If this is the start of the function no nop is needed.  */
4175
  if (insn == 0)
4176
    return 0;
4177
 
4178
  /* If the previous insn is a code label we have to insert a nop. This
4179
     could be a jump or table jump. We can find the normal jumps by
4180
     scanning the function but this will not find table jumps.  */
4181
  if (GET_CODE (insn) == CODE_LABEL)
4182
    return 1;
4183
 
4184
  /* If the previous insn sets register r11 we have to insert a nop.  */
4185
  if (c4x_r11_set_p (insn))
4186
    return 1;
4187
 
4188
  /* No nop needed.  */
4189
  return 0;
4190
}
4191
 
4192
 
4193
/* Adjust the cost of a scheduling dependency.  Return the new cost of
4194
   a dependency LINK or INSN on DEP_INSN.  COST is the current cost.
4195
   A set of an address register followed by a use occurs a 2 cycle
4196
   stall (reduced to a single cycle on the c40 using LDA), while
4197
   a read of an address register followed by a use occurs a single cycle.  */
4198
 
4199
#define SET_USE_COST    3
4200
#define SETLDA_USE_COST 2
4201
#define READ_USE_COST   2
4202
 
4203
static int
4204
c4x_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
4205
{
4206
  /* Don't worry about this until we know what registers have been
4207
     assigned.  */
4208
  if (flag_schedule_insns == 0 && ! reload_completed)
4209
    return 0;
4210
 
4211
  /* How do we handle dependencies where a read followed by another
4212
     read causes a pipeline stall?  For example, a read of ar0 followed
4213
     by the use of ar0 for a memory reference.  It looks like we
4214
     need to extend the scheduler to handle this case.  */
4215
 
4216
  /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4217
     (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4218
     so only deal with insns we know about.  */
4219
  if (recog_memoized (dep_insn) < 0)
4220
    return 0;
4221
 
4222
  if (REG_NOTE_KIND (link) == 0)
4223
    {
4224
      int max = 0;
4225
 
4226
      /* Data dependency; DEP_INSN writes a register that INSN reads some
4227
         cycles later.  */
4228
      if (TARGET_C3X)
4229
        {
4230
          if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4231
            max = SET_USE_COST > max ? SET_USE_COST : max;
4232
          if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4233
            max = READ_USE_COST > max ? READ_USE_COST : max;
4234
        }
4235
      else
4236
        {
4237
          /* This could be significantly optimized. We should look
4238
             to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4239
             insn uses ar0-ar7.  We then test if the same register
4240
             is used.  The tricky bit is that some operands will
4241
             use several registers...  */
4242
          if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4243
            max = SET_USE_COST > max ? SET_USE_COST : max;
4244
          if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4245
            max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4246
          if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4247
            max = READ_USE_COST > max ? READ_USE_COST : max;
4248
 
4249
          if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4250
            max = SET_USE_COST > max ? SET_USE_COST : max;
4251
          if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4252
            max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4253
          if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4254
            max = READ_USE_COST > max ? READ_USE_COST : max;
4255
 
4256
          if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4257
            max = SET_USE_COST > max ? SET_USE_COST : max;
4258
          if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4259
            max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4260
          if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4261
            max = READ_USE_COST > max ? READ_USE_COST : max;
4262
 
4263
          if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4264
            max = SET_USE_COST > max ? SET_USE_COST : max;
4265
          if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4266
            max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4267
          if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4268
            max = READ_USE_COST > max ? READ_USE_COST : max;
4269
 
4270
          if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4271
            max = SET_USE_COST > max ? SET_USE_COST : max;
4272
          if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4273
            max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4274
          if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4275
            max = READ_USE_COST > max ? READ_USE_COST : max;
4276
 
4277
          if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4278
            max = SET_USE_COST > max ? SET_USE_COST : max;
4279
          if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4280
            max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4281
          if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4282
            max = READ_USE_COST > max ? READ_USE_COST : max;
4283
 
4284
          if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4285
            max = SET_USE_COST > max ? SET_USE_COST : max;
4286
          if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4287
            max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4288
          if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4289
            max = READ_USE_COST > max ? READ_USE_COST : max;
4290
 
4291
          if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4292
            max = SET_USE_COST > max ? SET_USE_COST : max;
4293
          if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4294
            max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4295
          if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4296
            max = READ_USE_COST > max ? READ_USE_COST : max;
4297
 
4298
          if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4299
            max = SET_USE_COST > max ? SET_USE_COST : max;
4300
          if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4301
            max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4302
 
4303
          if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4304
            max = SET_USE_COST > max ? SET_USE_COST : max;
4305
          if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
4306
            max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4307
        }
4308
 
4309
      if (max)
4310
        cost = max;
4311
 
4312
      /* For other data dependencies, the default cost specified in the
4313
         md is correct.  */
4314
      return cost;
4315
    }
4316
  else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
4317
    {
4318
      /* Anti dependency; DEP_INSN reads a register that INSN writes some
4319
         cycles later.  */
4320
 
4321
      /* For c4x anti dependencies, the cost is 0.  */
4322
      return 0;
4323
    }
4324
  else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
4325
    {
4326
      /* Output dependency; DEP_INSN writes a register that INSN writes some
4327
         cycles later.  */
4328
 
4329
      /* For c4x output dependencies, the cost is 0.  */
4330
      return 0;
4331
    }
4332
  else
4333
    abort ();
4334
}
4335
 
4336
void
4337
c4x_init_builtins (void)
4338
{
4339
  tree endlink = void_list_node;
4340
 
4341
  lang_hooks.builtin_function ("fast_ftoi",
4342
                               build_function_type
4343
                               (integer_type_node,
4344
                                tree_cons (NULL_TREE, double_type_node,
4345
                                           endlink)),
4346
                               C4X_BUILTIN_FIX, BUILT_IN_MD, NULL, NULL_TREE);
4347
  lang_hooks.builtin_function ("ansi_ftoi",
4348
                               build_function_type
4349
                               (integer_type_node,
4350
                                tree_cons (NULL_TREE, double_type_node,
4351
                                           endlink)),
4352
                               C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL,
4353
                               NULL_TREE);
4354
  if (TARGET_C3X)
4355
    lang_hooks.builtin_function ("fast_imult",
4356
                                 build_function_type
4357
                                 (integer_type_node,
4358
                                  tree_cons (NULL_TREE, integer_type_node,
4359
                                             tree_cons (NULL_TREE,
4360
                                                        integer_type_node,
4361
                                                        endlink))),
4362
                                 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL,
4363
                                 NULL_TREE);
4364
  else
4365
    {
4366
      lang_hooks.builtin_function ("toieee",
4367
                                   build_function_type
4368
                                   (double_type_node,
4369
                                    tree_cons (NULL_TREE, double_type_node,
4370
                                               endlink)),
4371
                                   C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL,
4372
                                   NULL_TREE);
4373
      lang_hooks.builtin_function ("frieee",
4374
                                   build_function_type
4375
                                   (double_type_node,
4376
                                    tree_cons (NULL_TREE, double_type_node,
4377
                                               endlink)),
4378
                                   C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL,
4379
                                   NULL_TREE);
4380
      lang_hooks.builtin_function ("fast_invf",
4381
                                   build_function_type
4382
                                   (double_type_node,
4383
                                    tree_cons (NULL_TREE, double_type_node,
4384
                                               endlink)),
4385
                                   C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL,
4386
                                   NULL_TREE);
4387
    }
4388
}
4389
 
4390
 
4391
rtx
4392
c4x_expand_builtin (tree exp, rtx target,
4393
                    rtx subtarget ATTRIBUTE_UNUSED,
4394
                    enum machine_mode mode ATTRIBUTE_UNUSED,
4395
                    int ignore ATTRIBUTE_UNUSED)
4396
{
4397
  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4398
  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4399
  tree arglist = TREE_OPERAND (exp, 1);
4400
  tree arg0, arg1;
4401
  rtx r0, r1;
4402
 
4403
  switch (fcode)
4404
    {
4405
    case C4X_BUILTIN_FIX:
4406
      arg0 = TREE_VALUE (arglist);
4407
      r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4408
      if (! target || ! register_operand (target, QImode))
4409
        target = gen_reg_rtx (QImode);
4410
      emit_insn (gen_fixqfqi_clobber (target, r0));
4411
      return target;
4412
 
4413
    case C4X_BUILTIN_FIX_ANSI:
4414
      arg0 = TREE_VALUE (arglist);
4415
      r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4416
      if (! target || ! register_operand (target, QImode))
4417
        target = gen_reg_rtx (QImode);
4418
      emit_insn (gen_fix_truncqfqi2 (target, r0));
4419
      return target;
4420
 
4421
    case C4X_BUILTIN_MPYI:
4422
      if (! TARGET_C3X)
4423
        break;
4424
      arg0 = TREE_VALUE (arglist);
4425
      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4426
      r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
4427
      r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
4428
      if (! target || ! register_operand (target, QImode))
4429
        target = gen_reg_rtx (QImode);
4430
      emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
4431
      return target;
4432
 
4433
    case C4X_BUILTIN_TOIEEE:
4434
      if (TARGET_C3X)
4435
        break;
4436
      arg0 = TREE_VALUE (arglist);
4437
      r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4438
      if (! target || ! register_operand (target, QFmode))
4439
        target = gen_reg_rtx (QFmode);
4440
      emit_insn (gen_toieee (target, r0));
4441
      return target;
4442
 
4443
    case C4X_BUILTIN_FRIEEE:
4444
      if (TARGET_C3X)
4445
        break;
4446
      arg0 = TREE_VALUE (arglist);
4447
      r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4448
      if (register_operand (r0, QFmode))
4449
        {
4450
          r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
4451
          emit_move_insn (r1, r0);
4452
          r0 = r1;
4453
        }
4454
      if (! target || ! register_operand (target, QFmode))
4455
        target = gen_reg_rtx (QFmode);
4456
      emit_insn (gen_frieee (target, r0));
4457
      return target;
4458
 
4459
    case C4X_BUILTIN_RCPF:
4460
      if (TARGET_C3X)
4461
        break;
4462
      arg0 = TREE_VALUE (arglist);
4463
      r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4464
      if (! target || ! register_operand (target, QFmode))
4465
        target = gen_reg_rtx (QFmode);
4466
      emit_insn (gen_rcpfqf_clobber (target, r0));
4467
      return target;
4468
    }
4469
  return NULL_RTX;
4470
}
4471
 
4472
static void
4473
c4x_init_libfuncs (void)
4474
{
4475
  set_optab_libfunc (smul_optab, QImode, "__mulqi3");
4476
  set_optab_libfunc (sdiv_optab, QImode, "__divqi3");
4477
  set_optab_libfunc (udiv_optab, QImode, "__udivqi3");
4478
  set_optab_libfunc (smod_optab, QImode, "__modqi3");
4479
  set_optab_libfunc (umod_optab, QImode, "__umodqi3");
4480
  set_optab_libfunc (sdiv_optab, QFmode, "__divqf3");
4481
  set_optab_libfunc (smul_optab, HFmode, "__mulhf3");
4482
  set_optab_libfunc (sdiv_optab, HFmode, "__divhf3");
4483
  set_optab_libfunc (smul_optab, HImode, "__mulhi3");
4484
  set_optab_libfunc (sdiv_optab, HImode, "__divhi3");
4485
  set_optab_libfunc (udiv_optab, HImode, "__udivhi3");
4486
  set_optab_libfunc (smod_optab, HImode, "__modhi3");
4487
  set_optab_libfunc (umod_optab, HImode, "__umodhi3");
4488
  set_optab_libfunc (ffs_optab,  QImode, "__ffs");
4489
  smulhi3_libfunc           = init_one_libfunc ("__smulhi3_high");
4490
  umulhi3_libfunc           = init_one_libfunc ("__umulhi3_high");
4491
  fix_truncqfhi2_libfunc    = init_one_libfunc ("__fix_truncqfhi2");
4492
  fixuns_truncqfhi2_libfunc = init_one_libfunc ("__ufix_truncqfhi2");
4493
  fix_trunchfhi2_libfunc    = init_one_libfunc ("__fix_trunchfhi2");
4494
  fixuns_trunchfhi2_libfunc = init_one_libfunc ("__ufix_trunchfhi2");
4495
  floathiqf2_libfunc        = init_one_libfunc ("__floathiqf2");
4496
  floatunshiqf2_libfunc     = init_one_libfunc ("__ufloathiqf2");
4497
  floathihf2_libfunc        = init_one_libfunc ("__floathihf2");
4498
  floatunshihf2_libfunc     = init_one_libfunc ("__ufloathihf2");
4499
}
4500
 
4501
static void
4502
c4x_asm_named_section (const char *name, unsigned int flags ATTRIBUTE_UNUSED,
4503
                       tree decl ATTRIBUTE_UNUSED)
4504
{
4505
  fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);
4506
}
4507
 
4508
static void
4509
c4x_globalize_label (FILE *stream, const char *name)
4510
{
4511
  default_globalize_label (stream, name);
4512
  c4x_global_label (name);
4513
}
4514
 
4515
#define SHIFT_CODE_P(C) \
4516
  ((C) == ASHIFT || (C) == ASHIFTRT || (C) == LSHIFTRT)
4517
#define LOGICAL_CODE_P(C) \
4518
  ((C) == NOT || (C) == AND || (C) == IOR || (C) == XOR)
4519
 
4520
/* Compute a (partial) cost for rtx X.  Return true if the complete
4521
   cost has been computed, and false if subexpressions should be
4522
   scanned.  In either case, *TOTAL contains the cost result.  */
4523
 
4524
static bool
4525
c4x_rtx_costs (rtx x, int code, int outer_code, int *total)
4526
{
4527
  HOST_WIDE_INT val;
4528
 
4529
  switch (code)
4530
    {
4531
      /* Some small integers are effectively free for the C40.  We should
4532
         also consider if we are using the small memory model.  With
4533
         the big memory model we require an extra insn for a constant
4534
         loaded from memory.  */
4535
 
4536
    case CONST_INT:
4537
      val = INTVAL (x);
4538
      if (c4x_J_constant (x))
4539
        *total = 0;
4540
      else if (! TARGET_C3X
4541
               && outer_code == AND
4542
               && (val == 255 || val == 65535))
4543
        *total = 0;
4544
      else if (! TARGET_C3X
4545
               && (outer_code == ASHIFTRT || outer_code == LSHIFTRT)
4546
               && (val == 16 || val == 24))
4547
        *total = 0;
4548
      else if (TARGET_C3X && SHIFT_CODE_P (outer_code))
4549
        *total = 3;
4550
      else if (LOGICAL_CODE_P (outer_code)
4551
               ? c4x_L_constant (x) : c4x_I_constant (x))
4552
        *total = 2;
4553
      else
4554
        *total = 4;
4555
      return true;
4556
 
4557
    case CONST:
4558
    case LABEL_REF:
4559
    case SYMBOL_REF:
4560
      *total = 4;
4561
      return true;
4562
 
4563
    case CONST_DOUBLE:
4564
      if (c4x_H_constant (x))
4565
        *total = 2;
4566
      else if (GET_MODE (x) == QFmode)
4567
        *total = 4;
4568
      else
4569
        *total = 8;
4570
      return true;
4571
 
4572
    /* ??? Note that we return true, rather than false so that rtx_cost
4573
       doesn't include the constant costs.  Otherwise expand_mult will
4574
       think that it is cheaper to synthesize a multiply rather than to
4575
       use a multiply instruction.  I think this is because the algorithm
4576
       synth_mult doesn't take into account the loading of the operands,
4577
       whereas the calculation of mult_cost does.  */
4578
    case PLUS:
4579
    case MINUS:
4580
    case AND:
4581
    case IOR:
4582
    case XOR:
4583
    case ASHIFT:
4584
    case ASHIFTRT:
4585
    case LSHIFTRT:
4586
      *total = COSTS_N_INSNS (1);
4587
      return true;
4588
 
4589
    case MULT:
4590
      *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
4591
                              || TARGET_MPYI ? 1 : 14);
4592
      return true;
4593
 
4594
    case DIV:
4595
    case UDIV:
4596
    case MOD:
4597
    case UMOD:
4598
      *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
4599
                              ? 15 : 50);
4600
      return true;
4601
 
4602
    default:
4603
      return false;
4604
    }
4605
}
4606
 
4607
/* Worker function for TARGET_ASM_EXTERNAL_LIBCALL.  */
4608
 
4609
static void
4610
c4x_external_libcall (rtx fun)
4611
{
4612
  /* This is only needed to keep asm30 happy for ___divqf3 etc.  */
4613
  c4x_external_ref (XSTR (fun, 0));
4614
}
4615
 
4616
/* Worker function for TARGET_STRUCT_VALUE_RTX.  */
4617
 
4618
static rtx
4619
c4x_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
4620
                      int incoming ATTRIBUTE_UNUSED)
4621
{
4622
  return gen_rtx_REG (Pmode, AR0_REGNO);
4623
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.