OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [config/] [arc/] [arc.c] - Blame information for rev 12

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
/* Subroutines used for code generation on the Argonaut ARC cpu.
2
   Copyright (C) 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3
   2005
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify
9
it under the terms of the GNU General Public License as published by
10
the Free Software Foundation; either version 2, or (at your option)
11
any later version.
12
 
13
GCC is distributed in the hope that it will be useful,
14
but WITHOUT ANY WARRANTY; without even the implied warranty of
15
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16
GNU General Public License for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING.  If not, write to
20
the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21
Boston, MA 02110-1301, USA.  */
22
 
23
/* ??? This is an old port, and is undoubtedly suffering from bit rot.  */
24
 
25
#include "config.h"
26
#include "system.h"
27
#include "coretypes.h"
28
#include "tm.h"
29
#include "tree.h"
30
#include "rtl.h"
31
#include "regs.h"
32
#include "hard-reg-set.h"
33
#include "real.h"
34
#include "insn-config.h"
35
#include "conditions.h"
36
#include "output.h"
37
#include "insn-attr.h"
38
#include "flags.h"
39
#include "function.h"
40
#include "expr.h"
41
#include "recog.h"
42
#include "toplev.h"
43
#include "tm_p.h"
44
#include "target.h"
45
#include "target-def.h"
46
 
47
/* Which cpu we're compiling for.  */
48
int arc_cpu_type;
49
 
50
/* Name of mangle string to add to symbols to separate code compiled for each
51
   cpu (or NULL).  */
52
const char *arc_mangle_cpu;
53
 
54
/* Save the operands last given to a compare for use when we
55
   generate a scc or bcc insn.  */
56
rtx arc_compare_op0, arc_compare_op1;
57
 
58
/* Name of text, data, and rodata sections used in varasm.c.  */
59
const char *arc_text_section;
60
const char *arc_data_section;
61
const char *arc_rodata_section;
62
 
63
/* Array of valid operand punctuation characters.  */
64
char arc_punct_chars[256];
65
 
66
/* Variables used by arc_final_prescan_insn to implement conditional
67
   execution.  */
68
static int arc_ccfsm_state;
69
static int arc_ccfsm_current_cc;
70
static rtx arc_ccfsm_target_insn;
71
static int arc_ccfsm_target_label;
72
 
73
/* The maximum number of insns skipped which will be conditionalised if
74
   possible.  */
75
#define MAX_INSNS_SKIPPED 3
76
 
77
/* A nop is needed between a 4 byte insn that sets the condition codes and
78
   a branch that uses them (the same isn't true for an 8 byte insn that sets
79
   the condition codes).  Set by arc_final_prescan_insn.  Used by
80
   arc_print_operand.  */
81
static int last_insn_set_cc_p;
82
static int current_insn_set_cc_p;
83
static bool arc_handle_option (size_t, const char *, int);
84
static void record_cc_ref (rtx);
85
static void arc_init_reg_tables (void);
86
static int get_arc_condition_code (rtx);
87
const struct attribute_spec arc_attribute_table[];
88
static tree arc_handle_interrupt_attribute (tree *, tree, tree, int, bool *);
89
static bool arc_assemble_integer (rtx, unsigned int, int);
90
static void arc_output_function_prologue (FILE *, HOST_WIDE_INT);
91
static void arc_output_function_epilogue (FILE *, HOST_WIDE_INT);
92
static void arc_file_start (void);
93
static void arc_internal_label (FILE *, const char *, unsigned long);
94
static void arc_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
95
                                        tree, int *, int);
96
static bool arc_rtx_costs (rtx, int, int, int *);
97
static int arc_address_cost (rtx);
98
static void arc_external_libcall (rtx);
99
static bool arc_return_in_memory (tree, tree);
100
static bool arc_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
101
                                   tree, bool);
102
 
103
/* Initialize the GCC target structure.  */
104
#undef TARGET_ASM_ALIGNED_HI_OP
105
#define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
106
#undef TARGET_ASM_ALIGNED_SI_OP
107
#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
108
#undef TARGET_ASM_INTEGER
109
#define TARGET_ASM_INTEGER arc_assemble_integer
110
 
111
#undef TARGET_ASM_FUNCTION_PROLOGUE
112
#define TARGET_ASM_FUNCTION_PROLOGUE arc_output_function_prologue
113
#undef TARGET_ASM_FUNCTION_EPILOGUE
114
#define TARGET_ASM_FUNCTION_EPILOGUE arc_output_function_epilogue
115
#undef TARGET_ASM_FILE_START
116
#define TARGET_ASM_FILE_START arc_file_start
117
#undef TARGET_ATTRIBUTE_TABLE
118
#define TARGET_ATTRIBUTE_TABLE arc_attribute_table
119
#undef TARGET_ASM_INTERNAL_LABEL
120
#define TARGET_ASM_INTERNAL_LABEL arc_internal_label
121
#undef TARGET_ASM_EXTERNAL_LIBCALL
122
#define TARGET_ASM_EXTERNAL_LIBCALL arc_external_libcall
123
 
124
#undef TARGET_HANDLE_OPTION
125
#define TARGET_HANDLE_OPTION arc_handle_option
126
 
127
#undef TARGET_RTX_COSTS
128
#define TARGET_RTX_COSTS arc_rtx_costs
129
#undef TARGET_ADDRESS_COST
130
#define TARGET_ADDRESS_COST arc_address_cost
131
 
132
#undef TARGET_PROMOTE_FUNCTION_ARGS
133
#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
134
#undef TARGET_PROMOTE_FUNCTION_RETURN
135
#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
136
#undef TARGET_PROMOTE_PROTOTYPES
137
#define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
138
 
139
#undef TARGET_RETURN_IN_MEMORY
140
#define TARGET_RETURN_IN_MEMORY arc_return_in_memory
141
#undef TARGET_PASS_BY_REFERENCE
142
#define TARGET_PASS_BY_REFERENCE arc_pass_by_reference
143
#undef TARGET_CALLEE_COPIES
144
#define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
145
 
146
#undef TARGET_SETUP_INCOMING_VARARGS
147
#define TARGET_SETUP_INCOMING_VARARGS arc_setup_incoming_varargs
148
 
149
struct gcc_target targetm = TARGET_INITIALIZER;
150
 
151
/* Implement TARGET_HANDLE_OPTION.  */
152
 
153
static bool
154
arc_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
155
{
156
  switch (code)
157
    {
158
    case OPT_mcpu_:
159
      return strcmp (arg, "base") == 0 || ARC_EXTENSION_CPU (arg);
160
 
161
    default:
162
      return true;
163
    }
164
}
165
 
166
/* Called by OVERRIDE_OPTIONS to initialize various things.  */
167
 
168
void
169
arc_init (void)
170
{
171
  char *tmp;
172
 
173
  /* Set the pseudo-ops for the various standard sections.  */
174
  arc_text_section = tmp = xmalloc (strlen (arc_text_string) + sizeof (ARC_SECTION_FORMAT) + 1);
175
  sprintf (tmp, ARC_SECTION_FORMAT, arc_text_string);
176
  arc_data_section = tmp = xmalloc (strlen (arc_data_string) + sizeof (ARC_SECTION_FORMAT) + 1);
177
  sprintf (tmp, ARC_SECTION_FORMAT, arc_data_string);
178
  arc_rodata_section = tmp = xmalloc (strlen (arc_rodata_string) + sizeof (ARC_SECTION_FORMAT) + 1);
179
  sprintf (tmp, ARC_SECTION_FORMAT, arc_rodata_string);
180
 
181
  arc_init_reg_tables ();
182
 
183
  /* Initialize array for PRINT_OPERAND_PUNCT_VALID_P.  */
184
  memset (arc_punct_chars, 0, sizeof (arc_punct_chars));
185
  arc_punct_chars['#'] = 1;
186
  arc_punct_chars['*'] = 1;
187
  arc_punct_chars['?'] = 1;
188
  arc_punct_chars['!'] = 1;
189
  arc_punct_chars['~'] = 1;
190
}
191
 
192
/* The condition codes of the ARC, and the inverse function.  */
193
static const char *const arc_condition_codes[] =
194
{
195
  "al", 0, "eq", "ne", "p", "n", "c", "nc", "v", "nv",
196
  "gt", "le", "ge", "lt", "hi", "ls", "pnz", 0
197
};
198
 
199
#define ARC_INVERSE_CONDITION_CODE(X)  ((X) ^ 1)
200
 
201
/* Returns the index of the ARC condition code string in
202
   `arc_condition_codes'.  COMPARISON should be an rtx like
203
   `(eq (...) (...))'.  */
204
 
205
static int
206
get_arc_condition_code (rtx comparison)
207
{
208
  switch (GET_CODE (comparison))
209
    {
210
    case EQ : return 2;
211
    case NE : return 3;
212
    case GT : return 10;
213
    case LE : return 11;
214
    case GE : return 12;
215
    case LT : return 13;
216
    case GTU : return 14;
217
    case LEU : return 15;
218
    case LTU : return 6;
219
    case GEU : return 7;
220
    default : gcc_unreachable ();
221
    }
222
  /*NOTREACHED*/
223
  return (42);
224
}
225
 
226
/* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
227
   return the mode to be used for the comparison.  */
228
 
229
enum machine_mode
230
arc_select_cc_mode (enum rtx_code op,
231
                    rtx x ATTRIBUTE_UNUSED,
232
                    rtx y ATTRIBUTE_UNUSED)
233
{
234
  switch (op)
235
    {
236
    case EQ :
237
    case NE :
238
      return CCZNmode;
239
    default :
240
      switch (GET_CODE (x))
241
        {
242
        case AND :
243
        case IOR :
244
        case XOR :
245
        case SIGN_EXTEND :
246
        case ZERO_EXTEND :
247
          return CCZNmode;
248
        case ASHIFT :
249
        case ASHIFTRT :
250
        case LSHIFTRT :
251
          return CCZNCmode;
252
        default:
253
          break;
254
        }
255
    }
256
  return CCmode;
257
}
258
 
259
/* Vectors to keep interesting information about registers where it can easily
260
   be got.  We use to use the actual mode value as the bit number, but there
261
   is (or may be) more than 32 modes now.  Instead we use two tables: one
262
   indexed by hard register number, and one indexed by mode.  */
263
 
264
/* The purpose of arc_mode_class is to shrink the range of modes so that
265
   they all fit (as bit numbers) in a 32 bit word (again).  Each real mode is
266
   mapped into one arc_mode_class mode.  */
267
 
268
enum arc_mode_class {
269
  C_MODE,
270
  S_MODE, D_MODE, T_MODE, O_MODE,
271
  SF_MODE, DF_MODE, TF_MODE, OF_MODE
272
};
273
 
274
/* Modes for condition codes.  */
275
#define C_MODES (1 << (int) C_MODE)
276
 
277
/* Modes for single-word and smaller quantities.  */
278
#define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
279
 
280
/* Modes for double-word and smaller quantities.  */
281
#define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
282
 
283
/* Modes for quad-word and smaller quantities.  */
284
#define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
285
 
286
/* Value is 1 if register/mode pair is acceptable on arc.  */
287
 
288
const unsigned int arc_hard_regno_mode_ok[] = {
289
  T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
290
  T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
291
  T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, D_MODES,
292
  D_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
293
 
294
  /* ??? Leave these as S_MODES for now.  */
295
  S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
296
  S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
297
  S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
298
  S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, C_MODES
299
};
300
 
301
unsigned int arc_mode_class [NUM_MACHINE_MODES];
302
 
303
enum reg_class arc_regno_reg_class[FIRST_PSEUDO_REGISTER];
304
 
305
static void
306
arc_init_reg_tables (void)
307
{
308
  int i;
309
 
310
  for (i = 0; i < NUM_MACHINE_MODES; i++)
311
    {
312
      switch (GET_MODE_CLASS (i))
313
        {
314
        case MODE_INT:
315
        case MODE_PARTIAL_INT:
316
        case MODE_COMPLEX_INT:
317
          if (GET_MODE_SIZE (i) <= 4)
318
            arc_mode_class[i] = 1 << (int) S_MODE;
319
          else if (GET_MODE_SIZE (i) == 8)
320
            arc_mode_class[i] = 1 << (int) D_MODE;
321
          else if (GET_MODE_SIZE (i) == 16)
322
            arc_mode_class[i] = 1 << (int) T_MODE;
323
          else if (GET_MODE_SIZE (i) == 32)
324
            arc_mode_class[i] = 1 << (int) O_MODE;
325
          else
326
            arc_mode_class[i] = 0;
327
          break;
328
        case MODE_FLOAT:
329
        case MODE_COMPLEX_FLOAT:
330
          if (GET_MODE_SIZE (i) <= 4)
331
            arc_mode_class[i] = 1 << (int) SF_MODE;
332
          else if (GET_MODE_SIZE (i) == 8)
333
            arc_mode_class[i] = 1 << (int) DF_MODE;
334
          else if (GET_MODE_SIZE (i) == 16)
335
            arc_mode_class[i] = 1 << (int) TF_MODE;
336
          else if (GET_MODE_SIZE (i) == 32)
337
            arc_mode_class[i] = 1 << (int) OF_MODE;
338
          else
339
            arc_mode_class[i] = 0;
340
          break;
341
        case MODE_CC:
342
          arc_mode_class[i] = 1 << (int) C_MODE;
343
          break;
344
        default:
345
          arc_mode_class[i] = 0;
346
          break;
347
        }
348
    }
349
 
350
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
351
    {
352
      if (i < 60)
353
        arc_regno_reg_class[i] = GENERAL_REGS;
354
      else if (i == 60)
355
        arc_regno_reg_class[i] = LPCOUNT_REG;
356
      else if (i == 61)
357
        arc_regno_reg_class[i] = NO_REGS /* CC_REG: must be NO_REGS */;
358
      else
359
        arc_regno_reg_class[i] = NO_REGS;
360
    }
361
}
362
 
363
/* ARC specific attribute support.
364
 
365
   The ARC has these attributes:
366
   interrupt - for interrupt functions
367
*/
368
 
369
const struct attribute_spec arc_attribute_table[] =
370
{
371
  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
372
  { "interrupt", 1, 1, true,  false, false, arc_handle_interrupt_attribute },
373
  { NULL,        0, 0, false, false, false, NULL }
374
};
375
 
376
/* Handle an "interrupt" attribute; arguments as in
377
   struct attribute_spec.handler.  */
378
static tree
379
arc_handle_interrupt_attribute (tree *node ATTRIBUTE_UNUSED,
380
                                tree name,
381
                                tree args,
382
                                int flags ATTRIBUTE_UNUSED,
383
                                bool *no_add_attrs)
384
{
385
  tree value = TREE_VALUE (args);
386
 
387
  if (TREE_CODE (value) != STRING_CST)
388
    {
389
      warning (OPT_Wattributes,
390
               "argument of %qs attribute is not a string constant",
391
               IDENTIFIER_POINTER (name));
392
      *no_add_attrs = true;
393
    }
394
  else if (strcmp (TREE_STRING_POINTER (value), "ilink1")
395
           && strcmp (TREE_STRING_POINTER (value), "ilink2"))
396
    {
397
      warning (OPT_Wattributes,
398
               "argument of %qs attribute is not \"ilink1\" or \"ilink2\"",
399
               IDENTIFIER_POINTER (name));
400
      *no_add_attrs = true;
401
    }
402
 
403
  return NULL_TREE;
404
}
405
 
406
 
407
/* Acceptable arguments to the call insn.  */
408
 
409
int
410
call_address_operand (rtx op, enum machine_mode mode)
411
{
412
  return (symbolic_operand (op, mode)
413
          || (GET_CODE (op) == CONST_INT && LEGITIMATE_CONSTANT_P (op))
414
          || (GET_CODE (op) == REG));
415
}
416
 
417
int
418
call_operand (rtx op, enum machine_mode mode)
419
{
420
  if (GET_CODE (op) != MEM)
421
    return 0;
422
  op = XEXP (op, 0);
423
  return call_address_operand (op, mode);
424
}
425
 
426
/* Returns 1 if OP is a symbol reference.  */
427
 
428
int
429
symbolic_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
430
{
431
  switch (GET_CODE (op))
432
    {
433
    case SYMBOL_REF:
434
    case LABEL_REF:
435
    case CONST :
436
      return 1;
437
    default:
438
      return 0;
439
    }
440
}
441
 
442
/* Return truth value of statement that OP is a symbolic memory
443
   operand of mode MODE.  */
444
 
445
int
446
symbolic_memory_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
447
{
448
  if (GET_CODE (op) == SUBREG)
449
    op = SUBREG_REG (op);
450
  if (GET_CODE (op) != MEM)
451
    return 0;
452
  op = XEXP (op, 0);
453
  return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST
454
          || GET_CODE (op) == LABEL_REF);
455
}
456
 
457
/* Return true if OP is a short immediate (shimm) value.  */
458
 
459
int
460
short_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
461
{
462
  if (GET_CODE (op) != CONST_INT)
463
    return 0;
464
  return SMALL_INT (INTVAL (op));
465
}
466
 
467
/* Return true if OP will require a long immediate (limm) value.
468
   This is currently only used when calculating length attributes.  */
469
 
470
int
471
long_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
472
{
473
  switch (GET_CODE (op))
474
    {
475
    case SYMBOL_REF :
476
    case LABEL_REF :
477
    case CONST :
478
      return 1;
479
    case CONST_INT :
480
      return !SMALL_INT (INTVAL (op));
481
    case CONST_DOUBLE :
482
      /* These can happen because large unsigned 32 bit constants are
483
         represented this way (the multiplication patterns can cause these
484
         to be generated).  They also occur for SFmode values.  */
485
      return 1;
486
    default:
487
      break;
488
    }
489
  return 0;
490
}
491
 
492
/* Return true if OP is a MEM that when used as a load or store address will
493
   require an 8 byte insn.
494
   Load and store instructions don't allow the same possibilities but they're
495
   similar enough that this one function will do.
496
   This is currently only used when calculating length attributes.  */
497
 
498
int
499
long_immediate_loadstore_operand (rtx op,
500
                                  enum machine_mode mode ATTRIBUTE_UNUSED)
501
{
502
  if (GET_CODE (op) != MEM)
503
    return 0;
504
 
505
  op = XEXP (op, 0);
506
  switch (GET_CODE (op))
507
    {
508
    case SYMBOL_REF :
509
    case LABEL_REF :
510
    case CONST :
511
      return 1;
512
    case CONST_INT :
513
      /* This must be handled as "st c,[limm]".  Ditto for load.
514
         Technically, the assembler could translate some possibilities to
515
         "st c,[limm/2 + limm/2]" if limm/2 will fit in a shimm, but we don't
516
         assume that it does.  */
517
      return 1;
518
    case CONST_DOUBLE :
519
      /* These can happen because large unsigned 32 bit constants are
520
         represented this way (the multiplication patterns can cause these
521
         to be generated).  They also occur for SFmode values.  */
522
      return 1;
523
    case REG :
524
      return 0;
525
    case PLUS :
526
      if (GET_CODE (XEXP (op, 1)) == CONST_INT
527
          && !SMALL_INT (INTVAL (XEXP (op, 1))))
528
        return 1;
529
      return 0;
530
    default:
531
      break;
532
    }
533
  return 0;
534
}
535
 
536
/* Return true if OP is an acceptable argument for a single word
537
   move source.  */
538
 
539
int
540
move_src_operand (rtx op, enum machine_mode mode)
541
{
542
  switch (GET_CODE (op))
543
    {
544
    case SYMBOL_REF :
545
    case LABEL_REF :
546
    case CONST :
547
      return 1;
548
    case CONST_INT :
549
      return (LARGE_INT (INTVAL (op)));
550
    case CONST_DOUBLE :
551
      /* We can handle DImode integer constants in SImode if the value
552
         (signed or unsigned) will fit in 32 bits.  This is needed because
553
         large unsigned 32 bit constants are represented as CONST_DOUBLEs.  */
554
      if (mode == SImode)
555
        return arc_double_limm_p (op);
556
      /* We can handle 32 bit floating point constants.  */
557
      if (mode == SFmode)
558
        return GET_MODE (op) == SFmode;
559
      return 0;
560
    case REG :
561
      return register_operand (op, mode);
562
    case SUBREG :
563
      /* (subreg (mem ...) ...) can occur here if the inner part was once a
564
         pseudo-reg and is now a stack slot.  */
565
      if (GET_CODE (SUBREG_REG (op)) == MEM)
566
        return address_operand (XEXP (SUBREG_REG (op), 0), mode);
567
      else
568
        return register_operand (op, mode);
569
    case MEM :
570
      return address_operand (XEXP (op, 0), mode);
571
    default :
572
      return 0;
573
    }
574
}
575
 
576
/* Return true if OP is an acceptable argument for a double word
577
   move source.  */
578
 
579
int
580
move_double_src_operand (rtx op, enum machine_mode mode)
581
{
582
  switch (GET_CODE (op))
583
    {
584
    case REG :
585
      return register_operand (op, mode);
586
    case SUBREG :
587
      /* (subreg (mem ...) ...) can occur here if the inner part was once a
588
         pseudo-reg and is now a stack slot.  */
589
      if (GET_CODE (SUBREG_REG (op)) == MEM)
590
        return move_double_src_operand (SUBREG_REG (op), mode);
591
      else
592
        return register_operand (op, mode);
593
    case MEM :
594
      /* Disallow auto inc/dec for now.  */
595
      if (GET_CODE (XEXP (op, 0)) == PRE_DEC
596
          || GET_CODE (XEXP (op, 0)) == PRE_INC)
597
        return 0;
598
      return address_operand (XEXP (op, 0), mode);
599
    case CONST_INT :
600
    case CONST_DOUBLE :
601
      return 1;
602
    default :
603
      return 0;
604
    }
605
}
606
 
607
/* Return true if OP is an acceptable argument for a move destination.  */
608
 
609
int
610
move_dest_operand (rtx op, enum machine_mode mode)
611
{
612
  switch (GET_CODE (op))
613
    {
614
    case REG :
615
      return register_operand (op, mode);
616
    case SUBREG :
617
      /* (subreg (mem ...) ...) can occur here if the inner part was once a
618
         pseudo-reg and is now a stack slot.  */
619
      if (GET_CODE (SUBREG_REG (op)) == MEM)
620
        return address_operand (XEXP (SUBREG_REG (op), 0), mode);
621
      else
622
        return register_operand (op, mode);
623
    case MEM :
624
      return address_operand (XEXP (op, 0), mode);
625
    default :
626
      return 0;
627
    }
628
}
629
 
630
/* Return true if OP is valid load with update operand.  */
631
 
632
int
633
load_update_operand (rtx op, enum machine_mode mode)
634
{
635
  if (GET_CODE (op) != MEM
636
      || GET_MODE (op) != mode)
637
    return 0;
638
  op = XEXP (op, 0);
639
  if (GET_CODE (op) != PLUS
640
      || GET_MODE (op) != Pmode
641
      || !register_operand (XEXP (op, 0), Pmode)
642
      || !nonmemory_operand (XEXP (op, 1), Pmode))
643
    return 0;
644
  return 1;
645
}
646
 
647
/* Return true if OP is valid store with update operand.  */
648
 
649
int
650
store_update_operand (rtx op, enum machine_mode mode)
651
{
652
  if (GET_CODE (op) != MEM
653
      || GET_MODE (op) != mode)
654
    return 0;
655
  op = XEXP (op, 0);
656
  if (GET_CODE (op) != PLUS
657
      || GET_MODE (op) != Pmode
658
      || !register_operand (XEXP (op, 0), Pmode)
659
      || !(GET_CODE (XEXP (op, 1)) == CONST_INT
660
           && SMALL_INT (INTVAL (XEXP (op, 1)))))
661
    return 0;
662
  return 1;
663
}
664
 
665
/* Return true if OP is a non-volatile non-immediate operand.
666
   Volatile memory refs require a special "cache-bypass" instruction
667
   and only the standard movXX patterns are set up to handle them.  */
668
 
669
int
670
nonvol_nonimm_operand (rtx op, enum machine_mode mode)
671
{
672
  if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
673
    return 0;
674
  return nonimmediate_operand (op, mode);
675
}
676
 
677
/* Accept integer operands in the range -0x80000000..0x7fffffff.  We have
678
   to check the range carefully since this predicate is used in DImode
679
   contexts.  */
680
 
681
int
682
const_sint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
683
{
684
  /* All allowed constants will fit a CONST_INT.  */
685
  return (GET_CODE (op) == CONST_INT
686
          && (INTVAL (op) >= (-0x7fffffff - 1) && INTVAL (op) <= 0x7fffffff));
687
}
688
 
689
/* Accept integer operands in the range 0..0xffffffff.  We have to check the
690
   range carefully since this predicate is used in DImode contexts.  Also, we
691
   need some extra crud to make it work when hosted on 64-bit machines.  */
692
 
693
int
694
const_uint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
695
{
696
#if HOST_BITS_PER_WIDE_INT > 32
697
  /* All allowed constants will fit a CONST_INT.  */
698
  return (GET_CODE (op) == CONST_INT
699
          && (INTVAL (op) >= 0 && INTVAL (op) <= 0xffffffffL));
700
#else
701
  return ((GET_CODE (op) == CONST_INT && INTVAL (op) >= 0)
702
          || (GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_HIGH (op) == 0));
703
#endif
704
}
705
 
706
/* Return 1 if OP is a comparison operator valid for the mode of CC.
707
   This allows the use of MATCH_OPERATOR to recognize all the branch insns.
708
 
709
   Some insns only set a few bits in the condition code.  So only allow those
710
   comparisons that use the bits that are valid.  */
711
 
712
int
713
proper_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
714
{
715
  enum rtx_code code;
716
  if (!COMPARISON_P (op))
717
    return 0;
718
 
719
  code = GET_CODE (op);
720
  if (GET_MODE (XEXP (op, 0)) == CCZNmode)
721
    return (code == EQ || code == NE);
722
  if (GET_MODE (XEXP (op, 0)) == CCZNCmode)
723
    return (code == EQ || code == NE
724
            || code == LTU || code == GEU || code == GTU || code == LEU);
725
  return 1;
726
}
727
 
728
/* Misc. utilities.  */
729
 
730
/* X and Y are two things to compare using CODE.  Emit the compare insn and
731
   return the rtx for the cc reg in the proper mode.  */
732
 
733
rtx
734
gen_compare_reg (enum rtx_code code, rtx x, rtx y)
735
{
736
  enum machine_mode mode = SELECT_CC_MODE (code, x, y);
737
  rtx cc_reg;
738
 
739
  cc_reg = gen_rtx_REG (mode, 61);
740
 
741
  emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
742
                          gen_rtx_COMPARE (mode, x, y)));
743
 
744
  return cc_reg;
745
}
746
 
747
/* Return 1 if VALUE, a const_double, will fit in a limm (4 byte number).
748
   We assume the value can be either signed or unsigned.  */
749
 
750
int
751
arc_double_limm_p (rtx value)
752
{
753
  HOST_WIDE_INT low, high;
754
 
755
  gcc_assert (GET_CODE (value) == CONST_DOUBLE);
756
 
757
  low = CONST_DOUBLE_LOW (value);
758
  high = CONST_DOUBLE_HIGH (value);
759
 
760
  if (low & 0x80000000)
761
    {
762
      return (((unsigned HOST_WIDE_INT) low <= 0xffffffff && high == 0)
763
              || (((low & - (unsigned HOST_WIDE_INT) 0x80000000)
764
                   == - (unsigned HOST_WIDE_INT) 0x80000000)
765
                  && high == -1));
766
    }
767
  else
768
    {
769
      return (unsigned HOST_WIDE_INT) low <= 0x7fffffff && high == 0;
770
    }
771
}
772
 
773
/* Do any needed setup for a variadic function.  For the ARC, we must
774
   create a register parameter block, and then copy any anonymous arguments
775
   in registers to memory.
776
 
777
   CUM has not been updated for the last named argument which has type TYPE
778
   and mode MODE, and we rely on this fact.
779
 
780
   We do things a little weird here.  We're supposed to only allocate space
781
   for the anonymous arguments.  However we need to keep the stack eight byte
782
   aligned.  So we round the space up if necessary, and leave it to va_start
783
   to compensate.  */
784
 
785
static void
786
arc_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
787
                            enum machine_mode mode,
788
                            tree type ATTRIBUTE_UNUSED,
789
                            int *pretend_size,
790
                            int no_rtl)
791
{
792
  int first_anon_arg;
793
 
794
  /* All BLKmode values are passed by reference.  */
795
  gcc_assert (mode != BLKmode);
796
 
797
  first_anon_arg = *cum + ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1)
798
                           / UNITS_PER_WORD);
799
 
800
  if (first_anon_arg < MAX_ARC_PARM_REGS && !no_rtl)
801
    {
802
      /* Note that first_reg_offset < MAX_ARC_PARM_REGS.  */
803
      int first_reg_offset = first_anon_arg;
804
      /* Size in words to "pretend" allocate.  */
805
      int size = MAX_ARC_PARM_REGS - first_reg_offset;
806
      /* Extra slop to keep stack eight byte aligned.  */
807
      int align_slop = size & 1;
808
      rtx regblock;
809
 
810
      regblock = gen_rtx_MEM (BLKmode,
811
                              plus_constant (arg_pointer_rtx,
812
                                             FIRST_PARM_OFFSET (0)
813
                                             + align_slop * UNITS_PER_WORD));
814
      set_mem_alias_set (regblock, get_varargs_alias_set ());
815
      set_mem_align (regblock, BITS_PER_WORD);
816
      move_block_from_reg (first_reg_offset, regblock,
817
                           MAX_ARC_PARM_REGS - first_reg_offset);
818
 
819
      *pretend_size = ((MAX_ARC_PARM_REGS - first_reg_offset + align_slop)
820
                       * UNITS_PER_WORD);
821
    }
822
}
823
 
824
/* Cost functions.  */
825
 
826
/* Compute a (partial) cost for rtx X.  Return true if the complete
827
   cost has been computed, and false if subexpressions should be
828
   scanned.  In either case, *TOTAL contains the cost result.  */
829
 
830
static bool
831
arc_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
832
{
833
  switch (code)
834
    {
835
      /* Small integers are as cheap as registers.  4 byte values can
836
         be fetched as immediate constants - let's give that the cost
837
         of an extra insn.  */
838
    case CONST_INT:
839
      if (SMALL_INT (INTVAL (x)))
840
        {
841
          *total = 0;
842
          return true;
843
        }
844
      /* FALLTHRU */
845
 
846
    case CONST:
847
    case LABEL_REF:
848
    case SYMBOL_REF:
849
      *total = COSTS_N_INSNS (1);
850
      return true;
851
 
852
    case CONST_DOUBLE:
853
      {
854
        rtx high, low;
855
        split_double (x, &high, &low);
856
        *total = COSTS_N_INSNS (!SMALL_INT (INTVAL (high))
857
                                + !SMALL_INT (INTVAL (low)));
858
        return true;
859
      }
860
 
861
    /* Encourage synth_mult to find a synthetic multiply when reasonable.
862
       If we need more than 12 insns to do a multiply, then go out-of-line,
863
       since the call overhead will be < 10% of the cost of the multiply.  */
864
    case ASHIFT:
865
    case ASHIFTRT:
866
    case LSHIFTRT:
867
      if (TARGET_SHIFTER)
868
        *total = COSTS_N_INSNS (1);
869
      else if (GET_CODE (XEXP (x, 1)) != CONST_INT)
870
        *total = COSTS_N_INSNS (16);
871
      else
872
        *total = COSTS_N_INSNS (INTVAL (XEXP ((x), 1)));
873
      return false;
874
 
875
    default:
876
      return false;
877
    }
878
}
879
 
880
 
881
/* Provide the costs of an addressing mode that contains ADDR.
882
   If ADDR is not a valid address, its cost is irrelevant.  */
883
 
884
static int
885
arc_address_cost (rtx addr)
886
{
887
  switch (GET_CODE (addr))
888
    {
889
    case REG :
890
      return 1;
891
 
892
    case LABEL_REF :
893
    case SYMBOL_REF :
894
    case CONST :
895
      return 2;
896
 
897
    case PLUS :
898
      {
899
        register rtx plus0 = XEXP (addr, 0);
900
        register rtx plus1 = XEXP (addr, 1);
901
 
902
        if (GET_CODE (plus0) != REG)
903
          break;
904
 
905
        switch (GET_CODE (plus1))
906
          {
907
          case CONST_INT :
908
            return SMALL_INT (plus1) ? 1 : 2;
909
          case CONST :
910
          case SYMBOL_REF :
911
          case LABEL_REF :
912
            return 2;
913
          default:
914
            break;
915
          }
916
        break;
917
      }
918
    default:
919
      break;
920
    }
921
 
922
  return 4;
923
}
924
 
925
/* Function prologue/epilogue handlers.  */
926
 
927
/* ARC stack frames look like:
928
 
929
             Before call                       After call
930
        +-----------------------+       +-----------------------+
931
        |                       |       |                       |
932
   high |  local variables,     |       |  local variables,     |
933
   mem  |  reg save area, etc.  |       |  reg save area, etc.  |
934
        |                       |       |                       |
935
        +-----------------------+       +-----------------------+
936
        |                       |       |                       |
937
        |  arguments on stack.  |       |  arguments on stack.  |
938
        |                       |       |                       |
939
 SP+16->+-----------------------+FP+48->+-----------------------+
940
        | 4 word save area for  |       |  reg parm save area,  |
941
        | return addr, prev %fp |       |  only created for     |
942
  SP+0->+-----------------------+       |  variable argument    |
943
                                        |  functions            |
944
                                 FP+16->+-----------------------+
945
                                        | 4 word save area for  |
946
                                        | return addr, prev %fp |
947
                                  FP+0->+-----------------------+
948
                                        |                       |
949
                                        |  local variables      |
950
                                        |                       |
951
                                        +-----------------------+
952
                                        |                       |
953
                                        |  register save area   |
954
                                        |                       |
955
                                        +-----------------------+
956
                                        |                       |
957
                                        |  alloca allocations   |
958
                                        |                       |
959
                                        +-----------------------+
960
                                        |                       |
961
                                        |  arguments on stack   |
962
                                        |                       |
963
                                 SP+16->+-----------------------+
964
   low                                  | 4 word save area for  |
965
   memory                               | return addr, prev %fp |
966
                                  SP+0->+-----------------------+
967
 
968
Notes:
969
1) The "reg parm save area" does not exist for non variable argument fns.
970
   The "reg parm save area" can be eliminated completely if we created our
971
   own va-arc.h, but that has tradeoffs as well (so it's not done).  */
972
 
973
/* Structure to be filled in by arc_compute_frame_size with register
974
   save masks, and offsets for the current function.  */
975
struct arc_frame_info
976
{
977
  unsigned int total_size;      /* # bytes that the entire frame takes up.  */
978
  unsigned int extra_size;      /* # bytes of extra stuff.  */
979
  unsigned int pretend_size;    /* # bytes we push and pretend caller did.  */
980
  unsigned int args_size;       /* # bytes that outgoing arguments take up.  */
981
  unsigned int reg_size;        /* # bytes needed to store regs.  */
982
  unsigned int var_size;        /* # bytes that variables take up.  */
983
  unsigned int reg_offset;      /* Offset from new sp to store regs.  */
984
  unsigned int gmask;           /* Mask of saved gp registers.  */
985
  int          initialized;     /* Nonzero if frame size already calculated.  */
986
};
987
 
988
/* Current frame information calculated by arc_compute_frame_size.  */
989
static struct arc_frame_info current_frame_info;
990
 
991
/* Zero structure to initialize current_frame_info.  */
992
static struct arc_frame_info zero_frame_info;
993
 
994
/* Type of function DECL.
995
 
996
   The result is cached.  To reset the cache at the end of a function,
997
   call with DECL = NULL_TREE.  */
998
 
999
enum arc_function_type
1000
arc_compute_function_type (tree decl)
1001
{
1002
  tree a;
1003
  /* Cached value.  */
1004
  static enum arc_function_type fn_type = ARC_FUNCTION_UNKNOWN;
1005
  /* Last function we were called for.  */
1006
  static tree last_fn = NULL_TREE;
1007
 
1008
  /* Resetting the cached value?  */
1009
  if (decl == NULL_TREE)
1010
    {
1011
      fn_type = ARC_FUNCTION_UNKNOWN;
1012
      last_fn = NULL_TREE;
1013
      return fn_type;
1014
    }
1015
 
1016
  if (decl == last_fn && fn_type != ARC_FUNCTION_UNKNOWN)
1017
    return fn_type;
1018
 
1019
  /* Assume we have a normal function (not an interrupt handler).  */
1020
  fn_type = ARC_FUNCTION_NORMAL;
1021
 
1022
  /* Now see if this is an interrupt handler.  */
1023
  for (a = DECL_ATTRIBUTES (current_function_decl);
1024
       a;
1025
       a = TREE_CHAIN (a))
1026
    {
1027
      tree name = TREE_PURPOSE (a), args = TREE_VALUE (a);
1028
 
1029
      if (name == get_identifier ("__interrupt__")
1030
          && list_length (args) == 1
1031
          && TREE_CODE (TREE_VALUE (args)) == STRING_CST)
1032
        {
1033
          tree value = TREE_VALUE (args);
1034
 
1035
          if (!strcmp (TREE_STRING_POINTER (value), "ilink1"))
1036
            fn_type = ARC_FUNCTION_ILINK1;
1037
          else if (!strcmp (TREE_STRING_POINTER (value), "ilink2"))
1038
            fn_type = ARC_FUNCTION_ILINK2;
1039
          else
1040
            gcc_unreachable ();
1041
          break;
1042
        }
1043
    }
1044
 
1045
  last_fn = decl;
1046
  return fn_type;
1047
}
1048
 
1049
#define ILINK1_REGNUM 29
1050
#define ILINK2_REGNUM 30
1051
#define RETURN_ADDR_REGNUM 31
1052
#define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1053
#define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1054
 
1055
/* Tell prologue and epilogue if register REGNO should be saved / restored.
1056
   The return address and frame pointer are treated separately.
1057
   Don't consider them here.  */
1058
#define MUST_SAVE_REGISTER(regno, interrupt_p) \
1059
((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1060
 && (regs_ever_live[regno] && (!call_used_regs[regno] || interrupt_p)))
1061
 
1062
#define MUST_SAVE_RETURN_ADDR (regs_ever_live[RETURN_ADDR_REGNUM])
1063
 
1064
/* Return the bytes needed to compute the frame pointer from the current
1065
   stack pointer.
1066
 
1067
   SIZE is the size needed for local variables.  */
1068
 
1069
unsigned int
1070
arc_compute_frame_size (int size /* # of var. bytes allocated.  */)
1071
{
1072
  int regno;
1073
  unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1074
  unsigned int reg_size, reg_offset;
1075
  unsigned int gmask;
1076
  enum arc_function_type fn_type;
1077
  int interrupt_p;
1078
 
1079
  var_size      = size;
1080
  args_size     = current_function_outgoing_args_size;
1081
  pretend_size  = current_function_pretend_args_size;
1082
  extra_size    = FIRST_PARM_OFFSET (0);
1083
  total_size    = extra_size + pretend_size + args_size + var_size;
1084
  reg_offset    = FIRST_PARM_OFFSET(0) + current_function_outgoing_args_size;
1085
  reg_size      = 0;
1086
  gmask         = 0;
1087
 
1088
  /* See if this is an interrupt handler.  Call used registers must be saved
1089
     for them too.  */
1090
  fn_type = arc_compute_function_type (current_function_decl);
1091
  interrupt_p = ARC_INTERRUPT_P (fn_type);
1092
 
1093
  /* Calculate space needed for registers.
1094
     ??? We ignore the extension registers for now.  */
1095
 
1096
  for (regno = 0; regno <= 31; regno++)
1097
    {
1098
      if (MUST_SAVE_REGISTER (regno, interrupt_p))
1099
        {
1100
          reg_size += UNITS_PER_WORD;
1101
          gmask |= 1 << regno;
1102
        }
1103
    }
1104
 
1105
  total_size += reg_size;
1106
 
1107
  /* If the only space to allocate is the fp/blink save area this is an
1108
     empty frame.  However, if we'll be making a function call we need to
1109
     allocate a stack frame for our callee's fp/blink save area.  */
1110
  if (total_size == extra_size
1111
      && !MUST_SAVE_RETURN_ADDR)
1112
    total_size = extra_size = 0;
1113
 
1114
  total_size = ARC_STACK_ALIGN (total_size);
1115
 
1116
  /* Save computed information.  */
1117
  current_frame_info.total_size   = total_size;
1118
  current_frame_info.extra_size   = extra_size;
1119
  current_frame_info.pretend_size = pretend_size;
1120
  current_frame_info.var_size     = var_size;
1121
  current_frame_info.args_size    = args_size;
1122
  current_frame_info.reg_size     = reg_size;
1123
  current_frame_info.reg_offset   = reg_offset;
1124
  current_frame_info.gmask        = gmask;
1125
  current_frame_info.initialized  = reload_completed;
1126
 
1127
  /* Ok, we're done.  */
1128
  return total_size;
1129
}
1130
 
1131
/* Common code to save/restore registers.  */
1132
 
1133
void
1134
arc_save_restore (FILE *file,
1135
                  const char *base_reg,
1136
                  unsigned int offset,
1137
                  unsigned int gmask,
1138
                  const char *op)
1139
{
1140
  int regno;
1141
 
1142
  if (gmask == 0)
1143
    return;
1144
 
1145
  for (regno = 0; regno <= 31; regno++)
1146
    {
1147
      if ((gmask & (1L << regno)) != 0)
1148
        {
1149
          fprintf (file, "\t%s %s,[%s,%d]\n",
1150
                     op, reg_names[regno], base_reg, offset);
1151
          offset += UNITS_PER_WORD;
1152
        }
1153
    }
1154
}
1155
 
1156
/* Target hook to assemble an integer object.  The ARC version needs to
1157
   emit a special directive for references to labels and function
1158
   symbols.  */
1159
 
1160
static bool
1161
arc_assemble_integer (rtx x, unsigned int size, int aligned_p)
1162
{
1163
  if (size == UNITS_PER_WORD && aligned_p
1164
      && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
1165
          || GET_CODE (x) == LABEL_REF))
1166
    {
1167
      fputs ("\t.word\t%st(", asm_out_file);
1168
      output_addr_const (asm_out_file, x);
1169
      fputs (")\n", asm_out_file);
1170
      return true;
1171
    }
1172
  return default_assemble_integer (x, size, aligned_p);
1173
}
1174
 
1175
/* Set up the stack and frame pointer (if desired) for the function.  */
1176
 
1177
static void
1178
arc_output_function_prologue (FILE *file, HOST_WIDE_INT size)
1179
{
1180
  const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1181
  const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1182
  unsigned int gmask = current_frame_info.gmask;
1183
  enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1184
 
1185
  /* If this is an interrupt handler, set up our stack frame.
1186
     ??? Optimize later.  */
1187
  if (ARC_INTERRUPT_P (fn_type))
1188
    {
1189
      fprintf (file, "\t%s interrupt handler\n",
1190
               ASM_COMMENT_START);
1191
      fprintf (file, "\tsub %s,%s,16\n", sp_str, sp_str);
1192
    }
1193
 
1194
  /* This is only for the human reader.  */
1195
  fprintf (file, "\t%s BEGIN PROLOGUE %s vars= %d, regs= %d, args= %d, extra= %d\n",
1196
           ASM_COMMENT_START, ASM_COMMENT_START,
1197
           current_frame_info.var_size,
1198
           current_frame_info.reg_size / 4,
1199
           current_frame_info.args_size,
1200
           current_frame_info.extra_size);
1201
 
1202
  size = ARC_STACK_ALIGN (size);
1203
  size = (! current_frame_info.initialized
1204
           ? arc_compute_frame_size (size)
1205
           : current_frame_info.total_size);
1206
 
1207
  /* These cases shouldn't happen.  Catch them now.  */
1208
  gcc_assert (size || !gmask);
1209
 
1210
  /* Allocate space for register arguments if this is a variadic function.  */
1211
  if (current_frame_info.pretend_size != 0)
1212
    fprintf (file, "\tsub %s,%s,%d\n",
1213
             sp_str, sp_str, current_frame_info.pretend_size);
1214
 
1215
  /* The home-grown ABI says link register is saved first.  */
1216
  if (MUST_SAVE_RETURN_ADDR)
1217
    fprintf (file, "\tst %s,[%s,%d]\n",
1218
             reg_names[RETURN_ADDR_REGNUM], sp_str, UNITS_PER_WORD);
1219
 
1220
  /* Set up the previous frame pointer next (if we need to).  */
1221
  if (frame_pointer_needed)
1222
    {
1223
      fprintf (file, "\tst %s,[%s]\n", fp_str, sp_str);
1224
      fprintf (file, "\tmov %s,%s\n", fp_str, sp_str);
1225
    }
1226
 
1227
  /* ??? We don't handle the case where the saved regs are more than 252
1228
     bytes away from sp.  This can be handled by decrementing sp once, saving
1229
     the regs, and then decrementing it again.  The epilogue doesn't have this
1230
     problem as the `ld' insn takes reg+limm values (though it would be more
1231
     efficient to avoid reg+limm).  */
1232
 
1233
  /* Allocate the stack frame.  */
1234
  if (size - current_frame_info.pretend_size > 0)
1235
    fprintf (file, "\tsub %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
1236
             sp_str, sp_str, size - current_frame_info.pretend_size);
1237
 
1238
  /* Save any needed call-saved regs (and call-used if this is an
1239
     interrupt handler).  */
1240
  arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1241
                    /* The zeroing of these two bits is unnecessary,
1242
                       but leave this in for clarity.  */
1243
                    gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1244
                    "st");
1245
 
1246
  fprintf (file, "\t%s END PROLOGUE\n", ASM_COMMENT_START);
1247
}
1248
 
1249
/* Do any necessary cleanup after a function to restore stack, frame,
1250
   and regs.  */
1251
 
1252
static void
1253
arc_output_function_epilogue (FILE *file, HOST_WIDE_INT size)
1254
{
1255
  rtx epilogue_delay = current_function_epilogue_delay_list;
1256
  int noepilogue = FALSE;
1257
  enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1258
 
1259
  /* This is only for the human reader.  */
1260
  fprintf (file, "\t%s EPILOGUE\n", ASM_COMMENT_START);
1261
 
1262
  size = ARC_STACK_ALIGN (size);
1263
  size = (!current_frame_info.initialized
1264
           ? arc_compute_frame_size (size)
1265
           : current_frame_info.total_size);
1266
 
1267
  if (size == 0 && epilogue_delay == 0)
1268
    {
1269
      rtx insn = get_last_insn ();
1270
 
1271
      /* If the last insn was a BARRIER, we don't have to write any code
1272
         because a jump (aka return) was put there.  */
1273
      if (GET_CODE (insn) == NOTE)
1274
        insn = prev_nonnote_insn (insn);
1275
      if (insn && GET_CODE (insn) == BARRIER)
1276
        noepilogue = TRUE;
1277
    }
1278
 
1279
  if (!noepilogue)
1280
    {
1281
      unsigned int pretend_size = current_frame_info.pretend_size;
1282
      unsigned int frame_size = size - pretend_size;
1283
      int restored, fp_restored_p;
1284
      int can_trust_sp_p = !current_function_calls_alloca;
1285
      const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1286
      const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1287
 
1288
      /* ??? There are lots of optimizations that can be done here.
1289
         EG: Use fp to restore regs if it's closer.
1290
         Maybe in time we'll do them all.  For now, always restore regs from
1291
         sp, but don't restore sp if we don't have to.  */
1292
 
1293
      if (!can_trust_sp_p)
1294
        {
1295
          gcc_assert (frame_pointer_needed);
1296
          fprintf (file,"\tsub %s,%s,%d\t\t%s sp not trusted here\n",
1297
                   sp_str, fp_str, frame_size, ASM_COMMENT_START);
1298
        }
1299
 
1300
      /* Restore any saved registers.  */
1301
      arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1302
                        /* The zeroing of these two bits is unnecessary,
1303
                           but leave this in for clarity.  */
1304
                        current_frame_info.gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1305
                        "ld");
1306
 
1307
      if (MUST_SAVE_RETURN_ADDR)
1308
        fprintf (file, "\tld %s,[%s,%d]\n",
1309
                 reg_names[RETURN_ADDR_REGNUM],
1310
                 frame_pointer_needed ? fp_str : sp_str,
1311
                 UNITS_PER_WORD + (frame_pointer_needed ? 0 : frame_size));
1312
 
1313
      /* Keep track of how much of the stack pointer we've restored.
1314
         It makes the following a lot more readable.  */
1315
      restored = 0;
1316
      fp_restored_p = 0;
1317
 
1318
      /* We try to emit the epilogue delay slot insn right after the load
1319
         of the return address register so that it can execute with the
1320
         stack intact.  Secondly, loads are delayed.  */
1321
      /* ??? If stack intactness is important, always emit now.  */
1322
      if (MUST_SAVE_RETURN_ADDR && epilogue_delay != NULL_RTX)
1323
        {
1324
          final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1325
          epilogue_delay = NULL_RTX;
1326
        }
1327
 
1328
      if (frame_pointer_needed)
1329
        {
1330
          /* Try to restore the frame pointer in the delay slot.  We can't,
1331
             however, if any of these is true.  */
1332
          if (epilogue_delay != NULL_RTX
1333
              || !SMALL_INT (frame_size)
1334
              || pretend_size
1335
              || ARC_INTERRUPT_P (fn_type))
1336
            {
1337
              /* Note that we restore fp and sp here!  */
1338
              fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1339
              restored += frame_size;
1340
              fp_restored_p = 1;
1341
            }
1342
        }
1343
      else if (!SMALL_INT (size /* frame_size + pretend_size */)
1344
               || ARC_INTERRUPT_P (fn_type))
1345
        {
1346
          fprintf (file, "\tadd %s,%s,%d\n", sp_str, sp_str, frame_size);
1347
          restored += frame_size;
1348
        }
1349
 
1350
      /* These must be done before the return insn because the delay slot
1351
         does the final stack restore.  */
1352
      if (ARC_INTERRUPT_P (fn_type))
1353
        {
1354
          if (epilogue_delay)
1355
            {
1356
              final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1357
            }
1358
        }
1359
 
1360
      /* Emit the return instruction.  */
1361
      {
1362
        static const int regs[4] = {
1363
          0, RETURN_ADDR_REGNUM, ILINK1_REGNUM, ILINK2_REGNUM
1364
        };
1365
 
1366
        /* Update the flags, if returning from an interrupt handler. */
1367
        if (ARC_INTERRUPT_P (fn_type))
1368
          fprintf (file, "\tj.d.f %s\n", reg_names[regs[fn_type]]);
1369
        else
1370
          fprintf (file, "\tj.d %s\n", reg_names[regs[fn_type]]);
1371
        }
1372
 
1373
      /* If the only register saved is the return address, we need a
1374
         nop, unless we have an instruction to put into it.  Otherwise
1375
         we don't since reloading multiple registers doesn't reference
1376
         the register being loaded.  */
1377
 
1378
      if (ARC_INTERRUPT_P (fn_type))
1379
        fprintf (file, "\tadd %s,%s,16\n", sp_str, sp_str);
1380
      else if (epilogue_delay != NULL_RTX)
1381
        {
1382
          gcc_assert (!frame_pointer_needed || fp_restored_p);
1383
          gcc_assert (restored >= size);
1384
          final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1385
        }
1386
      else if (frame_pointer_needed && !fp_restored_p)
1387
        {
1388
          gcc_assert (SMALL_INT (frame_size));
1389
          /* Note that we restore fp and sp here!  */
1390
          fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1391
        }
1392
      else if (restored < size)
1393
        {
1394
          gcc_assert (SMALL_INT (size - restored));
1395
          fprintf (file, "\tadd %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
1396
                   sp_str, sp_str, size - restored);
1397
        }
1398
      else
1399
        fprintf (file, "\tnop\n");
1400
    }
1401
 
1402
  /* Reset state info for each function.  */
1403
  current_frame_info = zero_frame_info;
1404
  arc_compute_function_type (NULL_TREE);
1405
}
1406
 
1407
/* Define the number of delay slots needed for the function epilogue.
1408
 
1409
   Interrupt handlers can't have any epilogue delay slots (it's always needed
1410
   for something else, I think).  For normal functions, we have to worry about
1411
   using call-saved regs as they'll be restored before the delay slot insn.
1412
   Functions with non-empty frames already have enough choices for the epilogue
1413
   delay slot so for now we only consider functions with empty frames.  */
1414
 
1415
int
1416
arc_delay_slots_for_epilogue (void)
1417
{
1418
  if (arc_compute_function_type (current_function_decl) != ARC_FUNCTION_NORMAL)
1419
    return 0;
1420
  if (!current_frame_info.initialized)
1421
    (void) arc_compute_frame_size (get_frame_size ());
1422
  if (current_frame_info.total_size == 0)
1423
    return 1;
1424
  return 0;
1425
}
1426
 
1427
/* Return true if TRIAL is a valid insn for the epilogue delay slot.
1428
   Any single length instruction which doesn't reference the stack or frame
1429
   pointer or any call-saved register is OK.  SLOT will always be 0.  */
1430
 
1431
int
1432
arc_eligible_for_epilogue_delay (rtx trial, int slot)
1433
{
1434
  gcc_assert (!slot);
1435
 
1436
  if (get_attr_length (trial) == 1
1437
      /* If registers where saved, presumably there's more than enough
1438
         possibilities for the delay slot.  The alternative is something
1439
         more complicated (of course, if we expanded the epilogue as rtl
1440
         this problem would go away).  */
1441
      /* ??? Note that this will always be true since only functions with
1442
         empty frames have epilogue delay slots.  See
1443
         arc_delay_slots_for_epilogue.  */
1444
      && current_frame_info.gmask == 0
1445
      && ! reg_mentioned_p (stack_pointer_rtx, PATTERN (trial))
1446
      && ! reg_mentioned_p (frame_pointer_rtx, PATTERN (trial)))
1447
    return 1;
1448
  return 0;
1449
}
1450
 
1451
/* Return true if OP is a shift operator.  */
1452
 
1453
int
1454
shift_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1455
{
1456
  switch (GET_CODE (op))
1457
    {
1458
    case ASHIFTRT:
1459
    case LSHIFTRT:
1460
    case ASHIFT:
1461
      return 1;
1462
    default:
1463
      return 0;
1464
    }
1465
}
1466
 
1467
/* Output the assembler code for doing a shift.
1468
   We go to a bit of trouble to generate efficient code as the ARC only has
1469
   single bit shifts.  This is taken from the h8300 port.  We only have one
1470
   mode of shifting and can't access individual bytes like the h8300 can, so
1471
   this is greatly simplified (at the expense of not generating hyper-
1472
   efficient code).
1473
 
1474
   This function is not used if the variable shift insns are present.  */
1475
 
1476
/* ??? We assume the output operand is the same as operand 1.
1477
   This can be optimized (deleted) in the case of 1 bit shifts.  */
1478
/* ??? We use the loop register here.  We don't use it elsewhere (yet) and
1479
   using it here will give us a chance to play with it.  */
1480
 
1481
const char *
1482
output_shift (rtx *operands)
1483
{
1484
  rtx shift = operands[3];
1485
  enum machine_mode mode = GET_MODE (shift);
1486
  enum rtx_code code = GET_CODE (shift);
1487
  const char *shift_one;
1488
 
1489
  gcc_assert (mode == SImode);
1490
 
1491
  switch (code)
1492
    {
1493
    case ASHIFT:   shift_one = "asl %0,%0"; break;
1494
    case ASHIFTRT: shift_one = "asr %0,%0"; break;
1495
    case LSHIFTRT: shift_one = "lsr %0,%0"; break;
1496
    default:       gcc_unreachable ();
1497
    }
1498
 
1499
  if (GET_CODE (operands[2]) != CONST_INT)
1500
    {
1501
      if (optimize)
1502
        {
1503
          output_asm_insn ("sub.f 0,%2,0", operands);
1504
          output_asm_insn ("mov lp_count,%2", operands);
1505
          output_asm_insn ("bz 2f", operands);
1506
        }
1507
      else
1508
        output_asm_insn ("mov %4,%2", operands);
1509
      goto shiftloop;
1510
    }
1511
  else
1512
    {
1513
      int n = INTVAL (operands[2]);
1514
 
1515
      /* If the count is negative, make it 0.  */
1516
      if (n < 0)
1517
        n = 0;
1518
      /* If the count is too big, truncate it.
1519
         ANSI says shifts of GET_MODE_BITSIZE are undefined - we choose to
1520
         do the intuitive thing.  */
1521
      else if (n > GET_MODE_BITSIZE (mode))
1522
        n = GET_MODE_BITSIZE (mode);
1523
 
1524
      /* First see if we can do them inline.  */
1525
      if (n <= 8)
1526
        {
1527
          while (--n >= 0)
1528
            output_asm_insn (shift_one, operands);
1529
        }
1530
      /* See if we can use a rotate/and.  */
1531
      else if (n == BITS_PER_WORD - 1)
1532
        {
1533
          switch (code)
1534
            {
1535
            case ASHIFT :
1536
              output_asm_insn ("and %0,%0,1\n\tror %0,%0", operands);
1537
              break;
1538
            case ASHIFTRT :
1539
              /* The ARC doesn't have a rol insn.  Use something else.  */
1540
              output_asm_insn ("asl.f 0,%0\n\tsbc %0,0,0", operands);
1541
              break;
1542
            case LSHIFTRT :
1543
              /* The ARC doesn't have a rol insn.  Use something else.  */
1544
              output_asm_insn ("asl.f 0,%0\n\tadc %0,0,0", operands);
1545
              break;
1546
            default:
1547
              break;
1548
            }
1549
        }
1550
      /* Must loop.  */
1551
      else
1552
        {
1553
          char buf[100];
1554
 
1555
          if (optimize)
1556
            output_asm_insn ("mov lp_count,%c2", operands);
1557
          else
1558
            output_asm_insn ("mov %4,%c2", operands);
1559
        shiftloop:
1560
          if (optimize)
1561
            {
1562
              if (flag_pic)
1563
                sprintf (buf, "lr %%4,[status]\n\tadd %%4,%%4,6\t%s single insn loop start",
1564
                         ASM_COMMENT_START);
1565
              else
1566
                sprintf (buf, "mov %%4,%%%%st(1f)\t%s (single insn loop start) >> 2",
1567
                         ASM_COMMENT_START);
1568
              output_asm_insn (buf, operands);
1569
              output_asm_insn ("sr %4,[lp_start]", operands);
1570
              output_asm_insn ("add %4,%4,1", operands);
1571
              output_asm_insn ("sr %4,[lp_end]", operands);
1572
              output_asm_insn ("nop\n\tnop", operands);
1573
              if (flag_pic)
1574
                fprintf (asm_out_file, "\t%s single insn loop\n",
1575
                         ASM_COMMENT_START);
1576
              else
1577
                fprintf (asm_out_file, "1:\t%s single insn loop\n",
1578
                         ASM_COMMENT_START);
1579
              output_asm_insn (shift_one, operands);
1580
              fprintf (asm_out_file, "2:\t%s end single insn loop\n",
1581
                       ASM_COMMENT_START);
1582
            }
1583
          else
1584
            {
1585
              fprintf (asm_out_file, "1:\t%s begin shift loop\n",
1586
                       ASM_COMMENT_START);
1587
              output_asm_insn ("sub.f %4,%4,1", operands);
1588
              output_asm_insn ("nop", operands);
1589
              output_asm_insn ("bn.nd 2f", operands);
1590
              output_asm_insn (shift_one, operands);
1591
              output_asm_insn ("b.nd 1b", operands);
1592
              fprintf (asm_out_file, "2:\t%s end shift loop\n",
1593
                       ASM_COMMENT_START);
1594
            }
1595
        }
1596
    }
1597
 
1598
  return "";
1599
}
1600
 
1601
/* Nested function support.  */
1602
 
1603
/* Emit RTL insns to initialize the variable parts of a trampoline.
1604
   FNADDR is an RTX for the address of the function's pure code.
1605
   CXT is an RTX for the static chain value for the function.  */
1606
 
1607
void
1608
arc_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
1609
                           rtx fnaddr ATTRIBUTE_UNUSED,
1610
                           rtx cxt ATTRIBUTE_UNUSED)
1611
{
1612
}
1613
 
1614
/* Set the cpu type and print out other fancy things,
1615
   at the top of the file.  */
1616
 
1617
static void
1618
arc_file_start (void)
1619
{
1620
  default_file_start ();
1621
  fprintf (asm_out_file, "\t.cpu %s\n", arc_cpu_string);
1622
}
1623
 
1624
/* Print operand X (an rtx) in assembler syntax to file FILE.
1625
   CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
1626
   For `%' followed by punctuation, CODE is the punctuation and X is null.  */
1627
 
1628
void
1629
arc_print_operand (FILE *file, rtx x, int code)
1630
{
1631
  switch (code)
1632
    {
1633
    case '#' :
1634
      /* Conditional branches.  For now these are equivalent.  */
1635
    case '*' :
1636
      /* Unconditional branches.  Output the appropriate delay slot suffix.  */
1637
      if (!final_sequence || XVECLEN (final_sequence, 0) == 1)
1638
        {
1639
          /* There's nothing in the delay slot.  */
1640
          fputs (".nd", file);
1641
        }
1642
      else
1643
        {
1644
          rtx jump = XVECEXP (final_sequence, 0, 0);
1645
          rtx delay = XVECEXP (final_sequence, 0, 1);
1646
          if (INSN_ANNULLED_BRANCH_P (jump))
1647
            fputs (INSN_FROM_TARGET_P (delay) ? ".jd" : ".nd", file);
1648
          else
1649
            fputs (".d", file);
1650
        }
1651
      return;
1652
    case '?' : /* with leading "." */
1653
    case '!' : /* without leading "." */
1654
      /* This insn can be conditionally executed.  See if the ccfsm machinery
1655
         says it should be conditionalized.  */
1656
      if (arc_ccfsm_state == 3 || arc_ccfsm_state == 4)
1657
        {
1658
          /* Is this insn in a delay slot?  */
1659
          if (final_sequence && XVECLEN (final_sequence, 0) == 2)
1660
            {
1661
              rtx insn = XVECEXP (final_sequence, 0, 1);
1662
 
1663
              /* If the insn is annulled and is from the target path, we need
1664
                 to inverse the condition test.  */
1665
              if (INSN_ANNULLED_BRANCH_P (insn))
1666
                {
1667
                  if (INSN_FROM_TARGET_P (insn))
1668
                    fprintf (file, "%s%s",
1669
                             code == '?' ? "." : "",
1670
                             arc_condition_codes[ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc)]);
1671
                  else
1672
                    fprintf (file, "%s%s",
1673
                             code == '?' ? "." : "",
1674
                             arc_condition_codes[arc_ccfsm_current_cc]);
1675
                }
1676
              else
1677
                {
1678
                  /* This insn is executed for either path, so don't
1679
                     conditionalize it at all.  */
1680
                  ; /* nothing to do */
1681
                }
1682
            }
1683
          else
1684
            {
1685
              /* This insn isn't in a delay slot.  */
1686
              fprintf (file, "%s%s",
1687
                       code == '?' ? "." : "",
1688
                       arc_condition_codes[arc_ccfsm_current_cc]);
1689
            }
1690
        }
1691
      return;
1692
    case '~' :
1693
      /* Output a nop if we're between a set of the condition codes,
1694
         and a conditional branch.  */
1695
      if (last_insn_set_cc_p)
1696
        fputs ("nop\n\t", file);
1697
      return;
1698
    case 'd' :
1699
      fputs (arc_condition_codes[get_arc_condition_code (x)], file);
1700
      return;
1701
    case 'D' :
1702
      fputs (arc_condition_codes[ARC_INVERSE_CONDITION_CODE
1703
                                 (get_arc_condition_code (x))],
1704
             file);
1705
      return;
1706
    case 'R' :
1707
      /* Write second word of DImode or DFmode reference,
1708
         register or memory.  */
1709
      if (GET_CODE (x) == REG)
1710
        fputs (reg_names[REGNO (x)+1], file);
1711
      else if (GET_CODE (x) == MEM)
1712
        {
1713
          fputc ('[', file);
1714
          /* Handle possible auto-increment.  Since it is pre-increment and
1715
             we have already done it, we can just use an offset of four.  */
1716
          /* ??? This is taken from rs6000.c I think.  I don't think it is
1717
             currently necessary, but keep it around.  */
1718
          if (GET_CODE (XEXP (x, 0)) == PRE_INC
1719
              || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1720
            output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
1721
          else
1722
            output_address (plus_constant (XEXP (x, 0), 4));
1723
          fputc (']', file);
1724
        }
1725
      else
1726
        output_operand_lossage ("invalid operand to %%R code");
1727
      return;
1728
    case 'S' :
1729
      if ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
1730
          || GET_CODE (x) == LABEL_REF)
1731
        {
1732
          fprintf (file, "%%st(");
1733
          output_addr_const (file, x);
1734
          fprintf (file, ")");
1735
          return;
1736
        }
1737
      break;
1738
    case 'H' :
1739
    case 'L' :
1740
      if (GET_CODE (x) == REG)
1741
        {
1742
          /* L = least significant word, H = most significant word */
1743
          if ((TARGET_BIG_ENDIAN != 0) ^ (code == 'L'))
1744
            fputs (reg_names[REGNO (x)], file);
1745
          else
1746
            fputs (reg_names[REGNO (x)+1], file);
1747
        }
1748
      else if (GET_CODE (x) == CONST_INT
1749
               || GET_CODE (x) == CONST_DOUBLE)
1750
        {
1751
          rtx first, second;
1752
 
1753
          split_double (x, &first, &second);
1754
          fprintf (file, "0x%08lx",
1755
                   (long)(code == 'L' ? INTVAL (first) : INTVAL (second)));
1756
        }
1757
      else
1758
        output_operand_lossage ("invalid operand to %%H/%%L code");
1759
      return;
1760
    case 'A' :
1761
      {
1762
        char str[30];
1763
 
1764
        gcc_assert (GET_CODE (x) == CONST_DOUBLE
1765
                    && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT);
1766
 
1767
        real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
1768
        fprintf (file, "%s", str);
1769
        return;
1770
      }
1771
    case 'U' :
1772
      /* Output a load/store with update indicator if appropriate.  */
1773
      if (GET_CODE (x) == MEM)
1774
        {
1775
          if (GET_CODE (XEXP (x, 0)) == PRE_INC
1776
              || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1777
            fputs (".a", file);
1778
        }
1779
      else
1780
        output_operand_lossage ("invalid operand to %%U code");
1781
      return;
1782
    case 'V' :
1783
      /* Output cache bypass indicator for a load/store insn.  Volatile memory
1784
         refs are defined to use the cache bypass mechanism.  */
1785
      if (GET_CODE (x) == MEM)
1786
        {
1787
          if (MEM_VOLATILE_P (x))
1788
            fputs (".di", file);
1789
        }
1790
      else
1791
        output_operand_lossage ("invalid operand to %%V code");
1792
      return;
1793
    case 0 :
1794
      /* Do nothing special.  */
1795
      break;
1796
    default :
1797
      /* Unknown flag.  */
1798
      output_operand_lossage ("invalid operand output code");
1799
    }
1800
 
1801
  switch (GET_CODE (x))
1802
    {
1803
    case REG :
1804
      fputs (reg_names[REGNO (x)], file);
1805
      break;
1806
    case MEM :
1807
      fputc ('[', file);
1808
      if (GET_CODE (XEXP (x, 0)) == PRE_INC)
1809
        output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1810
                                       GET_MODE_SIZE (GET_MODE (x))));
1811
      else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
1812
        output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1813
                                       - GET_MODE_SIZE (GET_MODE (x))));
1814
      else
1815
        output_address (XEXP (x, 0));
1816
      fputc (']', file);
1817
      break;
1818
    case CONST_DOUBLE :
1819
      /* We handle SFmode constants here as output_addr_const doesn't.  */
1820
      if (GET_MODE (x) == SFmode)
1821
        {
1822
          REAL_VALUE_TYPE d;
1823
          long l;
1824
 
1825
          REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1826
          REAL_VALUE_TO_TARGET_SINGLE (d, l);
1827
          fprintf (file, "0x%08lx", l);
1828
          break;
1829
        }
1830
      /* Fall through.  Let output_addr_const deal with it.  */
1831
    default :
1832
      output_addr_const (file, x);
1833
      break;
1834
    }
1835
}
1836
 
1837
/* Print a memory address as an operand to reference that memory location.  */
1838
 
1839
void
1840
arc_print_operand_address (FILE *file, rtx addr)
1841
{
1842
  register rtx base, index = 0;
1843
  int offset = 0;
1844
 
1845
  switch (GET_CODE (addr))
1846
    {
1847
    case REG :
1848
      fputs (reg_names[REGNO (addr)], file);
1849
      break;
1850
    case SYMBOL_REF :
1851
      if (/*???*/ 0 && SYMBOL_REF_FUNCTION_P (addr))
1852
        {
1853
          fprintf (file, "%%st(");
1854
          output_addr_const (file, addr);
1855
          fprintf (file, ")");
1856
        }
1857
      else
1858
        output_addr_const (file, addr);
1859
      break;
1860
    case PLUS :
1861
      if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
1862
        offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
1863
      else if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
1864
        offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
1865
      else
1866
        base = XEXP (addr, 0), index = XEXP (addr, 1);
1867
      gcc_assert (GET_CODE (base) == REG);
1868
      fputs (reg_names[REGNO (base)], file);
1869
      if (index == 0)
1870
        {
1871
          if (offset != 0)
1872
            fprintf (file, ",%d", offset);
1873
        }
1874
      else
1875
        {
1876
          switch (GET_CODE (index))
1877
            {
1878
            case REG:
1879
              fprintf (file, ",%s", reg_names[REGNO (index)]);
1880
              break;
1881
            case SYMBOL_REF:
1882
              fputc (',', file), output_addr_const (file, index);
1883
              break;
1884
            default:
1885
              gcc_unreachable ();
1886
            }
1887
        }
1888
      break;
1889
    case PRE_INC :
1890
    case PRE_DEC :
1891
      /* We shouldn't get here as we've lost the mode of the memory object
1892
         (which says how much to inc/dec by.  */
1893
      gcc_unreachable ();
1894
      break;
1895
    default :
1896
      output_addr_const (file, addr);
1897
      break;
1898
    }
1899
}
1900
 
1901
/* Update compare/branch separation marker.  */
1902
 
1903
static void
1904
record_cc_ref (rtx insn)
1905
{
1906
  last_insn_set_cc_p = current_insn_set_cc_p;
1907
 
1908
  switch (get_attr_cond (insn))
1909
    {
1910
    case COND_SET :
1911
    case COND_SET_ZN :
1912
    case COND_SET_ZNC :
1913
      if (get_attr_length (insn) == 1)
1914
        current_insn_set_cc_p = 1;
1915
      else
1916
        current_insn_set_cc_p = 0;
1917
      break;
1918
    default :
1919
      current_insn_set_cc_p = 0;
1920
      break;
1921
    }
1922
}
1923
 
1924
/* Conditional execution support.
1925
 
1926
   This is based on the ARM port but for now is much simpler.
1927
 
1928
   A finite state machine takes care of noticing whether or not instructions
1929
   can be conditionally executed, and thus decrease execution time and code
1930
   size by deleting branch instructions.  The fsm is controlled by
1931
   final_prescan_insn, and controls the actions of PRINT_OPERAND.  The patterns
1932
   in the .md file for the branch insns also have a hand in this.  */
1933
 
1934
/* The state of the fsm controlling condition codes are:
1935
   0: normal, do nothing special
1936
   1: don't output this insn
1937
   2: don't output this insn
1938
   3: make insns conditional
1939
   4: make insns conditional
1940
 
1941
   State transitions (state->state by whom, under what condition):
1942
 
1943
 
1944
   1 -> 3 branch patterns, after having not output the conditional branch
1945
   2 -> 4 branch patterns, after having not output the conditional branch
1946
   3 -> 0 (*targetm.asm_out.internal_label), if the `target' label is reached
1947
          (the target label has CODE_LABEL_NUMBER equal to
1948
          arc_ccfsm_target_label).
1949
   4 -> 0 final_prescan_insn, if `target' unconditional branch is reached
1950
 
1951
   If the jump clobbers the conditions then we use states 2 and 4.
1952
 
1953
   A similar thing can be done with conditional return insns.
1954
 
1955
   We also handle separating branches from sets of the condition code.
1956
   This is done here because knowledge of the ccfsm state is required,
1957
   we may not be outputting the branch.  */
1958
 
1959
void
1960
arc_final_prescan_insn (rtx insn,
1961
                        rtx *opvec ATTRIBUTE_UNUSED,
1962
                        int noperands ATTRIBUTE_UNUSED)
1963
{
1964
  /* BODY will hold the body of INSN.  */
1965
  register rtx body = PATTERN (insn);
1966
 
1967
  /* This will be 1 if trying to repeat the trick (i.e.: do the `else' part of
1968
     an if/then/else), and things need to be reversed.  */
1969
  int reverse = 0;
1970
 
1971
  /* If we start with a return insn, we only succeed if we find another one.  */
1972
  int seeking_return = 0;
1973
 
1974
  /* START_INSN will hold the insn from where we start looking.  This is the
1975
     first insn after the following code_label if REVERSE is true.  */
1976
  rtx start_insn = insn;
1977
 
1978
  /* Update compare/branch separation marker.  */
1979
  record_cc_ref (insn);
1980
 
1981
  /* Allow -mdebug-ccfsm to turn this off so we can see how well it does.
1982
     We can't do this in macro FINAL_PRESCAN_INSN because its called from
1983
     final_scan_insn which has `optimize' as a local.  */
1984
  if (optimize < 2 || TARGET_NO_COND_EXEC)
1985
    return;
1986
 
1987
  /* If in state 4, check if the target branch is reached, in order to
1988
     change back to state 0.  */
1989
  if (arc_ccfsm_state == 4)
1990
    {
1991
      if (insn == arc_ccfsm_target_insn)
1992
        {
1993
          arc_ccfsm_target_insn = NULL;
1994
          arc_ccfsm_state = 0;
1995
        }
1996
      return;
1997
    }
1998
 
1999
  /* If in state 3, it is possible to repeat the trick, if this insn is an
2000
     unconditional branch to a label, and immediately following this branch
2001
     is the previous target label which is only used once, and the label this
2002
     branch jumps to is not too far off.  Or in other words "we've done the
2003
     `then' part, see if we can do the `else' part."  */
2004
  if (arc_ccfsm_state == 3)
2005
    {
2006
      if (simplejump_p (insn))
2007
        {
2008
          start_insn = next_nonnote_insn (start_insn);
2009
          if (GET_CODE (start_insn) == BARRIER)
2010
            {
2011
              /* ??? Isn't this always a barrier?  */
2012
              start_insn = next_nonnote_insn (start_insn);
2013
            }
2014
          if (GET_CODE (start_insn) == CODE_LABEL
2015
              && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2016
              && LABEL_NUSES (start_insn) == 1)
2017
            reverse = TRUE;
2018
          else
2019
            return;
2020
        }
2021
      else if (GET_CODE (body) == RETURN)
2022
        {
2023
          start_insn = next_nonnote_insn (start_insn);
2024
          if (GET_CODE (start_insn) == BARRIER)
2025
            start_insn = next_nonnote_insn (start_insn);
2026
          if (GET_CODE (start_insn) == CODE_LABEL
2027
              && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2028
              && LABEL_NUSES (start_insn) == 1)
2029
            {
2030
              reverse = TRUE;
2031
              seeking_return = 1;
2032
            }
2033
          else
2034
            return;
2035
        }
2036
      else
2037
        return;
2038
    }
2039
 
2040
  if (GET_CODE (insn) != JUMP_INSN)
2041
    return;
2042
 
2043
  /* This jump might be paralleled with a clobber of the condition codes,
2044
     the jump should always come first.  */
2045
  if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
2046
    body = XVECEXP (body, 0, 0);
2047
 
2048
  if (reverse
2049
      || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
2050
          && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
2051
    {
2052
      int insns_skipped = 0, fail = FALSE, succeed = FALSE;
2053
      /* Flag which part of the IF_THEN_ELSE is the LABEL_REF.  */
2054
      int then_not_else = TRUE;
2055
      /* Nonzero if next insn must be the target label.  */
2056
      int next_must_be_target_label_p;
2057
      rtx this_insn = start_insn, label = 0;
2058
 
2059
      /* Register the insn jumped to.  */
2060
      if (reverse)
2061
        {
2062
          if (!seeking_return)
2063
            label = XEXP (SET_SRC (body), 0);
2064
        }
2065
      else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
2066
        label = XEXP (XEXP (SET_SRC (body), 1), 0);
2067
      else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
2068
        {
2069
          label = XEXP (XEXP (SET_SRC (body), 2), 0);
2070
          then_not_else = FALSE;
2071
        }
2072
      else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
2073
        seeking_return = 1;
2074
      else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
2075
        {
2076
          seeking_return = 1;
2077
          then_not_else = FALSE;
2078
        }
2079
      else
2080
        gcc_unreachable ();
2081
 
2082
      /* See how many insns this branch skips, and what kind of insns.  If all
2083
         insns are okay, and the label or unconditional branch to the same
2084
         label is not too far away, succeed.  */
2085
      for (insns_skipped = 0, next_must_be_target_label_p = FALSE;
2086
           !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
2087
           insns_skipped++)
2088
        {
2089
          rtx scanbody;
2090
 
2091
          this_insn = next_nonnote_insn (this_insn);
2092
          if (!this_insn)
2093
            break;
2094
 
2095
          if (next_must_be_target_label_p)
2096
            {
2097
              if (GET_CODE (this_insn) == BARRIER)
2098
                continue;
2099
              if (GET_CODE (this_insn) == CODE_LABEL
2100
                  && this_insn == label)
2101
                {
2102
                  arc_ccfsm_state = 1;
2103
                  succeed = TRUE;
2104
                }
2105
              else
2106
                fail = TRUE;
2107
              break;
2108
            }
2109
 
2110
          scanbody = PATTERN (this_insn);
2111
 
2112
          switch (GET_CODE (this_insn))
2113
            {
2114
            case CODE_LABEL:
2115
              /* Succeed if it is the target label, otherwise fail since
2116
                 control falls in from somewhere else.  */
2117
              if (this_insn == label)
2118
                {
2119
                  arc_ccfsm_state = 1;
2120
                  succeed = TRUE;
2121
                }
2122
              else
2123
                fail = TRUE;
2124
              break;
2125
 
2126
            case BARRIER:
2127
              /* Succeed if the following insn is the target label.
2128
                 Otherwise fail.
2129
                 If return insns are used then the last insn in a function
2130
                 will be a barrier.  */
2131
              next_must_be_target_label_p = TRUE;
2132
              break;
2133
 
2134
            case CALL_INSN:
2135
              /* Can handle a call insn if there are no insns after it.
2136
                 IE: The next "insn" is the target label.  We don't have to
2137
                 worry about delay slots as such insns are SEQUENCE's inside
2138
                 INSN's.  ??? It is possible to handle such insns though.  */
2139
              if (get_attr_cond (this_insn) == COND_CANUSE)
2140
                next_must_be_target_label_p = TRUE;
2141
              else
2142
                fail = TRUE;
2143
              break;
2144
 
2145
            case JUMP_INSN:
2146
              /* If this is an unconditional branch to the same label, succeed.
2147
                 If it is to another label, do nothing.  If it is conditional,
2148
                 fail.  */
2149
              /* ??? Probably, the test for the SET and the PC are unnecessary.  */
2150
 
2151
              if (GET_CODE (scanbody) == SET
2152
                  && GET_CODE (SET_DEST (scanbody)) == PC)
2153
                {
2154
                  if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
2155
                      && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
2156
                    {
2157
                      arc_ccfsm_state = 2;
2158
                      succeed = TRUE;
2159
                    }
2160
                  else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
2161
                    fail = TRUE;
2162
                }
2163
              else if (GET_CODE (scanbody) == RETURN
2164
                       && seeking_return)
2165
                {
2166
                  arc_ccfsm_state = 2;
2167
                  succeed = TRUE;
2168
                }
2169
              else if (GET_CODE (scanbody) == PARALLEL)
2170
                {
2171
                  if (get_attr_cond (this_insn) != COND_CANUSE)
2172
                    fail = TRUE;
2173
                }
2174
              break;
2175
 
2176
            case INSN:
2177
              /* We can only do this with insns that can use the condition
2178
                 codes (and don't set them).  */
2179
              if (GET_CODE (scanbody) == SET
2180
                  || GET_CODE (scanbody) == PARALLEL)
2181
                {
2182
                  if (get_attr_cond (this_insn) != COND_CANUSE)
2183
                    fail = TRUE;
2184
                }
2185
              /* We can't handle other insns like sequences.  */
2186
              else
2187
                fail = TRUE;
2188
              break;
2189
 
2190
            default:
2191
              break;
2192
            }
2193
        }
2194
 
2195
      if (succeed)
2196
        {
2197
          if ((!seeking_return) && (arc_ccfsm_state == 1 || reverse))
2198
            arc_ccfsm_target_label = CODE_LABEL_NUMBER (label);
2199
          else
2200
            {
2201
              gcc_assert (seeking_return || arc_ccfsm_state == 2);
2202
              while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
2203
                {
2204
                  this_insn = next_nonnote_insn (this_insn);
2205
                  gcc_assert (!this_insn
2206
                              || (GET_CODE (this_insn) != BARRIER
2207
                                  && GET_CODE (this_insn) != CODE_LABEL));
2208
                }
2209
              if (!this_insn)
2210
                {
2211
                  /* Oh dear! we ran off the end, give up.  */
2212
                  extract_insn_cached (insn);
2213
                  arc_ccfsm_state = 0;
2214
                  arc_ccfsm_target_insn = NULL;
2215
                  return;
2216
                }
2217
              arc_ccfsm_target_insn = this_insn;
2218
            }
2219
 
2220
          /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
2221
             what it was.  */
2222
          if (!reverse)
2223
            arc_ccfsm_current_cc = get_arc_condition_code (XEXP (SET_SRC (body),
2224
                                                                 0));
2225
 
2226
          if (reverse || then_not_else)
2227
            arc_ccfsm_current_cc = ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc);
2228
        }
2229
 
2230
      /* Restore recog_data.  Getting the attributes of other insns can
2231
         destroy this array, but final.c assumes that it remains intact
2232
         across this call.  */
2233
      extract_insn_cached (insn);
2234
    }
2235
}
2236
 
2237
/* Record that we are currently outputting label NUM with prefix PREFIX.
2238
   It it's the label we're looking for, reset the ccfsm machinery.
2239
 
2240
   Called from (*targetm.asm_out.internal_label).  */
2241
 
2242
void
2243
arc_ccfsm_at_label (const char *prefix, int num)
2244
{
2245
  if (arc_ccfsm_state == 3 && arc_ccfsm_target_label == num
2246
      && !strcmp (prefix, "L"))
2247
    {
2248
      arc_ccfsm_state = 0;
2249
      arc_ccfsm_target_insn = NULL_RTX;
2250
    }
2251
}
2252
 
2253
/* See if the current insn, which is a conditional branch, is to be
2254
   deleted.  */
2255
 
2256
int
2257
arc_ccfsm_branch_deleted_p (void)
2258
{
2259
  if (arc_ccfsm_state == 1 || arc_ccfsm_state == 2)
2260
    return 1;
2261
  return 0;
2262
}
2263
 
2264
/* Record a branch isn't output because subsequent insns can be
2265
   conditionalized.  */
2266
 
2267
void
2268
arc_ccfsm_record_branch_deleted (void)
2269
{
2270
  /* Indicate we're conditionalizing insns now.  */
2271
  arc_ccfsm_state += 2;
2272
 
2273
  /* If the next insn is a subroutine call, we still need a nop between the
2274
     cc setter and user.  We need to undo the effect of calling record_cc_ref
2275
     for the just deleted branch.  */
2276
  current_insn_set_cc_p = last_insn_set_cc_p;
2277
}
2278
 
2279
void
2280
arc_va_start (tree valist, rtx nextarg)
2281
{
2282
  /* See arc_setup_incoming_varargs for reasons for this oddity.  */
2283
  if (current_function_args_info < 8
2284
      && (current_function_args_info & 1))
2285
    nextarg = plus_constant (nextarg, UNITS_PER_WORD);
2286
 
2287
  std_expand_builtin_va_start (valist, nextarg);
2288
}
2289
 
2290
/* This is how to output a definition of an internal numbered label where
2291
   PREFIX is the class of label and NUM is the number within the class.  */
2292
 
2293
static void
2294
arc_internal_label (FILE *stream, const char *prefix, unsigned long labelno)
2295
{
2296
  arc_ccfsm_at_label (prefix, labelno);
2297
  default_internal_label (stream, prefix, labelno);
2298
}
2299
 
2300
/* Worker function for TARGET_ASM_EXTERNAL_LIBCALL.  */
2301
 
2302
static void
2303
arc_external_libcall (rtx fun ATTRIBUTE_UNUSED)
2304
{
2305
#if 0
2306
/* On the ARC we want to have libgcc's for multiple cpus in one binary.
2307
   We can't use `assemble_name' here as that will call ASM_OUTPUT_LABELREF
2308
   and we'll get another suffix added on if -mmangle-cpu.  */
2309
  if (TARGET_MANGLE_CPU_LIBGCC)
2310
    {
2311
      fprintf (FILE, "\t.rename\t_%s, _%s%s\n",
2312
               XSTR (SYMREF, 0), XSTR (SYMREF, 0),
2313
               arc_mangle_suffix);
2314
    }
2315
#endif
2316
}
2317
 
2318
/* Worker function for TARGET_RETURN_IN_MEMORY.  */
2319
 
2320
static bool
2321
arc_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
2322
{
2323
  if (AGGREGATE_TYPE_P (type))
2324
    return true;
2325
  else
2326
    {
2327
      HOST_WIDE_INT size = int_size_in_bytes (type);
2328
      return (size == -1 || size > 8);
2329
    }
2330
}
2331
 
2332
/* For ARC, All aggregates and arguments greater than 8 bytes are
2333
   passed by reference.  */
2334
 
2335
static bool
2336
arc_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
2337
                       enum machine_mode mode, tree type,
2338
                       bool named ATTRIBUTE_UNUSED)
2339
{
2340
  unsigned HOST_WIDE_INT size;
2341
 
2342
  if (type)
2343
    {
2344
      if (AGGREGATE_TYPE_P (type))
2345
        return true;
2346
      size = int_size_in_bytes (type);
2347
    }
2348
  else
2349
    size = GET_MODE_SIZE (mode);
2350
 
2351
  return size > 8;
2352
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.