OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [config/] [bfin/] [bfin.c] - Blame information for rev 12

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
/* The Blackfin code generation auxiliary output file.
2
   Copyright (C) 2005  Free Software Foundation, Inc.
3
   Contributed by Analog Devices.
4
 
5
   This file is part of GCC.
6
 
7
   GCC is free software; you can redistribute it and/or modify it
8
   under the terms of the GNU General Public License as published
9
   by the Free Software Foundation; either version 2, or (at your
10
   option) any later version.
11
 
12
   GCC is distributed in the hope that it will be useful, but WITHOUT
13
   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14
   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
15
   License for more details.
16
 
17
   You should have received a copy of the GNU General Public License
18
   along with GCC; see the file COPYING.  If not, write to
19
   the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20
   Boston, MA 02110-1301, USA.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "rtl.h"
27
#include "regs.h"
28
#include "hard-reg-set.h"
29
#include "real.h"
30
#include "insn-config.h"
31
#include "insn-codes.h"
32
#include "conditions.h"
33
#include "insn-flags.h"
34
#include "output.h"
35
#include "insn-attr.h"
36
#include "tree.h"
37
#include "flags.h"
38
#include "except.h"
39
#include "function.h"
40
#include "input.h"
41
#include "target.h"
42
#include "target-def.h"
43
#include "expr.h"
44
#include "toplev.h"
45
#include "recog.h"
46
#include "ggc.h"
47
#include "integrate.h"
48
#include "cgraph.h"
49
#include "langhooks.h"
50
#include "bfin-protos.h"
51
#include "tm-preds.h"
52
#include "gt-bfin.h"
53
 
54
/* Test and compare insns in bfin.md store the information needed to
55
   generate branch and scc insns here.  */
56
rtx bfin_compare_op0, bfin_compare_op1;
57
 
58
/* RTX for condition code flag register and RETS register */
59
extern GTY(()) rtx bfin_cc_rtx;
60
extern GTY(()) rtx bfin_rets_rtx;
61
rtx bfin_cc_rtx, bfin_rets_rtx;
62
 
63
int max_arg_registers = 0;
64
 
65
/* Arrays used when emitting register names.  */
66
const char *short_reg_names[]  =  SHORT_REGISTER_NAMES;
67
const char *high_reg_names[]   =  HIGH_REGISTER_NAMES;
68
const char *dregs_pair_names[] =  DREGS_PAIR_NAMES;
69
const char *byte_reg_names[]   =  BYTE_REGISTER_NAMES;
70
 
71
static int arg_regs[] = FUNCTION_ARG_REGISTERS;
72
 
73
/* Nonzero if -mshared-library-id was given.  */
74
static int bfin_lib_id_given;
75
 
76
static void
77
bfin_globalize_label (FILE *stream, const char *name)
78
{
79
  fputs (".global ", stream);
80
  assemble_name (stream, name);
81
  fputc (';',stream);
82
  fputc ('\n',stream);
83
}
84
 
85
static void
86
output_file_start (void)
87
{
88
  FILE *file = asm_out_file;
89
  int i;
90
 
91
  fprintf (file, ".file \"%s\";\n", input_filename);
92
 
93
  for (i = 0; arg_regs[i] >= 0; i++)
94
    ;
95
  max_arg_registers = i;        /* how many arg reg used  */
96
}
97
 
98
/* Called early in the compilation to conditionally modify
99
   fixed_regs/call_used_regs.  */
100
 
101
void
102
conditional_register_usage (void)
103
{
104
  /* initialize condition code flag register rtx */
105
  bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
106
  bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
107
}
108
 
109
/* Examine machine-dependent attributes of function type FUNTYPE and return its
110
   type.  See the definition of E_FUNKIND.  */
111
 
112
static e_funkind funkind (tree funtype)
113
{
114
  tree attrs = TYPE_ATTRIBUTES (funtype);
115
  if (lookup_attribute ("interrupt_handler", attrs))
116
    return INTERRUPT_HANDLER;
117
  else if (lookup_attribute ("exception_handler", attrs))
118
    return EXCPT_HANDLER;
119
  else if (lookup_attribute ("nmi_handler", attrs))
120
    return NMI_HANDLER;
121
  else
122
    return SUBROUTINE;
123
}
124
 
125
/* Legitimize PIC addresses.  If the address is already position-independent,
126
   we return ORIG.  Newly generated position-independent addresses go into a
127
   reg.  This is REG if nonzero, otherwise we allocate register(s) as
128
   necessary.  PICREG is the register holding the pointer to the PIC offset
129
   table.  */
130
 
131
rtx
132
legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
133
{
134
  rtx addr = orig;
135
  rtx new = orig;
136
 
137
  if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
138
    {
139
      if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
140
        reg = new = orig;
141
      else
142
        {
143
          if (reg == 0)
144
            {
145
              gcc_assert (!no_new_pseudos);
146
              reg = gen_reg_rtx (Pmode);
147
            }
148
 
149
          if (flag_pic == 2)
150
            {
151
              emit_insn (gen_movsi_high_pic (reg, addr));
152
              emit_insn (gen_movsi_low_pic (reg, reg, addr));
153
              emit_insn (gen_addsi3 (reg, reg, picreg));
154
              new = gen_const_mem (Pmode, reg);
155
            }
156
          else
157
            {
158
              rtx tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
159
                                        UNSPEC_MOVE_PIC);
160
              new = gen_const_mem (Pmode,
161
                                   gen_rtx_PLUS (Pmode, picreg, tmp));
162
            }
163
          emit_move_insn (reg, new);
164
        }
165
      if (picreg == pic_offset_table_rtx)
166
        current_function_uses_pic_offset_table = 1;
167
      return reg;
168
    }
169
 
170
  else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
171
    {
172
      rtx base;
173
 
174
      if (GET_CODE (addr) == CONST)
175
        {
176
          addr = XEXP (addr, 0);
177
          gcc_assert (GET_CODE (addr) == PLUS);
178
        }
179
 
180
      if (XEXP (addr, 0) == picreg)
181
        return orig;
182
 
183
      if (reg == 0)
184
        {
185
          gcc_assert (!no_new_pseudos);
186
          reg = gen_reg_rtx (Pmode);
187
        }
188
 
189
      base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
190
      addr = legitimize_pic_address (XEXP (addr, 1),
191
                                     base == reg ? NULL_RTX : reg,
192
                                     picreg);
193
 
194
      if (GET_CODE (addr) == CONST_INT)
195
        {
196
          gcc_assert (! reload_in_progress && ! reload_completed);
197
          addr = force_reg (Pmode, addr);
198
        }
199
 
200
      if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
201
        {
202
          base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
203
          addr = XEXP (addr, 1);
204
        }
205
 
206
      return gen_rtx_PLUS (Pmode, base, addr);
207
    }
208
 
209
  return new;
210
}
211
 
212
/* Stack frame layout. */
213
 
214
/* Compute the number of DREGS to save with a push_multiple operation.
215
   This could include registers that aren't modified in the function,
216
   since push_multiple only takes a range of registers.
217
   If IS_INTHANDLER, then everything that is live must be saved, even
218
   if normally call-clobbered.  */
219
 
220
static int
221
n_dregs_to_save (bool is_inthandler)
222
{
223
  unsigned i;
224
 
225
  for (i = REG_R0; i <= REG_R7; i++)
226
    {
227
      if (regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
228
        return REG_R7 - i + 1;
229
 
230
      if (current_function_calls_eh_return)
231
        {
232
          unsigned j;
233
          for (j = 0; ; j++)
234
            {
235
              unsigned test = EH_RETURN_DATA_REGNO (j);
236
              if (test == INVALID_REGNUM)
237
                break;
238
              if (test == i)
239
                return REG_R7 - i + 1;
240
            }
241
        }
242
 
243
    }
244
  return 0;
245
}
246
 
247
/* Like n_dregs_to_save, but compute number of PREGS to save.  */
248
 
249
static int
250
n_pregs_to_save (bool is_inthandler)
251
{
252
  unsigned i;
253
 
254
  for (i = REG_P0; i <= REG_P5; i++)
255
    if ((regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
256
        || (i == PIC_OFFSET_TABLE_REGNUM
257
            && (current_function_uses_pic_offset_table
258
                || (TARGET_ID_SHARED_LIBRARY && ! current_function_is_leaf))))
259
      return REG_P5 - i + 1;
260
  return 0;
261
}
262
 
263
/* Determine if we are going to save the frame pointer in the prologue.  */
264
 
265
static bool
266
must_save_fp_p (void)
267
{
268
  return frame_pointer_needed || regs_ever_live[REG_FP];
269
}
270
 
271
static bool
272
stack_frame_needed_p (void)
273
{
274
  /* EH return puts a new return address into the frame using an
275
     address relative to the frame pointer.  */
276
  if (current_function_calls_eh_return)
277
    return true;
278
  return frame_pointer_needed;
279
}
280
 
281
/* Emit code to save registers in the prologue.  SAVEALL is nonzero if we
282
   must save all registers; this is used for interrupt handlers.
283
   SPREG contains (reg:SI REG_SP).  IS_INTHANDLER is true if we're doing
284
   this for an interrupt (or exception) handler.  */
285
 
286
static void
287
expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
288
{
289
  int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
290
  int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
291
  int dregno = REG_R7 + 1 - ndregs;
292
  int pregno = REG_P5 + 1 - npregs;
293
  int total = ndregs + npregs;
294
  int i;
295
  rtx pat, insn, val;
296
 
297
  if (total == 0)
298
    return;
299
 
300
  val = GEN_INT (-total * 4);
301
  pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 2));
302
  XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
303
                                        UNSPEC_PUSH_MULTIPLE);
304
  XVECEXP (pat, 0, total + 1) = gen_rtx_SET (VOIDmode, spreg,
305
                                             gen_rtx_PLUS (Pmode, spreg,
306
                                                           val));
307
  RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total + 1)) = 1;
308
  for (i = 0; i < total; i++)
309
    {
310
      rtx memref = gen_rtx_MEM (word_mode,
311
                                gen_rtx_PLUS (Pmode, spreg,
312
                                              GEN_INT (- i * 4 - 4)));
313
      rtx subpat;
314
      if (ndregs > 0)
315
        {
316
          subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
317
                                                               dregno++));
318
          ndregs--;
319
        }
320
      else
321
        {
322
          subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
323
                                                               pregno++));
324
          npregs++;
325
        }
326
      XVECEXP (pat, 0, i + 1) = subpat;
327
      RTX_FRAME_RELATED_P (subpat) = 1;
328
    }
329
  insn = emit_insn (pat);
330
  RTX_FRAME_RELATED_P (insn) = 1;
331
}
332
 
333
/* Emit code to restore registers in the epilogue.  SAVEALL is nonzero if we
334
   must save all registers; this is used for interrupt handlers.
335
   SPREG contains (reg:SI REG_SP).  IS_INTHANDLER is true if we're doing
336
   this for an interrupt (or exception) handler.  */
337
 
338
static void
339
expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
340
{
341
  int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
342
  int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
343
  int total = ndregs + npregs;
344
  int i, regno;
345
  rtx pat, insn;
346
 
347
  if (total == 0)
348
    return;
349
 
350
  pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 1));
351
  XVECEXP (pat, 0, 0) = gen_rtx_SET (VOIDmode, spreg,
352
                                     gen_rtx_PLUS (Pmode, spreg,
353
                                                   GEN_INT (total * 4)));
354
 
355
  if (npregs > 0)
356
    regno = REG_P5 + 1;
357
  else
358
    regno = REG_R7 + 1;
359
 
360
  for (i = 0; i < total; i++)
361
    {
362
      rtx addr = (i > 0
363
                  ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
364
                  : spreg);
365
      rtx memref = gen_rtx_MEM (word_mode, addr);
366
 
367
      regno--;
368
      XVECEXP (pat, 0, i + 1)
369
        = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
370
 
371
      if (npregs > 0)
372
        {
373
          if (--npregs == 0)
374
            regno = REG_R7 + 1;
375
        }
376
    }
377
 
378
  insn = emit_insn (pat);
379
  RTX_FRAME_RELATED_P (insn) = 1;
380
}
381
 
382
/* Perform any needed actions needed for a function that is receiving a
383
   variable number of arguments.
384
 
385
   CUM is as above.
386
 
387
   MODE and TYPE are the mode and type of the current parameter.
388
 
389
   PRETEND_SIZE is a variable that should be set to the amount of stack
390
   that must be pushed by the prolog to pretend that our caller pushed
391
   it.
392
 
393
   Normally, this macro will push all remaining incoming registers on the
394
   stack and set PRETEND_SIZE to the length of the registers pushed.
395
 
396
   Blackfin specific :
397
   - VDSP C compiler manual (our ABI) says that a variable args function
398
     should save the R0, R1 and R2 registers in the stack.
399
   - The caller will always leave space on the stack for the
400
     arguments that are passed in registers, so we dont have
401
     to leave any extra space.
402
   - now, the vastart pointer can access all arguments from the stack.  */
403
 
404
static void
405
setup_incoming_varargs (CUMULATIVE_ARGS *cum,
406
                        enum machine_mode mode ATTRIBUTE_UNUSED,
407
                        tree type ATTRIBUTE_UNUSED, int *pretend_size,
408
                        int no_rtl)
409
{
410
  rtx mem;
411
  int i;
412
 
413
  if (no_rtl)
414
    return;
415
 
416
  /* The move for named arguments will be generated automatically by the
417
     compiler.  We need to generate the move rtx for the unnamed arguments
418
     if they are in the first 3 words.  We assume at least 1 named argument
419
     exists, so we never generate [ARGP] = R0 here.  */
420
 
421
  for (i = cum->words + 1; i < max_arg_registers; i++)
422
    {
423
      mem = gen_rtx_MEM (Pmode,
424
                         plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
425
      emit_move_insn (mem, gen_rtx_REG (Pmode, i));
426
    }
427
 
428
  *pretend_size = 0;
429
}
430
 
431
/* Value should be nonzero if functions must have frame pointers.
432
   Zero means the frame pointer need not be set up (and parms may
433
   be accessed via the stack pointer) in functions that seem suitable.  */
434
 
435
int
436
bfin_frame_pointer_required (void)
437
{
438
  e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
439
 
440
  if (fkind != SUBROUTINE)
441
    return 1;
442
 
443
  /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
444
     so we have to override it for non-leaf functions.  */
445
  if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
446
    return 1;
447
 
448
  return 0;
449
}
450
 
451
/* Return the number of registers pushed during the prologue.  */
452
 
453
static int
454
n_regs_saved_by_prologue (void)
455
{
456
  e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
457
  bool is_inthandler = fkind != SUBROUTINE;
458
  tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
459
  bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
460
              || (is_inthandler && !current_function_is_leaf));
461
  int ndregs = all ? 8 : n_dregs_to_save (is_inthandler);
462
  int npregs = all ? 6 : n_pregs_to_save (is_inthandler);
463
  int n = ndregs + npregs;
464
 
465
  if (all || stack_frame_needed_p ())
466
    /* We use a LINK instruction in this case.  */
467
    n += 2;
468
  else
469
    {
470
      if (must_save_fp_p ())
471
        n++;
472
      if (! current_function_is_leaf)
473
        n++;
474
    }
475
 
476
  if (fkind != SUBROUTINE)
477
    {
478
      int i;
479
 
480
      /* Increment once for ASTAT.  */
481
      n++;
482
 
483
      /* RETE/X/N.  */
484
      if (lookup_attribute ("nesting", attrs))
485
        n++;
486
 
487
      for (i = REG_P7 + 1; i < REG_CC; i++)
488
        if (all
489
            || regs_ever_live[i]
490
            || (!leaf_function_p () && call_used_regs[i]))
491
          n += i == REG_A0 || i == REG_A1 ? 2 : 1;
492
    }
493
  return n;
494
}
495
 
496
/* Return the offset between two registers, one to be eliminated, and the other
497
   its replacement, at the start of a routine.  */
498
 
499
HOST_WIDE_INT
500
bfin_initial_elimination_offset (int from, int to)
501
{
502
  HOST_WIDE_INT offset = 0;
503
 
504
  if (from == ARG_POINTER_REGNUM)
505
    offset = n_regs_saved_by_prologue () * 4;
506
 
507
  if (to == STACK_POINTER_REGNUM)
508
    {
509
      if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
510
        offset += current_function_outgoing_args_size;
511
      else if (current_function_outgoing_args_size)
512
        offset += FIXED_STACK_AREA;
513
 
514
      offset += get_frame_size ();
515
    }
516
 
517
  return offset;
518
}
519
 
520
/* Emit code to load a constant CONSTANT into register REG; setting
521
   RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
522
   Make sure that the insns we generate need not be split.  */
523
 
524
static void
525
frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
526
{
527
  rtx insn;
528
  rtx cst = GEN_INT (constant);
529
 
530
  if (constant >= -32768 && constant < 65536)
531
    insn = emit_move_insn (reg, cst);
532
  else
533
    {
534
      /* We don't call split_load_immediate here, since dwarf2out.c can get
535
         confused about some of the more clever sequences it can generate.  */
536
      insn = emit_insn (gen_movsi_high (reg, cst));
537
      if (related)
538
        RTX_FRAME_RELATED_P (insn) = 1;
539
      insn = emit_insn (gen_movsi_low (reg, reg, cst));
540
    }
541
  if (related)
542
    RTX_FRAME_RELATED_P (insn) = 1;
543
}
544
 
545
/* Generate efficient code to add a value to the frame pointer.  We
546
   can use P1 as a scratch register.  Set RTX_FRAME_RELATED_P on the
547
   generated insns if FRAME is nonzero.  */
548
 
549
static void
550
add_to_sp (rtx spreg, HOST_WIDE_INT value, int frame)
551
{
552
  if (value == 0)
553
    return;
554
 
555
  /* Choose whether to use a sequence using a temporary register, or
556
     a sequence with multiple adds.  We can add a signed 7 bit value
557
     in one instruction.  */
558
  if (value > 120 || value < -120)
559
    {
560
      rtx tmpreg = gen_rtx_REG (SImode, REG_P1);
561
      rtx insn;
562
 
563
      if (frame)
564
        frame_related_constant_load (tmpreg, value, TRUE);
565
      else
566
        {
567
          insn = emit_move_insn (tmpreg, GEN_INT (value));
568
          if (frame)
569
            RTX_FRAME_RELATED_P (insn) = 1;
570
        }
571
 
572
      insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
573
      if (frame)
574
        RTX_FRAME_RELATED_P (insn) = 1;
575
    }
576
  else
577
    do
578
      {
579
        int size = value;
580
        rtx insn;
581
 
582
        if (size > 60)
583
          size = 60;
584
        else if (size < -60)
585
          /* We could use -62, but that would leave the stack unaligned, so
586
             it's no good.  */
587
          size = -60;
588
 
589
        insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (size)));
590
        if (frame)
591
          RTX_FRAME_RELATED_P (insn) = 1;
592
        value -= size;
593
      }
594
    while (value != 0);
595
}
596
 
597
/* Generate a LINK insn for a frame sized FRAME_SIZE.  If this constant
598
   is too large, generate a sequence of insns that has the same effect.
599
   SPREG contains (reg:SI REG_SP).  */
600
 
601
static void
602
emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
603
{
604
  HOST_WIDE_INT link_size = frame_size;
605
  rtx insn;
606
  int i;
607
 
608
  if (link_size > 262140)
609
    link_size = 262140;
610
 
611
  /* Use a LINK insn with as big a constant as possible, then subtract
612
     any remaining size from the SP.  */
613
  insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
614
  RTX_FRAME_RELATED_P (insn) = 1;
615
 
616
  for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
617
    {
618
      rtx set = XVECEXP (PATTERN (insn), 0, i);
619
      gcc_assert (GET_CODE (set) == SET);
620
      RTX_FRAME_RELATED_P (set) = 1;
621
    }
622
 
623
  frame_size -= link_size;
624
 
625
  if (frame_size > 0)
626
    {
627
      /* Must use a call-clobbered PREG that isn't the static chain.  */
628
      rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
629
 
630
      frame_related_constant_load (tmpreg, -frame_size, TRUE);
631
      insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
632
      RTX_FRAME_RELATED_P (insn) = 1;
633
    }
634
}
635
 
636
/* Return the number of bytes we must reserve for outgoing arguments
637
   in the current function's stack frame.  */
638
 
639
static HOST_WIDE_INT
640
arg_area_size (void)
641
{
642
  if (current_function_outgoing_args_size)
643
    {
644
      if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
645
        return current_function_outgoing_args_size;
646
      else
647
        return FIXED_STACK_AREA;
648
    }
649
  return 0;
650
}
651
 
652
/* Save RETS and FP, and allocate a stack frame.  ALL is true if the
653
   function must save all its registers (true only for certain interrupt
654
   handlers).  */
655
 
656
static void
657
do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
658
{
659
  frame_size += arg_area_size ();
660
 
661
  if (all || stack_frame_needed_p ()
662
      || (must_save_fp_p () && ! current_function_is_leaf))
663
    emit_link_insn (spreg, frame_size);
664
  else
665
    {
666
      if (! current_function_is_leaf)
667
        {
668
          rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
669
                                            gen_rtx_PRE_DEC (Pmode, spreg)),
670
                               bfin_rets_rtx);
671
          rtx insn = emit_insn (pat);
672
          RTX_FRAME_RELATED_P (insn) = 1;
673
        }
674
      if (must_save_fp_p ())
675
        {
676
          rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
677
                                            gen_rtx_PRE_DEC (Pmode, spreg)),
678
                               gen_rtx_REG (Pmode, REG_FP));
679
          rtx insn = emit_insn (pat);
680
          RTX_FRAME_RELATED_P (insn) = 1;
681
        }
682
      add_to_sp (spreg, -frame_size, 1);
683
    }
684
}
685
 
686
/* Like do_link, but used for epilogues to deallocate the stack frame.  */
687
 
688
static void
689
do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all)
690
{
691
  frame_size += arg_area_size ();
692
 
693
  if (all || stack_frame_needed_p ())
694
    emit_insn (gen_unlink ());
695
  else
696
    {
697
      rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
698
 
699
      add_to_sp (spreg, frame_size, 0);
700
      if (must_save_fp_p ())
701
        {
702
          rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
703
          emit_move_insn (fpreg, postinc);
704
          emit_insn (gen_rtx_USE (VOIDmode, fpreg));
705
        }
706
      if (! current_function_is_leaf)
707
        {
708
          emit_move_insn (bfin_rets_rtx, postinc);
709
          emit_insn (gen_rtx_USE (VOIDmode, bfin_rets_rtx));
710
        }
711
    }
712
}
713
 
714
/* Generate a prologue suitable for a function of kind FKIND.  This is
715
   called for interrupt and exception handler prologues.
716
   SPREG contains (reg:SI REG_SP).  */
717
 
718
static void
719
expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind)
720
{
721
  int i;
722
  HOST_WIDE_INT frame_size = get_frame_size ();
723
  rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
724
  rtx predec = gen_rtx_MEM (SImode, predec1);
725
  rtx insn;
726
  tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
727
  bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
728
  tree kspisusp = lookup_attribute ("kspisusp", attrs);
729
 
730
  if (kspisusp)
731
    {
732
      insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
733
      RTX_FRAME_RELATED_P (insn) = 1;
734
    }
735
 
736
  /* We need space on the stack in case we need to save the argument
737
     registers.  */
738
  if (fkind == EXCPT_HANDLER)
739
    {
740
      insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
741
      RTX_FRAME_RELATED_P (insn) = 1;
742
    }
743
 
744
  insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
745
  RTX_FRAME_RELATED_P (insn) = 1;
746
 
747
  /* If we're calling other functions, they won't save their call-clobbered
748
     registers, so we must save everything here.  */
749
  if (!current_function_is_leaf)
750
    all = true;
751
  expand_prologue_reg_save (spreg, all, true);
752
 
753
  for (i = REG_P7 + 1; i < REG_CC; i++)
754
    if (all
755
        || regs_ever_live[i]
756
        || (!leaf_function_p () && call_used_regs[i]))
757
      {
758
        if (i == REG_A0 || i == REG_A1)
759
          insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
760
                                 gen_rtx_REG (PDImode, i));
761
        else
762
          insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
763
        RTX_FRAME_RELATED_P (insn) = 1;
764
      }
765
 
766
  if (lookup_attribute ("nesting", attrs))
767
    {
768
      rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
769
                                        : fkind == NMI_HANDLER ? REG_RETN
770
                                        : REG_RETI));
771
      insn = emit_move_insn (predec, srcreg);
772
      RTX_FRAME_RELATED_P (insn) = 1;
773
    }
774
 
775
  do_link (spreg, frame_size, all);
776
 
777
  if (fkind == EXCPT_HANDLER)
778
    {
779
      rtx r0reg = gen_rtx_REG (SImode, REG_R0);
780
      rtx r1reg = gen_rtx_REG (SImode, REG_R1);
781
      rtx r2reg = gen_rtx_REG (SImode, REG_R2);
782
      rtx insn;
783
 
784
      insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
785
      REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
786
                                            NULL_RTX);
787
      insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
788
      REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
789
                                            NULL_RTX);
790
      insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
791
      REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
792
                                            NULL_RTX);
793
      insn = emit_move_insn (r1reg, spreg);
794
      REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
795
                                            NULL_RTX);
796
      insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
797
      REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
798
                                            NULL_RTX);
799
      insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
800
      REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
801
                                            NULL_RTX);
802
    }
803
}
804
 
805
/* Generate an epilogue suitable for a function of kind FKIND.  This is
806
   called for interrupt and exception handler epilogues.
807
   SPREG contains (reg:SI REG_SP).  */
808
 
809
static void
810
expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind)
811
{
812
  int i;
813
  rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
814
  rtx postinc = gen_rtx_MEM (SImode, postinc1);
815
  tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
816
  bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
817
 
818
  /* A slightly crude technique to stop flow from trying to delete "dead"
819
     insns.  */
820
  MEM_VOLATILE_P (postinc) = 1;
821
 
822
  do_unlink (spreg, get_frame_size (), all);
823
 
824
  if (lookup_attribute ("nesting", attrs))
825
    {
826
      rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
827
                                        : fkind == NMI_HANDLER ? REG_RETN
828
                                        : REG_RETI));
829
      emit_move_insn (srcreg, postinc);
830
    }
831
 
832
  /* If we're calling other functions, they won't save their call-clobbered
833
     registers, so we must save (and restore) everything here.  */
834
  if (!current_function_is_leaf)
835
    all = true;
836
 
837
  for (i = REG_CC - 1; i > REG_P7; i--)
838
    if (all
839
        || regs_ever_live[i]
840
        || (!leaf_function_p () && call_used_regs[i]))
841
      {
842
        if (i == REG_A0 || i == REG_A1)
843
          {
844
            rtx mem = gen_rtx_MEM (PDImode, postinc1);
845
            MEM_VOLATILE_P (mem) = 1;
846
            emit_move_insn (gen_rtx_REG (PDImode, i), mem);
847
          }
848
        else
849
          emit_move_insn (gen_rtx_REG (SImode, i), postinc);
850
      }
851
 
852
  expand_epilogue_reg_restore (spreg, all, true);
853
 
854
  emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
855
 
856
  /* Deallocate any space we left on the stack in case we needed to save the
857
     argument registers.  */
858
  if (fkind == EXCPT_HANDLER)
859
    emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
860
 
861
  emit_jump_insn (gen_return_internal (GEN_INT (fkind)));
862
}
863
 
864
/* Used while emitting the prologue to generate code to load the correct value
865
   into the PIC register, which is passed in DEST.  */
866
 
867
static rtx
868
bfin_load_pic_reg (rtx dest)
869
{
870
  struct cgraph_local_info *i = NULL;
871
  rtx addr, insn;
872
 
873
  if (flag_unit_at_a_time)
874
    i = cgraph_local_info (current_function_decl);
875
 
876
  /* Functions local to the translation unit don't need to reload the
877
     pic reg, since the caller always passes a usable one.  */
878
  if (i && i->local)
879
    return pic_offset_table_rtx;
880
 
881
  if (bfin_lib_id_given)
882
    addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
883
  else
884
    addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
885
                         gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
886
                                         UNSPEC_LIBRARY_OFFSET));
887
  insn = emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
888
  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
889
  return dest;
890
}
891
 
892
/* Generate RTL for the prologue of the current function.  */
893
 
894
void
895
bfin_expand_prologue (void)
896
{
897
  rtx insn;
898
  HOST_WIDE_INT frame_size = get_frame_size ();
899
  rtx spreg = gen_rtx_REG (Pmode, REG_SP);
900
  e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
901
  rtx pic_reg_loaded = NULL_RTX;
902
 
903
  if (fkind != SUBROUTINE)
904
    {
905
      expand_interrupt_handler_prologue (spreg, fkind);
906
      return;
907
    }
908
 
909
  if (current_function_limit_stack)
910
    {
911
      HOST_WIDE_INT offset
912
        = bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
913
                                           STACK_POINTER_REGNUM);
914
      rtx lim = stack_limit_rtx;
915
 
916
      if (GET_CODE (lim) == SYMBOL_REF)
917
        {
918
          rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
919
          if (TARGET_ID_SHARED_LIBRARY)
920
            {
921
              rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
922
              rtx val;
923
              pic_reg_loaded = bfin_load_pic_reg (p2reg);
924
              val = legitimize_pic_address (stack_limit_rtx, p1reg,
925
                                            pic_reg_loaded);
926
              emit_move_insn (p1reg, val);
927
              frame_related_constant_load (p2reg, offset, FALSE);
928
              emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
929
              lim = p2reg;
930
            }
931
          else
932
            {
933
              rtx limit = plus_constant (stack_limit_rtx, offset);
934
              emit_move_insn (p2reg, limit);
935
              lim = p2reg;
936
            }
937
        }
938
      emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
939
      emit_insn (gen_trapifcc ());
940
    }
941
  expand_prologue_reg_save (spreg, 0, false);
942
 
943
  do_link (spreg, frame_size, false);
944
 
945
  if (TARGET_ID_SHARED_LIBRARY
946
      && (current_function_uses_pic_offset_table
947
          || !current_function_is_leaf))
948
    bfin_load_pic_reg (pic_offset_table_rtx);
949
}
950
 
951
/* Generate RTL for the epilogue of the current function.  NEED_RETURN is zero
952
   if this is for a sibcall.  EH_RETURN is nonzero if we're expanding an
953
   eh_return pattern.  */
954
 
955
void
956
bfin_expand_epilogue (int need_return, int eh_return)
957
{
958
  rtx spreg = gen_rtx_REG (Pmode, REG_SP);
959
  e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
960
 
961
  if (fkind != SUBROUTINE)
962
    {
963
      expand_interrupt_handler_epilogue (spreg, fkind);
964
      return;
965
    }
966
 
967
  do_unlink (spreg, get_frame_size (), false);
968
 
969
  expand_epilogue_reg_restore (spreg, false, false);
970
 
971
  /* Omit the return insn if this is for a sibcall.  */
972
  if (! need_return)
973
    return;
974
 
975
  if (eh_return)
976
    emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
977
 
978
  emit_jump_insn (gen_return_internal (GEN_INT (SUBROUTINE)));
979
}
980
 
981
/* Return nonzero if register OLD_REG can be renamed to register NEW_REG.  */
982
 
983
int
984
bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
985
                           unsigned int new_reg)
986
{
987
  /* Interrupt functions can only use registers that have already been
988
     saved by the prologue, even if they would normally be
989
     call-clobbered.  */
990
 
991
  if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
992
      && !regs_ever_live[new_reg])
993
    return 0;
994
 
995
  return 1;
996
}
997
 
998
/* Return the value of the return address for the frame COUNT steps up
999
   from the current frame, after the prologue.
1000
   We punt for everything but the current frame by returning const0_rtx.  */
1001
 
1002
rtx
1003
bfin_return_addr_rtx (int count)
1004
{
1005
  if (count != 0)
1006
    return const0_rtx;
1007
 
1008
  return get_hard_reg_initial_val (Pmode, REG_RETS);
1009
}
1010
 
1011
/* Try machine-dependent ways of modifying an illegitimate address X
1012
   to be legitimate.  If we find one, return the new, valid address,
1013
   otherwise return NULL_RTX.
1014
 
1015
   OLDX is the address as it was before break_out_memory_refs was called.
1016
   In some cases it is useful to look at this to decide what needs to be done.
1017
 
1018
   MODE is the mode of the memory reference.  */
1019
 
1020
rtx
1021
legitimize_address (rtx x ATTRIBUTE_UNUSED, rtx oldx ATTRIBUTE_UNUSED,
1022
                    enum machine_mode mode ATTRIBUTE_UNUSED)
1023
{
1024
  return NULL_RTX;
1025
}
1026
 
1027
/* This predicate is used to compute the length of a load/store insn.
1028
   OP is a MEM rtx, we return nonzero if its addressing mode requires a
1029
   32 bit instruction.  */
1030
 
1031
int
1032
effective_address_32bit_p (rtx op, enum machine_mode mode)
1033
{
1034
  HOST_WIDE_INT offset;
1035
 
1036
  mode = GET_MODE (op);
1037
  op = XEXP (op, 0);
1038
 
1039
  if (GET_CODE (op) != PLUS)
1040
    {
1041
      gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
1042
                  || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
1043
      return 0;
1044
    }
1045
 
1046
  offset = INTVAL (XEXP (op, 1));
1047
 
1048
  /* All byte loads use a 16 bit offset.  */
1049
  if (GET_MODE_SIZE (mode) == 1)
1050
    return 1;
1051
 
1052
  if (GET_MODE_SIZE (mode) == 4)
1053
    {
1054
      /* Frame pointer relative loads can use a negative offset, all others
1055
         are restricted to a small positive one.  */
1056
      if (XEXP (op, 0) == frame_pointer_rtx)
1057
        return offset < -128 || offset > 60;
1058
      return offset < 0 || offset > 60;
1059
    }
1060
 
1061
  /* Must be HImode now.  */
1062
  return offset < 0 || offset > 30;
1063
}
1064
 
1065
/* Return cost of the memory address ADDR.
1066
   All addressing modes are equally cheap on the Blackfin.  */
1067
 
1068
static int
1069
bfin_address_cost (rtx addr ATTRIBUTE_UNUSED)
1070
{
1071
  return 1;
1072
}
1073
 
1074
/* Subroutine of print_operand; used to print a memory reference X to FILE.  */
1075
 
1076
void
1077
print_address_operand (FILE *file, rtx x)
1078
{
1079
  switch (GET_CODE (x))
1080
    {
1081
    case PLUS:
1082
      output_address (XEXP (x, 0));
1083
      fprintf (file, "+");
1084
      output_address (XEXP (x, 1));
1085
      break;
1086
 
1087
    case PRE_DEC:
1088
      fprintf (file, "--");
1089
      output_address (XEXP (x, 0));
1090
      break;
1091
    case POST_INC:
1092
      output_address (XEXP (x, 0));
1093
      fprintf (file, "++");
1094
      break;
1095
    case POST_DEC:
1096
      output_address (XEXP (x, 0));
1097
      fprintf (file, "--");
1098
      break;
1099
 
1100
    default:
1101
      gcc_assert (GET_CODE (x) != MEM);
1102
      print_operand (file, x, 0);
1103
      break;
1104
    }
1105
}
1106
 
1107
/* Adding intp DImode support by Tony
1108
 * -- Q: (low  word)
1109
 * -- R: (high word)
1110
 */
1111
 
1112
void
1113
print_operand (FILE *file, rtx x, char code)
1114
{
1115
  enum machine_mode mode = GET_MODE (x);
1116
 
1117
  switch (code)
1118
    {
1119
    case 'j':
1120
      switch (GET_CODE (x))
1121
        {
1122
        case EQ:
1123
          fprintf (file, "e");
1124
          break;
1125
        case NE:
1126
          fprintf (file, "ne");
1127
          break;
1128
        case GT:
1129
          fprintf (file, "g");
1130
          break;
1131
        case LT:
1132
          fprintf (file, "l");
1133
          break;
1134
        case GE:
1135
          fprintf (file, "ge");
1136
          break;
1137
        case LE:
1138
          fprintf (file, "le");
1139
          break;
1140
        case GTU:
1141
          fprintf (file, "g");
1142
          break;
1143
        case LTU:
1144
          fprintf (file, "l");
1145
          break;
1146
        case GEU:
1147
          fprintf (file, "ge");
1148
          break;
1149
        case LEU:
1150
          fprintf (file, "le");
1151
          break;
1152
        default:
1153
          output_operand_lossage ("invalid %%j value");
1154
        }
1155
      break;
1156
 
1157
    case 'J':                                    /* reverse logic */
1158
      switch (GET_CODE(x))
1159
        {
1160
        case EQ:
1161
          fprintf (file, "ne");
1162
          break;
1163
        case NE:
1164
          fprintf (file, "e");
1165
          break;
1166
        case GT:
1167
          fprintf (file, "le");
1168
          break;
1169
        case LT:
1170
          fprintf (file, "ge");
1171
          break;
1172
        case GE:
1173
          fprintf (file, "l");
1174
          break;
1175
        case LE:
1176
          fprintf (file, "g");
1177
          break;
1178
        case GTU:
1179
          fprintf (file, "le");
1180
          break;
1181
        case LTU:
1182
          fprintf (file, "ge");
1183
          break;
1184
        case GEU:
1185
          fprintf (file, "l");
1186
          break;
1187
        case LEU:
1188
          fprintf (file, "g");
1189
          break;
1190
        default:
1191
          output_operand_lossage ("invalid %%J value");
1192
        }
1193
      break;
1194
 
1195
    default:
1196
      switch (GET_CODE (x))
1197
        {
1198
        case REG:
1199
          if (code == 'h')
1200
            {
1201
              gcc_assert (REGNO (x) < 32);
1202
              fprintf (file, "%s", short_reg_names[REGNO (x)]);
1203
              /*fprintf (file, "\n%d\n ", REGNO (x));*/
1204
              break;
1205
            }
1206
          else if (code == 'd')
1207
            {
1208
              gcc_assert (REGNO (x) < 32);
1209
              fprintf (file, "%s", high_reg_names[REGNO (x)]);
1210
              break;
1211
            }
1212
          else if (code == 'w')
1213
            {
1214
              gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1215
              fprintf (file, "%s.w", reg_names[REGNO (x)]);
1216
            }
1217
          else if (code == 'x')
1218
            {
1219
              gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1220
              fprintf (file, "%s.x", reg_names[REGNO (x)]);
1221
            }
1222
          else if (code == 'D')
1223
            {
1224
              fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1225
            }
1226
          else if (code == 'H')
1227
            {
1228
              gcc_assert (mode == DImode || mode == DFmode);
1229
              gcc_assert (REG_P (x));
1230
              fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1231
            }
1232
          else if (code == 'T')
1233
            {
1234
              gcc_assert (D_REGNO_P (REGNO (x)));
1235
              fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1236
            }
1237
          else
1238
            fprintf (file, "%s", reg_names[REGNO (x)]);
1239
          break;
1240
 
1241
        case MEM:
1242
          fputc ('[', file);
1243
          x = XEXP (x,0);
1244
          print_address_operand (file, x);
1245
          fputc (']', file);
1246
          break;
1247
 
1248
        case CONST_INT:
1249
          /* Moves to half registers with d or h modifiers always use unsigned
1250
             constants.  */
1251
          if (code == 'd')
1252
            x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1253
          else if (code == 'h')
1254
            x = GEN_INT (INTVAL (x) & 0xffff);
1255
          else if (code == 'X')
1256
            x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1257
          else if (code == 'Y')
1258
            x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1259
          else if (code == 'Z')
1260
            /* Used for LINK insns.  */
1261
            x = GEN_INT (-8 - INTVAL (x));
1262
 
1263
          /* fall through */
1264
 
1265
        case SYMBOL_REF:
1266
          output_addr_const (file, x);
1267
          break;
1268
 
1269
        case CONST_DOUBLE:
1270
          output_operand_lossage ("invalid const_double operand");
1271
          break;
1272
 
1273
        case UNSPEC:
1274
          switch (XINT (x, 1))
1275
            {
1276
            case UNSPEC_MOVE_PIC:
1277
              output_addr_const (file, XVECEXP (x, 0, 0));
1278
              fprintf (file, "@GOT");
1279
              break;
1280
 
1281
            case UNSPEC_LIBRARY_OFFSET:
1282
              fprintf (file, "_current_shared_library_p5_offset_");
1283
              break;
1284
 
1285
            default:
1286
              gcc_unreachable ();
1287
            }
1288
          break;
1289
 
1290
        default:
1291
          output_addr_const (file, x);
1292
        }
1293
    }
1294
}
1295
 
1296
/* Argument support functions.  */
1297
 
1298
/* Initialize a variable CUM of type CUMULATIVE_ARGS
1299
   for a call to a function whose data type is FNTYPE.
1300
   For a library call, FNTYPE is 0.
1301
   VDSP C Compiler manual, our ABI says that
1302
   first 3 words of arguments will use R0, R1 and R2.
1303
*/
1304
 
1305
void
1306
init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
1307
                      rtx libname ATTRIBUTE_UNUSED)
1308
{
1309
  static CUMULATIVE_ARGS zero_cum;
1310
 
1311
  *cum = zero_cum;
1312
 
1313
  /* Set up the number of registers to use for passing arguments.  */
1314
 
1315
  cum->nregs = max_arg_registers;
1316
  cum->arg_regs = arg_regs;
1317
 
1318
  cum->call_cookie = CALL_NORMAL;
1319
  /* Check for a longcall attribute.  */
1320
  if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1321
    cum->call_cookie |= CALL_SHORT;
1322
  else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1323
    cum->call_cookie |= CALL_LONG;
1324
 
1325
  return;
1326
}
1327
 
1328
/* Update the data in CUM to advance over an argument
1329
   of mode MODE and data type TYPE.
1330
   (TYPE is null for libcalls where that information may not be available.)  */
1331
 
1332
void
1333
function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1334
                      int named ATTRIBUTE_UNUSED)
1335
{
1336
  int count, bytes, words;
1337
 
1338
  bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1339
  words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1340
 
1341
  cum->words += words;
1342
  cum->nregs -= words;
1343
 
1344
  if (cum->nregs <= 0)
1345
    {
1346
      cum->nregs = 0;
1347
      cum->arg_regs = NULL;
1348
    }
1349
  else
1350
    {
1351
      for (count = 1; count <= words; count++)
1352
        cum->arg_regs++;
1353
    }
1354
 
1355
  return;
1356
}
1357
 
1358
/* Define where to put the arguments to a function.
1359
   Value is zero to push the argument on the stack,
1360
   or a hard register in which to store the argument.
1361
 
1362
   MODE is the argument's machine mode.
1363
   TYPE is the data type of the argument (as a tree).
1364
    This is null for libcalls where that information may
1365
    not be available.
1366
   CUM is a variable of type CUMULATIVE_ARGS which gives info about
1367
    the preceding args and about the function being called.
1368
   NAMED is nonzero if this argument is a named parameter
1369
    (otherwise it is an extra parameter matching an ellipsis).  */
1370
 
1371
struct rtx_def *
1372
function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1373
              int named ATTRIBUTE_UNUSED)
1374
{
1375
  int bytes
1376
    = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1377
 
1378
  if (mode == VOIDmode)
1379
    /* Compute operand 2 of the call insn.  */
1380
    return GEN_INT (cum->call_cookie);
1381
 
1382
  if (bytes == -1)
1383
    return NULL_RTX;
1384
 
1385
  if (cum->nregs)
1386
    return gen_rtx_REG (mode, *(cum->arg_regs));
1387
 
1388
  return NULL_RTX;
1389
}
1390
 
1391
/* For an arg passed partly in registers and partly in memory,
1392
   this is the number of bytes passed in registers.
1393
   For args passed entirely in registers or entirely in memory, zero.
1394
 
1395
   Refer VDSP C Compiler manual, our ABI.
1396
   First 3 words are in registers. So, if a an argument is larger
1397
   than the registers available, it will span the register and
1398
   stack.   */
1399
 
1400
static int
1401
bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1402
                        tree type ATTRIBUTE_UNUSED,
1403
                        bool named ATTRIBUTE_UNUSED)
1404
{
1405
  int bytes
1406
    = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1407
  int bytes_left = cum->nregs * UNITS_PER_WORD;
1408
 
1409
  if (bytes == -1)
1410
    return 0;
1411
 
1412
  if (bytes_left == 0)
1413
    return 0;
1414
  if (bytes > bytes_left)
1415
    return bytes_left;
1416
  return 0;
1417
}
1418
 
1419
/* Variable sized types are passed by reference.  */
1420
 
1421
static bool
1422
bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1423
                        enum machine_mode mode ATTRIBUTE_UNUSED,
1424
                        tree type, bool named ATTRIBUTE_UNUSED)
1425
{
1426
  return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1427
}
1428
 
1429
/* Decide whether a type should be returned in memory (true)
1430
   or in a register (false).  This is called by the macro
1431
   RETURN_IN_MEMORY.  */
1432
 
1433
int
1434
bfin_return_in_memory (tree type)
1435
{
1436
  int size = int_size_in_bytes (type);
1437
  return size > 2 * UNITS_PER_WORD || size == -1;
1438
}
1439
 
1440
/* Register in which address to store a structure value
1441
   is passed to a function.  */
1442
static rtx
1443
bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1444
                      int incoming ATTRIBUTE_UNUSED)
1445
{
1446
  return gen_rtx_REG (Pmode, REG_P0);
1447
}
1448
 
1449
/* Return true when register may be used to pass function parameters.  */
1450
 
1451
bool
1452
function_arg_regno_p (int n)
1453
{
1454
  int i;
1455
  for (i = 0; arg_regs[i] != -1; i++)
1456
    if (n == arg_regs[i])
1457
      return true;
1458
  return false;
1459
}
1460
 
1461
/* Returns 1 if OP contains a symbol reference */
1462
 
1463
int
1464
symbolic_reference_mentioned_p (rtx op)
1465
{
1466
  register const char *fmt;
1467
  register int i;
1468
 
1469
  if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1470
    return 1;
1471
 
1472
  fmt = GET_RTX_FORMAT (GET_CODE (op));
1473
  for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1474
    {
1475
      if (fmt[i] == 'E')
1476
        {
1477
          register int j;
1478
 
1479
          for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1480
            if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1481
              return 1;
1482
        }
1483
 
1484
      else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1485
        return 1;
1486
    }
1487
 
1488
  return 0;
1489
}
1490
 
1491
/* Decide whether we can make a sibling call to a function.  DECL is the
1492
   declaration of the function being targeted by the call and EXP is the
1493
   CALL_EXPR representing the call.  */
1494
 
1495
static bool
1496
bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1497
                              tree exp ATTRIBUTE_UNUSED)
1498
{
1499
  e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1500
  return fkind == SUBROUTINE;
1501
}
1502
 
1503
/* Emit RTL insns to initialize the variable parts of a trampoline at
1504
   TRAMP. FNADDR is an RTX for the address of the function's pure
1505
   code.  CXT is an RTX for the static chain value for the function.  */
1506
 
1507
void
1508
initialize_trampoline (tramp, fnaddr, cxt)
1509
     rtx tramp, fnaddr, cxt;
1510
{
1511
  rtx t1 = copy_to_reg (fnaddr);
1512
  rtx t2 = copy_to_reg (cxt);
1513
  rtx addr;
1514
 
1515
  addr = memory_address (Pmode, plus_constant (tramp, 2));
1516
  emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1517
  emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
1518
  addr = memory_address (Pmode, plus_constant (tramp, 6));
1519
  emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1520
 
1521
  addr = memory_address (Pmode, plus_constant (tramp, 10));
1522
  emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1523
  emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
1524
  addr = memory_address (Pmode, plus_constant (tramp, 14));
1525
  emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1526
}
1527
 
1528
/* Emit insns to move operands[1] into operands[0].  */
1529
 
1530
void
1531
emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1532
{
1533
  rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1534
 
1535
  if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1536
    operands[1] = force_reg (SImode, operands[1]);
1537
  else
1538
    operands[1] = legitimize_pic_address (operands[1], temp,
1539
                                          pic_offset_table_rtx);
1540
}
1541
 
1542
/* Expand a move operation in mode MODE.  The operands are in OPERANDS.  */
1543
 
1544
void
1545
expand_move (rtx *operands, enum machine_mode mode)
1546
{
1547
  if (flag_pic && SYMBOLIC_CONST (operands[1]))
1548
    emit_pic_move (operands, mode);
1549
 
1550
  /* Don't generate memory->memory or constant->memory moves, go through a
1551
     register */
1552
  else if ((reload_in_progress | reload_completed) == 0
1553
           && GET_CODE (operands[0]) == MEM
1554
           && GET_CODE (operands[1]) != REG)
1555
    operands[1] = force_reg (mode, operands[1]);
1556
}
1557
 
1558
/* Split one or more DImode RTL references into pairs of SImode
1559
   references.  The RTL can be REG, offsettable MEM, integer constant, or
1560
   CONST_DOUBLE.  "operands" is a pointer to an array of DImode RTL to
1561
   split and "num" is its length.  lo_half and hi_half are output arrays
1562
   that parallel "operands".  */
1563
 
1564
void
1565
split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1566
{
1567
  while (num--)
1568
    {
1569
      rtx op = operands[num];
1570
 
1571
      /* simplify_subreg refuse to split volatile memory addresses,
1572
         but we still have to handle it.  */
1573
      if (GET_CODE (op) == MEM)
1574
        {
1575
          lo_half[num] = adjust_address (op, SImode, 0);
1576
          hi_half[num] = adjust_address (op, SImode, 4);
1577
        }
1578
      else
1579
        {
1580
          lo_half[num] = simplify_gen_subreg (SImode, op,
1581
                                              GET_MODE (op) == VOIDmode
1582
                                              ? DImode : GET_MODE (op), 0);
1583
          hi_half[num] = simplify_gen_subreg (SImode, op,
1584
                                              GET_MODE (op) == VOIDmode
1585
                                              ? DImode : GET_MODE (op), 4);
1586
        }
1587
    }
1588
}
1589
 
1590
bool
1591
bfin_longcall_p (rtx op, int call_cookie)
1592
{
1593
  gcc_assert (GET_CODE (op) == SYMBOL_REF);
1594
  if (call_cookie & CALL_SHORT)
1595
    return 0;
1596
  if (call_cookie & CALL_LONG)
1597
    return 1;
1598
  if (TARGET_LONG_CALLS)
1599
    return 1;
1600
  return 0;
1601
}
1602
 
1603
/* Expand a call instruction.  FNADDR is the call target, RETVAL the return value.
1604
   COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
1605
   SIBCALL is nonzero if this is a sibling call.  */
1606
 
1607
void
1608
bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
1609
{
1610
  rtx use = NULL, call;
1611
  rtx callee = XEXP (fnaddr, 0);
1612
  rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (sibcall ? 3 : 2));
1613
 
1614
  /* In an untyped call, we can get NULL for operand 2.  */
1615
  if (cookie == NULL_RTX)
1616
    cookie = const0_rtx;
1617
 
1618
  /* Static functions and indirect calls don't need the pic register.  */
1619
  if (flag_pic
1620
      && GET_CODE (callee) == SYMBOL_REF
1621
      && !SYMBOL_REF_LOCAL_P (callee))
1622
    use_reg (&use, pic_offset_table_rtx);
1623
 
1624
  if ((!register_no_elim_operand (callee, Pmode)
1625
       && GET_CODE (callee) != SYMBOL_REF)
1626
      || (GET_CODE (callee) == SYMBOL_REF
1627
          && (flag_pic
1628
              || bfin_longcall_p (callee, INTVAL (cookie)))))
1629
    {
1630
      callee = copy_to_mode_reg (Pmode, callee);
1631
      fnaddr = gen_rtx_MEM (Pmode, callee);
1632
    }
1633
  call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
1634
 
1635
  if (retval)
1636
    call = gen_rtx_SET (VOIDmode, retval, call);
1637
 
1638
  XVECEXP (pat, 0, 0) = call;
1639
  XVECEXP (pat, 0, 1) = gen_rtx_USE (VOIDmode, cookie);
1640
  if (sibcall)
1641
    XVECEXP (pat, 0, 2) = gen_rtx_RETURN (VOIDmode);
1642
  call = emit_call_insn (pat);
1643
  if (use)
1644
    CALL_INSN_FUNCTION_USAGE (call) = use;
1645
}
1646
 
1647
/* Return 1 if hard register REGNO can hold a value of machine-mode MODE.  */
1648
 
1649
int
1650
hard_regno_mode_ok (int regno, enum machine_mode mode)
1651
{
1652
  /* Allow only dregs to store value of mode HI or QI */
1653
  enum reg_class class = REGNO_REG_CLASS (regno);
1654
 
1655
  if (mode == CCmode)
1656
    return 0;
1657
 
1658
  if (mode == V2HImode)
1659
    return D_REGNO_P (regno);
1660
  if (class == CCREGS)
1661
    return mode == BImode;
1662
  if (mode == PDImode)
1663
    return regno == REG_A0 || regno == REG_A1;
1664
  if (mode == SImode
1665
      && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
1666
    return 1;
1667
 
1668
  return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
1669
}
1670
 
1671
/* Implements target hook vector_mode_supported_p.  */
1672
 
1673
static bool
1674
bfin_vector_mode_supported_p (enum machine_mode mode)
1675
{
1676
  return mode == V2HImode;
1677
}
1678
 
1679
/* Return the cost of moving data from a register in class CLASS1 to
1680
   one in class CLASS2.  A cost of 2 is the default.  */
1681
 
1682
int
1683
bfin_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1684
                         enum reg_class class1, enum reg_class class2)
1685
{
1686
  /* If optimizing for size, always prefer reg-reg over reg-memory moves.  */
1687
  if (optimize_size)
1688
    return 2;
1689
 
1690
  /* There are some stalls involved when moving from a DREG to a different
1691
     class reg, and using the value in one of the following instructions.
1692
     Attempt to model this by slightly discouraging such moves.  */
1693
  if (class1 == DREGS && class2 != DREGS)
1694
    return 2 * 2;
1695
 
1696
  return 2;
1697
}
1698
 
1699
/* Return the cost of moving data of mode M between a
1700
   register and memory.  A value of 2 is the default; this cost is
1701
   relative to those in `REGISTER_MOVE_COST'.
1702
 
1703
   ??? In theory L1 memory has single-cycle latency.  We should add a switch
1704
   that tells the compiler whether we expect to use only L1 memory for the
1705
   program; it'll make the costs more accurate.  */
1706
 
1707
int
1708
bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1709
                       enum reg_class class,
1710
                       int in ATTRIBUTE_UNUSED)
1711
{
1712
  /* Make memory accesses slightly more expensive than any register-register
1713
     move.  Also, penalize non-DP registers, since they need secondary
1714
     reloads to load and store.  */
1715
  if (! reg_class_subset_p (class, DPREGS))
1716
    return 10;
1717
 
1718
  return 8;
1719
}
1720
 
1721
/* Inform reload about cases where moving X with a mode MODE to a register in
1722
   CLASS requires an extra scratch register.  Return the class needed for the
1723
   scratch register.  */
1724
 
1725
enum reg_class
1726
secondary_input_reload_class (enum reg_class class, enum machine_mode mode,
1727
                              rtx x)
1728
{
1729
  /* If we have HImode or QImode, we can only use DREGS as secondary registers;
1730
     in most other cases we can also use PREGS.  */
1731
  enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
1732
  enum reg_class x_class = NO_REGS;
1733
  enum rtx_code code = GET_CODE (x);
1734
 
1735
  if (code == SUBREG)
1736
    x = SUBREG_REG (x), code = GET_CODE (x);
1737
  if (REG_P (x))
1738
    {
1739
      int regno = REGNO (x);
1740
      if (regno >= FIRST_PSEUDO_REGISTER)
1741
        regno = reg_renumber[regno];
1742
 
1743
      if (regno == -1)
1744
        code = MEM;
1745
      else
1746
        x_class = REGNO_REG_CLASS (regno);
1747
    }
1748
 
1749
  /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
1750
     This happens as a side effect of register elimination, and we need
1751
     a scratch register to do it.  */
1752
  if (fp_plus_const_operand (x, mode))
1753
    {
1754
      rtx op2 = XEXP (x, 1);
1755
      int large_constant_p = ! CONST_7BIT_IMM_P (INTVAL (op2));
1756
 
1757
      if (class == PREGS || class == PREGS_CLOBBERED)
1758
        return NO_REGS;
1759
      /* If destination is a DREG, we can do this without a scratch register
1760
         if the constant is valid for an add instruction.  */
1761
      if (class == DREGS || class == DPREGS)
1762
        return large_constant_p ? PREGS : NO_REGS;
1763
      /* Reloading to anything other than a DREG?  Use a PREG scratch
1764
         register.  */
1765
      return PREGS;
1766
    }
1767
 
1768
  /* Data can usually be moved freely between registers of most classes.
1769
     AREGS are an exception; they can only move to or from another register
1770
     in AREGS or one in DREGS.  They can also be assigned the constant 0.  */
1771
  if (x_class == AREGS)
1772
    return class == DREGS || class == AREGS ? NO_REGS : DREGS;
1773
 
1774
  if (class == AREGS)
1775
    {
1776
      if (x != const0_rtx && x_class != DREGS)
1777
        return DREGS;
1778
      else
1779
        return NO_REGS;
1780
    }
1781
 
1782
  /* CCREGS can only be moved from/to DREGS.  */
1783
  if (class == CCREGS && x_class != DREGS)
1784
    return DREGS;
1785
  if (x_class == CCREGS && class != DREGS)
1786
    return DREGS;
1787
  /* All registers other than AREGS can load arbitrary constants.  The only
1788
     case that remains is MEM.  */
1789
  if (code == MEM)
1790
    if (! reg_class_subset_p (class, default_class))
1791
      return default_class;
1792
  return NO_REGS;
1793
}
1794
 
1795
/* Like secondary_input_reload_class; and all we do is call that function.  */
1796
 
1797
enum reg_class
1798
secondary_output_reload_class (enum reg_class class, enum machine_mode mode,
1799
                               rtx x)
1800
{
1801
  return secondary_input_reload_class (class, mode, x);
1802
}
1803
 
1804
/* Implement TARGET_HANDLE_OPTION.  */
1805
 
1806
static bool
1807
bfin_handle_option (size_t code, const char *arg, int value)
1808
{
1809
  switch (code)
1810
    {
1811
    case OPT_mshared_library_id_:
1812
      if (value > MAX_LIBRARY_ID)
1813
        error ("-mshared-library-id=%s is not between 0 and %d",
1814
               arg, MAX_LIBRARY_ID);
1815
      bfin_lib_id_given = 1;
1816
      return true;
1817
 
1818
    default:
1819
      return true;
1820
    }
1821
}
1822
 
1823
/* Implement the macro OVERRIDE_OPTIONS.  */
1824
 
1825
void
1826
override_options (void)
1827
{
1828
  if (TARGET_OMIT_LEAF_FRAME_POINTER)
1829
    flag_omit_frame_pointer = 1;
1830
 
1831
  /* Library identification */
1832
  if (bfin_lib_id_given && ! TARGET_ID_SHARED_LIBRARY)
1833
    error ("-mshared-library-id= specified without -mid-shared-library");
1834
 
1835
  if (TARGET_ID_SHARED_LIBRARY)
1836
    /* ??? Provide a way to use a bigger GOT.  */
1837
    flag_pic = 1;
1838
 
1839
  flag_schedule_insns = 0;
1840
}
1841
 
1842
/* Return the destination address of BRANCH.
1843
   We need to use this instead of get_attr_length, because the
1844
   cbranch_with_nops pattern conservatively sets its length to 6, and
1845
   we still prefer to use shorter sequences.  */
1846
 
1847
static int
1848
branch_dest (rtx branch)
1849
{
1850
  rtx dest;
1851
  int dest_uid;
1852
  rtx pat = PATTERN (branch);
1853
  if (GET_CODE (pat) == PARALLEL)
1854
    pat = XVECEXP (pat, 0, 0);
1855
  dest = SET_SRC (pat);
1856
  if (GET_CODE (dest) == IF_THEN_ELSE)
1857
    dest = XEXP (dest, 1);
1858
  dest = XEXP (dest, 0);
1859
  dest_uid = INSN_UID (dest);
1860
  return INSN_ADDRESSES (dest_uid);
1861
}
1862
 
1863
/* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
1864
   it's a branch that's predicted taken.  */
1865
 
1866
static int
1867
cbranch_predicted_taken_p (rtx insn)
1868
{
1869
  rtx x = find_reg_note (insn, REG_BR_PROB, 0);
1870
 
1871
  if (x)
1872
    {
1873
      int pred_val = INTVAL (XEXP (x, 0));
1874
 
1875
      return pred_val >= REG_BR_PROB_BASE / 2;
1876
    }
1877
 
1878
  return 0;
1879
}
1880
 
1881
/* Templates for use by asm_conditional_branch.  */
1882
 
1883
static const char *ccbranch_templates[][3] = {
1884
  { "if !cc jump %3;",  "if cc jump 4 (bp); jump.s %3;",  "if cc jump 6 (bp); jump.l %3;" },
1885
  { "if cc jump %3;",   "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
1886
  { "if !cc jump %3 (bp);",  "if cc jump 4; jump.s %3;",  "if cc jump 6; jump.l %3;" },
1887
  { "if cc jump %3 (bp);",  "if !cc jump 4; jump.s %3;",  "if !cc jump 6; jump.l %3;" },
1888
};
1889
 
1890
/* Output INSN, which is a conditional branch instruction with operands
1891
   OPERANDS.
1892
 
1893
   We deal with the various forms of conditional branches that can be generated
1894
   by bfin_reorg to prevent the hardware from doing speculative loads, by
1895
   - emitting a sufficient number of nops, if N_NOPS is nonzero, or
1896
   - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
1897
   Either of these is only necessary if the branch is short, otherwise the
1898
   template we use ends in an unconditional jump which flushes the pipeline
1899
   anyway.  */
1900
 
1901
void
1902
asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
1903
{
1904
  int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
1905
  /* Note : offset for instructions like if cc jmp; jump.[sl] offset
1906
            is to be taken from start of if cc rather than jump.
1907
            Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
1908
  */
1909
  int len = (offset >= -1024 && offset <= 1022 ? 0
1910
             : offset >= -4094 && offset <= 4096 ? 1
1911
             : 2);
1912
  int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
1913
  int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
1914
  output_asm_insn (ccbranch_templates[idx][len], operands);
1915
  gcc_assert (n_nops == 0 || !bp);
1916
  if (len == 0)
1917
    while (n_nops-- > 0)
1918
      output_asm_insn ("nop;", NULL);
1919
}
1920
 
1921
/* Emit rtl for a comparison operation CMP in mode MODE.  Operands have been
1922
   stored in bfin_compare_op0 and bfin_compare_op1 already.  */
1923
 
1924
rtx
1925
bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
1926
{
1927
  enum rtx_code code1, code2;
1928
  rtx op0 = bfin_compare_op0, op1 = bfin_compare_op1;
1929
  rtx tem = bfin_cc_rtx;
1930
  enum rtx_code code = GET_CODE (cmp);
1931
 
1932
  /* If we have a BImode input, then we already have a compare result, and
1933
     do not need to emit another comparison.  */
1934
  if (GET_MODE (op0) == BImode)
1935
    {
1936
      gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
1937
      tem = op0, code2 = code;
1938
    }
1939
  else
1940
    {
1941
      switch (code) {
1942
        /* bfin has these conditions */
1943
      case EQ:
1944
      case LT:
1945
      case LE:
1946
      case LEU:
1947
      case LTU:
1948
        code1 = code;
1949
        code2 = NE;
1950
        break;
1951
      default:
1952
        code1 = reverse_condition (code);
1953
        code2 = EQ;
1954
        break;
1955
      }
1956
      emit_insn (gen_rtx_SET (BImode, tem,
1957
                              gen_rtx_fmt_ee (code1, BImode, op0, op1)));
1958
    }
1959
 
1960
  return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
1961
}
1962
 
1963
/* Return nonzero iff C has exactly one bit set if it is interpreted
1964
   as a 32 bit constant.  */
1965
 
1966
int
1967
log2constp (unsigned HOST_WIDE_INT c)
1968
{
1969
  c &= 0xFFFFFFFF;
1970
  return c != 0 && (c & (c-1)) == 0;
1971
}
1972
 
1973
/* Returns the number of consecutive least significant zeros in the binary
1974
   representation of *V.
1975
   We modify *V to contain the original value arithmetically shifted right by
1976
   the number of zeroes.  */
1977
 
1978
static int
1979
shiftr_zero (HOST_WIDE_INT *v)
1980
{
1981
  unsigned HOST_WIDE_INT tmp = *v;
1982
  unsigned HOST_WIDE_INT sgn;
1983
  int n = 0;
1984
 
1985
  if (tmp == 0)
1986
    return 0;
1987
 
1988
  sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
1989
  while ((tmp & 0x1) == 0 && n <= 32)
1990
    {
1991
      tmp = (tmp >> 1) | sgn;
1992
      n++;
1993
    }
1994
  *v = tmp;
1995
  return n;
1996
}
1997
 
1998
/* After reload, split the load of an immediate constant.  OPERANDS are the
1999
   operands of the movsi_insn pattern which we are splitting.  We return
2000
   nonzero if we emitted a sequence to load the constant, zero if we emitted
2001
   nothing because we want to use the splitter's default sequence.  */
2002
 
2003
int
2004
split_load_immediate (rtx operands[])
2005
{
2006
  HOST_WIDE_INT val = INTVAL (operands[1]);
2007
  HOST_WIDE_INT tmp;
2008
  HOST_WIDE_INT shifted = val;
2009
  HOST_WIDE_INT shifted_compl = ~val;
2010
  int num_zero = shiftr_zero (&shifted);
2011
  int num_compl_zero = shiftr_zero (&shifted_compl);
2012
  unsigned int regno = REGNO (operands[0]);
2013
  enum reg_class class1 = REGNO_REG_CLASS (regno);
2014
 
2015
  /* This case takes care of single-bit set/clear constants, which we could
2016
     also implement with BITSET/BITCLR.  */
2017
  if (num_zero
2018
      && shifted >= -32768 && shifted < 65536
2019
      && (D_REGNO_P (regno)
2020
          || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
2021
    {
2022
      emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
2023
      emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
2024
      return 1;
2025
    }
2026
 
2027
  tmp = val & 0xFFFF;
2028
  tmp |= -(tmp & 0x8000);
2029
 
2030
  /* If high word has one bit set or clear, try to use a bit operation.  */
2031
  if (D_REGNO_P (regno))
2032
    {
2033
      if (log2constp (val & 0xFFFF0000))
2034
        {
2035
          emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
2036
          emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
2037
          return 1;
2038
        }
2039
      else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
2040
        {
2041
          emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2042
          emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
2043
        }
2044
    }
2045
 
2046
  if (D_REGNO_P (regno))
2047
    {
2048
      if (CONST_7BIT_IMM_P (tmp))
2049
        {
2050
          emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2051
          emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
2052
          return 1;
2053
        }
2054
 
2055
      if ((val & 0xFFFF0000) == 0)
2056
        {
2057
          emit_insn (gen_movsi (operands[0], const0_rtx));
2058
          emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2059
          return 1;
2060
        }
2061
 
2062
      if ((val & 0xFFFF0000) == 0xFFFF0000)
2063
        {
2064
          emit_insn (gen_movsi (operands[0], constm1_rtx));
2065
          emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2066
          return 1;
2067
        }
2068
    }
2069
 
2070
  /* Need DREGs for the remaining case.  */
2071
  if (regno > REG_R7)
2072
    return 0;
2073
 
2074
  if (optimize_size
2075
      && num_compl_zero && CONST_7BIT_IMM_P (shifted_compl))
2076
    {
2077
      /* If optimizing for size, generate a sequence that has more instructions
2078
         but is shorter.  */
2079
      emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2080
      emit_insn (gen_ashlsi3 (operands[0], operands[0],
2081
                              GEN_INT (num_compl_zero)));
2082
      emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2083
      return 1;
2084
    }
2085
  return 0;
2086
}
2087
 
2088
/* Return true if the legitimate memory address for a memory operand of mode
2089
   MODE.  Return false if not.  */
2090
 
2091
static bool
2092
bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2093
{
2094
  unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2095
  int sz = GET_MODE_SIZE (mode);
2096
  int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2097
  /* The usual offsettable_memref machinery doesn't work so well for this
2098
     port, so we deal with the problem here.  */
2099
  unsigned HOST_WIDE_INT mask = sz == 8 ? 0x7ffe : 0x7fff;
2100
  return (v & ~(mask << shift)) == 0;
2101
}
2102
 
2103
static bool
2104
bfin_valid_reg_p (unsigned int regno, int strict)
2105
{
2106
  return ((strict && REGNO_OK_FOR_BASE_STRICT_P (regno))
2107
          || (!strict && REGNO_OK_FOR_BASE_NONSTRICT_P (regno)));
2108
}
2109
 
2110
bool
2111
bfin_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
2112
{
2113
  switch (GET_CODE (x)) {
2114
  case REG:
2115
    if (bfin_valid_reg_p (REGNO (x), strict))
2116
      return true;
2117
    break;
2118
  case PLUS:
2119
    if (REG_P (XEXP (x, 0))
2120
        && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict)
2121
        && (GET_CODE (XEXP (x, 1)) == UNSPEC
2122
            || (GET_CODE (XEXP (x, 1)) == CONST_INT
2123
                && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2124
      return true;
2125
    break;
2126
  case POST_INC:
2127
  case POST_DEC:
2128
    if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2129
        && REG_P (XEXP (x, 0))
2130
        && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict))
2131
      return true;
2132
  case PRE_DEC:
2133
    if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2134
        && XEXP (x, 0) == stack_pointer_rtx
2135
        && REG_P (XEXP (x, 0))
2136
        && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict))
2137
      return true;
2138
    break;
2139
  default:
2140
    break;
2141
  }
2142
  return false;
2143
}
2144
 
2145
static bool
2146
bfin_rtx_costs (rtx x, int code, int outer_code, int *total)
2147
{
2148
  int cost2 = COSTS_N_INSNS (1);
2149
 
2150
  switch (code)
2151
    {
2152
    case CONST_INT:
2153
      if (outer_code == SET || outer_code == PLUS)
2154
        *total = CONST_7BIT_IMM_P (INTVAL (x)) ? 0 : cost2;
2155
      else if (outer_code == AND)
2156
        *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2157
      else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2158
        *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2159
      else if (outer_code == LEU || outer_code == LTU)
2160
        *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2161
      else if (outer_code == MULT)
2162
        *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2163
      else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2164
        *total = 0;
2165
      else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2166
               || outer_code == LSHIFTRT)
2167
        *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2168
      else if (outer_code == IOR || outer_code == XOR)
2169
        *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2170
      else
2171
        *total = cost2;
2172
      return true;
2173
 
2174
    case CONST:
2175
    case LABEL_REF:
2176
    case SYMBOL_REF:
2177
    case CONST_DOUBLE:
2178
      *total = COSTS_N_INSNS (2);
2179
      return true;
2180
 
2181
    case PLUS:
2182
      if (GET_MODE (x) == Pmode)
2183
        {
2184
          if (GET_CODE (XEXP (x, 0)) == MULT
2185
              && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2186
            {
2187
              HOST_WIDE_INT val = INTVAL (XEXP (XEXP (x, 0), 1));
2188
              if (val == 2 || val == 4)
2189
                {
2190
                  *total = cost2;
2191
                  *total += rtx_cost (XEXP (XEXP (x, 0), 0), outer_code);
2192
                  *total += rtx_cost (XEXP (x, 1), outer_code);
2193
                  return true;
2194
                }
2195
            }
2196
        }
2197
 
2198
      /* fall through */
2199
 
2200
    case MINUS:
2201
    case ASHIFT:
2202
    case ASHIFTRT:
2203
    case LSHIFTRT:
2204
      if (GET_MODE (x) == DImode)
2205
        *total = 6 * cost2;
2206
      return false;
2207
 
2208
    case AND:
2209
    case IOR:
2210
    case XOR:
2211
      if (GET_MODE (x) == DImode)
2212
        *total = 2 * cost2;
2213
      return false;
2214
 
2215
    case MULT:
2216
      if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD)
2217
        *total = COSTS_N_INSNS (3);
2218
      return false;
2219
 
2220
    default:
2221
      return false;
2222
    }
2223
}
2224
 
2225
static void
2226
bfin_internal_label (FILE *stream, const char *prefix, unsigned long num)
2227
{
2228
  fprintf (stream, "%s%s$%ld:\n", LOCAL_LABEL_PREFIX, prefix, num);
2229
}
2230
 
2231
/* Used for communication between {push,pop}_multiple_operation (which
2232
   we use not only as a predicate) and the corresponding output functions.  */
2233
static int first_preg_to_save, first_dreg_to_save;
2234
 
2235
int
2236
push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2237
{
2238
  int lastdreg = 8, lastpreg = 6;
2239
  int i, group;
2240
 
2241
  first_preg_to_save = lastpreg;
2242
  first_dreg_to_save = lastdreg;
2243
  for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
2244
    {
2245
      rtx t = XVECEXP (op, 0, i);
2246
      rtx src, dest;
2247
      int regno;
2248
 
2249
      if (GET_CODE (t) != SET)
2250
        return 0;
2251
 
2252
      src = SET_SRC (t);
2253
      dest = SET_DEST (t);
2254
      if (GET_CODE (dest) != MEM || ! REG_P (src))
2255
        return 0;
2256
      dest = XEXP (dest, 0);
2257
      if (GET_CODE (dest) != PLUS
2258
          || ! REG_P (XEXP (dest, 0))
2259
          || REGNO (XEXP (dest, 0)) != REG_SP
2260
          || GET_CODE (XEXP (dest, 1)) != CONST_INT
2261
          || INTVAL (XEXP (dest, 1)) != -i * 4)
2262
        return 0;
2263
 
2264
      regno = REGNO (src);
2265
      if (group == 0)
2266
        {
2267
          if (D_REGNO_P (regno))
2268
            {
2269
              group = 1;
2270
              first_dreg_to_save = lastdreg = regno - REG_R0;
2271
            }
2272
          else if (regno >= REG_P0 && regno <= REG_P7)
2273
            {
2274
              group = 2;
2275
              first_preg_to_save = lastpreg = regno - REG_P0;
2276
            }
2277
          else
2278
            return 0;
2279
 
2280
          continue;
2281
        }
2282
 
2283
      if (group == 1)
2284
        {
2285
          if (regno >= REG_P0 && regno <= REG_P7)
2286
            {
2287
              group = 2;
2288
              first_preg_to_save = lastpreg = regno - REG_P0;
2289
            }
2290
          else if (regno != REG_R0 + lastdreg + 1)
2291
            return 0;
2292
          else
2293
            lastdreg++;
2294
        }
2295
      else if (group == 2)
2296
        {
2297
          if (regno != REG_P0 + lastpreg + 1)
2298
            return 0;
2299
          lastpreg++;
2300
        }
2301
    }
2302
  return 1;
2303
}
2304
 
2305
int
2306
pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2307
{
2308
  int lastdreg = 8, lastpreg = 6;
2309
  int i, group;
2310
 
2311
  for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
2312
    {
2313
      rtx t = XVECEXP (op, 0, i);
2314
      rtx src, dest;
2315
      int regno;
2316
 
2317
      if (GET_CODE (t) != SET)
2318
        return 0;
2319
 
2320
      src = SET_SRC (t);
2321
      dest = SET_DEST (t);
2322
      if (GET_CODE (src) != MEM || ! REG_P (dest))
2323
        return 0;
2324
      src = XEXP (src, 0);
2325
 
2326
      if (i == 1)
2327
        {
2328
          if (! REG_P (src) || REGNO (src) != REG_SP)
2329
            return 0;
2330
        }
2331
      else if (GET_CODE (src) != PLUS
2332
               || ! REG_P (XEXP (src, 0))
2333
               || REGNO (XEXP (src, 0)) != REG_SP
2334
               || GET_CODE (XEXP (src, 1)) != CONST_INT
2335
               || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
2336
        return 0;
2337
 
2338
      regno = REGNO (dest);
2339
      if (group == 0)
2340
        {
2341
          if (regno == REG_R7)
2342
            {
2343
              group = 1;
2344
              lastdreg = 7;
2345
            }
2346
          else if (regno != REG_P0 + lastpreg - 1)
2347
            return 0;
2348
          else
2349
            lastpreg--;
2350
        }
2351
      else if (group == 1)
2352
        {
2353
          if (regno != REG_R0 + lastdreg - 1)
2354
            return 0;
2355
          else
2356
            lastdreg--;
2357
        }
2358
    }
2359
  first_dreg_to_save = lastdreg;
2360
  first_preg_to_save = lastpreg;
2361
  return 1;
2362
}
2363
 
2364
/* Emit assembly code for one multi-register push described by INSN, with
2365
   operands in OPERANDS.  */
2366
 
2367
void
2368
output_push_multiple (rtx insn, rtx *operands)
2369
{
2370
  char buf[80];
2371
  int ok;
2372
 
2373
  /* Validate the insn again, and compute first_[dp]reg_to_save. */
2374
  ok = push_multiple_operation (PATTERN (insn), VOIDmode);
2375
  gcc_assert (ok);
2376
 
2377
  if (first_dreg_to_save == 8)
2378
    sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
2379
  else if (first_preg_to_save == 6)
2380
    sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
2381
  else
2382
    sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
2383
             first_dreg_to_save, first_preg_to_save);
2384
 
2385
  output_asm_insn (buf, operands);
2386
}
2387
 
2388
/* Emit assembly code for one multi-register pop described by INSN, with
2389
   operands in OPERANDS.  */
2390
 
2391
void
2392
output_pop_multiple (rtx insn, rtx *operands)
2393
{
2394
  char buf[80];
2395
  int ok;
2396
 
2397
  /* Validate the insn again, and compute first_[dp]reg_to_save. */
2398
  ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
2399
  gcc_assert (ok);
2400
 
2401
  if (first_dreg_to_save == 8)
2402
    sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
2403
  else if (first_preg_to_save == 6)
2404
    sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
2405
  else
2406
    sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
2407
             first_dreg_to_save, first_preg_to_save);
2408
 
2409
  output_asm_insn (buf, operands);
2410
}
2411
 
2412
/* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE.  */
2413
 
2414
static void
2415
single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
2416
{
2417
  rtx scratch = gen_reg_rtx (mode);
2418
  rtx srcmem, dstmem;
2419
 
2420
  srcmem = adjust_address_nv (src, mode, offset);
2421
  dstmem = adjust_address_nv (dst, mode, offset);
2422
  emit_move_insn (scratch, srcmem);
2423
  emit_move_insn (dstmem, scratch);
2424
}
2425
 
2426
/* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
2427
   alignment ALIGN_EXP.  Return true if successful, false if we should fall
2428
   back on a different method.  */
2429
 
2430
bool
2431
bfin_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp)
2432
{
2433
  rtx srcreg, destreg, countreg;
2434
  HOST_WIDE_INT align = 0;
2435
  unsigned HOST_WIDE_INT count = 0;
2436
 
2437
  if (GET_CODE (align_exp) == CONST_INT)
2438
    align = INTVAL (align_exp);
2439
  if (GET_CODE (count_exp) == CONST_INT)
2440
    {
2441
      count = INTVAL (count_exp);
2442
#if 0
2443
      if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
2444
        return false;
2445
#endif
2446
    }
2447
 
2448
  /* If optimizing for size, only do single copies inline.  */
2449
  if (optimize_size)
2450
    {
2451
      if (count == 2 && align < 2)
2452
        return false;
2453
      if (count == 4 && align < 4)
2454
        return false;
2455
      if (count != 1 && count != 2 && count != 4)
2456
        return false;
2457
    }
2458
  if (align < 2 && count != 1)
2459
    return false;
2460
 
2461
  destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
2462
  if (destreg != XEXP (dst, 0))
2463
    dst = replace_equiv_address_nv (dst, destreg);
2464
  srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
2465
  if (srcreg != XEXP (src, 0))
2466
    src = replace_equiv_address_nv (src, srcreg);
2467
 
2468
  if (count != 0 && align >= 2)
2469
    {
2470
      unsigned HOST_WIDE_INT offset = 0;
2471
 
2472
      if (align >= 4)
2473
        {
2474
          if ((count & ~3) == 4)
2475
            {
2476
              single_move_for_movmem (dst, src, SImode, offset);
2477
              offset = 4;
2478
            }
2479
          else if (count & ~3)
2480
            {
2481
              HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
2482
              countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2483
 
2484
              emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
2485
            }
2486
          if (count & 2)
2487
            {
2488
              single_move_for_movmem (dst, src, HImode, offset);
2489
              offset += 2;
2490
            }
2491
        }
2492
      else
2493
        {
2494
          if ((count & ~1) == 2)
2495
            {
2496
              single_move_for_movmem (dst, src, HImode, offset);
2497
              offset = 2;
2498
            }
2499
          else if (count & ~1)
2500
            {
2501
              HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
2502
              countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2503
 
2504
              emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
2505
            }
2506
        }
2507
      if (count & 1)
2508
        {
2509
          single_move_for_movmem (dst, src, QImode, offset);
2510
        }
2511
      return true;
2512
    }
2513
  return false;
2514
}
2515
 
2516
 
2517
static int
2518
bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
2519
{
2520
  enum attr_type insn_type, dep_insn_type;
2521
  int dep_insn_code_number;
2522
 
2523
  /* Anti and output dependencies have zero cost.  */
2524
  if (REG_NOTE_KIND (link) != 0)
2525
    return 0;
2526
 
2527
  dep_insn_code_number = recog_memoized (dep_insn);
2528
 
2529
  /* If we can't recognize the insns, we can't really do anything.  */
2530
  if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
2531
    return cost;
2532
 
2533
  insn_type = get_attr_type (insn);
2534
  dep_insn_type = get_attr_type (dep_insn);
2535
 
2536
  if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
2537
    {
2538
      rtx pat = PATTERN (dep_insn);
2539
      rtx dest = SET_DEST (pat);
2540
      rtx src = SET_SRC (pat);
2541
      if (! ADDRESS_REGNO_P (REGNO (dest)) || ! D_REGNO_P (REGNO (src)))
2542
        return cost;
2543
      return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
2544
    }
2545
 
2546
  return cost;
2547
}
2548
 
2549
/* We use the machine specific reorg pass for emitting CSYNC instructions
2550
   after conditional branches as needed.
2551
 
2552
   The Blackfin is unusual in that a code sequence like
2553
     if cc jump label
2554
     r0 = (p0)
2555
   may speculatively perform the load even if the condition isn't true.  This
2556
   happens for a branch that is predicted not taken, because the pipeline
2557
   isn't flushed or stalled, so the early stages of the following instructions,
2558
   which perform the memory reference, are allowed to execute before the
2559
   jump condition is evaluated.
2560
   Therefore, we must insert additional instructions in all places where this
2561
   could lead to incorrect behavior.  The manual recommends CSYNC, while
2562
   VDSP seems to use NOPs (even though its corresponding compiler option is
2563
   named CSYNC).
2564
 
2565
   When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
2566
   When optimizing for size, we turn the branch into a predicted taken one.
2567
   This may be slower due to mispredicts, but saves code size.  */
2568
 
2569
static void
2570
bfin_reorg (void)
2571
{
2572
  rtx insn, last_condjump = NULL_RTX;
2573
  int cycles_since_jump = INT_MAX;
2574
 
2575
  if (! TARGET_SPECLD_ANOMALY || ! TARGET_CSYNC_ANOMALY)
2576
    return;
2577
 
2578
  /* First pass: find predicted-false branches; if something after them
2579
     needs nops, insert them or change the branch to predict true.  */
2580
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2581
    {
2582
      rtx pat;
2583
 
2584
      if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
2585
        continue;
2586
 
2587
      pat = PATTERN (insn);
2588
      if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
2589
          || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
2590
          || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
2591
        continue;
2592
 
2593
      if (JUMP_P (insn))
2594
        {
2595
          if (any_condjump_p (insn)
2596
              && ! cbranch_predicted_taken_p (insn))
2597
            {
2598
              last_condjump = insn;
2599
              cycles_since_jump = 0;
2600
            }
2601
          else
2602
            cycles_since_jump = INT_MAX;
2603
        }
2604
      else if (INSN_P (insn))
2605
        {
2606
          enum attr_type type = get_attr_type (insn);
2607
          int delay_needed = 0;
2608
          if (cycles_since_jump < INT_MAX)
2609
            cycles_since_jump++;
2610
 
2611
          if (type == TYPE_MCLD && TARGET_SPECLD_ANOMALY)
2612
            {
2613
              rtx pat = single_set (insn);
2614
              if (may_trap_p (SET_SRC (pat)))
2615
                delay_needed = 3;
2616
            }
2617
          else if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
2618
            delay_needed = 4;
2619
 
2620
          if (delay_needed > cycles_since_jump)
2621
            {
2622
              rtx pat;
2623
              int num_clobbers;
2624
              rtx *op = recog_data.operand;
2625
 
2626
              delay_needed -= cycles_since_jump;
2627
 
2628
              extract_insn (last_condjump);
2629
              if (optimize_size)
2630
                {
2631
                  pat = gen_cbranch_predicted_taken (op[0], op[1], op[2],
2632
                                                     op[3]);
2633
                  cycles_since_jump = INT_MAX;
2634
                }
2635
              else
2636
                /* Do not adjust cycles_since_jump in this case, so that
2637
                   we'll increase the number of NOPs for a subsequent insn
2638
                   if necessary.  */
2639
                pat = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
2640
                                             GEN_INT (delay_needed));
2641
              PATTERN (last_condjump) = pat;
2642
              INSN_CODE (last_condjump) = recog (pat, insn, &num_clobbers);
2643
            }
2644
        }
2645
    }
2646
  /* Second pass: for predicted-true branches, see if anything at the
2647
     branch destination needs extra nops.  */
2648
  if (! TARGET_CSYNC_ANOMALY)
2649
    return;
2650
 
2651
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2652
    {
2653
      if (JUMP_P (insn)
2654
          && any_condjump_p (insn)
2655
          && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
2656
              || cbranch_predicted_taken_p (insn)))
2657
        {
2658
          rtx target = JUMP_LABEL (insn);
2659
          rtx label = target;
2660
          cycles_since_jump = 0;
2661
          for (; target && cycles_since_jump < 3; target = NEXT_INSN (target))
2662
            {
2663
              rtx pat;
2664
 
2665
              if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
2666
                continue;
2667
 
2668
              pat = PATTERN (target);
2669
              if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
2670
                  || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
2671
                  || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
2672
                continue;
2673
 
2674
              if (INSN_P (target))
2675
                {
2676
                  enum attr_type type = get_attr_type (target);
2677
                  int delay_needed = 0;
2678
                  if (cycles_since_jump < INT_MAX)
2679
                    cycles_since_jump++;
2680
 
2681
                  if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
2682
                    delay_needed = 2;
2683
 
2684
                  if (delay_needed > cycles_since_jump)
2685
                    {
2686
                      rtx prev = prev_real_insn (label);
2687
                      delay_needed -= cycles_since_jump;
2688
                      if (dump_file)
2689
                        fprintf (dump_file, "Adding %d nops after %d\n",
2690
                                 delay_needed, INSN_UID (label));
2691
                      if (JUMP_P (prev)
2692
                          && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
2693
                        {
2694
                          rtx x;
2695
                          HOST_WIDE_INT v;
2696
 
2697
                          if (dump_file)
2698
                            fprintf (dump_file,
2699
                                     "Reducing nops on insn %d.\n",
2700
                                     INSN_UID (prev));
2701
                          x = PATTERN (prev);
2702
                          x = XVECEXP (x, 0, 1);
2703
                          v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
2704
                          XVECEXP (x, 0, 0) = GEN_INT (v);
2705
                        }
2706
                      while (delay_needed-- > 0)
2707
                        emit_insn_after (gen_nop (), label);
2708
                      break;
2709
                    }
2710
                }
2711
            }
2712
        }
2713
    }
2714
}
2715
 
2716
/* Handle interrupt_handler, exception_handler and nmi_handler function
2717
   attributes; arguments as in struct attribute_spec.handler.  */
2718
 
2719
static tree
2720
handle_int_attribute (tree *node, tree name,
2721
                      tree args ATTRIBUTE_UNUSED,
2722
                      int flags ATTRIBUTE_UNUSED,
2723
                      bool *no_add_attrs)
2724
{
2725
  tree x = *node;
2726
  if (TREE_CODE (x) == FUNCTION_DECL)
2727
    x = TREE_TYPE (x);
2728
 
2729
  if (TREE_CODE (x) != FUNCTION_TYPE)
2730
    {
2731
      warning (OPT_Wattributes, "%qs attribute only applies to functions",
2732
               IDENTIFIER_POINTER (name));
2733
      *no_add_attrs = true;
2734
    }
2735
  else if (funkind (x) != SUBROUTINE)
2736
    error ("multiple function type attributes specified");
2737
 
2738
  return NULL_TREE;
2739
}
2740
 
2741
/* Return 0 if the attributes for two types are incompatible, 1 if they
2742
   are compatible, and 2 if they are nearly compatible (which causes a
2743
   warning to be generated).  */
2744
 
2745
static int
2746
bfin_comp_type_attributes (tree type1, tree type2)
2747
{
2748
  e_funkind kind1, kind2;
2749
 
2750
  if (TREE_CODE (type1) != FUNCTION_TYPE)
2751
    return 1;
2752
 
2753
  kind1 = funkind (type1);
2754
  kind2 = funkind (type2);
2755
 
2756
  if (kind1 != kind2)
2757
    return 0;
2758
 
2759
  /*  Check for mismatched modifiers */
2760
  if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
2761
      != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
2762
    return 0;
2763
 
2764
  if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
2765
      != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
2766
    return 0;
2767
 
2768
  if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
2769
      != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
2770
    return 0;
2771
 
2772
  if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
2773
      != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
2774
    return 0;
2775
 
2776
  return 1;
2777
}
2778
 
2779
/* Handle a "longcall" or "shortcall" attribute; arguments as in
2780
   struct attribute_spec.handler.  */
2781
 
2782
static tree
2783
bfin_handle_longcall_attribute (tree *node, tree name,
2784
                                tree args ATTRIBUTE_UNUSED,
2785
                                int flags ATTRIBUTE_UNUSED,
2786
                                bool *no_add_attrs)
2787
{
2788
  if (TREE_CODE (*node) != FUNCTION_TYPE
2789
      && TREE_CODE (*node) != FIELD_DECL
2790
      && TREE_CODE (*node) != TYPE_DECL)
2791
    {
2792
      warning (OPT_Wattributes, "`%s' attribute only applies to functions",
2793
               IDENTIFIER_POINTER (name));
2794
      *no_add_attrs = true;
2795
    }
2796
 
2797
  if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
2798
       && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
2799
      || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
2800
          && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
2801
    {
2802
      warning (OPT_Wattributes,
2803
               "can't apply both longcall and shortcall attributes to the same function");
2804
      *no_add_attrs = true;
2805
    }
2806
 
2807
  return NULL_TREE;
2808
}
2809
 
2810
/* Table of valid machine attributes.  */
2811
const struct attribute_spec bfin_attribute_table[] =
2812
{
2813
  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2814
  { "interrupt_handler", 0, 0, false, true,  true, handle_int_attribute },
2815
  { "exception_handler", 0, 0, false, true,  true, handle_int_attribute },
2816
  { "nmi_handler", 0, 0, false, true,  true, handle_int_attribute },
2817
  { "nesting", 0, 0, false, true,  true, NULL },
2818
  { "kspisusp", 0, 0, false, true,  true, NULL },
2819
  { "saveall", 0, 0, false, true,  true, NULL },
2820
  { "longcall",  0, 0, false, true,  true,  bfin_handle_longcall_attribute },
2821
  { "shortcall", 0, 0, false, true,  true,  bfin_handle_longcall_attribute },
2822
  { NULL, 0, 0, false, false, false, NULL }
2823
};
2824
 
2825
/* Output the assembler code for a thunk function.  THUNK_DECL is the
2826
   declaration for the thunk function itself, FUNCTION is the decl for
2827
   the target function.  DELTA is an immediate constant offset to be
2828
   added to THIS.  If VCALL_OFFSET is nonzero, the word at
2829
   *(*this + vcall_offset) should be added to THIS.  */
2830
 
2831
static void
2832
bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
2833
                      tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
2834
                      HOST_WIDE_INT vcall_offset, tree function)
2835
{
2836
  rtx xops[3];
2837
  /* The this parameter is passed as the first argument.  */
2838
  rtx this = gen_rtx_REG (Pmode, REG_R0);
2839
 
2840
  /* Adjust the this parameter by a fixed constant.  */
2841
  if (delta)
2842
    {
2843
      xops[1] = this;
2844
      if (delta >= -64 && delta <= 63)
2845
        {
2846
          xops[0] = GEN_INT (delta);
2847
          output_asm_insn ("%1 += %0;", xops);
2848
        }
2849
      else if (delta >= -128 && delta < -64)
2850
        {
2851
          xops[0] = GEN_INT (delta + 64);
2852
          output_asm_insn ("%1 += -64; %1 += %0;", xops);
2853
        }
2854
      else if (delta > 63 && delta <= 126)
2855
        {
2856
          xops[0] = GEN_INT (delta - 63);
2857
          output_asm_insn ("%1 += 63; %1 += %0;", xops);
2858
        }
2859
      else
2860
        {
2861
          xops[0] = GEN_INT (delta);
2862
          output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
2863
        }
2864
    }
2865
 
2866
  /* Adjust the this parameter by a value stored in the vtable.  */
2867
  if (vcall_offset)
2868
    {
2869
      rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
2870
      rtx tmp = gen_rtx_REG (Pmode, REG_R2);
2871
 
2872
      xops[1] = tmp;
2873
      xops[2] = p2tmp;
2874
      output_asm_insn ("%2 = r0; %2 = [%2];", xops);
2875
 
2876
      /* Adjust the this parameter.  */
2877
      xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
2878
      if (!memory_operand (xops[0], Pmode))
2879
        {
2880
          rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
2881
          xops[0] = GEN_INT (vcall_offset);
2882
          xops[1] = tmp2;
2883
          output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
2884
          xops[0] = gen_rtx_MEM (Pmode, p2tmp);
2885
        }
2886
      xops[2] = this;
2887
      output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
2888
    }
2889
 
2890
  xops[0] = XEXP (DECL_RTL (function), 0);
2891
  if (1 || !flag_pic || (*targetm.binds_local_p) (function))
2892
    output_asm_insn ("jump.l\t%P0", xops);
2893
}
2894
 
2895
/* Codes for all the Blackfin builtins.  */
2896
enum bfin_builtins
2897
{
2898
  BFIN_BUILTIN_CSYNC,
2899
  BFIN_BUILTIN_SSYNC,
2900
  BFIN_BUILTIN_MAX
2901
};
2902
 
2903
#define def_builtin(NAME, TYPE, CODE)                                   \
2904
do {                                                                    \
2905
  lang_hooks.builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD,     \
2906
                               NULL, NULL_TREE);                        \
2907
} while (0)
2908
 
2909
/* Set up all builtin functions for this target.  */
2910
static void
2911
bfin_init_builtins (void)
2912
{
2913
  tree void_ftype_void
2914
    = build_function_type (void_type_node, void_list_node);
2915
 
2916
  /* Add the remaining MMX insns with somewhat more complicated types.  */
2917
  def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
2918
  def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
2919
}
2920
 
2921
/* Expand an expression EXP that calls a built-in function,
2922
   with result going to TARGET if that's convenient
2923
   (and in mode MODE if that's convenient).
2924
   SUBTARGET may be used as the target for computing one of EXP's operands.
2925
   IGNORE is nonzero if the value is to be ignored.  */
2926
 
2927
static rtx
2928
bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
2929
                     rtx subtarget ATTRIBUTE_UNUSED,
2930
                     enum machine_mode mode ATTRIBUTE_UNUSED,
2931
                     int ignore ATTRIBUTE_UNUSED)
2932
{
2933
  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2934
  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2935
 
2936
  switch (fcode)
2937
    {
2938
    case BFIN_BUILTIN_CSYNC:
2939
      emit_insn (gen_csync ());
2940
      return 0;
2941
    case BFIN_BUILTIN_SSYNC:
2942
      emit_insn (gen_ssync ());
2943
      return 0;
2944
 
2945
    default:
2946
      gcc_unreachable ();
2947
    }
2948
}
2949
 
2950
#undef TARGET_INIT_BUILTINS
2951
#define TARGET_INIT_BUILTINS bfin_init_builtins
2952
 
2953
#undef TARGET_EXPAND_BUILTIN
2954
#define TARGET_EXPAND_BUILTIN bfin_expand_builtin
2955
 
2956
#undef TARGET_ASM_GLOBALIZE_LABEL
2957
#define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label 
2958
 
2959
#undef TARGET_ASM_FILE_START
2960
#define TARGET_ASM_FILE_START output_file_start
2961
 
2962
#undef TARGET_ATTRIBUTE_TABLE
2963
#define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
2964
 
2965
#undef TARGET_COMP_TYPE_ATTRIBUTES
2966
#define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
2967
 
2968
#undef TARGET_RTX_COSTS
2969
#define TARGET_RTX_COSTS bfin_rtx_costs
2970
 
2971
#undef  TARGET_ADDRESS_COST
2972
#define TARGET_ADDRESS_COST bfin_address_cost
2973
 
2974
#undef TARGET_ASM_INTERNAL_LABEL
2975
#define TARGET_ASM_INTERNAL_LABEL bfin_internal_label
2976
 
2977
#undef TARGET_MACHINE_DEPENDENT_REORG
2978
#define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
2979
 
2980
#undef TARGET_FUNCTION_OK_FOR_SIBCALL
2981
#define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
2982
 
2983
#undef TARGET_ASM_OUTPUT_MI_THUNK
2984
#define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
2985
#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2986
#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
2987
 
2988
#undef TARGET_SCHED_ADJUST_COST
2989
#define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
2990
 
2991
#undef TARGET_PROMOTE_PROTOTYPES
2992
#define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
2993
#undef TARGET_PROMOTE_FUNCTION_ARGS
2994
#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
2995
#undef TARGET_PROMOTE_FUNCTION_RETURN
2996
#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
2997
 
2998
#undef TARGET_ARG_PARTIAL_BYTES
2999
#define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
3000
 
3001
#undef TARGET_PASS_BY_REFERENCE
3002
#define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
3003
 
3004
#undef TARGET_SETUP_INCOMING_VARARGS
3005
#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
3006
 
3007
#undef TARGET_STRUCT_VALUE_RTX
3008
#define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
3009
 
3010
#undef TARGET_VECTOR_MODE_SUPPORTED_P
3011
#define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
3012
 
3013
#undef TARGET_HANDLE_OPTION
3014
#define TARGET_HANDLE_OPTION bfin_handle_option
3015
 
3016
#undef TARGET_DEFAULT_TARGET_FLAGS
3017
#define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
3018
 
3019
struct gcc_target targetm = TARGET_INITIALIZER;

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.