OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [config/] [bfin/] [bfin.c] - Blame information for rev 816

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* The Blackfin code generation auxiliary output file.
2
   Copyright (C) 2005, 2006, 2007  Free Software Foundation, Inc.
3
   Contributed by Analog Devices.
4
 
5
   This file is part of GCC.
6
 
7
   GCC is free software; you can redistribute it and/or modify it
8
   under the terms of the GNU General Public License as published
9
   by the Free Software Foundation; either version 3, or (at your
10
   option) any later version.
11
 
12
   GCC is distributed in the hope that it will be useful, but WITHOUT
13
   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14
   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
15
   License for more details.
16
 
17
   You should have received a copy of the GNU General Public License
18
   along with GCC; see the file COPYING3.  If not see
19
   <http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "rtl.h"
26
#include "regs.h"
27
#include "hard-reg-set.h"
28
#include "real.h"
29
#include "insn-config.h"
30
#include "insn-codes.h"
31
#include "conditions.h"
32
#include "insn-flags.h"
33
#include "output.h"
34
#include "insn-attr.h"
35
#include "tree.h"
36
#include "flags.h"
37
#include "except.h"
38
#include "function.h"
39
#include "input.h"
40
#include "target.h"
41
#include "target-def.h"
42
#include "expr.h"
43
#include "toplev.h"
44
#include "recog.h"
45
#include "optabs.h"
46
#include "ggc.h"
47
#include "integrate.h"
48
#include "cgraph.h"
49
#include "langhooks.h"
50
#include "bfin-protos.h"
51
#include "tm-preds.h"
52
#include "gt-bfin.h"
53
#include "basic-block.h"
54
 
55
/* A C structure for machine-specific, per-function data.
56
   This is added to the cfun structure.  */
57
struct machine_function GTY(())
58
{
59
  int has_hardware_loops;
60
};
61
 
62
/* Test and compare insns in bfin.md store the information needed to
63
   generate branch and scc insns here.  */
64
rtx bfin_compare_op0, bfin_compare_op1;
65
 
66
/* RTX for condition code flag register and RETS register */
67
extern GTY(()) rtx bfin_cc_rtx;
68
extern GTY(()) rtx bfin_rets_rtx;
69
rtx bfin_cc_rtx, bfin_rets_rtx;
70
 
71
int max_arg_registers = 0;
72
 
73
/* Arrays used when emitting register names.  */
74
const char *short_reg_names[]  =  SHORT_REGISTER_NAMES;
75
const char *high_reg_names[]   =  HIGH_REGISTER_NAMES;
76
const char *dregs_pair_names[] =  DREGS_PAIR_NAMES;
77
const char *byte_reg_names[]   =  BYTE_REGISTER_NAMES;
78
 
79
static int arg_regs[] = FUNCTION_ARG_REGISTERS;
80
 
81
/* Nonzero if -mshared-library-id was given.  */
82
static int bfin_lib_id_given;
83
 
84
static void
85
bfin_globalize_label (FILE *stream, const char *name)
86
{
87
  fputs (".global ", stream);
88
  assemble_name (stream, name);
89
  fputc (';',stream);
90
  fputc ('\n',stream);
91
}
92
 
93
static void
94
output_file_start (void)
95
{
96
  FILE *file = asm_out_file;
97
  int i;
98
 
99
  fprintf (file, ".file \"%s\";\n", input_filename);
100
 
101
  for (i = 0; arg_regs[i] >= 0; i++)
102
    ;
103
  max_arg_registers = i;        /* how many arg reg used  */
104
}
105
 
106
/* Called early in the compilation to conditionally modify
107
   fixed_regs/call_used_regs.  */
108
 
109
void
110
conditional_register_usage (void)
111
{
112
  /* initialize condition code flag register rtx */
113
  bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
114
  bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
115
}
116
 
117
/* Examine machine-dependent attributes of function type FUNTYPE and return its
118
   type.  See the definition of E_FUNKIND.  */
119
 
120
static e_funkind funkind (tree funtype)
121
{
122
  tree attrs = TYPE_ATTRIBUTES (funtype);
123
  if (lookup_attribute ("interrupt_handler", attrs))
124
    return INTERRUPT_HANDLER;
125
  else if (lookup_attribute ("exception_handler", attrs))
126
    return EXCPT_HANDLER;
127
  else if (lookup_attribute ("nmi_handler", attrs))
128
    return NMI_HANDLER;
129
  else
130
    return SUBROUTINE;
131
}
132
 
133
/* Legitimize PIC addresses.  If the address is already position-independent,
134
   we return ORIG.  Newly generated position-independent addresses go into a
135
   reg.  This is REG if nonzero, otherwise we allocate register(s) as
136
   necessary.  PICREG is the register holding the pointer to the PIC offset
137
   table.  */
138
 
139
static rtx
140
legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
141
{
142
  rtx addr = orig;
143
  rtx new = orig;
144
 
145
  if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
146
    {
147
      if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
148
        reg = new = orig;
149
      else
150
        {
151
          int unspec;
152
          rtx tmp;
153
 
154
          if (TARGET_ID_SHARED_LIBRARY)
155
            unspec = UNSPEC_MOVE_PIC;
156
          else if (GET_CODE (addr) == SYMBOL_REF
157
                   && SYMBOL_REF_FUNCTION_P (addr))
158
            {
159
              unspec = UNSPEC_FUNCDESC_GOT17M4;
160
            }
161
          else
162
            {
163
              unspec = UNSPEC_MOVE_FDPIC;
164
            }
165
 
166
          if (reg == 0)
167
            {
168
              gcc_assert (!no_new_pseudos);
169
              reg = gen_reg_rtx (Pmode);
170
            }
171
 
172
          tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), unspec);
173
          new = gen_const_mem (Pmode, gen_rtx_PLUS (Pmode, picreg, tmp));
174
 
175
          emit_move_insn (reg, new);
176
        }
177
      if (picreg == pic_offset_table_rtx)
178
        current_function_uses_pic_offset_table = 1;
179
      return reg;
180
    }
181
 
182
  else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
183
    {
184
      rtx base;
185
 
186
      if (GET_CODE (addr) == CONST)
187
        {
188
          addr = XEXP (addr, 0);
189
          gcc_assert (GET_CODE (addr) == PLUS);
190
        }
191
 
192
      if (XEXP (addr, 0) == picreg)
193
        return orig;
194
 
195
      if (reg == 0)
196
        {
197
          gcc_assert (!no_new_pseudos);
198
          reg = gen_reg_rtx (Pmode);
199
        }
200
 
201
      base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
202
      addr = legitimize_pic_address (XEXP (addr, 1),
203
                                     base == reg ? NULL_RTX : reg,
204
                                     picreg);
205
 
206
      if (GET_CODE (addr) == CONST_INT)
207
        {
208
          gcc_assert (! reload_in_progress && ! reload_completed);
209
          addr = force_reg (Pmode, addr);
210
        }
211
 
212
      if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
213
        {
214
          base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
215
          addr = XEXP (addr, 1);
216
        }
217
 
218
      return gen_rtx_PLUS (Pmode, base, addr);
219
    }
220
 
221
  return new;
222
}
223
 
224
/* Stack frame layout. */
225
 
226
/* Compute the number of DREGS to save with a push_multiple operation.
227
   This could include registers that aren't modified in the function,
228
   since push_multiple only takes a range of registers.
229
   If IS_INTHANDLER, then everything that is live must be saved, even
230
   if normally call-clobbered.  */
231
 
232
static int
233
n_dregs_to_save (bool is_inthandler)
234
{
235
  unsigned i;
236
 
237
  for (i = REG_R0; i <= REG_R7; i++)
238
    {
239
      if (regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
240
        return REG_R7 - i + 1;
241
 
242
      if (current_function_calls_eh_return)
243
        {
244
          unsigned j;
245
          for (j = 0; ; j++)
246
            {
247
              unsigned test = EH_RETURN_DATA_REGNO (j);
248
              if (test == INVALID_REGNUM)
249
                break;
250
              if (test == i)
251
                return REG_R7 - i + 1;
252
            }
253
        }
254
 
255
    }
256
  return 0;
257
}
258
 
259
/* Like n_dregs_to_save, but compute number of PREGS to save.  */
260
 
261
static int
262
n_pregs_to_save (bool is_inthandler)
263
{
264
  unsigned i;
265
 
266
  for (i = REG_P0; i <= REG_P5; i++)
267
    if ((regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
268
        || (!TARGET_FDPIC
269
            && i == PIC_OFFSET_TABLE_REGNUM
270
            && (current_function_uses_pic_offset_table
271
                || (TARGET_ID_SHARED_LIBRARY && ! current_function_is_leaf))))
272
      return REG_P5 - i + 1;
273
  return 0;
274
}
275
 
276
/* Determine if we are going to save the frame pointer in the prologue.  */
277
 
278
static bool
279
must_save_fp_p (void)
280
{
281
  return frame_pointer_needed || regs_ever_live[REG_FP];
282
}
283
 
284
static bool
285
stack_frame_needed_p (void)
286
{
287
  /* EH return puts a new return address into the frame using an
288
     address relative to the frame pointer.  */
289
  if (current_function_calls_eh_return)
290
    return true;
291
  return frame_pointer_needed;
292
}
293
 
294
/* Emit code to save registers in the prologue.  SAVEALL is nonzero if we
295
   must save all registers; this is used for interrupt handlers.
296
   SPREG contains (reg:SI REG_SP).  IS_INTHANDLER is true if we're doing
297
   this for an interrupt (or exception) handler.  */
298
 
299
static void
300
expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
301
{
302
  int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
303
  int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
304
  int dregno = REG_R7 + 1 - ndregs;
305
  int pregno = REG_P5 + 1 - npregs;
306
  int total = ndregs + npregs;
307
  int i;
308
  rtx pat, insn, val;
309
 
310
  if (total == 0)
311
    return;
312
 
313
  val = GEN_INT (-total * 4);
314
  pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 2));
315
  XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
316
                                        UNSPEC_PUSH_MULTIPLE);
317
  XVECEXP (pat, 0, total + 1) = gen_rtx_SET (VOIDmode, spreg,
318
                                             gen_rtx_PLUS (Pmode, spreg,
319
                                                           val));
320
  RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total + 1)) = 1;
321
  for (i = 0; i < total; i++)
322
    {
323
      rtx memref = gen_rtx_MEM (word_mode,
324
                                gen_rtx_PLUS (Pmode, spreg,
325
                                              GEN_INT (- i * 4 - 4)));
326
      rtx subpat;
327
      if (ndregs > 0)
328
        {
329
          subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
330
                                                               dregno++));
331
          ndregs--;
332
        }
333
      else
334
        {
335
          subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
336
                                                               pregno++));
337
          npregs++;
338
        }
339
      XVECEXP (pat, 0, i + 1) = subpat;
340
      RTX_FRAME_RELATED_P (subpat) = 1;
341
    }
342
  insn = emit_insn (pat);
343
  RTX_FRAME_RELATED_P (insn) = 1;
344
}
345
 
346
/* Emit code to restore registers in the epilogue.  SAVEALL is nonzero if we
347
   must save all registers; this is used for interrupt handlers.
348
   SPREG contains (reg:SI REG_SP).  IS_INTHANDLER is true if we're doing
349
   this for an interrupt (or exception) handler.  */
350
 
351
static void
352
expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
353
{
354
  int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
355
  int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
356
  int total = ndregs + npregs;
357
  int i, regno;
358
  rtx pat, insn;
359
 
360
  if (total == 0)
361
    return;
362
 
363
  pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 1));
364
  XVECEXP (pat, 0, 0) = gen_rtx_SET (VOIDmode, spreg,
365
                                     gen_rtx_PLUS (Pmode, spreg,
366
                                                   GEN_INT (total * 4)));
367
 
368
  if (npregs > 0)
369
    regno = REG_P5 + 1;
370
  else
371
    regno = REG_R7 + 1;
372
 
373
  for (i = 0; i < total; i++)
374
    {
375
      rtx addr = (i > 0
376
                  ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
377
                  : spreg);
378
      rtx memref = gen_rtx_MEM (word_mode, addr);
379
 
380
      regno--;
381
      XVECEXP (pat, 0, i + 1)
382
        = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
383
 
384
      if (npregs > 0)
385
        {
386
          if (--npregs == 0)
387
            regno = REG_R7 + 1;
388
        }
389
    }
390
 
391
  insn = emit_insn (pat);
392
  RTX_FRAME_RELATED_P (insn) = 1;
393
}
394
 
395
/* Perform any needed actions needed for a function that is receiving a
396
   variable number of arguments.
397
 
398
   CUM is as above.
399
 
400
   MODE and TYPE are the mode and type of the current parameter.
401
 
402
   PRETEND_SIZE is a variable that should be set to the amount of stack
403
   that must be pushed by the prolog to pretend that our caller pushed
404
   it.
405
 
406
   Normally, this macro will push all remaining incoming registers on the
407
   stack and set PRETEND_SIZE to the length of the registers pushed.
408
 
409
   Blackfin specific :
410
   - VDSP C compiler manual (our ABI) says that a variable args function
411
     should save the R0, R1 and R2 registers in the stack.
412
   - The caller will always leave space on the stack for the
413
     arguments that are passed in registers, so we dont have
414
     to leave any extra space.
415
   - now, the vastart pointer can access all arguments from the stack.  */
416
 
417
static void
418
setup_incoming_varargs (CUMULATIVE_ARGS *cum,
419
                        enum machine_mode mode ATTRIBUTE_UNUSED,
420
                        tree type ATTRIBUTE_UNUSED, int *pretend_size,
421
                        int no_rtl)
422
{
423
  rtx mem;
424
  int i;
425
 
426
  if (no_rtl)
427
    return;
428
 
429
  /* The move for named arguments will be generated automatically by the
430
     compiler.  We need to generate the move rtx for the unnamed arguments
431
     if they are in the first 3 words.  We assume at least 1 named argument
432
     exists, so we never generate [ARGP] = R0 here.  */
433
 
434
  for (i = cum->words + 1; i < max_arg_registers; i++)
435
    {
436
      mem = gen_rtx_MEM (Pmode,
437
                         plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
438
      emit_move_insn (mem, gen_rtx_REG (Pmode, i));
439
    }
440
 
441
  *pretend_size = 0;
442
}
443
 
444
/* Value should be nonzero if functions must have frame pointers.
445
   Zero means the frame pointer need not be set up (and parms may
446
   be accessed via the stack pointer) in functions that seem suitable.  */
447
 
448
int
449
bfin_frame_pointer_required (void)
450
{
451
  e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
452
 
453
  if (fkind != SUBROUTINE)
454
    return 1;
455
 
456
  /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
457
     so we have to override it for non-leaf functions.  */
458
  if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
459
    return 1;
460
 
461
  return 0;
462
}
463
 
464
/* Return the number of registers pushed during the prologue.  */
465
 
466
static int
467
n_regs_saved_by_prologue (void)
468
{
469
  e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
470
  bool is_inthandler = fkind != SUBROUTINE;
471
  tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
472
  bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
473
              || (is_inthandler && !current_function_is_leaf));
474
  int ndregs = all ? 8 : n_dregs_to_save (is_inthandler);
475
  int npregs = all ? 6 : n_pregs_to_save (is_inthandler);
476
  int n = ndregs + npregs;
477
 
478
  if (all || stack_frame_needed_p ())
479
    /* We use a LINK instruction in this case.  */
480
    n += 2;
481
  else
482
    {
483
      if (must_save_fp_p ())
484
        n++;
485
      if (! current_function_is_leaf)
486
        n++;
487
    }
488
 
489
  if (fkind != SUBROUTINE)
490
    {
491
      int i;
492
 
493
      /* Increment once for ASTAT.  */
494
      n++;
495
 
496
      /* RETE/X/N.  */
497
      if (lookup_attribute ("nesting", attrs))
498
        n++;
499
 
500
      for (i = REG_P7 + 1; i < REG_CC; i++)
501
        if (all
502
            || regs_ever_live[i]
503
            || (!leaf_function_p () && call_used_regs[i]))
504
          n += i == REG_A0 || i == REG_A1 ? 2 : 1;
505
    }
506
  return n;
507
}
508
 
509
/* Return the offset between two registers, one to be eliminated, and the other
510
   its replacement, at the start of a routine.  */
511
 
512
HOST_WIDE_INT
513
bfin_initial_elimination_offset (int from, int to)
514
{
515
  HOST_WIDE_INT offset = 0;
516
 
517
  if (from == ARG_POINTER_REGNUM)
518
    offset = n_regs_saved_by_prologue () * 4;
519
 
520
  if (to == STACK_POINTER_REGNUM)
521
    {
522
      if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
523
        offset += current_function_outgoing_args_size;
524
      else if (current_function_outgoing_args_size)
525
        offset += FIXED_STACK_AREA;
526
 
527
      offset += get_frame_size ();
528
    }
529
 
530
  return offset;
531
}
532
 
533
/* Emit code to load a constant CONSTANT into register REG; setting
534
   RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
535
   Make sure that the insns we generate need not be split.  */
536
 
537
static void
538
frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
539
{
540
  rtx insn;
541
  rtx cst = GEN_INT (constant);
542
 
543
  if (constant >= -32768 && constant < 65536)
544
    insn = emit_move_insn (reg, cst);
545
  else
546
    {
547
      /* We don't call split_load_immediate here, since dwarf2out.c can get
548
         confused about some of the more clever sequences it can generate.  */
549
      insn = emit_insn (gen_movsi_high (reg, cst));
550
      if (related)
551
        RTX_FRAME_RELATED_P (insn) = 1;
552
      insn = emit_insn (gen_movsi_low (reg, reg, cst));
553
    }
554
  if (related)
555
    RTX_FRAME_RELATED_P (insn) = 1;
556
}
557
 
558
/* Generate efficient code to add a value to the frame pointer.  We
559
   can use P1 as a scratch register.  Set RTX_FRAME_RELATED_P on the
560
   generated insns if FRAME is nonzero.  */
561
 
562
static void
563
add_to_sp (rtx spreg, HOST_WIDE_INT value, int frame)
564
{
565
  if (value == 0)
566
    return;
567
 
568
  /* Choose whether to use a sequence using a temporary register, or
569
     a sequence with multiple adds.  We can add a signed 7 bit value
570
     in one instruction.  */
571
  if (value > 120 || value < -120)
572
    {
573
      rtx tmpreg = gen_rtx_REG (SImode, REG_P1);
574
      rtx insn;
575
 
576
      if (frame)
577
        frame_related_constant_load (tmpreg, value, TRUE);
578
      else
579
        {
580
          insn = emit_move_insn (tmpreg, GEN_INT (value));
581
          if (frame)
582
            RTX_FRAME_RELATED_P (insn) = 1;
583
        }
584
 
585
      insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
586
      if (frame)
587
        RTX_FRAME_RELATED_P (insn) = 1;
588
    }
589
  else
590
    do
591
      {
592
        int size = value;
593
        rtx insn;
594
 
595
        if (size > 60)
596
          size = 60;
597
        else if (size < -60)
598
          /* We could use -62, but that would leave the stack unaligned, so
599
             it's no good.  */
600
          size = -60;
601
 
602
        insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (size)));
603
        if (frame)
604
          RTX_FRAME_RELATED_P (insn) = 1;
605
        value -= size;
606
      }
607
    while (value != 0);
608
}
609
 
610
/* Generate a LINK insn for a frame sized FRAME_SIZE.  If this constant
611
   is too large, generate a sequence of insns that has the same effect.
612
   SPREG contains (reg:SI REG_SP).  */
613
 
614
static void
615
emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
616
{
617
  HOST_WIDE_INT link_size = frame_size;
618
  rtx insn;
619
  int i;
620
 
621
  if (link_size > 262140)
622
    link_size = 262140;
623
 
624
  /* Use a LINK insn with as big a constant as possible, then subtract
625
     any remaining size from the SP.  */
626
  insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
627
  RTX_FRAME_RELATED_P (insn) = 1;
628
 
629
  for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
630
    {
631
      rtx set = XVECEXP (PATTERN (insn), 0, i);
632
      gcc_assert (GET_CODE (set) == SET);
633
      RTX_FRAME_RELATED_P (set) = 1;
634
    }
635
 
636
  frame_size -= link_size;
637
 
638
  if (frame_size > 0)
639
    {
640
      /* Must use a call-clobbered PREG that isn't the static chain.  */
641
      rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
642
 
643
      frame_related_constant_load (tmpreg, -frame_size, TRUE);
644
      insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
645
      RTX_FRAME_RELATED_P (insn) = 1;
646
    }
647
}
648
 
649
/* Return the number of bytes we must reserve for outgoing arguments
650
   in the current function's stack frame.  */
651
 
652
static HOST_WIDE_INT
653
arg_area_size (void)
654
{
655
  if (current_function_outgoing_args_size)
656
    {
657
      if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
658
        return current_function_outgoing_args_size;
659
      else
660
        return FIXED_STACK_AREA;
661
    }
662
  return 0;
663
}
664
 
665
/* Save RETS and FP, and allocate a stack frame.  ALL is true if the
666
   function must save all its registers (true only for certain interrupt
667
   handlers).  */
668
 
669
static void
670
do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
671
{
672
  frame_size += arg_area_size ();
673
 
674
  if (all || stack_frame_needed_p ()
675
      || (must_save_fp_p () && ! current_function_is_leaf))
676
    emit_link_insn (spreg, frame_size);
677
  else
678
    {
679
      if (! current_function_is_leaf)
680
        {
681
          rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
682
                                            gen_rtx_PRE_DEC (Pmode, spreg)),
683
                               bfin_rets_rtx);
684
          rtx insn = emit_insn (pat);
685
          RTX_FRAME_RELATED_P (insn) = 1;
686
        }
687
      if (must_save_fp_p ())
688
        {
689
          rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
690
                                            gen_rtx_PRE_DEC (Pmode, spreg)),
691
                               gen_rtx_REG (Pmode, REG_FP));
692
          rtx insn = emit_insn (pat);
693
          RTX_FRAME_RELATED_P (insn) = 1;
694
        }
695
      add_to_sp (spreg, -frame_size, 1);
696
    }
697
}
698
 
699
/* Like do_link, but used for epilogues to deallocate the stack frame.  */
700
 
701
static void
702
do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all)
703
{
704
  frame_size += arg_area_size ();
705
 
706
  if (all || stack_frame_needed_p ())
707
    emit_insn (gen_unlink ());
708
  else
709
    {
710
      rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
711
 
712
      add_to_sp (spreg, frame_size, 0);
713
      if (must_save_fp_p ())
714
        {
715
          rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
716
          emit_move_insn (fpreg, postinc);
717
          emit_insn (gen_rtx_USE (VOIDmode, fpreg));
718
        }
719
      if (! current_function_is_leaf)
720
        {
721
          emit_move_insn (bfin_rets_rtx, postinc);
722
          emit_insn (gen_rtx_USE (VOIDmode, bfin_rets_rtx));
723
        }
724
    }
725
}
726
 
727
/* Generate a prologue suitable for a function of kind FKIND.  This is
728
   called for interrupt and exception handler prologues.
729
   SPREG contains (reg:SI REG_SP).  */
730
 
731
static void
732
expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind)
733
{
734
  int i;
735
  HOST_WIDE_INT frame_size = get_frame_size ();
736
  rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
737
  rtx predec = gen_rtx_MEM (SImode, predec1);
738
  rtx insn;
739
  tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
740
  bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
741
  tree kspisusp = lookup_attribute ("kspisusp", attrs);
742
 
743
  if (kspisusp)
744
    {
745
      insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
746
      RTX_FRAME_RELATED_P (insn) = 1;
747
    }
748
 
749
  /* We need space on the stack in case we need to save the argument
750
     registers.  */
751
  if (fkind == EXCPT_HANDLER)
752
    {
753
      insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
754
      RTX_FRAME_RELATED_P (insn) = 1;
755
    }
756
 
757
  insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
758
  RTX_FRAME_RELATED_P (insn) = 1;
759
 
760
  /* If we're calling other functions, they won't save their call-clobbered
761
     registers, so we must save everything here.  */
762
  if (!current_function_is_leaf)
763
    all = true;
764
  expand_prologue_reg_save (spreg, all, true);
765
 
766
  for (i = REG_P7 + 1; i < REG_CC; i++)
767
    if (all
768
        || regs_ever_live[i]
769
        || (!leaf_function_p () && call_used_regs[i]))
770
      {
771
        if (i == REG_A0 || i == REG_A1)
772
          insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
773
                                 gen_rtx_REG (PDImode, i));
774
        else
775
          insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
776
        RTX_FRAME_RELATED_P (insn) = 1;
777
      }
778
 
779
  if (lookup_attribute ("nesting", attrs))
780
    {
781
      rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
782
                                        : fkind == NMI_HANDLER ? REG_RETN
783
                                        : REG_RETI));
784
      insn = emit_move_insn (predec, srcreg);
785
      RTX_FRAME_RELATED_P (insn) = 1;
786
    }
787
 
788
  do_link (spreg, frame_size, all);
789
 
790
  if (fkind == EXCPT_HANDLER)
791
    {
792
      rtx r0reg = gen_rtx_REG (SImode, REG_R0);
793
      rtx r1reg = gen_rtx_REG (SImode, REG_R1);
794
      rtx r2reg = gen_rtx_REG (SImode, REG_R2);
795
      rtx insn;
796
 
797
      insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
798
      REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
799
                                            NULL_RTX);
800
      insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
801
      REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
802
                                            NULL_RTX);
803
      insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
804
      REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
805
                                            NULL_RTX);
806
      insn = emit_move_insn (r1reg, spreg);
807
      REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
808
                                            NULL_RTX);
809
      insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
810
      REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
811
                                            NULL_RTX);
812
      insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
813
      REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
814
                                            NULL_RTX);
815
    }
816
}
817
 
818
/* Generate an epilogue suitable for a function of kind FKIND.  This is
819
   called for interrupt and exception handler epilogues.
820
   SPREG contains (reg:SI REG_SP).  */
821
 
822
static void
823
expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind)
824
{
825
  int i;
826
  rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
827
  rtx postinc = gen_rtx_MEM (SImode, postinc1);
828
  tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
829
  bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
830
 
831
  /* A slightly crude technique to stop flow from trying to delete "dead"
832
     insns.  */
833
  MEM_VOLATILE_P (postinc) = 1;
834
 
835
  do_unlink (spreg, get_frame_size (), all);
836
 
837
  if (lookup_attribute ("nesting", attrs))
838
    {
839
      rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
840
                                        : fkind == NMI_HANDLER ? REG_RETN
841
                                        : REG_RETI));
842
      emit_move_insn (srcreg, postinc);
843
    }
844
 
845
  /* If we're calling other functions, they won't save their call-clobbered
846
     registers, so we must save (and restore) everything here.  */
847
  if (!current_function_is_leaf)
848
    all = true;
849
 
850
  for (i = REG_CC - 1; i > REG_P7; i--)
851
    if (all
852
        || regs_ever_live[i]
853
        || (!leaf_function_p () && call_used_regs[i]))
854
      {
855
        if (i == REG_A0 || i == REG_A1)
856
          {
857
            rtx mem = gen_rtx_MEM (PDImode, postinc1);
858
            MEM_VOLATILE_P (mem) = 1;
859
            emit_move_insn (gen_rtx_REG (PDImode, i), mem);
860
          }
861
        else
862
          emit_move_insn (gen_rtx_REG (SImode, i), postinc);
863
      }
864
 
865
  expand_epilogue_reg_restore (spreg, all, true);
866
 
867
  emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
868
 
869
  /* Deallocate any space we left on the stack in case we needed to save the
870
     argument registers.  */
871
  if (fkind == EXCPT_HANDLER)
872
    emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
873
 
874
  emit_jump_insn (gen_return_internal (GEN_INT (fkind)));
875
}
876
 
877
/* Used while emitting the prologue to generate code to load the correct value
878
   into the PIC register, which is passed in DEST.  */
879
 
880
static rtx
881
bfin_load_pic_reg (rtx dest)
882
{
883
  struct cgraph_local_info *i = NULL;
884
  rtx addr, insn;
885
 
886
  if (flag_unit_at_a_time)
887
    i = cgraph_local_info (current_function_decl);
888
 
889
  /* Functions local to the translation unit don't need to reload the
890
     pic reg, since the caller always passes a usable one.  */
891
  if (i && i->local)
892
    return pic_offset_table_rtx;
893
 
894
  if (bfin_lib_id_given)
895
    addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
896
  else
897
    addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
898
                         gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
899
                                         UNSPEC_LIBRARY_OFFSET));
900
  insn = emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
901
  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
902
  return dest;
903
}
904
 
905
/* Generate RTL for the prologue of the current function.  */
906
 
907
void
908
bfin_expand_prologue (void)
909
{
910
  rtx insn;
911
  HOST_WIDE_INT frame_size = get_frame_size ();
912
  rtx spreg = gen_rtx_REG (Pmode, REG_SP);
913
  e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
914
  rtx pic_reg_loaded = NULL_RTX;
915
 
916
  if (fkind != SUBROUTINE)
917
    {
918
      expand_interrupt_handler_prologue (spreg, fkind);
919
      return;
920
    }
921
 
922
  if (current_function_limit_stack)
923
    {
924
      HOST_WIDE_INT offset
925
        = bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
926
                                           STACK_POINTER_REGNUM);
927
      rtx lim = stack_limit_rtx;
928
 
929
      if (GET_CODE (lim) == SYMBOL_REF)
930
        {
931
          rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
932
          if (TARGET_ID_SHARED_LIBRARY)
933
            {
934
              rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
935
              rtx val;
936
              pic_reg_loaded = bfin_load_pic_reg (p2reg);
937
              val = legitimize_pic_address (stack_limit_rtx, p1reg,
938
                                            pic_reg_loaded);
939
              emit_move_insn (p1reg, val);
940
              frame_related_constant_load (p2reg, offset, FALSE);
941
              emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
942
              lim = p2reg;
943
            }
944
          else
945
            {
946
              rtx limit = plus_constant (stack_limit_rtx, offset);
947
              emit_move_insn (p2reg, limit);
948
              lim = p2reg;
949
            }
950
        }
951
      emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
952
      emit_insn (gen_trapifcc ());
953
    }
954
  expand_prologue_reg_save (spreg, 0, false);
955
 
956
  do_link (spreg, frame_size, false);
957
 
958
  if (TARGET_ID_SHARED_LIBRARY
959
      && (current_function_uses_pic_offset_table
960
          || !current_function_is_leaf))
961
    bfin_load_pic_reg (pic_offset_table_rtx);
962
}
963
 
964
/* Generate RTL for the epilogue of the current function.  NEED_RETURN is zero
965
   if this is for a sibcall.  EH_RETURN is nonzero if we're expanding an
966
   eh_return pattern.  */
967
 
968
void
969
bfin_expand_epilogue (int need_return, int eh_return)
970
{
971
  rtx spreg = gen_rtx_REG (Pmode, REG_SP);
972
  e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
973
 
974
  if (fkind != SUBROUTINE)
975
    {
976
      expand_interrupt_handler_epilogue (spreg, fkind);
977
      return;
978
    }
979
 
980
  do_unlink (spreg, get_frame_size (), false);
981
 
982
  expand_epilogue_reg_restore (spreg, false, false);
983
 
984
  /* Omit the return insn if this is for a sibcall.  */
985
  if (! need_return)
986
    return;
987
 
988
  if (eh_return)
989
    emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
990
 
991
  emit_jump_insn (gen_return_internal (GEN_INT (SUBROUTINE)));
992
}
993
 
994
/* Return nonzero if register OLD_REG can be renamed to register NEW_REG.  */
995
 
996
int
997
bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
998
                           unsigned int new_reg)
999
{
1000
  /* Interrupt functions can only use registers that have already been
1001
     saved by the prologue, even if they would normally be
1002
     call-clobbered.  */
1003
 
1004
  if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
1005
      && !regs_ever_live[new_reg])
1006
    return 0;
1007
 
1008
  return 1;
1009
}
1010
 
1011
/* Return the value of the return address for the frame COUNT steps up
1012
   from the current frame, after the prologue.
1013
   We punt for everything but the current frame by returning const0_rtx.  */
1014
 
1015
rtx
1016
bfin_return_addr_rtx (int count)
1017
{
1018
  if (count != 0)
1019
    return const0_rtx;
1020
 
1021
  return get_hard_reg_initial_val (Pmode, REG_RETS);
1022
}
1023
 
1024
/* Try machine-dependent ways of modifying an illegitimate address X
1025
   to be legitimate.  If we find one, return the new, valid address,
1026
   otherwise return NULL_RTX.
1027
 
1028
   OLDX is the address as it was before break_out_memory_refs was called.
1029
   In some cases it is useful to look at this to decide what needs to be done.
1030
 
1031
   MODE is the mode of the memory reference.  */
1032
 
1033
rtx
1034
legitimize_address (rtx x ATTRIBUTE_UNUSED, rtx oldx ATTRIBUTE_UNUSED,
1035
                    enum machine_mode mode ATTRIBUTE_UNUSED)
1036
{
1037
  return NULL_RTX;
1038
}
1039
 
1040
static rtx
1041
bfin_delegitimize_address (rtx orig_x)
1042
{
1043
  rtx x = orig_x, y;
1044
 
1045
  if (GET_CODE (x) != MEM)
1046
    return orig_x;
1047
 
1048
  x = XEXP (x, 0);
1049
  if (GET_CODE (x) == PLUS
1050
      && GET_CODE (XEXP (x, 1)) == UNSPEC
1051
      && XINT (XEXP (x, 1), 1) == UNSPEC_MOVE_PIC
1052
      && GET_CODE (XEXP (x, 0)) == REG
1053
      && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
1054
    return XVECEXP (XEXP (x, 1), 0, 0);
1055
 
1056
  return orig_x;
1057
}
1058
 
1059
/* This predicate is used to compute the length of a load/store insn.
1060
   OP is a MEM rtx, we return nonzero if its addressing mode requires a
1061
   32 bit instruction.  */
1062
 
1063
int
1064
effective_address_32bit_p (rtx op, enum machine_mode mode)
1065
{
1066
  HOST_WIDE_INT offset;
1067
 
1068
  mode = GET_MODE (op);
1069
  op = XEXP (op, 0);
1070
 
1071
  if (GET_CODE (op) != PLUS)
1072
    {
1073
      gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
1074
                  || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
1075
      return 0;
1076
    }
1077
 
1078
  offset = INTVAL (XEXP (op, 1));
1079
 
1080
  /* All byte loads use a 16 bit offset.  */
1081
  if (GET_MODE_SIZE (mode) == 1)
1082
    return 1;
1083
 
1084
  if (GET_MODE_SIZE (mode) == 4)
1085
    {
1086
      /* Frame pointer relative loads can use a negative offset, all others
1087
         are restricted to a small positive one.  */
1088
      if (XEXP (op, 0) == frame_pointer_rtx)
1089
        return offset < -128 || offset > 60;
1090
      return offset < 0 || offset > 60;
1091
    }
1092
 
1093
  /* Must be HImode now.  */
1094
  return offset < 0 || offset > 30;
1095
}
1096
 
1097
/* Returns true if X is a memory reference using an I register.  */
1098
bool
1099
bfin_dsp_memref_p (rtx x)
1100
{
1101
  if (! MEM_P (x))
1102
    return false;
1103
  x = XEXP (x, 0);
1104
  if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_INC
1105
      || GET_CODE (x) == POST_DEC || GET_CODE (x) == PRE_DEC)
1106
    x = XEXP (x, 0);
1107
  return IREG_P (x);
1108
}
1109
 
1110
/* Return cost of the memory address ADDR.
1111
   All addressing modes are equally cheap on the Blackfin.  */
1112
 
1113
static int
1114
bfin_address_cost (rtx addr ATTRIBUTE_UNUSED)
1115
{
1116
  return 1;
1117
}
1118
 
1119
/* Subroutine of print_operand; used to print a memory reference X to FILE.  */
1120
 
1121
void
1122
print_address_operand (FILE *file, rtx x)
1123
{
1124
  switch (GET_CODE (x))
1125
    {
1126
    case PLUS:
1127
      output_address (XEXP (x, 0));
1128
      fprintf (file, "+");
1129
      output_address (XEXP (x, 1));
1130
      break;
1131
 
1132
    case PRE_DEC:
1133
      fprintf (file, "--");
1134
      output_address (XEXP (x, 0));
1135
      break;
1136
    case POST_INC:
1137
      output_address (XEXP (x, 0));
1138
      fprintf (file, "++");
1139
      break;
1140
    case POST_DEC:
1141
      output_address (XEXP (x, 0));
1142
      fprintf (file, "--");
1143
      break;
1144
 
1145
    default:
1146
      gcc_assert (GET_CODE (x) != MEM);
1147
      print_operand (file, x, 0);
1148
      break;
1149
    }
1150
}
1151
 
1152
/* Adding intp DImode support by Tony
1153
 * -- Q: (low  word)
1154
 * -- R: (high word)
1155
 */
1156
 
1157
void
1158
print_operand (FILE *file, rtx x, char code)
1159
{
1160
  enum machine_mode mode = GET_MODE (x);
1161
 
1162
  switch (code)
1163
    {
1164
    case 'j':
1165
      switch (GET_CODE (x))
1166
        {
1167
        case EQ:
1168
          fprintf (file, "e");
1169
          break;
1170
        case NE:
1171
          fprintf (file, "ne");
1172
          break;
1173
        case GT:
1174
          fprintf (file, "g");
1175
          break;
1176
        case LT:
1177
          fprintf (file, "l");
1178
          break;
1179
        case GE:
1180
          fprintf (file, "ge");
1181
          break;
1182
        case LE:
1183
          fprintf (file, "le");
1184
          break;
1185
        case GTU:
1186
          fprintf (file, "g");
1187
          break;
1188
        case LTU:
1189
          fprintf (file, "l");
1190
          break;
1191
        case GEU:
1192
          fprintf (file, "ge");
1193
          break;
1194
        case LEU:
1195
          fprintf (file, "le");
1196
          break;
1197
        default:
1198
          output_operand_lossage ("invalid %%j value");
1199
        }
1200
      break;
1201
 
1202
    case 'J':                                    /* reverse logic */
1203
      switch (GET_CODE(x))
1204
        {
1205
        case EQ:
1206
          fprintf (file, "ne");
1207
          break;
1208
        case NE:
1209
          fprintf (file, "e");
1210
          break;
1211
        case GT:
1212
          fprintf (file, "le");
1213
          break;
1214
        case LT:
1215
          fprintf (file, "ge");
1216
          break;
1217
        case GE:
1218
          fprintf (file, "l");
1219
          break;
1220
        case LE:
1221
          fprintf (file, "g");
1222
          break;
1223
        case GTU:
1224
          fprintf (file, "le");
1225
          break;
1226
        case LTU:
1227
          fprintf (file, "ge");
1228
          break;
1229
        case GEU:
1230
          fprintf (file, "l");
1231
          break;
1232
        case LEU:
1233
          fprintf (file, "g");
1234
          break;
1235
        default:
1236
          output_operand_lossage ("invalid %%J value");
1237
        }
1238
      break;
1239
 
1240
    default:
1241
      switch (GET_CODE (x))
1242
        {
1243
        case REG:
1244
          if (code == 'h')
1245
            {
1246
              gcc_assert (REGNO (x) < 32);
1247
              fprintf (file, "%s", short_reg_names[REGNO (x)]);
1248
              /*fprintf (file, "\n%d\n ", REGNO (x));*/
1249
              break;
1250
            }
1251
          else if (code == 'd')
1252
            {
1253
              gcc_assert (REGNO (x) < 32);
1254
              fprintf (file, "%s", high_reg_names[REGNO (x)]);
1255
              break;
1256
            }
1257
          else if (code == 'w')
1258
            {
1259
              gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1260
              fprintf (file, "%s.w", reg_names[REGNO (x)]);
1261
            }
1262
          else if (code == 'x')
1263
            {
1264
              gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1265
              fprintf (file, "%s.x", reg_names[REGNO (x)]);
1266
            }
1267
          else if (code == 'D')
1268
            {
1269
              fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1270
            }
1271
          else if (code == 'H')
1272
            {
1273
              gcc_assert (mode == DImode || mode == DFmode);
1274
              gcc_assert (REG_P (x));
1275
              fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1276
            }
1277
          else if (code == 'T')
1278
            {
1279
              gcc_assert (D_REGNO_P (REGNO (x)));
1280
              fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1281
            }
1282
          else
1283
            fprintf (file, "%s", reg_names[REGNO (x)]);
1284
          break;
1285
 
1286
        case MEM:
1287
          fputc ('[', file);
1288
          x = XEXP (x,0);
1289
          print_address_operand (file, x);
1290
          fputc (']', file);
1291
          break;
1292
 
1293
        case CONST_INT:
1294
          if (code == 'M')
1295
            {
1296
              switch (INTVAL (x))
1297
                {
1298
                case MACFLAG_NONE:
1299
                  break;
1300
                case MACFLAG_FU:
1301
                  fputs ("(FU)", file);
1302
                  break;
1303
                case MACFLAG_T:
1304
                  fputs ("(T)", file);
1305
                  break;
1306
                case MACFLAG_TFU:
1307
                  fputs ("(TFU)", file);
1308
                  break;
1309
                case MACFLAG_W32:
1310
                  fputs ("(W32)", file);
1311
                  break;
1312
                case MACFLAG_IS:
1313
                  fputs ("(IS)", file);
1314
                  break;
1315
                case MACFLAG_IU:
1316
                  fputs ("(IU)", file);
1317
                  break;
1318
                case MACFLAG_IH:
1319
                  fputs ("(IH)", file);
1320
                  break;
1321
                case MACFLAG_M:
1322
                  fputs ("(M)", file);
1323
                  break;
1324
                case MACFLAG_ISS2:
1325
                  fputs ("(ISS2)", file);
1326
                  break;
1327
                case MACFLAG_S2RND:
1328
                  fputs ("(S2RND)", file);
1329
                  break;
1330
                default:
1331
                  gcc_unreachable ();
1332
                }
1333
              break;
1334
            }
1335
          else if (code == 'b')
1336
            {
1337
              if (INTVAL (x) == 0)
1338
                fputs ("+=", file);
1339
              else if (INTVAL (x) == 1)
1340
                fputs ("-=", file);
1341
              else
1342
                gcc_unreachable ();
1343
              break;
1344
            }
1345
          /* Moves to half registers with d or h modifiers always use unsigned
1346
             constants.  */
1347
          else if (code == 'd')
1348
            x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1349
          else if (code == 'h')
1350
            x = GEN_INT (INTVAL (x) & 0xffff);
1351
          else if (code == 'X')
1352
            x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1353
          else if (code == 'Y')
1354
            x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1355
          else if (code == 'Z')
1356
            /* Used for LINK insns.  */
1357
            x = GEN_INT (-8 - INTVAL (x));
1358
 
1359
          /* fall through */
1360
 
1361
        case SYMBOL_REF:
1362
          output_addr_const (file, x);
1363
          break;
1364
 
1365
        case CONST_DOUBLE:
1366
          output_operand_lossage ("invalid const_double operand");
1367
          break;
1368
 
1369
        case UNSPEC:
1370
          switch (XINT (x, 1))
1371
            {
1372
            case UNSPEC_MOVE_PIC:
1373
              output_addr_const (file, XVECEXP (x, 0, 0));
1374
              fprintf (file, "@GOT");
1375
              break;
1376
 
1377
            case UNSPEC_MOVE_FDPIC:
1378
              output_addr_const (file, XVECEXP (x, 0, 0));
1379
              fprintf (file, "@GOT17M4");
1380
              break;
1381
 
1382
            case UNSPEC_FUNCDESC_GOT17M4:
1383
              output_addr_const (file, XVECEXP (x, 0, 0));
1384
              fprintf (file, "@FUNCDESC_GOT17M4");
1385
              break;
1386
 
1387
            case UNSPEC_LIBRARY_OFFSET:
1388
              fprintf (file, "_current_shared_library_p5_offset_");
1389
              break;
1390
 
1391
            default:
1392
              gcc_unreachable ();
1393
            }
1394
          break;
1395
 
1396
        default:
1397
          output_addr_const (file, x);
1398
        }
1399
    }
1400
}
1401
 
1402
/* Argument support functions.  */
1403
 
1404
/* Initialize a variable CUM of type CUMULATIVE_ARGS
1405
   for a call to a function whose data type is FNTYPE.
1406
   For a library call, FNTYPE is 0.
1407
   VDSP C Compiler manual, our ABI says that
1408
   first 3 words of arguments will use R0, R1 and R2.
1409
*/
1410
 
1411
void
1412
init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
1413
                      rtx libname ATTRIBUTE_UNUSED)
1414
{
1415
  static CUMULATIVE_ARGS zero_cum;
1416
 
1417
  *cum = zero_cum;
1418
 
1419
  /* Set up the number of registers to use for passing arguments.  */
1420
 
1421
  cum->nregs = max_arg_registers;
1422
  cum->arg_regs = arg_regs;
1423
 
1424
  cum->call_cookie = CALL_NORMAL;
1425
  /* Check for a longcall attribute.  */
1426
  if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1427
    cum->call_cookie |= CALL_SHORT;
1428
  else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1429
    cum->call_cookie |= CALL_LONG;
1430
 
1431
  return;
1432
}
1433
 
1434
/* Update the data in CUM to advance over an argument
1435
   of mode MODE and data type TYPE.
1436
   (TYPE is null for libcalls where that information may not be available.)  */
1437
 
1438
void
1439
function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1440
                      int named ATTRIBUTE_UNUSED)
1441
{
1442
  int count, bytes, words;
1443
 
1444
  bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1445
  words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1446
 
1447
  cum->words += words;
1448
  cum->nregs -= words;
1449
 
1450
  if (cum->nregs <= 0)
1451
    {
1452
      cum->nregs = 0;
1453
      cum->arg_regs = NULL;
1454
    }
1455
  else
1456
    {
1457
      for (count = 1; count <= words; count++)
1458
        cum->arg_regs++;
1459
    }
1460
 
1461
  return;
1462
}
1463
 
1464
/* Define where to put the arguments to a function.
1465
   Value is zero to push the argument on the stack,
1466
   or a hard register in which to store the argument.
1467
 
1468
   MODE is the argument's machine mode.
1469
   TYPE is the data type of the argument (as a tree).
1470
    This is null for libcalls where that information may
1471
    not be available.
1472
   CUM is a variable of type CUMULATIVE_ARGS which gives info about
1473
    the preceding args and about the function being called.
1474
   NAMED is nonzero if this argument is a named parameter
1475
    (otherwise it is an extra parameter matching an ellipsis).  */
1476
 
1477
struct rtx_def *
1478
function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1479
              int named ATTRIBUTE_UNUSED)
1480
{
1481
  int bytes
1482
    = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1483
 
1484
  if (mode == VOIDmode)
1485
    /* Compute operand 2 of the call insn.  */
1486
    return GEN_INT (cum->call_cookie);
1487
 
1488
  if (bytes == -1)
1489
    return NULL_RTX;
1490
 
1491
  if (cum->nregs)
1492
    return gen_rtx_REG (mode, *(cum->arg_regs));
1493
 
1494
  return NULL_RTX;
1495
}
1496
 
1497
/* For an arg passed partly in registers and partly in memory,
1498
   this is the number of bytes passed in registers.
1499
   For args passed entirely in registers or entirely in memory, zero.
1500
 
1501
   Refer VDSP C Compiler manual, our ABI.
1502
   First 3 words are in registers. So, if a an argument is larger
1503
   than the registers available, it will span the register and
1504
   stack.   */
1505
 
1506
static int
1507
bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1508
                        tree type ATTRIBUTE_UNUSED,
1509
                        bool named ATTRIBUTE_UNUSED)
1510
{
1511
  int bytes
1512
    = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1513
  int bytes_left = cum->nregs * UNITS_PER_WORD;
1514
 
1515
  if (bytes == -1)
1516
    return 0;
1517
 
1518
  if (bytes_left == 0)
1519
    return 0;
1520
  if (bytes > bytes_left)
1521
    return bytes_left;
1522
  return 0;
1523
}
1524
 
1525
/* Variable sized types are passed by reference.  */
1526
 
1527
static bool
1528
bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1529
                        enum machine_mode mode ATTRIBUTE_UNUSED,
1530
                        tree type, bool named ATTRIBUTE_UNUSED)
1531
{
1532
  return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1533
}
1534
 
1535
/* Decide whether a type should be returned in memory (true)
1536
   or in a register (false).  This is called by the macro
1537
   RETURN_IN_MEMORY.  */
1538
 
1539
int
1540
bfin_return_in_memory (tree type)
1541
{
1542
  int size = int_size_in_bytes (type);
1543
  return size > 2 * UNITS_PER_WORD || size == -1;
1544
}
1545
 
1546
/* Register in which address to store a structure value
1547
   is passed to a function.  */
1548
static rtx
1549
bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1550
                      int incoming ATTRIBUTE_UNUSED)
1551
{
1552
  return gen_rtx_REG (Pmode, REG_P0);
1553
}
1554
 
1555
/* Return true when register may be used to pass function parameters.  */
1556
 
1557
bool
1558
function_arg_regno_p (int n)
1559
{
1560
  int i;
1561
  for (i = 0; arg_regs[i] != -1; i++)
1562
    if (n == arg_regs[i])
1563
      return true;
1564
  return false;
1565
}
1566
 
1567
/* Returns 1 if OP contains a symbol reference */
1568
 
1569
int
1570
symbolic_reference_mentioned_p (rtx op)
1571
{
1572
  register const char *fmt;
1573
  register int i;
1574
 
1575
  if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1576
    return 1;
1577
 
1578
  fmt = GET_RTX_FORMAT (GET_CODE (op));
1579
  for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1580
    {
1581
      if (fmt[i] == 'E')
1582
        {
1583
          register int j;
1584
 
1585
          for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1586
            if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1587
              return 1;
1588
        }
1589
 
1590
      else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1591
        return 1;
1592
    }
1593
 
1594
  return 0;
1595
}
1596
 
1597
/* Decide whether we can make a sibling call to a function.  DECL is the
1598
   declaration of the function being targeted by the call and EXP is the
1599
   CALL_EXPR representing the call.  */
1600
 
1601
static bool
1602
bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1603
                              tree exp ATTRIBUTE_UNUSED)
1604
{
1605
  e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1606
  return fkind == SUBROUTINE;
1607
}
1608
 
1609
/* Emit RTL insns to initialize the variable parts of a trampoline at
1610
   TRAMP. FNADDR is an RTX for the address of the function's pure
1611
   code.  CXT is an RTX for the static chain value for the function.  */
1612
 
1613
void
1614
initialize_trampoline (tramp, fnaddr, cxt)
1615
     rtx tramp, fnaddr, cxt;
1616
{
1617
  rtx t1 = copy_to_reg (fnaddr);
1618
  rtx t2 = copy_to_reg (cxt);
1619
  rtx addr;
1620
  int i = 0;
1621
 
1622
  if (TARGET_FDPIC)
1623
    {
1624
      rtx a = memory_address (Pmode, plus_constant (tramp, 8));
1625
      addr = memory_address (Pmode, tramp);
1626
      emit_move_insn (gen_rtx_MEM (SImode, addr), a);
1627
      i = 8;
1628
    }
1629
 
1630
  addr = memory_address (Pmode, plus_constant (tramp, i + 2));
1631
  emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1632
  emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
1633
  addr = memory_address (Pmode, plus_constant (tramp, i + 6));
1634
  emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1635
 
1636
  addr = memory_address (Pmode, plus_constant (tramp, i + 10));
1637
  emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1638
  emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
1639
  addr = memory_address (Pmode, plus_constant (tramp, i + 14));
1640
  emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1641
}
1642
 
1643
/* Emit insns to move operands[1] into operands[0].  */
1644
 
1645
void
1646
emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1647
{
1648
  rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1649
 
1650
  gcc_assert (!TARGET_FDPIC || !(reload_in_progress || reload_completed));
1651
  if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1652
    operands[1] = force_reg (SImode, operands[1]);
1653
  else
1654
    operands[1] = legitimize_pic_address (operands[1], temp,
1655
                                          TARGET_FDPIC ? OUR_FDPIC_REG
1656
                                          : pic_offset_table_rtx);
1657
}
1658
 
1659
/* Expand a move operation in mode MODE.  The operands are in OPERANDS.  */
1660
 
1661
void
1662
expand_move (rtx *operands, enum machine_mode mode)
1663
{
1664
  rtx op = operands[1];
1665
  if ((TARGET_ID_SHARED_LIBRARY || TARGET_FDPIC)
1666
      && SYMBOLIC_CONST (op))
1667
    emit_pic_move (operands, mode);
1668
  /* Don't generate memory->memory or constant->memory moves, go through a
1669
     register */
1670
  else if ((reload_in_progress | reload_completed) == 0
1671
           && GET_CODE (operands[0]) == MEM
1672
           && GET_CODE (operands[1]) != REG)
1673
    operands[1] = force_reg (mode, operands[1]);
1674
}
1675
 
1676
/* Split one or more DImode RTL references into pairs of SImode
1677
   references.  The RTL can be REG, offsettable MEM, integer constant, or
1678
   CONST_DOUBLE.  "operands" is a pointer to an array of DImode RTL to
1679
   split and "num" is its length.  lo_half and hi_half are output arrays
1680
   that parallel "operands".  */
1681
 
1682
void
1683
split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1684
{
1685
  while (num--)
1686
    {
1687
      rtx op = operands[num];
1688
 
1689
      /* simplify_subreg refuse to split volatile memory addresses,
1690
         but we still have to handle it.  */
1691
      if (GET_CODE (op) == MEM)
1692
        {
1693
          lo_half[num] = adjust_address (op, SImode, 0);
1694
          hi_half[num] = adjust_address (op, SImode, 4);
1695
        }
1696
      else
1697
        {
1698
          lo_half[num] = simplify_gen_subreg (SImode, op,
1699
                                              GET_MODE (op) == VOIDmode
1700
                                              ? DImode : GET_MODE (op), 0);
1701
          hi_half[num] = simplify_gen_subreg (SImode, op,
1702
                                              GET_MODE (op) == VOIDmode
1703
                                              ? DImode : GET_MODE (op), 4);
1704
        }
1705
    }
1706
}
1707
 
1708
bool
1709
bfin_longcall_p (rtx op, int call_cookie)
1710
{
1711
  gcc_assert (GET_CODE (op) == SYMBOL_REF);
1712
  if (call_cookie & CALL_SHORT)
1713
    return 0;
1714
  if (call_cookie & CALL_LONG)
1715
    return 1;
1716
  if (TARGET_LONG_CALLS)
1717
    return 1;
1718
  return 0;
1719
}
1720
 
1721
/* Expand a call instruction.  FNADDR is the call target, RETVAL the return value.
1722
   COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
1723
   SIBCALL is nonzero if this is a sibling call.  */
1724
 
1725
void
1726
bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
1727
{
1728
  rtx use = NULL, call;
1729
  rtx callee = XEXP (fnaddr, 0);
1730
  int nelts = 2 + !!sibcall;
1731
  rtx pat;
1732
  rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REGNO);
1733
  int n;
1734
 
1735
  /* In an untyped call, we can get NULL for operand 2.  */
1736
  if (cookie == NULL_RTX)
1737
    cookie = const0_rtx;
1738
 
1739
  /* Static functions and indirect calls don't need the pic register.  */
1740
  if (!TARGET_FDPIC && flag_pic
1741
      && GET_CODE (callee) == SYMBOL_REF
1742
      && !SYMBOL_REF_LOCAL_P (callee))
1743
    use_reg (&use, pic_offset_table_rtx);
1744
 
1745
  if (TARGET_FDPIC)
1746
    {
1747
      if (GET_CODE (callee) != SYMBOL_REF
1748
          || bfin_longcall_p (callee, INTVAL (cookie)))
1749
        {
1750
          rtx addr = callee;
1751
          if (! address_operand (addr, Pmode))
1752
            addr = force_reg (Pmode, addr);
1753
 
1754
          fnaddr = gen_reg_rtx (SImode);
1755
          emit_insn (gen_load_funcdescsi (fnaddr, addr));
1756
          fnaddr = gen_rtx_MEM (Pmode, fnaddr);
1757
 
1758
          picreg = gen_reg_rtx (SImode);
1759
          emit_insn (gen_load_funcdescsi (picreg,
1760
                                          plus_constant (addr, 4)));
1761
        }
1762
 
1763
      nelts++;
1764
    }
1765
  else if ((!register_no_elim_operand (callee, Pmode)
1766
            && GET_CODE (callee) != SYMBOL_REF)
1767
           || (GET_CODE (callee) == SYMBOL_REF
1768
               && (flag_pic
1769
                   || bfin_longcall_p (callee, INTVAL (cookie)))))
1770
    {
1771
      callee = copy_to_mode_reg (Pmode, callee);
1772
      fnaddr = gen_rtx_MEM (Pmode, callee);
1773
    }
1774
  call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
1775
 
1776
  if (retval)
1777
    call = gen_rtx_SET (VOIDmode, retval, call);
1778
 
1779
  pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nelts));
1780
  n = 0;
1781
  XVECEXP (pat, 0, n++) = call;
1782
  if (TARGET_FDPIC)
1783
    XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, picreg);
1784
  XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, cookie);
1785
  if (sibcall)
1786
    XVECEXP (pat, 0, n++) = gen_rtx_RETURN (VOIDmode);
1787
  call = emit_call_insn (pat);
1788
  if (use)
1789
    CALL_INSN_FUNCTION_USAGE (call) = use;
1790
}
1791
 
1792
/* Return 1 if hard register REGNO can hold a value of machine-mode MODE.  */
1793
 
1794
int
1795
hard_regno_mode_ok (int regno, enum machine_mode mode)
1796
{
1797
  /* Allow only dregs to store value of mode HI or QI */
1798
  enum reg_class class = REGNO_REG_CLASS (regno);
1799
 
1800
  if (mode == CCmode)
1801
    return 0;
1802
 
1803
  if (mode == V2HImode)
1804
    return D_REGNO_P (regno);
1805
  if (class == CCREGS)
1806
    return mode == BImode;
1807
  if (mode == PDImode || mode == V2PDImode)
1808
    return regno == REG_A0 || regno == REG_A1;
1809
  if (mode == SImode
1810
      && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
1811
    return 1;
1812
 
1813
  return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
1814
}
1815
 
1816
/* Implements target hook vector_mode_supported_p.  */
1817
 
1818
static bool
1819
bfin_vector_mode_supported_p (enum machine_mode mode)
1820
{
1821
  return mode == V2HImode;
1822
}
1823
 
1824
/* Return the cost of moving data from a register in class CLASS1 to
1825
   one in class CLASS2.  A cost of 2 is the default.  */
1826
 
1827
int
1828
bfin_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1829
                         enum reg_class class1, enum reg_class class2)
1830
{
1831
  /* These need secondary reloads, so they're more expensive.  */
1832
  if ((class1 == CCREGS && class2 != DREGS)
1833
      || (class1 != DREGS && class2 == CCREGS))
1834
    return 4;
1835
 
1836
  /* If optimizing for size, always prefer reg-reg over reg-memory moves.  */
1837
  if (optimize_size)
1838
    return 2;
1839
 
1840
  /* There are some stalls involved when moving from a DREG to a different
1841
     class reg, and using the value in one of the following instructions.
1842
     Attempt to model this by slightly discouraging such moves.  */
1843
  if (class1 == DREGS && class2 != DREGS)
1844
    return 2 * 2;
1845
 
1846
  return 2;
1847
}
1848
 
1849
/* Return the cost of moving data of mode M between a
1850
   register and memory.  A value of 2 is the default; this cost is
1851
   relative to those in `REGISTER_MOVE_COST'.
1852
 
1853
   ??? In theory L1 memory has single-cycle latency.  We should add a switch
1854
   that tells the compiler whether we expect to use only L1 memory for the
1855
   program; it'll make the costs more accurate.  */
1856
 
1857
int
1858
bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1859
                       enum reg_class class,
1860
                       int in ATTRIBUTE_UNUSED)
1861
{
1862
  /* Make memory accesses slightly more expensive than any register-register
1863
     move.  Also, penalize non-DP registers, since they need secondary
1864
     reloads to load and store.  */
1865
  if (! reg_class_subset_p (class, DPREGS))
1866
    return 10;
1867
 
1868
  return 8;
1869
}
1870
 
1871
/* Inform reload about cases where moving X with a mode MODE to a register in
1872
   CLASS requires an extra scratch register.  Return the class needed for the
1873
   scratch register.  */
1874
 
1875
static enum reg_class
1876
bfin_secondary_reload (bool in_p, rtx x, enum reg_class class,
1877
                     enum machine_mode mode, secondary_reload_info *sri)
1878
{
1879
  /* If we have HImode or QImode, we can only use DREGS as secondary registers;
1880
     in most other cases we can also use PREGS.  */
1881
  enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
1882
  enum reg_class x_class = NO_REGS;
1883
  enum rtx_code code = GET_CODE (x);
1884
 
1885
  if (code == SUBREG)
1886
    x = SUBREG_REG (x), code = GET_CODE (x);
1887
  if (REG_P (x))
1888
    {
1889
      int regno = REGNO (x);
1890
      if (regno >= FIRST_PSEUDO_REGISTER)
1891
        regno = reg_renumber[regno];
1892
 
1893
      if (regno == -1)
1894
        code = MEM;
1895
      else
1896
        x_class = REGNO_REG_CLASS (regno);
1897
    }
1898
 
1899
  /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
1900
     This happens as a side effect of register elimination, and we need
1901
     a scratch register to do it.  */
1902
  if (fp_plus_const_operand (x, mode))
1903
    {
1904
      rtx op2 = XEXP (x, 1);
1905
      int large_constant_p = ! CONST_7BIT_IMM_P (INTVAL (op2));
1906
 
1907
      if (class == PREGS || class == PREGS_CLOBBERED)
1908
        return NO_REGS;
1909
      /* If destination is a DREG, we can do this without a scratch register
1910
         if the constant is valid for an add instruction.  */
1911
      if ((class == DREGS || class == DPREGS)
1912
          && ! large_constant_p)
1913
        return NO_REGS;
1914
      /* Reloading to anything other than a DREG?  Use a PREG scratch
1915
         register.  */
1916
      sri->icode = CODE_FOR_reload_insi;
1917
      return NO_REGS;
1918
    }
1919
 
1920
  /* Data can usually be moved freely between registers of most classes.
1921
     AREGS are an exception; they can only move to or from another register
1922
     in AREGS or one in DREGS.  They can also be assigned the constant 0.  */
1923
  if (x_class == AREGS)
1924
    return class == DREGS || class == AREGS ? NO_REGS : DREGS;
1925
 
1926
  if (class == AREGS)
1927
    {
1928
      if (x != const0_rtx && x_class != DREGS)
1929
        return DREGS;
1930
      else
1931
        return NO_REGS;
1932
    }
1933
 
1934
  /* CCREGS can only be moved from/to DREGS.  */
1935
  if (class == CCREGS && x_class != DREGS)
1936
    return DREGS;
1937
  if (x_class == CCREGS && class != DREGS)
1938
    return DREGS;
1939
 
1940
  /* All registers other than AREGS can load arbitrary constants.  The only
1941
     case that remains is MEM.  */
1942
  if (code == MEM)
1943
    if (! reg_class_subset_p (class, default_class))
1944
      return default_class;
1945
  return NO_REGS;
1946
}
1947
 
1948
/* Implement TARGET_HANDLE_OPTION.  */
1949
 
1950
static bool
1951
bfin_handle_option (size_t code, const char *arg, int value)
1952
{
1953
  switch (code)
1954
    {
1955
    case OPT_mshared_library_id_:
1956
      if (value > MAX_LIBRARY_ID)
1957
        error ("-mshared-library-id=%s is not between 0 and %d",
1958
               arg, MAX_LIBRARY_ID);
1959
      bfin_lib_id_given = 1;
1960
      return true;
1961
 
1962
    default:
1963
      return true;
1964
    }
1965
}
1966
 
1967
static struct machine_function *
1968
bfin_init_machine_status (void)
1969
{
1970
  struct machine_function *f;
1971
 
1972
  f = ggc_alloc_cleared (sizeof (struct machine_function));
1973
 
1974
  return f;
1975
}
1976
 
1977
/* Implement the macro OVERRIDE_OPTIONS.  */
1978
 
1979
void
1980
override_options (void)
1981
{
1982
  if (TARGET_OMIT_LEAF_FRAME_POINTER)
1983
    flag_omit_frame_pointer = 1;
1984
 
1985
  /* Library identification */
1986
  if (bfin_lib_id_given && ! TARGET_ID_SHARED_LIBRARY)
1987
    error ("-mshared-library-id= specified without -mid-shared-library");
1988
 
1989
  if (TARGET_ID_SHARED_LIBRARY && flag_pic == 0)
1990
    flag_pic = 1;
1991
 
1992
  if (TARGET_ID_SHARED_LIBRARY && TARGET_FDPIC)
1993
      error ("ID shared libraries and FD-PIC mode can't be used together.");
1994
 
1995
  /* There is no single unaligned SI op for PIC code.  Sometimes we
1996
     need to use ".4byte" and sometimes we need to use ".picptr".
1997
     See bfin_assemble_integer for details.  */
1998
  if (TARGET_FDPIC)
1999
    targetm.asm_out.unaligned_op.si = 0;
2000
 
2001
  /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2002
     since we don't support it and it'll just break.  */
2003
  if (flag_pic && !TARGET_FDPIC && !TARGET_ID_SHARED_LIBRARY)
2004
    flag_pic = 0;
2005
 
2006
  flag_schedule_insns = 0;
2007
 
2008
  init_machine_status = bfin_init_machine_status;
2009
}
2010
 
2011
/* Return the destination address of BRANCH.
2012
   We need to use this instead of get_attr_length, because the
2013
   cbranch_with_nops pattern conservatively sets its length to 6, and
2014
   we still prefer to use shorter sequences.  */
2015
 
2016
static int
2017
branch_dest (rtx branch)
2018
{
2019
  rtx dest;
2020
  int dest_uid;
2021
  rtx pat = PATTERN (branch);
2022
  if (GET_CODE (pat) == PARALLEL)
2023
    pat = XVECEXP (pat, 0, 0);
2024
  dest = SET_SRC (pat);
2025
  if (GET_CODE (dest) == IF_THEN_ELSE)
2026
    dest = XEXP (dest, 1);
2027
  dest = XEXP (dest, 0);
2028
  dest_uid = INSN_UID (dest);
2029
  return INSN_ADDRESSES (dest_uid);
2030
}
2031
 
2032
/* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2033
   it's a branch that's predicted taken.  */
2034
 
2035
static int
2036
cbranch_predicted_taken_p (rtx insn)
2037
{
2038
  rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2039
 
2040
  if (x)
2041
    {
2042
      int pred_val = INTVAL (XEXP (x, 0));
2043
 
2044
      return pred_val >= REG_BR_PROB_BASE / 2;
2045
    }
2046
 
2047
  return 0;
2048
}
2049
 
2050
/* Templates for use by asm_conditional_branch.  */
2051
 
2052
static const char *ccbranch_templates[][3] = {
2053
  { "if !cc jump %3;",  "if cc jump 4 (bp); jump.s %3;",  "if cc jump 6 (bp); jump.l %3;" },
2054
  { "if cc jump %3;",   "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2055
  { "if !cc jump %3 (bp);",  "if cc jump 4; jump.s %3;",  "if cc jump 6; jump.l %3;" },
2056
  { "if cc jump %3 (bp);",  "if !cc jump 4; jump.s %3;",  "if !cc jump 6; jump.l %3;" },
2057
};
2058
 
2059
/* Output INSN, which is a conditional branch instruction with operands
2060
   OPERANDS.
2061
 
2062
   We deal with the various forms of conditional branches that can be generated
2063
   by bfin_reorg to prevent the hardware from doing speculative loads, by
2064
   - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2065
   - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2066
   Either of these is only necessary if the branch is short, otherwise the
2067
   template we use ends in an unconditional jump which flushes the pipeline
2068
   anyway.  */
2069
 
2070
void
2071
asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
2072
{
2073
  int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
2074
  /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2075
            is to be taken from start of if cc rather than jump.
2076
            Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2077
  */
2078
  int len = (offset >= -1024 && offset <= 1022 ? 0
2079
             : offset >= -4094 && offset <= 4096 ? 1
2080
             : 2);
2081
  int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
2082
  int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
2083
  output_asm_insn (ccbranch_templates[idx][len], operands);
2084
  gcc_assert (n_nops == 0 || !bp);
2085
  if (len == 0)
2086
    while (n_nops-- > 0)
2087
      output_asm_insn ("nop;", NULL);
2088
}
2089
 
2090
/* Emit rtl for a comparison operation CMP in mode MODE.  Operands have been
2091
   stored in bfin_compare_op0 and bfin_compare_op1 already.  */
2092
 
2093
rtx
2094
bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
2095
{
2096
  enum rtx_code code1, code2;
2097
  rtx op0 = bfin_compare_op0, op1 = bfin_compare_op1;
2098
  rtx tem = bfin_cc_rtx;
2099
  enum rtx_code code = GET_CODE (cmp);
2100
 
2101
  /* If we have a BImode input, then we already have a compare result, and
2102
     do not need to emit another comparison.  */
2103
  if (GET_MODE (op0) == BImode)
2104
    {
2105
      gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
2106
      tem = op0, code2 = code;
2107
    }
2108
  else
2109
    {
2110
      switch (code) {
2111
        /* bfin has these conditions */
2112
      case EQ:
2113
      case LT:
2114
      case LE:
2115
      case LEU:
2116
      case LTU:
2117
        code1 = code;
2118
        code2 = NE;
2119
        break;
2120
      default:
2121
        code1 = reverse_condition (code);
2122
        code2 = EQ;
2123
        break;
2124
      }
2125
      emit_insn (gen_rtx_SET (BImode, tem,
2126
                              gen_rtx_fmt_ee (code1, BImode, op0, op1)));
2127
    }
2128
 
2129
  return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
2130
}
2131
 
2132
/* Return nonzero iff C has exactly one bit set if it is interpreted
2133
   as a 32 bit constant.  */
2134
 
2135
int
2136
log2constp (unsigned HOST_WIDE_INT c)
2137
{
2138
  c &= 0xFFFFFFFF;
2139
  return c != 0 && (c & (c-1)) == 0;
2140
}
2141
 
2142
/* Returns the number of consecutive least significant zeros in the binary
2143
   representation of *V.
2144
   We modify *V to contain the original value arithmetically shifted right by
2145
   the number of zeroes.  */
2146
 
2147
static int
2148
shiftr_zero (HOST_WIDE_INT *v)
2149
{
2150
  unsigned HOST_WIDE_INT tmp = *v;
2151
  unsigned HOST_WIDE_INT sgn;
2152
  int n = 0;
2153
 
2154
  if (tmp == 0)
2155
    return 0;
2156
 
2157
  sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
2158
  while ((tmp & 0x1) == 0 && n <= 32)
2159
    {
2160
      tmp = (tmp >> 1) | sgn;
2161
      n++;
2162
    }
2163
  *v = tmp;
2164
  return n;
2165
}
2166
 
2167
/* After reload, split the load of an immediate constant.  OPERANDS are the
2168
   operands of the movsi_insn pattern which we are splitting.  We return
2169
   nonzero if we emitted a sequence to load the constant, zero if we emitted
2170
   nothing because we want to use the splitter's default sequence.  */
2171
 
2172
int
2173
split_load_immediate (rtx operands[])
2174
{
2175
  HOST_WIDE_INT val = INTVAL (operands[1]);
2176
  HOST_WIDE_INT tmp;
2177
  HOST_WIDE_INT shifted = val;
2178
  HOST_WIDE_INT shifted_compl = ~val;
2179
  int num_zero = shiftr_zero (&shifted);
2180
  int num_compl_zero = shiftr_zero (&shifted_compl);
2181
  unsigned int regno = REGNO (operands[0]);
2182
  enum reg_class class1 = REGNO_REG_CLASS (regno);
2183
 
2184
  /* This case takes care of single-bit set/clear constants, which we could
2185
     also implement with BITSET/BITCLR.  */
2186
  if (num_zero
2187
      && shifted >= -32768 && shifted < 65536
2188
      && (D_REGNO_P (regno)
2189
          || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
2190
    {
2191
      emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
2192
      emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
2193
      return 1;
2194
    }
2195
 
2196
  tmp = val & 0xFFFF;
2197
  tmp |= -(tmp & 0x8000);
2198
 
2199
  /* If high word has one bit set or clear, try to use a bit operation.  */
2200
  if (D_REGNO_P (regno))
2201
    {
2202
      if (log2constp (val & 0xFFFF0000))
2203
        {
2204
          emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
2205
          emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
2206
          return 1;
2207
        }
2208
      else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
2209
        {
2210
          emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2211
          emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
2212
        }
2213
    }
2214
 
2215
  if (D_REGNO_P (regno))
2216
    {
2217
      if (CONST_7BIT_IMM_P (tmp))
2218
        {
2219
          emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2220
          emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
2221
          return 1;
2222
        }
2223
 
2224
      if ((val & 0xFFFF0000) == 0)
2225
        {
2226
          emit_insn (gen_movsi (operands[0], const0_rtx));
2227
          emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2228
          return 1;
2229
        }
2230
 
2231
      if ((val & 0xFFFF0000) == 0xFFFF0000)
2232
        {
2233
          emit_insn (gen_movsi (operands[0], constm1_rtx));
2234
          emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2235
          return 1;
2236
        }
2237
    }
2238
 
2239
  /* Need DREGs for the remaining case.  */
2240
  if (regno > REG_R7)
2241
    return 0;
2242
 
2243
  if (optimize_size
2244
      && num_compl_zero && CONST_7BIT_IMM_P (shifted_compl))
2245
    {
2246
      /* If optimizing for size, generate a sequence that has more instructions
2247
         but is shorter.  */
2248
      emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2249
      emit_insn (gen_ashlsi3 (operands[0], operands[0],
2250
                              GEN_INT (num_compl_zero)));
2251
      emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2252
      return 1;
2253
    }
2254
  return 0;
2255
}
2256
 
2257
/* Return true if the legitimate memory address for a memory operand of mode
2258
   MODE.  Return false if not.  */
2259
 
2260
static bool
2261
bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2262
{
2263
  unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2264
  int sz = GET_MODE_SIZE (mode);
2265
  int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2266
  /* The usual offsettable_memref machinery doesn't work so well for this
2267
     port, so we deal with the problem here.  */
2268
  unsigned HOST_WIDE_INT mask = sz == 8 ? 0x7ffe : 0x7fff;
2269
  return (v & ~(mask << shift)) == 0;
2270
}
2271
 
2272
static bool
2273
bfin_valid_reg_p (unsigned int regno, int strict, enum machine_mode mode,
2274
                  enum rtx_code outer_code)
2275
{
2276
  if (strict)
2277
    return REGNO_OK_FOR_BASE_STRICT_P (regno, mode, outer_code, SCRATCH);
2278
  else
2279
    return REGNO_OK_FOR_BASE_NONSTRICT_P (regno, mode, outer_code, SCRATCH);
2280
}
2281
 
2282
bool
2283
bfin_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
2284
{
2285
  switch (GET_CODE (x)) {
2286
  case REG:
2287
    if (bfin_valid_reg_p (REGNO (x), strict, mode, MEM))
2288
      return true;
2289
    break;
2290
  case PLUS:
2291
    if (REG_P (XEXP (x, 0))
2292
        && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PLUS)
2293
        && ((GET_CODE (XEXP (x, 1)) == UNSPEC && mode == SImode)
2294
            || (GET_CODE (XEXP (x, 1)) == CONST_INT
2295
                && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2296
      return true;
2297
    break;
2298
  case POST_INC:
2299
  case POST_DEC:
2300
    if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2301
        && REG_P (XEXP (x, 0))
2302
        && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, POST_INC))
2303
      return true;
2304
  case PRE_DEC:
2305
    if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2306
        && XEXP (x, 0) == stack_pointer_rtx
2307
        && REG_P (XEXP (x, 0))
2308
        && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PRE_DEC))
2309
      return true;
2310
    break;
2311
  default:
2312
    break;
2313
  }
2314
  return false;
2315
}
2316
 
2317
static bool
2318
bfin_rtx_costs (rtx x, int code, int outer_code, int *total)
2319
{
2320
  int cost2 = COSTS_N_INSNS (1);
2321
 
2322
  switch (code)
2323
    {
2324
    case CONST_INT:
2325
      if (outer_code == SET || outer_code == PLUS)
2326
        *total = CONST_7BIT_IMM_P (INTVAL (x)) ? 0 : cost2;
2327
      else if (outer_code == AND)
2328
        *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2329
      else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2330
        *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2331
      else if (outer_code == LEU || outer_code == LTU)
2332
        *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2333
      else if (outer_code == MULT)
2334
        *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2335
      else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2336
        *total = 0;
2337
      else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2338
               || outer_code == LSHIFTRT)
2339
        *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2340
      else if (outer_code == IOR || outer_code == XOR)
2341
        *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2342
      else
2343
        *total = cost2;
2344
      return true;
2345
 
2346
    case CONST:
2347
    case LABEL_REF:
2348
    case SYMBOL_REF:
2349
    case CONST_DOUBLE:
2350
      *total = COSTS_N_INSNS (2);
2351
      return true;
2352
 
2353
    case PLUS:
2354
      if (GET_MODE (x) == Pmode)
2355
        {
2356
          if (GET_CODE (XEXP (x, 0)) == MULT
2357
              && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2358
            {
2359
              HOST_WIDE_INT val = INTVAL (XEXP (XEXP (x, 0), 1));
2360
              if (val == 2 || val == 4)
2361
                {
2362
                  *total = cost2;
2363
                  *total += rtx_cost (XEXP (XEXP (x, 0), 0), outer_code);
2364
                  *total += rtx_cost (XEXP (x, 1), outer_code);
2365
                  return true;
2366
                }
2367
            }
2368
        }
2369
 
2370
      /* fall through */
2371
 
2372
    case MINUS:
2373
    case ASHIFT:
2374
    case ASHIFTRT:
2375
    case LSHIFTRT:
2376
      if (GET_MODE (x) == DImode)
2377
        *total = 6 * cost2;
2378
      return false;
2379
 
2380
    case AND:
2381
    case IOR:
2382
    case XOR:
2383
      if (GET_MODE (x) == DImode)
2384
        *total = 2 * cost2;
2385
      return false;
2386
 
2387
    case MULT:
2388
      if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD)
2389
        *total = COSTS_N_INSNS (3);
2390
      return false;
2391
 
2392
    case UDIV:
2393
    case UMOD:
2394
      *total = COSTS_N_INSNS (32);
2395
      return true;
2396
 
2397
    case VEC_CONCAT:
2398
    case VEC_SELECT:
2399
      if (outer_code == SET)
2400
        *total = cost2;
2401
      return true;
2402
 
2403
    default:
2404
      return false;
2405
    }
2406
}
2407
 
2408
static void
2409
bfin_internal_label (FILE *stream, const char *prefix, unsigned long num)
2410
{
2411
  fprintf (stream, "%s%s$%ld:\n", LOCAL_LABEL_PREFIX, prefix, num);
2412
}
2413
 
2414
/* Used for communication between {push,pop}_multiple_operation (which
2415
   we use not only as a predicate) and the corresponding output functions.  */
2416
static int first_preg_to_save, first_dreg_to_save;
2417
 
2418
int
2419
push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2420
{
2421
  int lastdreg = 8, lastpreg = 6;
2422
  int i, group;
2423
 
2424
  first_preg_to_save = lastpreg;
2425
  first_dreg_to_save = lastdreg;
2426
  for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
2427
    {
2428
      rtx t = XVECEXP (op, 0, i);
2429
      rtx src, dest;
2430
      int regno;
2431
 
2432
      if (GET_CODE (t) != SET)
2433
        return 0;
2434
 
2435
      src = SET_SRC (t);
2436
      dest = SET_DEST (t);
2437
      if (GET_CODE (dest) != MEM || ! REG_P (src))
2438
        return 0;
2439
      dest = XEXP (dest, 0);
2440
      if (GET_CODE (dest) != PLUS
2441
          || ! REG_P (XEXP (dest, 0))
2442
          || REGNO (XEXP (dest, 0)) != REG_SP
2443
          || GET_CODE (XEXP (dest, 1)) != CONST_INT
2444
          || INTVAL (XEXP (dest, 1)) != -i * 4)
2445
        return 0;
2446
 
2447
      regno = REGNO (src);
2448
      if (group == 0)
2449
        {
2450
          if (D_REGNO_P (regno))
2451
            {
2452
              group = 1;
2453
              first_dreg_to_save = lastdreg = regno - REG_R0;
2454
            }
2455
          else if (regno >= REG_P0 && regno <= REG_P7)
2456
            {
2457
              group = 2;
2458
              first_preg_to_save = lastpreg = regno - REG_P0;
2459
            }
2460
          else
2461
            return 0;
2462
 
2463
          continue;
2464
        }
2465
 
2466
      if (group == 1)
2467
        {
2468
          if (regno >= REG_P0 && regno <= REG_P7)
2469
            {
2470
              group = 2;
2471
              first_preg_to_save = lastpreg = regno - REG_P0;
2472
            }
2473
          else if (regno != REG_R0 + lastdreg + 1)
2474
            return 0;
2475
          else
2476
            lastdreg++;
2477
        }
2478
      else if (group == 2)
2479
        {
2480
          if (regno != REG_P0 + lastpreg + 1)
2481
            return 0;
2482
          lastpreg++;
2483
        }
2484
    }
2485
  return 1;
2486
}
2487
 
2488
int
2489
pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2490
{
2491
  int lastdreg = 8, lastpreg = 6;
2492
  int i, group;
2493
 
2494
  for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
2495
    {
2496
      rtx t = XVECEXP (op, 0, i);
2497
      rtx src, dest;
2498
      int regno;
2499
 
2500
      if (GET_CODE (t) != SET)
2501
        return 0;
2502
 
2503
      src = SET_SRC (t);
2504
      dest = SET_DEST (t);
2505
      if (GET_CODE (src) != MEM || ! REG_P (dest))
2506
        return 0;
2507
      src = XEXP (src, 0);
2508
 
2509
      if (i == 1)
2510
        {
2511
          if (! REG_P (src) || REGNO (src) != REG_SP)
2512
            return 0;
2513
        }
2514
      else if (GET_CODE (src) != PLUS
2515
               || ! REG_P (XEXP (src, 0))
2516
               || REGNO (XEXP (src, 0)) != REG_SP
2517
               || GET_CODE (XEXP (src, 1)) != CONST_INT
2518
               || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
2519
        return 0;
2520
 
2521
      regno = REGNO (dest);
2522
      if (group == 0)
2523
        {
2524
          if (regno == REG_R7)
2525
            {
2526
              group = 1;
2527
              lastdreg = 7;
2528
            }
2529
          else if (regno != REG_P0 + lastpreg - 1)
2530
            return 0;
2531
          else
2532
            lastpreg--;
2533
        }
2534
      else if (group == 1)
2535
        {
2536
          if (regno != REG_R0 + lastdreg - 1)
2537
            return 0;
2538
          else
2539
            lastdreg--;
2540
        }
2541
    }
2542
  first_dreg_to_save = lastdreg;
2543
  first_preg_to_save = lastpreg;
2544
  return 1;
2545
}
2546
 
2547
/* Emit assembly code for one multi-register push described by INSN, with
2548
   operands in OPERANDS.  */
2549
 
2550
void
2551
output_push_multiple (rtx insn, rtx *operands)
2552
{
2553
  char buf[80];
2554
  int ok;
2555
 
2556
  /* Validate the insn again, and compute first_[dp]reg_to_save. */
2557
  ok = push_multiple_operation (PATTERN (insn), VOIDmode);
2558
  gcc_assert (ok);
2559
 
2560
  if (first_dreg_to_save == 8)
2561
    sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
2562
  else if (first_preg_to_save == 6)
2563
    sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
2564
  else
2565
    sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
2566
             first_dreg_to_save, first_preg_to_save);
2567
 
2568
  output_asm_insn (buf, operands);
2569
}
2570
 
2571
/* Emit assembly code for one multi-register pop described by INSN, with
2572
   operands in OPERANDS.  */
2573
 
2574
void
2575
output_pop_multiple (rtx insn, rtx *operands)
2576
{
2577
  char buf[80];
2578
  int ok;
2579
 
2580
  /* Validate the insn again, and compute first_[dp]reg_to_save. */
2581
  ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
2582
  gcc_assert (ok);
2583
 
2584
  if (first_dreg_to_save == 8)
2585
    sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
2586
  else if (first_preg_to_save == 6)
2587
    sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
2588
  else
2589
    sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
2590
             first_dreg_to_save, first_preg_to_save);
2591
 
2592
  output_asm_insn (buf, operands);
2593
}
2594
 
2595
/* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE.  */
2596
 
2597
static void
2598
single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
2599
{
2600
  rtx scratch = gen_reg_rtx (mode);
2601
  rtx srcmem, dstmem;
2602
 
2603
  srcmem = adjust_address_nv (src, mode, offset);
2604
  dstmem = adjust_address_nv (dst, mode, offset);
2605
  emit_move_insn (scratch, srcmem);
2606
  emit_move_insn (dstmem, scratch);
2607
}
2608
 
2609
/* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
2610
   alignment ALIGN_EXP.  Return true if successful, false if we should fall
2611
   back on a different method.  */
2612
 
2613
bool
2614
bfin_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp)
2615
{
2616
  rtx srcreg, destreg, countreg;
2617
  HOST_WIDE_INT align = 0;
2618
  unsigned HOST_WIDE_INT count = 0;
2619
 
2620
  if (GET_CODE (align_exp) == CONST_INT)
2621
    align = INTVAL (align_exp);
2622
  if (GET_CODE (count_exp) == CONST_INT)
2623
    {
2624
      count = INTVAL (count_exp);
2625
#if 0
2626
      if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
2627
        return false;
2628
#endif
2629
    }
2630
 
2631
  /* If optimizing for size, only do single copies inline.  */
2632
  if (optimize_size)
2633
    {
2634
      if (count == 2 && align < 2)
2635
        return false;
2636
      if (count == 4 && align < 4)
2637
        return false;
2638
      if (count != 1 && count != 2 && count != 4)
2639
        return false;
2640
    }
2641
  if (align < 2 && count != 1)
2642
    return false;
2643
 
2644
  destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
2645
  if (destreg != XEXP (dst, 0))
2646
    dst = replace_equiv_address_nv (dst, destreg);
2647
  srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
2648
  if (srcreg != XEXP (src, 0))
2649
    src = replace_equiv_address_nv (src, srcreg);
2650
 
2651
  if (count != 0 && align >= 2)
2652
    {
2653
      unsigned HOST_WIDE_INT offset = 0;
2654
 
2655
      if (align >= 4)
2656
        {
2657
          if ((count & ~3) == 4)
2658
            {
2659
              single_move_for_movmem (dst, src, SImode, offset);
2660
              offset = 4;
2661
            }
2662
          else if (count & ~3)
2663
            {
2664
              HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
2665
              countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2666
 
2667
              emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
2668
            }
2669
          if (count & 2)
2670
            {
2671
              single_move_for_movmem (dst, src, HImode, offset);
2672
              offset += 2;
2673
            }
2674
        }
2675
      else
2676
        {
2677
          if ((count & ~1) == 2)
2678
            {
2679
              single_move_for_movmem (dst, src, HImode, offset);
2680
              offset = 2;
2681
            }
2682
          else if (count & ~1)
2683
            {
2684
              HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
2685
              countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2686
 
2687
              emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
2688
            }
2689
        }
2690
      if (count & 1)
2691
        {
2692
          single_move_for_movmem (dst, src, QImode, offset);
2693
        }
2694
      return true;
2695
    }
2696
  return false;
2697
}
2698
 
2699
 
2700
static int
2701
bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
2702
{
2703
  enum attr_type insn_type, dep_insn_type;
2704
  int dep_insn_code_number;
2705
 
2706
  /* Anti and output dependencies have zero cost.  */
2707
  if (REG_NOTE_KIND (link) != 0)
2708
    return 0;
2709
 
2710
  dep_insn_code_number = recog_memoized (dep_insn);
2711
 
2712
  /* If we can't recognize the insns, we can't really do anything.  */
2713
  if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
2714
    return cost;
2715
 
2716
  insn_type = get_attr_type (insn);
2717
  dep_insn_type = get_attr_type (dep_insn);
2718
 
2719
  if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
2720
    {
2721
      rtx pat = PATTERN (dep_insn);
2722
      rtx dest = SET_DEST (pat);
2723
      rtx src = SET_SRC (pat);
2724
      if (! ADDRESS_REGNO_P (REGNO (dest)) || ! D_REGNO_P (REGNO (src)))
2725
        return cost;
2726
      return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
2727
    }
2728
 
2729
  return cost;
2730
}
2731
 
2732
 
2733
/* Increment the counter for the number of loop instructions in the
2734
   current function.  */
2735
 
2736
void
2737
bfin_hardware_loop (void)
2738
{
2739
  cfun->machine->has_hardware_loops++;
2740
}
2741
 
2742
/* Maximum loop nesting depth.  */
2743
#define MAX_LOOP_DEPTH 2
2744
 
2745
/* Maximum size of a loop.  */
2746
#define MAX_LOOP_LENGTH 2042
2747
 
2748
/* We need to keep a vector of loops */
2749
typedef struct loop_info *loop_info;
2750
DEF_VEC_P (loop_info);
2751
DEF_VEC_ALLOC_P (loop_info,heap);
2752
 
2753
/* Information about a loop we have found (or are in the process of
2754
   finding).  */
2755
struct loop_info GTY (())
2756
{
2757
  /* loop number, for dumps */
2758
  int loop_no;
2759
 
2760
  /* Predecessor block of the loop.   This is the one that falls into
2761
     the loop and contains the initialization instruction.  */
2762
  basic_block predecessor;
2763
 
2764
  /* First block in the loop.  This is the one branched to by the loop_end
2765
     insn.  */
2766
  basic_block head;
2767
 
2768
  /* Last block in the loop (the one with the loop_end insn).  */
2769
  basic_block tail;
2770
 
2771
  /* The successor block of the loop.  This is the one the loop_end insn
2772
     falls into.  */
2773
  basic_block successor;
2774
 
2775
  /* The last instruction in the tail.  */
2776
  rtx last_insn;
2777
 
2778
  /* The loop_end insn.  */
2779
  rtx loop_end;
2780
 
2781
  /* The iteration register.  */
2782
  rtx iter_reg;
2783
 
2784
  /* The new initialization insn.  */
2785
  rtx init;
2786
 
2787
  /* The new initialization instruction.  */
2788
  rtx loop_init;
2789
 
2790
  /* The new label placed at the beginning of the loop. */
2791
  rtx start_label;
2792
 
2793
  /* The new label placed at the end of the loop. */
2794
  rtx end_label;
2795
 
2796
  /* The length of the loop.  */
2797
  int length;
2798
 
2799
  /* The nesting depth of the loop.  */
2800
  int depth;
2801
 
2802
  /* Nonzero if we can't optimize this loop.  */
2803
  int bad;
2804
 
2805
  /* True if we have visited this loop.  */
2806
  int visited;
2807
 
2808
  /* True if this loop body clobbers any of LC0, LT0, or LB0.  */
2809
  int clobber_loop0;
2810
 
2811
  /* True if this loop body clobbers any of LC1, LT1, or LB1.  */
2812
  int clobber_loop1;
2813
 
2814
  /* Next loop in the graph. */
2815
  struct loop_info *next;
2816
 
2817
  /* Immediate outer loop of this loop.  */
2818
  struct loop_info *outer;
2819
 
2820
  /* Vector of blocks only within the loop, including those within
2821
     inner loops.  */
2822
  VEC (basic_block,heap) *blocks;
2823
 
2824
  /* Same information in a bitmap.  */
2825
  bitmap block_bitmap;
2826
 
2827
  /* Vector of inner loops within this loop  */
2828
  VEC (loop_info,heap) *loops;
2829
};
2830
 
2831
static void
2832
bfin_dump_loops (loop_info loops)
2833
{
2834
  loop_info loop;
2835
 
2836
  for (loop = loops; loop; loop = loop->next)
2837
    {
2838
      loop_info i;
2839
      basic_block b;
2840
      unsigned ix;
2841
 
2842
      fprintf (dump_file, ";; loop %d: ", loop->loop_no);
2843
      if (loop->bad)
2844
        fprintf (dump_file, "(bad) ");
2845
      fprintf (dump_file, "{head:%d, depth:%d}", loop->head->index, loop->depth);
2846
 
2847
      fprintf (dump_file, " blocks: [ ");
2848
      for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, b); ix++)
2849
        fprintf (dump_file, "%d ", b->index);
2850
      fprintf (dump_file, "] ");
2851
 
2852
      fprintf (dump_file, " inner loops: [ ");
2853
      for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, i); ix++)
2854
        fprintf (dump_file, "%d ", i->loop_no);
2855
      fprintf (dump_file, "]\n");
2856
    }
2857
  fprintf (dump_file, "\n");
2858
}
2859
 
2860
/* Scan the blocks of LOOP (and its inferiors) looking for basic block
2861
   BB. Return true, if we find it.  */
2862
 
2863
static bool
2864
bfin_bb_in_loop (loop_info loop, basic_block bb)
2865
{
2866
  return bitmap_bit_p (loop->block_bitmap, bb->index);
2867
}
2868
 
2869
/* Scan the blocks of LOOP (and its inferiors) looking for uses of
2870
   REG.  Return true, if we find any.  Don't count the loop's loop_end
2871
   insn if it matches LOOP_END.  */
2872
 
2873
static bool
2874
bfin_scan_loop (loop_info loop, rtx reg, rtx loop_end)
2875
{
2876
  unsigned ix;
2877
  basic_block bb;
2878
 
2879
  for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
2880
    {
2881
      rtx insn;
2882
 
2883
      for (insn = BB_HEAD (bb);
2884
           insn != NEXT_INSN (BB_END (bb));
2885
           insn = NEXT_INSN (insn))
2886
        {
2887
          if (!INSN_P (insn))
2888
            continue;
2889
          if (insn == loop_end)
2890
            continue;
2891
          if (reg_mentioned_p (reg, PATTERN (insn)))
2892
            return true;
2893
        }
2894
    }
2895
  return false;
2896
}
2897
 
2898
/* Optimize LOOP.  */
2899
 
2900
static void
2901
bfin_optimize_loop (loop_info loop)
2902
{
2903
  basic_block bb;
2904
  loop_info inner;
2905
  rtx insn, init_insn, last_insn, nop_insn;
2906
  rtx loop_init, start_label, end_label;
2907
  rtx reg_lc0, reg_lc1, reg_lt0, reg_lt1, reg_lb0, reg_lb1;
2908
  rtx iter_reg;
2909
  rtx lc_reg, lt_reg, lb_reg;
2910
  rtx seq;
2911
  int length;
2912
  unsigned ix;
2913
  int inner_depth = 0;
2914
 
2915
  if (loop->visited)
2916
    return;
2917
 
2918
  loop->visited = 1;
2919
 
2920
  if (loop->bad)
2921
    {
2922
      if (dump_file)
2923
        fprintf (dump_file, ";; loop %d bad when found\n", loop->loop_no);
2924
      goto bad_loop;
2925
    }
2926
 
2927
  /* Every loop contains in its list of inner loops every loop nested inside
2928
     it, even if there are intermediate loops.  This works because we're doing
2929
     a depth-first search here and never visit a loop more than once.  */
2930
  for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, inner); ix++)
2931
    {
2932
      bfin_optimize_loop (inner);
2933
 
2934
      if (!inner->bad && inner_depth < inner->depth)
2935
        {
2936
          inner_depth = inner->depth;
2937
 
2938
          loop->clobber_loop0 |= inner->clobber_loop0;
2939
          loop->clobber_loop1 |= inner->clobber_loop1;
2940
        }
2941
    }
2942
 
2943
  loop->depth = inner_depth + 1;
2944
  if (loop->depth > MAX_LOOP_DEPTH)
2945
    {
2946
      if (dump_file)
2947
        fprintf (dump_file, ";; loop %d too deep\n", loop->loop_no);
2948
      goto bad_loop;
2949
    }
2950
 
2951
  /* Get the loop iteration register.  */
2952
  iter_reg = loop->iter_reg;
2953
 
2954
  if (!DPREG_P (iter_reg))
2955
    {
2956
      if (dump_file)
2957
        fprintf (dump_file, ";; loop %d iteration count NOT in PREG or DREG\n",
2958
                 loop->loop_no);
2959
      goto bad_loop;
2960
    }
2961
 
2962
  /* Check if start_label appears before loop_end and calculate the
2963
     offset between them.  We calculate the length of instructions
2964
     conservatively.  */
2965
  length = 0;
2966
  for (insn = loop->start_label;
2967
       insn && insn != loop->loop_end;
2968
       insn = NEXT_INSN (insn))
2969
    {
2970
      if (JUMP_P (insn) && any_condjump_p (insn) && !optimize_size)
2971
        {
2972
          if (TARGET_CSYNC_ANOMALY)
2973
            length += 8;
2974
          else if (TARGET_SPECLD_ANOMALY)
2975
            length += 6;
2976
        }
2977
      else if (LABEL_P (insn))
2978
        {
2979
          if (TARGET_CSYNC_ANOMALY)
2980
            length += 4;
2981
        }
2982
 
2983
      if (INSN_P (insn))
2984
        length += get_attr_length (insn);
2985
    }
2986
 
2987
  if (!insn)
2988
    {
2989
      if (dump_file)
2990
        fprintf (dump_file, ";; loop %d start_label not before loop_end\n",
2991
                 loop->loop_no);
2992
      goto bad_loop;
2993
    }
2994
 
2995
  loop->length = length;
2996
  if (loop->length > MAX_LOOP_LENGTH)
2997
    {
2998
      if (dump_file)
2999
        fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
3000
      goto bad_loop;
3001
    }
3002
 
3003
  /* Scan all the blocks to make sure they don't use iter_reg.  */
3004
  if (bfin_scan_loop (loop, iter_reg, loop->loop_end))
3005
    {
3006
      if (dump_file)
3007
        fprintf (dump_file, ";; loop %d uses iterator\n", loop->loop_no);
3008
      goto bad_loop;
3009
    }
3010
 
3011
  /* Scan all the insns to see if the loop body clobber
3012
     any hardware loop registers. */
3013
 
3014
  reg_lc0 = gen_rtx_REG (SImode, REG_LC0);
3015
  reg_lc1 = gen_rtx_REG (SImode, REG_LC1);
3016
  reg_lt0 = gen_rtx_REG (SImode, REG_LT0);
3017
  reg_lt1 = gen_rtx_REG (SImode, REG_LT1);
3018
  reg_lb0 = gen_rtx_REG (SImode, REG_LB0);
3019
  reg_lb1 = gen_rtx_REG (SImode, REG_LB1);
3020
 
3021
  for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
3022
    {
3023
      rtx insn;
3024
 
3025
      for (insn = BB_HEAD (bb);
3026
           insn != NEXT_INSN (BB_END (bb));
3027
           insn = NEXT_INSN (insn))
3028
        {
3029
          if (!INSN_P (insn))
3030
            continue;
3031
 
3032
          if (reg_set_p (reg_lc0, insn)
3033
              || reg_set_p (reg_lt0, insn)
3034
              || reg_set_p (reg_lb0, insn))
3035
            loop->clobber_loop0 = 1;
3036
 
3037
          if (reg_set_p (reg_lc1, insn)
3038
              || reg_set_p (reg_lt1, insn)
3039
              || reg_set_p (reg_lb1, insn))
3040
            loop->clobber_loop1 |= 1;
3041
        }
3042
    }
3043
 
3044
  if ((loop->clobber_loop0 && loop->clobber_loop1)
3045
      || (loop->depth == MAX_LOOP_DEPTH && loop->clobber_loop0))
3046
    {
3047
      loop->depth = MAX_LOOP_DEPTH + 1;
3048
      if (dump_file)
3049
        fprintf (dump_file, ";; loop %d no loop reg available\n",
3050
                 loop->loop_no);
3051
      goto bad_loop;
3052
    }
3053
 
3054
  /* There should be an instruction before the loop_end instruction
3055
     in the same basic block. And the instruction must not be
3056
     - JUMP
3057
     - CONDITIONAL BRANCH
3058
     - CALL
3059
     - CSYNC
3060
     - SSYNC
3061
     - Returns (RTS, RTN, etc.)  */
3062
 
3063
  bb = loop->tail;
3064
  last_insn = PREV_INSN (loop->loop_end);
3065
 
3066
  while (1)
3067
    {
3068
      for (; last_insn != PREV_INSN (BB_HEAD (bb));
3069
           last_insn = PREV_INSN (last_insn))
3070
        if (INSN_P (last_insn))
3071
          break;
3072
 
3073
      if (last_insn != PREV_INSN (BB_HEAD (bb)))
3074
        break;
3075
 
3076
      if (single_pred_p (bb)
3077
          && single_pred (bb) != ENTRY_BLOCK_PTR)
3078
        {
3079
          bb = single_pred (bb);
3080
          last_insn = BB_END (bb);
3081
          continue;
3082
        }
3083
      else
3084
        {
3085
          last_insn = NULL_RTX;
3086
          break;
3087
        }
3088
    }
3089
 
3090
  if (!last_insn)
3091
    {
3092
      if (dump_file)
3093
        fprintf (dump_file, ";; loop %d has no last instruction\n",
3094
                 loop->loop_no);
3095
      goto bad_loop;
3096
    }
3097
 
3098
  if (JUMP_P (last_insn))
3099
    {
3100
      loop_info inner = bb->aux;
3101
      if (inner
3102
          && inner->outer == loop
3103
          && inner->loop_end == last_insn
3104
          && inner->depth == 1)
3105
        /* This jump_insn is the exact loop_end of an inner loop
3106
           and to be optimized away. So use the inner's last_insn.  */
3107
        last_insn = inner->last_insn;
3108
      else
3109
        {
3110
          if (dump_file)
3111
            fprintf (dump_file, ";; loop %d has bad last instruction\n",
3112
                     loop->loop_no);
3113
          goto bad_loop;
3114
        }
3115
    }
3116
  else if (CALL_P (last_insn)
3117
           || get_attr_type (last_insn) == TYPE_SYNC
3118
           || recog_memoized (last_insn) == CODE_FOR_return_internal)
3119
    {
3120
      if (dump_file)
3121
        fprintf (dump_file, ";; loop %d has bad last instruction\n",
3122
                 loop->loop_no);
3123
      goto bad_loop;
3124
    }
3125
 
3126
  if (GET_CODE (PATTERN (last_insn)) == ASM_INPUT
3127
      || asm_noperands (PATTERN (last_insn)) >= 0
3128
      || get_attr_seq_insns (last_insn) == SEQ_INSNS_MULTI)
3129
    {
3130
      nop_insn = emit_insn_after (gen_nop (), last_insn);
3131
      last_insn = nop_insn;
3132
    }
3133
 
3134
  loop->last_insn = last_insn;
3135
 
3136
  /* The loop is good for replacement.  */
3137
  start_label = loop->start_label;
3138
  end_label = gen_label_rtx ();
3139
  iter_reg = loop->iter_reg;
3140
 
3141
  if (loop->depth == 1 && !loop->clobber_loop1)
3142
    {
3143
      lc_reg = reg_lc1;
3144
      lt_reg = reg_lt1;
3145
      lb_reg = reg_lb1;
3146
      loop->clobber_loop1 = 1;
3147
    }
3148
  else
3149
    {
3150
      lc_reg = reg_lc0;
3151
      lt_reg = reg_lt0;
3152
      lb_reg = reg_lb0;
3153
      loop->clobber_loop0 = 1;
3154
    }
3155
 
3156
  /* If iter_reg is a DREG, we need generate an instruction to load
3157
     the loop count into LC register. */
3158
  if (D_REGNO_P (REGNO (iter_reg)))
3159
    {
3160
      init_insn = gen_movsi (lc_reg, iter_reg);
3161
      loop_init = gen_lsetup_without_autoinit (lt_reg, start_label,
3162
                                               lb_reg, end_label,
3163
                                               lc_reg);
3164
    }
3165
  else if (P_REGNO_P (REGNO (iter_reg)))
3166
    {
3167
      init_insn = NULL_RTX;
3168
      loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3169
                                            lb_reg, end_label,
3170
                                            lc_reg, iter_reg);
3171
    }
3172
  else
3173
    gcc_unreachable ();
3174
 
3175
  loop->init = init_insn;
3176
  loop->end_label = end_label;
3177
  loop->loop_init = loop_init;
3178
 
3179
  if (dump_file)
3180
    {
3181
      fprintf (dump_file, ";; replacing loop %d initializer with\n",
3182
               loop->loop_no);
3183
      print_rtl_single (dump_file, loop->loop_init);
3184
      fprintf (dump_file, ";; replacing loop %d terminator with\n",
3185
               loop->loop_no);
3186
      print_rtl_single (dump_file, loop->loop_end);
3187
    }
3188
 
3189
  start_sequence ();
3190
 
3191
  if (loop->init != NULL_RTX)
3192
    emit_insn (loop->init);
3193
  emit_insn(loop->loop_init);
3194
  emit_label (loop->start_label);
3195
 
3196
  seq = get_insns ();
3197
  end_sequence ();
3198
 
3199
  emit_insn_after (seq, BB_END (loop->predecessor));
3200
  delete_insn (loop->loop_end);
3201
 
3202
  /* Insert the loop end label before the last instruction of the loop.  */
3203
  emit_label_before (loop->end_label, loop->last_insn);
3204
 
3205
  return;
3206
 
3207
bad_loop:
3208
 
3209
  if (dump_file)
3210
    fprintf (dump_file, ";; loop %d is bad\n", loop->loop_no);
3211
 
3212
  loop->bad = 1;
3213
 
3214
  if (DPREG_P (loop->iter_reg))
3215
    {
3216
      /* If loop->iter_reg is a DREG or PREG, we can split it here
3217
         without scratch register.  */
3218
      rtx insn;
3219
 
3220
      emit_insn_before (gen_addsi3 (loop->iter_reg,
3221
                                    loop->iter_reg,
3222
                                    constm1_rtx),
3223
                        loop->loop_end);
3224
 
3225
      emit_insn_before (gen_cmpsi (loop->iter_reg, const0_rtx),
3226
                        loop->loop_end);
3227
 
3228
      insn = emit_jump_insn_before (gen_bne (loop->start_label),
3229
                                    loop->loop_end);
3230
 
3231
      JUMP_LABEL (insn) = loop->start_label;
3232
      LABEL_NUSES (loop->start_label)++;
3233
      delete_insn (loop->loop_end);
3234
    }
3235
}
3236
 
3237
/* Called from bfin_reorg_loops when a potential loop end is found.  LOOP is
3238
   a newly set up structure describing the loop, it is this function's
3239
   responsibility to fill most of it.  TAIL_BB and TAIL_INSN point to the
3240
   loop_end insn and its enclosing basic block.  */
3241
 
3242
static void
3243
bfin_discover_loop (loop_info loop, basic_block tail_bb, rtx tail_insn)
3244
{
3245
  unsigned dwork = 0;
3246
  basic_block bb;
3247
  VEC (basic_block,heap) *works = VEC_alloc (basic_block,heap,20);
3248
 
3249
  loop->tail = tail_bb;
3250
  loop->head = BRANCH_EDGE (tail_bb)->dest;
3251
  loop->successor = FALLTHRU_EDGE (tail_bb)->dest;
3252
  loop->predecessor = NULL;
3253
  loop->loop_end = tail_insn;
3254
  loop->last_insn = NULL_RTX;
3255
  loop->iter_reg = SET_DEST (XVECEXP (PATTERN (tail_insn), 0, 1));
3256
  loop->depth = loop->length = 0;
3257
  loop->visited = 0;
3258
  loop->clobber_loop0 = loop->clobber_loop1 = 0;
3259
  loop->outer = NULL;
3260
  loop->loops = NULL;
3261
 
3262
  loop->init = loop->loop_init = NULL_RTX;
3263
  loop->start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (tail_insn), 0, 0)), 1), 0);
3264
  loop->end_label = NULL_RTX;
3265
  loop->bad = 0;
3266
 
3267
  VEC_safe_push (basic_block, heap, works, loop->head);
3268
 
3269
  while (VEC_iterate (basic_block, works, dwork++, bb))
3270
    {
3271
      edge e;
3272
      edge_iterator ei;
3273
      if (bb == EXIT_BLOCK_PTR)
3274
        {
3275
          /* We've reached the exit block.  The loop must be bad. */
3276
          if (dump_file)
3277
            fprintf (dump_file,
3278
                     ";; Loop is bad - reached exit block while scanning\n");
3279
          loop->bad = 1;
3280
          break;
3281
        }
3282
 
3283
      if (bitmap_bit_p (loop->block_bitmap, bb->index))
3284
        continue;
3285
 
3286
      /* We've not seen this block before.  Add it to the loop's
3287
         list and then add each successor to the work list.  */
3288
 
3289
      VEC_safe_push (basic_block, heap, loop->blocks, bb);
3290
      bitmap_set_bit (loop->block_bitmap, bb->index);
3291
 
3292
      if (bb != tail_bb)
3293
        {
3294
          FOR_EACH_EDGE (e, ei, bb->succs)
3295
            {
3296
              basic_block succ = EDGE_SUCC (bb, ei.index)->dest;
3297
              if (!REGNO_REG_SET_P (succ->il.rtl->global_live_at_start,
3298
                                    REGNO (loop->iter_reg)))
3299
                continue;
3300
              if (!VEC_space (basic_block, works, 1))
3301
                {
3302
                  if (dwork)
3303
                    {
3304
                      VEC_block_remove (basic_block, works, 0, dwork);
3305
                      dwork = 0;
3306
                    }
3307
                  else
3308
                    VEC_reserve (basic_block, heap, works, 1);
3309
                }
3310
              VEC_quick_push (basic_block, works, succ);
3311
            }
3312
        }
3313
    }
3314
 
3315
  if (!loop->bad)
3316
    {
3317
      /* Make sure we only have one entry point.  */
3318
      if (EDGE_COUNT (loop->head->preds) == 2)
3319
        {
3320
          loop->predecessor = EDGE_PRED (loop->head, 0)->src;
3321
          if (loop->predecessor == loop->tail)
3322
            /* We wanted the other predecessor.  */
3323
            loop->predecessor = EDGE_PRED (loop->head, 1)->src;
3324
 
3325
          /* We can only place a loop insn on a fall through edge of a
3326
             single exit block.  */
3327
          if (EDGE_COUNT (loop->predecessor->succs) != 1
3328
              || !(EDGE_SUCC (loop->predecessor, 0)->flags & EDGE_FALLTHRU)
3329
              /* If loop->predecessor is in loop, loop->head is not really
3330
                 the head of the loop.  */
3331
              || bfin_bb_in_loop (loop, loop->predecessor))
3332
            loop->predecessor = NULL;
3333
        }
3334
 
3335
      if (loop->predecessor == NULL)
3336
        {
3337
          if (dump_file)
3338
            fprintf (dump_file, ";; loop has bad predecessor\n");
3339
          loop->bad = 1;
3340
        }
3341
    }
3342
 
3343
#ifdef ENABLE_CHECKING
3344
  /* Make sure nothing jumps into this loop.  This shouldn't happen as we
3345
     wouldn't have generated the counted loop patterns in such a case.
3346
     However, this test must be done after the test above to detect loops
3347
     with invalid headers.  */
3348
  if (!loop->bad)
3349
    for (dwork = 0; VEC_iterate (basic_block, loop->blocks, dwork, bb); dwork++)
3350
      {
3351
        edge e;
3352
        edge_iterator ei;
3353
        if (bb == loop->head)
3354
          continue;
3355
        FOR_EACH_EDGE (e, ei, bb->preds)
3356
          {
3357
            basic_block pred = EDGE_PRED (bb, ei.index)->src;
3358
            if (!bfin_bb_in_loop (loop, pred))
3359
              abort ();
3360
          }
3361
      }
3362
#endif
3363
  VEC_free (basic_block, heap, works);
3364
}
3365
 
3366
static void
3367
bfin_reorg_loops (FILE *dump_file)
3368
{
3369
  bitmap_obstack stack;
3370
  bitmap tmp_bitmap;
3371
  basic_block bb;
3372
  loop_info loops = NULL;
3373
  loop_info loop;
3374
  int nloops = 0;
3375
 
3376
  bitmap_obstack_initialize (&stack);
3377
 
3378
  /* Find all the possible loop tails.  This means searching for every
3379
     loop_end instruction.  For each one found, create a loop_info
3380
     structure and add the head block to the work list. */
3381
  FOR_EACH_BB (bb)
3382
    {
3383
      rtx tail = BB_END (bb);
3384
 
3385
      while (GET_CODE (tail) == NOTE)
3386
        tail = PREV_INSN (tail);
3387
 
3388
      bb->aux = NULL;
3389
 
3390
      if (INSN_P (tail) && recog_memoized (tail) == CODE_FOR_loop_end)
3391
        {
3392
          /* A possible loop end */
3393
 
3394
          loop = XNEW (struct loop_info);
3395
          loop->next = loops;
3396
          loops = loop;
3397
          loop->loop_no = nloops++;
3398
          loop->blocks = VEC_alloc (basic_block, heap, 20);
3399
          loop->block_bitmap = BITMAP_ALLOC (&stack);
3400
          bb->aux = loop;
3401
 
3402
          if (dump_file)
3403
            {
3404
              fprintf (dump_file, ";; potential loop %d ending at\n",
3405
                       loop->loop_no);
3406
              print_rtl_single (dump_file, tail);
3407
            }
3408
 
3409
          bfin_discover_loop (loop, bb, tail);
3410
        }
3411
    }
3412
 
3413
  tmp_bitmap = BITMAP_ALLOC (&stack);
3414
  /* Compute loop nestings.  */
3415
  for (loop = loops; loop; loop = loop->next)
3416
    {
3417
      loop_info other;
3418
      if (loop->bad)
3419
        continue;
3420
 
3421
      for (other = loop->next; other; other = other->next)
3422
        {
3423
          if (other->bad)
3424
            continue;
3425
 
3426
          bitmap_and (tmp_bitmap, other->block_bitmap, loop->block_bitmap);
3427
          if (bitmap_empty_p (tmp_bitmap))
3428
            continue;
3429
          if (bitmap_equal_p (tmp_bitmap, other->block_bitmap))
3430
            {
3431
              other->outer = loop;
3432
              VEC_safe_push (loop_info, heap, loop->loops, other);
3433
            }
3434
          else if (bitmap_equal_p (tmp_bitmap, loop->block_bitmap))
3435
            {
3436
              loop->outer = other;
3437
              VEC_safe_push (loop_info, heap, other->loops, loop);
3438
            }
3439
          else
3440
            {
3441
              loop->bad = other->bad = 1;
3442
            }
3443
        }
3444
    }
3445
  BITMAP_FREE (tmp_bitmap);
3446
 
3447
  if (dump_file)
3448
    {
3449
      fprintf (dump_file, ";; All loops found:\n\n");
3450
      bfin_dump_loops (loops);
3451
    }
3452
 
3453
  /* Now apply the optimizations.  */
3454
  for (loop = loops; loop; loop = loop->next)
3455
    bfin_optimize_loop (loop);
3456
 
3457
  if (dump_file)
3458
    {
3459
      fprintf (dump_file, ";; After hardware loops optimization:\n\n");
3460
      bfin_dump_loops (loops);
3461
    }
3462
 
3463
  /* Free up the loop structures */
3464
  while (loops)
3465
    {
3466
      loop = loops;
3467
      loops = loop->next;
3468
      VEC_free (loop_info, heap, loop->loops);
3469
      VEC_free (basic_block, heap, loop->blocks);
3470
      BITMAP_FREE (loop->block_bitmap);
3471
      XDELETE (loop);
3472
    }
3473
 
3474
  if (dump_file)
3475
    print_rtl (dump_file, get_insns ());
3476
}
3477
 
3478
 
3479
/* We use the machine specific reorg pass for emitting CSYNC instructions
3480
   after conditional branches as needed.
3481
 
3482
   The Blackfin is unusual in that a code sequence like
3483
     if cc jump label
3484
     r0 = (p0)
3485
   may speculatively perform the load even if the condition isn't true.  This
3486
   happens for a branch that is predicted not taken, because the pipeline
3487
   isn't flushed or stalled, so the early stages of the following instructions,
3488
   which perform the memory reference, are allowed to execute before the
3489
   jump condition is evaluated.
3490
   Therefore, we must insert additional instructions in all places where this
3491
   could lead to incorrect behavior.  The manual recommends CSYNC, while
3492
   VDSP seems to use NOPs (even though its corresponding compiler option is
3493
   named CSYNC).
3494
 
3495
   When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
3496
   When optimizing for size, we turn the branch into a predicted taken one.
3497
   This may be slower due to mispredicts, but saves code size.  */
3498
 
3499
static void
3500
bfin_reorg (void)
3501
{
3502
  rtx insn, last_condjump = NULL_RTX;
3503
  int cycles_since_jump = INT_MAX;
3504
 
3505
  /* Doloop optimization */
3506
  if (cfun->machine->has_hardware_loops)
3507
    bfin_reorg_loops (dump_file);
3508
 
3509
  if (! TARGET_SPECLD_ANOMALY && ! TARGET_CSYNC_ANOMALY)
3510
    return;
3511
 
3512
  /* First pass: find predicted-false branches; if something after them
3513
     needs nops, insert them or change the branch to predict true.  */
3514
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3515
    {
3516
      rtx pat;
3517
 
3518
      if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
3519
        continue;
3520
 
3521
      pat = PATTERN (insn);
3522
      if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
3523
          || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
3524
          || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
3525
        continue;
3526
 
3527
      if (JUMP_P (insn))
3528
        {
3529
          if (any_condjump_p (insn)
3530
              && ! cbranch_predicted_taken_p (insn))
3531
            {
3532
              last_condjump = insn;
3533
              cycles_since_jump = 0;
3534
            }
3535
          else
3536
            cycles_since_jump = INT_MAX;
3537
        }
3538
      else if (INSN_P (insn))
3539
        {
3540
          enum attr_type type = get_attr_type (insn);
3541
          int delay_needed = 0;
3542
          if (cycles_since_jump < INT_MAX)
3543
            cycles_since_jump++;
3544
 
3545
          if (type == TYPE_MCLD && TARGET_SPECLD_ANOMALY)
3546
            {
3547
              rtx pat = single_set (insn);
3548
              if (may_trap_p (SET_SRC (pat)))
3549
                delay_needed = 3;
3550
            }
3551
          else if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
3552
            delay_needed = 4;
3553
 
3554
          if (delay_needed > cycles_since_jump)
3555
            {
3556
              rtx pat;
3557
              int num_clobbers;
3558
              rtx *op = recog_data.operand;
3559
 
3560
              delay_needed -= cycles_since_jump;
3561
 
3562
              extract_insn (last_condjump);
3563
              if (optimize_size)
3564
                {
3565
                  pat = gen_cbranch_predicted_taken (op[0], op[1], op[2],
3566
                                                     op[3]);
3567
                  cycles_since_jump = INT_MAX;
3568
                }
3569
              else
3570
                /* Do not adjust cycles_since_jump in this case, so that
3571
                   we'll increase the number of NOPs for a subsequent insn
3572
                   if necessary.  */
3573
                pat = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
3574
                                             GEN_INT (delay_needed));
3575
              PATTERN (last_condjump) = pat;
3576
              INSN_CODE (last_condjump) = recog (pat, insn, &num_clobbers);
3577
            }
3578
        }
3579
    }
3580
  /* Second pass: for predicted-true branches, see if anything at the
3581
     branch destination needs extra nops.  */
3582
  if (! TARGET_CSYNC_ANOMALY)
3583
    return;
3584
 
3585
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3586
    {
3587
      if (JUMP_P (insn)
3588
          && any_condjump_p (insn)
3589
          && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
3590
              || cbranch_predicted_taken_p (insn)))
3591
        {
3592
          rtx target = JUMP_LABEL (insn);
3593
          rtx label = target;
3594
          cycles_since_jump = 0;
3595
          for (; target && cycles_since_jump < 3; target = NEXT_INSN (target))
3596
            {
3597
              rtx pat;
3598
 
3599
              if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
3600
                continue;
3601
 
3602
              pat = PATTERN (target);
3603
              if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
3604
                  || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
3605
                  || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
3606
                continue;
3607
 
3608
              if (INSN_P (target))
3609
                {
3610
                  enum attr_type type = get_attr_type (target);
3611
                  int delay_needed = 0;
3612
                  if (cycles_since_jump < INT_MAX)
3613
                    cycles_since_jump++;
3614
 
3615
                  if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
3616
                    delay_needed = 2;
3617
 
3618
                  if (delay_needed > cycles_since_jump)
3619
                    {
3620
                      rtx prev = prev_real_insn (label);
3621
                      delay_needed -= cycles_since_jump;
3622
                      if (dump_file)
3623
                        fprintf (dump_file, "Adding %d nops after %d\n",
3624
                                 delay_needed, INSN_UID (label));
3625
                      if (JUMP_P (prev)
3626
                          && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
3627
                        {
3628
                          rtx x;
3629
                          HOST_WIDE_INT v;
3630
 
3631
                          if (dump_file)
3632
                            fprintf (dump_file,
3633
                                     "Reducing nops on insn %d.\n",
3634
                                     INSN_UID (prev));
3635
                          x = PATTERN (prev);
3636
                          x = XVECEXP (x, 0, 1);
3637
                          v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
3638
                          XVECEXP (x, 0, 0) = GEN_INT (v);
3639
                        }
3640
                      while (delay_needed-- > 0)
3641
                        emit_insn_after (gen_nop (), label);
3642
                      break;
3643
                    }
3644
                }
3645
            }
3646
        }
3647
    }
3648
}
3649
 
3650
/* Handle interrupt_handler, exception_handler and nmi_handler function
3651
   attributes; arguments as in struct attribute_spec.handler.  */
3652
 
3653
static tree
3654
handle_int_attribute (tree *node, tree name,
3655
                      tree args ATTRIBUTE_UNUSED,
3656
                      int flags ATTRIBUTE_UNUSED,
3657
                      bool *no_add_attrs)
3658
{
3659
  tree x = *node;
3660
  if (TREE_CODE (x) == FUNCTION_DECL)
3661
    x = TREE_TYPE (x);
3662
 
3663
  if (TREE_CODE (x) != FUNCTION_TYPE)
3664
    {
3665
      warning (OPT_Wattributes, "%qs attribute only applies to functions",
3666
               IDENTIFIER_POINTER (name));
3667
      *no_add_attrs = true;
3668
    }
3669
  else if (funkind (x) != SUBROUTINE)
3670
    error ("multiple function type attributes specified");
3671
 
3672
  return NULL_TREE;
3673
}
3674
 
3675
/* Return 0 if the attributes for two types are incompatible, 1 if they
3676
   are compatible, and 2 if they are nearly compatible (which causes a
3677
   warning to be generated).  */
3678
 
3679
static int
3680
bfin_comp_type_attributes (tree type1, tree type2)
3681
{
3682
  e_funkind kind1, kind2;
3683
 
3684
  if (TREE_CODE (type1) != FUNCTION_TYPE)
3685
    return 1;
3686
 
3687
  kind1 = funkind (type1);
3688
  kind2 = funkind (type2);
3689
 
3690
  if (kind1 != kind2)
3691
    return 0;
3692
 
3693
  /*  Check for mismatched modifiers */
3694
  if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
3695
      != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
3696
    return 0;
3697
 
3698
  if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
3699
      != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
3700
    return 0;
3701
 
3702
  if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
3703
      != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
3704
    return 0;
3705
 
3706
  if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
3707
      != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
3708
    return 0;
3709
 
3710
  return 1;
3711
}
3712
 
3713
/* Handle a "longcall" or "shortcall" attribute; arguments as in
3714
   struct attribute_spec.handler.  */
3715
 
3716
static tree
3717
bfin_handle_longcall_attribute (tree *node, tree name,
3718
                                tree args ATTRIBUTE_UNUSED,
3719
                                int flags ATTRIBUTE_UNUSED,
3720
                                bool *no_add_attrs)
3721
{
3722
  if (TREE_CODE (*node) != FUNCTION_TYPE
3723
      && TREE_CODE (*node) != FIELD_DECL
3724
      && TREE_CODE (*node) != TYPE_DECL)
3725
    {
3726
      warning (OPT_Wattributes, "`%s' attribute only applies to functions",
3727
               IDENTIFIER_POINTER (name));
3728
      *no_add_attrs = true;
3729
    }
3730
 
3731
  if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
3732
       && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
3733
      || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
3734
          && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
3735
    {
3736
      warning (OPT_Wattributes,
3737
               "can't apply both longcall and shortcall attributes to the same function");
3738
      *no_add_attrs = true;
3739
    }
3740
 
3741
  return NULL_TREE;
3742
}
3743
 
3744
/* Table of valid machine attributes.  */
3745
const struct attribute_spec bfin_attribute_table[] =
3746
{
3747
  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
3748
  { "interrupt_handler", 0, 0, false, true,  true, handle_int_attribute },
3749
  { "exception_handler", 0, 0, false, true,  true, handle_int_attribute },
3750
  { "nmi_handler", 0, 0, false, true,  true, handle_int_attribute },
3751
  { "nesting", 0, 0, false, true,  true, NULL },
3752
  { "kspisusp", 0, 0, false, true,  true, NULL },
3753
  { "saveall", 0, 0, false, true,  true, NULL },
3754
  { "longcall",  0, 0, false, true,  true,  bfin_handle_longcall_attribute },
3755
  { "shortcall", 0, 0, false, true,  true,  bfin_handle_longcall_attribute },
3756
  { NULL, 0, 0, false, false, false, NULL }
3757
};
3758
 
3759
/* Implementation of TARGET_ASM_INTEGER.  When using FD-PIC, we need to
3760
   tell the assembler to generate pointers to function descriptors in
3761
   some cases.  */
3762
 
3763
static bool
3764
bfin_assemble_integer (rtx value, unsigned int size, int aligned_p)
3765
{
3766
  if (TARGET_FDPIC && size == UNITS_PER_WORD)
3767
    {
3768
      if (GET_CODE (value) == SYMBOL_REF
3769
          && SYMBOL_REF_FUNCTION_P (value))
3770
        {
3771
          fputs ("\t.picptr\tfuncdesc(", asm_out_file);
3772
          output_addr_const (asm_out_file, value);
3773
          fputs (")\n", asm_out_file);
3774
          return true;
3775
        }
3776
      if (!aligned_p)
3777
        {
3778
          /* We've set the unaligned SI op to NULL, so we always have to
3779
             handle the unaligned case here.  */
3780
          assemble_integer_with_op ("\t.4byte\t", value);
3781
          return true;
3782
        }
3783
    }
3784
  return default_assemble_integer (value, size, aligned_p);
3785
}
3786
 
3787
/* Output the assembler code for a thunk function.  THUNK_DECL is the
3788
   declaration for the thunk function itself, FUNCTION is the decl for
3789
   the target function.  DELTA is an immediate constant offset to be
3790
   added to THIS.  If VCALL_OFFSET is nonzero, the word at
3791
   *(*this + vcall_offset) should be added to THIS.  */
3792
 
3793
static void
3794
bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
3795
                      tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
3796
                      HOST_WIDE_INT vcall_offset, tree function)
3797
{
3798
  rtx xops[3];
3799
  /* The this parameter is passed as the first argument.  */
3800
  rtx this = gen_rtx_REG (Pmode, REG_R0);
3801
 
3802
  /* Adjust the this parameter by a fixed constant.  */
3803
  if (delta)
3804
    {
3805
      xops[1] = this;
3806
      if (delta >= -64 && delta <= 63)
3807
        {
3808
          xops[0] = GEN_INT (delta);
3809
          output_asm_insn ("%1 += %0;", xops);
3810
        }
3811
      else if (delta >= -128 && delta < -64)
3812
        {
3813
          xops[0] = GEN_INT (delta + 64);
3814
          output_asm_insn ("%1 += -64; %1 += %0;", xops);
3815
        }
3816
      else if (delta > 63 && delta <= 126)
3817
        {
3818
          xops[0] = GEN_INT (delta - 63);
3819
          output_asm_insn ("%1 += 63; %1 += %0;", xops);
3820
        }
3821
      else
3822
        {
3823
          xops[0] = GEN_INT (delta);
3824
          output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
3825
        }
3826
    }
3827
 
3828
  /* Adjust the this parameter by a value stored in the vtable.  */
3829
  if (vcall_offset)
3830
    {
3831
      rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
3832
      rtx tmp = gen_rtx_REG (Pmode, REG_R2);
3833
 
3834
      xops[1] = tmp;
3835
      xops[2] = p2tmp;
3836
      output_asm_insn ("%2 = r0; %2 = [%2];", xops);
3837
 
3838
      /* Adjust the this parameter.  */
3839
      xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
3840
      if (!memory_operand (xops[0], Pmode))
3841
        {
3842
          rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
3843
          xops[0] = GEN_INT (vcall_offset);
3844
          xops[1] = tmp2;
3845
          output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
3846
          xops[0] = gen_rtx_MEM (Pmode, p2tmp);
3847
        }
3848
      xops[2] = this;
3849
      output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
3850
    }
3851
 
3852
  xops[0] = XEXP (DECL_RTL (function), 0);
3853
  if (1 || !flag_pic || (*targetm.binds_local_p) (function))
3854
    output_asm_insn ("jump.l\t%P0", xops);
3855
}
3856
 
3857
/* Codes for all the Blackfin builtins.  */
3858
enum bfin_builtins
3859
{
3860
  BFIN_BUILTIN_CSYNC,
3861
  BFIN_BUILTIN_SSYNC,
3862
  BFIN_BUILTIN_COMPOSE_2X16,
3863
  BFIN_BUILTIN_EXTRACTLO,
3864
  BFIN_BUILTIN_EXTRACTHI,
3865
 
3866
  BFIN_BUILTIN_SSADD_2X16,
3867
  BFIN_BUILTIN_SSSUB_2X16,
3868
  BFIN_BUILTIN_SSADDSUB_2X16,
3869
  BFIN_BUILTIN_SSSUBADD_2X16,
3870
  BFIN_BUILTIN_MULT_2X16,
3871
  BFIN_BUILTIN_MULTR_2X16,
3872
  BFIN_BUILTIN_NEG_2X16,
3873
  BFIN_BUILTIN_ABS_2X16,
3874
  BFIN_BUILTIN_MIN_2X16,
3875
  BFIN_BUILTIN_MAX_2X16,
3876
 
3877
  BFIN_BUILTIN_SSADD_1X16,
3878
  BFIN_BUILTIN_SSSUB_1X16,
3879
  BFIN_BUILTIN_MULT_1X16,
3880
  BFIN_BUILTIN_MULTR_1X16,
3881
  BFIN_BUILTIN_NORM_1X16,
3882
  BFIN_BUILTIN_NEG_1X16,
3883
  BFIN_BUILTIN_ABS_1X16,
3884
  BFIN_BUILTIN_MIN_1X16,
3885
  BFIN_BUILTIN_MAX_1X16,
3886
 
3887
  BFIN_BUILTIN_DIFFHL_2X16,
3888
  BFIN_BUILTIN_DIFFLH_2X16,
3889
 
3890
  BFIN_BUILTIN_SSADD_1X32,
3891
  BFIN_BUILTIN_SSSUB_1X32,
3892
  BFIN_BUILTIN_NORM_1X32,
3893
  BFIN_BUILTIN_NEG_1X32,
3894
  BFIN_BUILTIN_MIN_1X32,
3895
  BFIN_BUILTIN_MAX_1X32,
3896
  BFIN_BUILTIN_MULT_1X32,
3897
 
3898
  BFIN_BUILTIN_MULHISILL,
3899
  BFIN_BUILTIN_MULHISILH,
3900
  BFIN_BUILTIN_MULHISIHL,
3901
  BFIN_BUILTIN_MULHISIHH,
3902
 
3903
  BFIN_BUILTIN_LSHIFT_1X16,
3904
  BFIN_BUILTIN_LSHIFT_2X16,
3905
  BFIN_BUILTIN_SSASHIFT_1X16,
3906
  BFIN_BUILTIN_SSASHIFT_2X16,
3907
 
3908
  BFIN_BUILTIN_CPLX_MUL_16,
3909
  BFIN_BUILTIN_CPLX_MAC_16,
3910
  BFIN_BUILTIN_CPLX_MSU_16,
3911
 
3912
  BFIN_BUILTIN_MAX
3913
};
3914
 
3915
#define def_builtin(NAME, TYPE, CODE)                                   \
3916
do {                                                                    \
3917
  lang_hooks.builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD,     \
3918
                               NULL, NULL_TREE);                        \
3919
} while (0)
3920
 
3921
/* Set up all builtin functions for this target.  */
3922
static void
3923
bfin_init_builtins (void)
3924
{
3925
  tree V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
3926
  tree void_ftype_void
3927
    = build_function_type (void_type_node, void_list_node);
3928
  tree short_ftype_short
3929
    = build_function_type_list (short_integer_type_node, short_integer_type_node,
3930
                                NULL_TREE);
3931
  tree short_ftype_int_int
3932
    = build_function_type_list (short_integer_type_node, integer_type_node,
3933
                                integer_type_node, NULL_TREE);
3934
  tree int_ftype_int_int
3935
    = build_function_type_list (integer_type_node, integer_type_node,
3936
                                integer_type_node, NULL_TREE);
3937
  tree int_ftype_int
3938
    = build_function_type_list (integer_type_node, integer_type_node,
3939
                                NULL_TREE);
3940
  tree short_ftype_int
3941
    = build_function_type_list (short_integer_type_node, integer_type_node,
3942
                                NULL_TREE);
3943
  tree int_ftype_v2hi_v2hi
3944
    = build_function_type_list (integer_type_node, V2HI_type_node,
3945
                                V2HI_type_node, NULL_TREE);
3946
  tree v2hi_ftype_v2hi_v2hi
3947
    = build_function_type_list (V2HI_type_node, V2HI_type_node,
3948
                                V2HI_type_node, NULL_TREE);
3949
  tree v2hi_ftype_v2hi_v2hi_v2hi
3950
    = build_function_type_list (V2HI_type_node, V2HI_type_node,
3951
                                V2HI_type_node, V2HI_type_node, NULL_TREE);
3952
  tree v2hi_ftype_int_int
3953
    = build_function_type_list (V2HI_type_node, integer_type_node,
3954
                                integer_type_node, NULL_TREE);
3955
  tree v2hi_ftype_v2hi_int
3956
    = build_function_type_list (V2HI_type_node, V2HI_type_node,
3957
                                integer_type_node, NULL_TREE);
3958
  tree int_ftype_short_short
3959
    = build_function_type_list (integer_type_node, short_integer_type_node,
3960
                                short_integer_type_node, NULL_TREE);
3961
  tree v2hi_ftype_v2hi
3962
    = build_function_type_list (V2HI_type_node, V2HI_type_node, NULL_TREE);
3963
  tree short_ftype_v2hi
3964
    = build_function_type_list (short_integer_type_node, V2HI_type_node,
3965
                                NULL_TREE);
3966
 
3967
  /* Add the remaining MMX insns with somewhat more complicated types.  */
3968
  def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
3969
  def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
3970
 
3971
  def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int,
3972
               BFIN_BUILTIN_COMPOSE_2X16);
3973
  def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi,
3974
               BFIN_BUILTIN_EXTRACTHI);
3975
  def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi,
3976
               BFIN_BUILTIN_EXTRACTLO);
3977
 
3978
  def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi,
3979
               BFIN_BUILTIN_MIN_2X16);
3980
  def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi,
3981
               BFIN_BUILTIN_MAX_2X16);
3982
 
3983
  def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi,
3984
               BFIN_BUILTIN_SSADD_2X16);
3985
  def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi,
3986
               BFIN_BUILTIN_SSSUB_2X16);
3987
  def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi,
3988
               BFIN_BUILTIN_SSADDSUB_2X16);
3989
  def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi,
3990
               BFIN_BUILTIN_SSSUBADD_2X16);
3991
  def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi,
3992
               BFIN_BUILTIN_MULT_2X16);
3993
  def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi,
3994
               BFIN_BUILTIN_MULTR_2X16);
3995
  def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi,
3996
               BFIN_BUILTIN_NEG_2X16);
3997
  def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi,
3998
               BFIN_BUILTIN_ABS_2X16);
3999
 
4000
  def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int,
4001
               BFIN_BUILTIN_SSADD_1X16);
4002
  def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int,
4003
               BFIN_BUILTIN_SSSUB_1X16);
4004
  def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int,
4005
               BFIN_BUILTIN_MULT_1X16);
4006
  def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int,
4007
               BFIN_BUILTIN_MULTR_1X16);
4008
  def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short,
4009
               BFIN_BUILTIN_NEG_1X16);
4010
  def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short,
4011
               BFIN_BUILTIN_ABS_1X16);
4012
  def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int,
4013
               BFIN_BUILTIN_NORM_1X16);
4014
 
4015
  def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi,
4016
               BFIN_BUILTIN_DIFFHL_2X16);
4017
  def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi,
4018
               BFIN_BUILTIN_DIFFLH_2X16);
4019
 
4020
  def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi,
4021
               BFIN_BUILTIN_MULHISILL);
4022
  def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi,
4023
               BFIN_BUILTIN_MULHISIHL);
4024
  def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi,
4025
               BFIN_BUILTIN_MULHISILH);
4026
  def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi,
4027
               BFIN_BUILTIN_MULHISIHH);
4028
 
4029
  def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int,
4030
               BFIN_BUILTIN_SSADD_1X32);
4031
  def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int,
4032
               BFIN_BUILTIN_SSSUB_1X32);
4033
  def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int,
4034
               BFIN_BUILTIN_NEG_1X32);
4035
  def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int,
4036
               BFIN_BUILTIN_NORM_1X32);
4037
  def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short,
4038
               BFIN_BUILTIN_MULT_1X32);
4039
 
4040
  /* Shifts.  */
4041
  def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int,
4042
               BFIN_BUILTIN_SSASHIFT_1X16);
4043
  def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int,
4044
               BFIN_BUILTIN_SSASHIFT_2X16);
4045
  def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int,
4046
               BFIN_BUILTIN_LSHIFT_1X16);
4047
  def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int,
4048
               BFIN_BUILTIN_LSHIFT_2X16);
4049
 
4050
  /* Complex numbers.  */
4051
  def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi,
4052
               BFIN_BUILTIN_CPLX_MUL_16);
4053
  def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi,
4054
               BFIN_BUILTIN_CPLX_MAC_16);
4055
  def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi,
4056
               BFIN_BUILTIN_CPLX_MSU_16);
4057
}
4058
 
4059
 
4060
struct builtin_description
4061
{
4062
  const enum insn_code icode;
4063
  const char *const name;
4064
  const enum bfin_builtins code;
4065
  int macflag;
4066
};
4067
 
4068
static const struct builtin_description bdesc_2arg[] =
4069
{
4070
  { CODE_FOR_composev2hi, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16, -1 },
4071
 
4072
  { CODE_FOR_ssashiftv2hi3, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16, -1 },
4073
  { CODE_FOR_ssashifthi3, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16, -1 },
4074
  { CODE_FOR_lshiftv2hi3, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16, -1 },
4075
  { CODE_FOR_lshifthi3, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16, -1 },
4076
 
4077
  { CODE_FOR_sminhi3, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16, -1 },
4078
  { CODE_FOR_smaxhi3, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16, -1 },
4079
  { CODE_FOR_ssaddhi3, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16, -1 },
4080
  { CODE_FOR_sssubhi3, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16, -1 },
4081
 
4082
  { CODE_FOR_sminsi3, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32, -1 },
4083
  { CODE_FOR_smaxsi3, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32, -1 },
4084
  { CODE_FOR_ssaddsi3, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32, -1 },
4085
  { CODE_FOR_sssubsi3, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32, -1 },
4086
 
4087
  { CODE_FOR_sminv2hi3, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16, -1 },
4088
  { CODE_FOR_smaxv2hi3, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16, -1 },
4089
  { CODE_FOR_ssaddv2hi3, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16, -1 },
4090
  { CODE_FOR_sssubv2hi3, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16, -1 },
4091
  { CODE_FOR_ssaddsubv2hi3, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16, -1 },
4092
  { CODE_FOR_sssubaddv2hi3, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16, -1 },
4093
 
4094
  { CODE_FOR_flag_mulhisi, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32, MACFLAG_NONE },
4095
  { CODE_FOR_flag_mulhi, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16, MACFLAG_T },
4096
  { CODE_FOR_flag_mulhi, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16, MACFLAG_NONE },
4097
  { CODE_FOR_flag_mulv2hi, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16, MACFLAG_T },
4098
  { CODE_FOR_flag_mulv2hi, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16, MACFLAG_NONE }
4099
};
4100
 
4101
static const struct builtin_description bdesc_1arg[] =
4102
{
4103
  { CODE_FOR_signbitshi2, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16, 0 },
4104
  { CODE_FOR_ssneghi2, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16, 0 },
4105
  { CODE_FOR_abshi2, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16, 0 },
4106
 
4107
  { CODE_FOR_signbitssi2, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32, 0 },
4108
  { CODE_FOR_ssnegsi2, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32, 0 },
4109
 
4110
  { CODE_FOR_movv2hi_hi_low, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO, 0 },
4111
  { CODE_FOR_movv2hi_hi_high, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI, 0 },
4112
  { CODE_FOR_ssnegv2hi2, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16, 0 },
4113
  { CODE_FOR_absv2hi2, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16, 0 }
4114
};
4115
 
4116
/* Errors in the source file can cause expand_expr to return const0_rtx
4117
   where we expect a vector.  To avoid crashing, use one of the vector
4118
   clear instructions.  */
4119
static rtx
4120
safe_vector_operand (rtx x, enum machine_mode mode)
4121
{
4122
  if (x != const0_rtx)
4123
    return x;
4124
  x = gen_reg_rtx (SImode);
4125
 
4126
  emit_insn (gen_movsi (x, CONST0_RTX (SImode)));
4127
  return gen_lowpart (mode, x);
4128
}
4129
 
4130
/* Subroutine of bfin_expand_builtin to take care of binop insns.  MACFLAG is -1
4131
   if this is a normal binary op, or one of the MACFLAG_xxx constants.  */
4132
 
4133
static rtx
4134
bfin_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target,
4135
                           int macflag)
4136
{
4137
  rtx pat;
4138
  tree arg0 = TREE_VALUE (arglist);
4139
  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4140
  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4141
  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4142
  enum machine_mode op0mode = GET_MODE (op0);
4143
  enum machine_mode op1mode = GET_MODE (op1);
4144
  enum machine_mode tmode = insn_data[icode].operand[0].mode;
4145
  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4146
  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4147
 
4148
  if (VECTOR_MODE_P (mode0))
4149
    op0 = safe_vector_operand (op0, mode0);
4150
  if (VECTOR_MODE_P (mode1))
4151
    op1 = safe_vector_operand (op1, mode1);
4152
 
4153
  if (! target
4154
      || GET_MODE (target) != tmode
4155
      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4156
    target = gen_reg_rtx (tmode);
4157
 
4158
  if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
4159
    {
4160
      op0mode = HImode;
4161
      op0 = gen_lowpart (HImode, op0);
4162
    }
4163
  if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
4164
    {
4165
      op1mode = HImode;
4166
      op1 = gen_lowpart (HImode, op1);
4167
    }
4168
  /* In case the insn wants input operands in modes different from
4169
     the result, abort.  */
4170
  gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
4171
              && (op1mode == mode1 || op1mode == VOIDmode));
4172
 
4173
  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4174
    op0 = copy_to_mode_reg (mode0, op0);
4175
  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4176
    op1 = copy_to_mode_reg (mode1, op1);
4177
 
4178
  if (macflag == -1)
4179
    pat = GEN_FCN (icode) (target, op0, op1);
4180
  else
4181
    pat = GEN_FCN (icode) (target, op0, op1, GEN_INT (macflag));
4182
  if (! pat)
4183
    return 0;
4184
 
4185
  emit_insn (pat);
4186
  return target;
4187
}
4188
 
4189
/* Subroutine of bfin_expand_builtin to take care of unop insns.  */
4190
 
4191
static rtx
4192
bfin_expand_unop_builtin (enum insn_code icode, tree arglist,
4193
                          rtx target)
4194
{
4195
  rtx pat;
4196
  tree arg0 = TREE_VALUE (arglist);
4197
  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4198
  enum machine_mode op0mode = GET_MODE (op0);
4199
  enum machine_mode tmode = insn_data[icode].operand[0].mode;
4200
  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4201
 
4202
  if (! target
4203
      || GET_MODE (target) != tmode
4204
      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4205
    target = gen_reg_rtx (tmode);
4206
 
4207
  if (VECTOR_MODE_P (mode0))
4208
    op0 = safe_vector_operand (op0, mode0);
4209
 
4210
  if (op0mode == SImode && mode0 == HImode)
4211
    {
4212
      op0mode = HImode;
4213
      op0 = gen_lowpart (HImode, op0);
4214
    }
4215
  gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
4216
 
4217
  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4218
    op0 = copy_to_mode_reg (mode0, op0);
4219
 
4220
  pat = GEN_FCN (icode) (target, op0);
4221
  if (! pat)
4222
    return 0;
4223
  emit_insn (pat);
4224
  return target;
4225
}
4226
 
4227
/* Expand an expression EXP that calls a built-in function,
4228
   with result going to TARGET if that's convenient
4229
   (and in mode MODE if that's convenient).
4230
   SUBTARGET may be used as the target for computing one of EXP's operands.
4231
   IGNORE is nonzero if the value is to be ignored.  */
4232
 
4233
static rtx
4234
bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
4235
                     rtx subtarget ATTRIBUTE_UNUSED,
4236
                     enum machine_mode mode ATTRIBUTE_UNUSED,
4237
                     int ignore ATTRIBUTE_UNUSED)
4238
{
4239
  size_t i;
4240
  enum insn_code icode;
4241
  const struct builtin_description *d;
4242
  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4243
  tree arglist = TREE_OPERAND (exp, 1);
4244
  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4245
  tree arg0, arg1, arg2;
4246
  rtx op0, op1, op2, accvec, pat, tmp1, tmp2;
4247
  enum machine_mode tmode, mode0;
4248
 
4249
  switch (fcode)
4250
    {
4251
    case BFIN_BUILTIN_CSYNC:
4252
      emit_insn (gen_csync ());
4253
      return 0;
4254
    case BFIN_BUILTIN_SSYNC:
4255
      emit_insn (gen_ssync ());
4256
      return 0;
4257
 
4258
    case BFIN_BUILTIN_DIFFHL_2X16:
4259
    case BFIN_BUILTIN_DIFFLH_2X16:
4260
      arg0 = TREE_VALUE (arglist);
4261
      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4262
      icode = (fcode == BFIN_BUILTIN_DIFFHL_2X16
4263
               ? CODE_FOR_subhilov2hi3 : CODE_FOR_sublohiv2hi3);
4264
      tmode = insn_data[icode].operand[0].mode;
4265
      mode0 = insn_data[icode].operand[1].mode;
4266
 
4267
      if (! target
4268
          || GET_MODE (target) != tmode
4269
          || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4270
        target = gen_reg_rtx (tmode);
4271
 
4272
      if (VECTOR_MODE_P (mode0))
4273
        op0 = safe_vector_operand (op0, mode0);
4274
 
4275
      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4276
        op0 = copy_to_mode_reg (mode0, op0);
4277
 
4278
      pat = GEN_FCN (icode) (target, op0, op0);
4279
      if (! pat)
4280
        return 0;
4281
      emit_insn (pat);
4282
      return target;
4283
 
4284
    case BFIN_BUILTIN_CPLX_MUL_16:
4285
      arg0 = TREE_VALUE (arglist);
4286
      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4287
      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4288
      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4289
      accvec = gen_reg_rtx (V2PDImode);
4290
 
4291
      if (! target
4292
          || GET_MODE (target) != V2HImode
4293
          || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
4294
        target = gen_reg_rtx (tmode);
4295
      if (! register_operand (op0, GET_MODE (op0)))
4296
        op0 = copy_to_mode_reg (GET_MODE (op0), op0);
4297
      if (! register_operand (op1, GET_MODE (op1)))
4298
        op1 = copy_to_mode_reg (GET_MODE (op1), op1);
4299
 
4300
      emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
4301
                                              const0_rtx, const0_rtx,
4302
                                              const1_rtx, GEN_INT (MACFLAG_NONE)));
4303
      emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
4304
                                         const1_rtx, const1_rtx,
4305
                                         const0_rtx, accvec, const1_rtx, const0_rtx,
4306
                                         GEN_INT (MACFLAG_NONE), accvec));
4307
 
4308
      return target;
4309
 
4310
    case BFIN_BUILTIN_CPLX_MAC_16:
4311
    case BFIN_BUILTIN_CPLX_MSU_16:
4312
      arg0 = TREE_VALUE (arglist);
4313
      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4314
      arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4315
      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4316
      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4317
      op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4318
      accvec = gen_reg_rtx (V2PDImode);
4319
 
4320
      if (! target
4321
          || GET_MODE (target) != V2HImode
4322
          || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
4323
        target = gen_reg_rtx (tmode);
4324
      if (! register_operand (op0, GET_MODE (op0)))
4325
        op0 = copy_to_mode_reg (GET_MODE (op0), op0);
4326
      if (! register_operand (op1, GET_MODE (op1)))
4327
        op1 = copy_to_mode_reg (GET_MODE (op1), op1);
4328
 
4329
      tmp1 = gen_reg_rtx (SImode);
4330
      tmp2 = gen_reg_rtx (SImode);
4331
      emit_insn (gen_ashlsi3 (tmp1, gen_lowpart (SImode, op2), GEN_INT (16)));
4332
      emit_move_insn (tmp2, gen_lowpart (SImode, op2));
4333
      emit_insn (gen_movstricthi_1 (gen_lowpart (HImode, tmp2), const0_rtx));
4334
      emit_insn (gen_load_accumulator_pair (accvec, tmp1, tmp2));
4335
      emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op0, op1, const0_rtx,
4336
                                                 const0_rtx, const0_rtx,
4337
                                                 const1_rtx, accvec, const0_rtx,
4338
                                                 const0_rtx,
4339
                                                 GEN_INT (MACFLAG_W32)));
4340
      tmp1 = (fcode == BFIN_BUILTIN_CPLX_MAC_16 ? const1_rtx : const0_rtx);
4341
      tmp2 = (fcode == BFIN_BUILTIN_CPLX_MAC_16 ? const0_rtx : const1_rtx);
4342
      emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
4343
                                         const1_rtx, const1_rtx,
4344
                                         const0_rtx, accvec, tmp1, tmp2,
4345
                                         GEN_INT (MACFLAG_NONE), accvec));
4346
 
4347
      return target;
4348
 
4349
    default:
4350
      break;
4351
    }
4352
 
4353
  for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4354
    if (d->code == fcode)
4355
      return bfin_expand_binop_builtin (d->icode, arglist, target,
4356
                                        d->macflag);
4357
 
4358
  for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4359
    if (d->code == fcode)
4360
      return bfin_expand_unop_builtin (d->icode, arglist, target);
4361
 
4362
  gcc_unreachable ();
4363
}
4364
 
4365
#undef TARGET_INIT_BUILTINS
4366
#define TARGET_INIT_BUILTINS bfin_init_builtins
4367
 
4368
#undef TARGET_EXPAND_BUILTIN
4369
#define TARGET_EXPAND_BUILTIN bfin_expand_builtin
4370
 
4371
#undef TARGET_ASM_GLOBALIZE_LABEL
4372
#define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label 
4373
 
4374
#undef TARGET_ASM_FILE_START
4375
#define TARGET_ASM_FILE_START output_file_start
4376
 
4377
#undef TARGET_ATTRIBUTE_TABLE
4378
#define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
4379
 
4380
#undef TARGET_COMP_TYPE_ATTRIBUTES
4381
#define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
4382
 
4383
#undef TARGET_RTX_COSTS
4384
#define TARGET_RTX_COSTS bfin_rtx_costs
4385
 
4386
#undef  TARGET_ADDRESS_COST
4387
#define TARGET_ADDRESS_COST bfin_address_cost
4388
 
4389
#undef TARGET_ASM_INTERNAL_LABEL
4390
#define TARGET_ASM_INTERNAL_LABEL bfin_internal_label
4391
 
4392
#undef  TARGET_ASM_INTEGER
4393
#define TARGET_ASM_INTEGER bfin_assemble_integer
4394
 
4395
#undef TARGET_MACHINE_DEPENDENT_REORG
4396
#define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
4397
 
4398
#undef TARGET_FUNCTION_OK_FOR_SIBCALL
4399
#define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
4400
 
4401
#undef TARGET_ASM_OUTPUT_MI_THUNK
4402
#define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
4403
#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
4404
#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
4405
 
4406
#undef TARGET_SCHED_ADJUST_COST
4407
#define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
4408
 
4409
#undef TARGET_PROMOTE_PROTOTYPES
4410
#define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
4411
#undef TARGET_PROMOTE_FUNCTION_ARGS
4412
#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
4413
#undef TARGET_PROMOTE_FUNCTION_RETURN
4414
#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
4415
 
4416
#undef TARGET_ARG_PARTIAL_BYTES
4417
#define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
4418
 
4419
#undef TARGET_PASS_BY_REFERENCE
4420
#define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
4421
 
4422
#undef TARGET_SETUP_INCOMING_VARARGS
4423
#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
4424
 
4425
#undef TARGET_STRUCT_VALUE_RTX
4426
#define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
4427
 
4428
#undef TARGET_VECTOR_MODE_SUPPORTED_P
4429
#define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
4430
 
4431
#undef TARGET_HANDLE_OPTION
4432
#define TARGET_HANDLE_OPTION bfin_handle_option
4433
 
4434
#undef TARGET_DEFAULT_TARGET_FLAGS
4435
#define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
4436
 
4437
#undef TARGET_SECONDARY_RELOAD
4438
#define TARGET_SECONDARY_RELOAD bfin_secondary_reload
4439
 
4440
#undef TARGET_DELEGITIMIZE_ADDRESS
4441
#define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
4442
 
4443
struct gcc_target targetm = TARGET_INITIALIZER;

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.