OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [config/] [bfin/] [bfin.c] - Blame information for rev 718

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 709 jeremybenn
/* The Blackfin code generation auxiliary output file.
2
   Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
3
   Free Software Foundation, Inc.
4
   Contributed by Analog Devices.
5
 
6
   This file is part of GCC.
7
 
8
   GCC is free software; you can redistribute it and/or modify it
9
   under the terms of the GNU General Public License as published
10
   by the Free Software Foundation; either version 3, or (at your
11
   option) any later version.
12
 
13
   GCC is distributed in the hope that it will be useful, but WITHOUT
14
   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15
   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
16
   License for more details.
17
 
18
   You should have received a copy of the GNU General Public License
19
   along with GCC; see the file COPYING3.  If not see
20
   <http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "rtl.h"
27
#include "regs.h"
28
#include "hard-reg-set.h"
29
#include "insn-config.h"
30
#include "insn-codes.h"
31
#include "conditions.h"
32
#include "insn-flags.h"
33
#include "output.h"
34
#include "insn-attr.h"
35
#include "tree.h"
36
#include "flags.h"
37
#include "except.h"
38
#include "function.h"
39
#include "input.h"
40
#include "target.h"
41
#include "target-def.h"
42
#include "expr.h"
43
#include "diagnostic-core.h"
44
#include "recog.h"
45
#include "optabs.h"
46
#include "ggc.h"
47
#include "integrate.h"
48
#include "cgraph.h"
49
#include "langhooks.h"
50
#include "bfin-protos.h"
51
#include "tm-preds.h"
52
#include "tm-constrs.h"
53
#include "gt-bfin.h"
54
#include "basic-block.h"
55
#include "cfglayout.h"
56
#include "timevar.h"
57
#include "df.h"
58
#include "sel-sched.h"
59
#include "hw-doloop.h"
60
#include "opts.h"
61
 
62
/* A C structure for machine-specific, per-function data.
63
   This is added to the cfun structure.  */
64
struct GTY(()) machine_function
65
{
66
  /* Set if we are notified by the doloop pass that a hardware loop
67
     was created.  */
68
  int has_hardware_loops;
69
 
70
  /* Set if we create a memcpy pattern that uses loop registers.  */
71
  int has_loopreg_clobber;
72
};
73
 
74
/* RTX for condition code flag register and RETS register */
75
extern GTY(()) rtx bfin_cc_rtx;
76
extern GTY(()) rtx bfin_rets_rtx;
77
rtx bfin_cc_rtx, bfin_rets_rtx;
78
 
79
int max_arg_registers = 0;
80
 
81
/* Arrays used when emitting register names.  */
82
const char *short_reg_names[]  =  SHORT_REGISTER_NAMES;
83
const char *high_reg_names[]   =  HIGH_REGISTER_NAMES;
84
const char *dregs_pair_names[] =  DREGS_PAIR_NAMES;
85
const char *byte_reg_names[]   =  BYTE_REGISTER_NAMES;
86
 
87
static int arg_regs[] = FUNCTION_ARG_REGISTERS;
88
static int ret_regs[] = FUNCTION_RETURN_REGISTERS;
89
 
90
int splitting_for_sched, splitting_loops;
91
 
92
static void
93
bfin_globalize_label (FILE *stream, const char *name)
94
{
95
  fputs (".global ", stream);
96
  assemble_name (stream, name);
97
  fputc (';',stream);
98
  fputc ('\n',stream);
99
}
100
 
101
static void
102
output_file_start (void)
103
{
104
  FILE *file = asm_out_file;
105
  int i;
106
 
107
  fprintf (file, ".file \"%s\";\n", input_filename);
108
 
109
  for (i = 0; arg_regs[i] >= 0; i++)
110
    ;
111
  max_arg_registers = i;        /* how many arg reg used  */
112
}
113
 
114
/* Examine machine-dependent attributes of function type FUNTYPE and return its
115
   type.  See the definition of E_FUNKIND.  */
116
 
117
static e_funkind
118
funkind (const_tree funtype)
119
{
120
  tree attrs = TYPE_ATTRIBUTES (funtype);
121
  if (lookup_attribute ("interrupt_handler", attrs))
122
    return INTERRUPT_HANDLER;
123
  else if (lookup_attribute ("exception_handler", attrs))
124
    return EXCPT_HANDLER;
125
  else if (lookup_attribute ("nmi_handler", attrs))
126
    return NMI_HANDLER;
127
  else
128
    return SUBROUTINE;
129
}
130
 
131
/* Legitimize PIC addresses.  If the address is already position-independent,
132
   we return ORIG.  Newly generated position-independent addresses go into a
133
   reg.  This is REG if nonzero, otherwise we allocate register(s) as
134
   necessary.  PICREG is the register holding the pointer to the PIC offset
135
   table.  */
136
 
137
static rtx
138
legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
139
{
140
  rtx addr = orig;
141
  rtx new_rtx = orig;
142
 
143
  if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
144
    {
145
      int unspec;
146
      rtx tmp;
147
 
148
      if (TARGET_ID_SHARED_LIBRARY)
149
        unspec = UNSPEC_MOVE_PIC;
150
      else if (GET_CODE (addr) == SYMBOL_REF
151
               && SYMBOL_REF_FUNCTION_P (addr))
152
        unspec = UNSPEC_FUNCDESC_GOT17M4;
153
      else
154
        unspec = UNSPEC_MOVE_FDPIC;
155
 
156
      if (reg == 0)
157
        {
158
          gcc_assert (can_create_pseudo_p ());
159
          reg = gen_reg_rtx (Pmode);
160
        }
161
 
162
      tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), unspec);
163
      new_rtx = gen_const_mem (Pmode, gen_rtx_PLUS (Pmode, picreg, tmp));
164
 
165
      emit_move_insn (reg, new_rtx);
166
      if (picreg == pic_offset_table_rtx)
167
        crtl->uses_pic_offset_table = 1;
168
      return reg;
169
    }
170
 
171
  else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
172
    {
173
      rtx base;
174
 
175
      if (GET_CODE (addr) == CONST)
176
        {
177
          addr = XEXP (addr, 0);
178
          gcc_assert (GET_CODE (addr) == PLUS);
179
        }
180
 
181
      if (XEXP (addr, 0) == picreg)
182
        return orig;
183
 
184
      if (reg == 0)
185
        {
186
          gcc_assert (can_create_pseudo_p ());
187
          reg = gen_reg_rtx (Pmode);
188
        }
189
 
190
      base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
191
      addr = legitimize_pic_address (XEXP (addr, 1),
192
                                     base == reg ? NULL_RTX : reg,
193
                                     picreg);
194
 
195
      if (GET_CODE (addr) == CONST_INT)
196
        {
197
          gcc_assert (! reload_in_progress && ! reload_completed);
198
          addr = force_reg (Pmode, addr);
199
        }
200
 
201
      if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
202
        {
203
          base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
204
          addr = XEXP (addr, 1);
205
        }
206
 
207
      return gen_rtx_PLUS (Pmode, base, addr);
208
    }
209
 
210
  return new_rtx;
211
}
212
 
213
/* Stack frame layout. */
214
 
215
/* For a given REGNO, determine whether it must be saved in the function
216
   prologue.  IS_INTHANDLER specifies whether we're generating a normal
217
   prologue or an interrupt/exception one.  */
218
static bool
219
must_save_p (bool is_inthandler, unsigned regno)
220
{
221
  if (D_REGNO_P (regno))
222
    {
223
      bool is_eh_return_reg = false;
224
      if (crtl->calls_eh_return)
225
        {
226
          unsigned j;
227
          for (j = 0; ; j++)
228
            {
229
              unsigned test = EH_RETURN_DATA_REGNO (j);
230
              if (test == INVALID_REGNUM)
231
                break;
232
              if (test == regno)
233
                is_eh_return_reg = true;
234
            }
235
        }
236
 
237
      return (is_eh_return_reg
238
              || (df_regs_ever_live_p (regno)
239
                  && !fixed_regs[regno]
240
                  && (is_inthandler || !call_used_regs[regno])));
241
    }
242
  else if (P_REGNO_P (regno))
243
    {
244
      return ((df_regs_ever_live_p (regno)
245
               && !fixed_regs[regno]
246
               && (is_inthandler || !call_used_regs[regno]))
247
              || (is_inthandler
248
                  && (ENABLE_WA_05000283 || ENABLE_WA_05000315)
249
                  && regno == REG_P5)
250
              || (!TARGET_FDPIC
251
                  && regno == PIC_OFFSET_TABLE_REGNUM
252
                  && (crtl->uses_pic_offset_table
253
                      || (TARGET_ID_SHARED_LIBRARY && !current_function_is_leaf))));
254
    }
255
  else
256
    return ((is_inthandler || !call_used_regs[regno])
257
            && (df_regs_ever_live_p (regno)
258
                || (!leaf_function_p () && call_used_regs[regno])));
259
 
260
}
261
 
262
/* Compute the number of DREGS to save with a push_multiple operation.
263
   This could include registers that aren't modified in the function,
264
   since push_multiple only takes a range of registers.
265
   If IS_INTHANDLER, then everything that is live must be saved, even
266
   if normally call-clobbered.
267
   If CONSECUTIVE, return the number of registers we can save in one
268
   instruction with a push/pop multiple instruction.  */
269
 
270
static int
271
n_dregs_to_save (bool is_inthandler, bool consecutive)
272
{
273
  int count = 0;
274
  unsigned i;
275
 
276
  for (i = REG_R7 + 1; i-- != REG_R0;)
277
    {
278
      if (must_save_p (is_inthandler, i))
279
        count++;
280
      else if (consecutive)
281
        return count;
282
    }
283
  return count;
284
}
285
 
286
/* Like n_dregs_to_save, but compute number of PREGS to save.  */
287
 
288
static int
289
n_pregs_to_save (bool is_inthandler, bool consecutive)
290
{
291
  int count = 0;
292
  unsigned i;
293
 
294
  for (i = REG_P5 + 1; i-- != REG_P0;)
295
    if (must_save_p (is_inthandler, i))
296
      count++;
297
    else if (consecutive)
298
      return count;
299
  return count;
300
}
301
 
302
/* Determine if we are going to save the frame pointer in the prologue.  */
303
 
304
static bool
305
must_save_fp_p (void)
306
{
307
  return df_regs_ever_live_p (REG_FP);
308
}
309
 
310
/* Determine if we are going to save the RETS register.  */
311
static bool
312
must_save_rets_p (void)
313
{
314
  return df_regs_ever_live_p (REG_RETS);
315
}
316
 
317
static bool
318
stack_frame_needed_p (void)
319
{
320
  /* EH return puts a new return address into the frame using an
321
     address relative to the frame pointer.  */
322
  if (crtl->calls_eh_return)
323
    return true;
324
  return frame_pointer_needed;
325
}
326
 
327
/* Emit code to save registers in the prologue.  SAVEALL is nonzero if we
328
   must save all registers; this is used for interrupt handlers.
329
   SPREG contains (reg:SI REG_SP).  IS_INTHANDLER is true if we're doing
330
   this for an interrupt (or exception) handler.  */
331
 
332
static void
333
expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
334
{
335
  rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
336
  rtx predec = gen_rtx_MEM (SImode, predec1);
337
  int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler, false);
338
  int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler, false);
339
  int ndregs_consec = saveall ? 8 : n_dregs_to_save (is_inthandler, true);
340
  int npregs_consec = saveall ? 6 : n_pregs_to_save (is_inthandler, true);
341
  int dregno, pregno;
342
  int total_consec = ndregs_consec + npregs_consec;
343
  int i, d_to_save;
344
 
345
  if (saveall || is_inthandler)
346
    {
347
      rtx insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
348
 
349
      RTX_FRAME_RELATED_P (insn) = 1;
350
      for (dregno = REG_LT0; dregno <= REG_LB1; dregno++)
351
        if (! current_function_is_leaf
352
            || cfun->machine->has_hardware_loops
353
            || cfun->machine->has_loopreg_clobber
354
            || (ENABLE_WA_05000257
355
                && (dregno == REG_LC0 || dregno == REG_LC1)))
356
          {
357
            insn = emit_move_insn (predec, gen_rtx_REG (SImode, dregno));
358
            RTX_FRAME_RELATED_P (insn) = 1;
359
          }
360
    }
361
 
362
  if (total_consec != 0)
363
    {
364
      rtx insn;
365
      rtx val = GEN_INT (-total_consec * 4);
366
      rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_consec + 2));
367
 
368
      XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
369
                                            UNSPEC_PUSH_MULTIPLE);
370
      XVECEXP (pat, 0, total_consec + 1) = gen_rtx_SET (VOIDmode, spreg,
371
                                                        gen_rtx_PLUS (Pmode,
372
                                                                      spreg,
373
                                                                      val));
374
      RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total_consec + 1)) = 1;
375
      d_to_save = ndregs_consec;
376
      dregno = REG_R7 + 1 - ndregs_consec;
377
      pregno = REG_P5 + 1 - npregs_consec;
378
      for (i = 0; i < total_consec; i++)
379
        {
380
          rtx memref = gen_rtx_MEM (word_mode,
381
                                    gen_rtx_PLUS (Pmode, spreg,
382
                                                  GEN_INT (- i * 4 - 4)));
383
          rtx subpat;
384
          if (d_to_save > 0)
385
            {
386
              subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
387
                                                                   dregno++));
388
              d_to_save--;
389
            }
390
          else
391
            {
392
              subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
393
                                                                   pregno++));
394
            }
395
          XVECEXP (pat, 0, i + 1) = subpat;
396
          RTX_FRAME_RELATED_P (subpat) = 1;
397
        }
398
      insn = emit_insn (pat);
399
      RTX_FRAME_RELATED_P (insn) = 1;
400
    }
401
 
402
  for (dregno = REG_R0; ndregs != ndregs_consec; dregno++)
403
    {
404
      if (must_save_p (is_inthandler, dregno))
405
        {
406
          rtx insn = emit_move_insn (predec, gen_rtx_REG (word_mode, dregno));
407
          RTX_FRAME_RELATED_P (insn) = 1;
408
          ndregs--;
409
        }
410
    }
411
  for (pregno = REG_P0; npregs != npregs_consec; pregno++)
412
    {
413
      if (must_save_p (is_inthandler, pregno))
414
        {
415
          rtx insn = emit_move_insn (predec, gen_rtx_REG (word_mode, pregno));
416
          RTX_FRAME_RELATED_P (insn) = 1;
417
          npregs--;
418
        }
419
    }
420
  for (i = REG_P7 + 1; i < REG_CC; i++)
421
    if (saveall
422
        || (is_inthandler
423
            && (df_regs_ever_live_p (i)
424
                || (!leaf_function_p () && call_used_regs[i]))))
425
      {
426
        rtx insn;
427
        if (i == REG_A0 || i == REG_A1)
428
          insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
429
                                 gen_rtx_REG (PDImode, i));
430
        else
431
          insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
432
        RTX_FRAME_RELATED_P (insn) = 1;
433
      }
434
}
435
 
436
/* Emit code to restore registers in the epilogue.  SAVEALL is nonzero if we
437
   must save all registers; this is used for interrupt handlers.
438
   SPREG contains (reg:SI REG_SP).  IS_INTHANDLER is true if we're doing
439
   this for an interrupt (or exception) handler.  */
440
 
441
static void
442
expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
443
{
444
  rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
445
  rtx postinc = gen_rtx_MEM (SImode, postinc1);
446
 
447
  int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler, false);
448
  int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler, false);
449
  int ndregs_consec = saveall ? 8 : n_dregs_to_save (is_inthandler, true);
450
  int npregs_consec = saveall ? 6 : n_pregs_to_save (is_inthandler, true);
451
  int total_consec = ndregs_consec + npregs_consec;
452
  int i, regno;
453
  rtx insn;
454
 
455
  /* A slightly crude technique to stop flow from trying to delete "dead"
456
     insns.  */
457
  MEM_VOLATILE_P (postinc) = 1;
458
 
459
  for (i = REG_CC - 1; i > REG_P7; i--)
460
    if (saveall
461
        || (is_inthandler
462
            && (df_regs_ever_live_p (i)
463
                || (!leaf_function_p () && call_used_regs[i]))))
464
      {
465
        if (i == REG_A0 || i == REG_A1)
466
          {
467
            rtx mem = gen_rtx_MEM (PDImode, postinc1);
468
            MEM_VOLATILE_P (mem) = 1;
469
            emit_move_insn (gen_rtx_REG (PDImode, i), mem);
470
          }
471
        else
472
          emit_move_insn (gen_rtx_REG (SImode, i), postinc);
473
      }
474
 
475
  regno = REG_P5 - npregs_consec;
476
  for (; npregs != npregs_consec; regno--)
477
    {
478
      if (must_save_p (is_inthandler, regno))
479
        {
480
          emit_move_insn (gen_rtx_REG (word_mode, regno), postinc);
481
          npregs--;
482
        }
483
    }
484
  regno = REG_R7 - ndregs_consec;
485
  for (; ndregs != ndregs_consec; regno--)
486
    {
487
      if (must_save_p (is_inthandler, regno))
488
        {
489
          emit_move_insn (gen_rtx_REG (word_mode, regno), postinc);
490
          ndregs--;
491
        }
492
    }
493
 
494
  if (total_consec != 0)
495
    {
496
      rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_consec + 1));
497
      XVECEXP (pat, 0, 0)
498
        = gen_rtx_SET (VOIDmode, spreg,
499
                       gen_rtx_PLUS (Pmode, spreg,
500
                                     GEN_INT (total_consec * 4)));
501
 
502
      if (npregs_consec > 0)
503
        regno = REG_P5 + 1;
504
      else
505
        regno = REG_R7 + 1;
506
 
507
      for (i = 0; i < total_consec; i++)
508
        {
509
          rtx addr = (i > 0
510
                      ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
511
                      : spreg);
512
          rtx memref = gen_rtx_MEM (word_mode, addr);
513
 
514
          regno--;
515
          XVECEXP (pat, 0, i + 1)
516
            = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
517
 
518
          if (npregs_consec > 0)
519
            {
520
              if (--npregs_consec == 0)
521
                regno = REG_R7 + 1;
522
            }
523
        }
524
 
525
      insn = emit_insn (pat);
526
      RTX_FRAME_RELATED_P (insn) = 1;
527
    }
528
  if (saveall || is_inthandler)
529
    {
530
      for (regno = REG_LB1; regno >= REG_LT0; regno--)
531
        if (! current_function_is_leaf
532
            || cfun->machine->has_hardware_loops
533
            || cfun->machine->has_loopreg_clobber
534
            || (ENABLE_WA_05000257 && (regno == REG_LC0 || regno == REG_LC1)))
535
          emit_move_insn (gen_rtx_REG (SImode, regno), postinc);
536
 
537
      emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
538
    }
539
}
540
 
541
/* Perform any needed actions needed for a function that is receiving a
542
   variable number of arguments.
543
 
544
   CUM is as above.
545
 
546
   MODE and TYPE are the mode and type of the current parameter.
547
 
548
   PRETEND_SIZE is a variable that should be set to the amount of stack
549
   that must be pushed by the prolog to pretend that our caller pushed
550
   it.
551
 
552
   Normally, this macro will push all remaining incoming registers on the
553
   stack and set PRETEND_SIZE to the length of the registers pushed.
554
 
555
   Blackfin specific :
556
   - VDSP C compiler manual (our ABI) says that a variable args function
557
     should save the R0, R1 and R2 registers in the stack.
558
   - The caller will always leave space on the stack for the
559
     arguments that are passed in registers, so we dont have
560
     to leave any extra space.
561
   - now, the vastart pointer can access all arguments from the stack.  */
562
 
563
static void
564
setup_incoming_varargs (cumulative_args_t cum,
565
                        enum machine_mode mode ATTRIBUTE_UNUSED,
566
                        tree type ATTRIBUTE_UNUSED, int *pretend_size,
567
                        int no_rtl)
568
{
569
  rtx mem;
570
  int i;
571
 
572
  if (no_rtl)
573
    return;
574
 
575
  /* The move for named arguments will be generated automatically by the
576
     compiler.  We need to generate the move rtx for the unnamed arguments
577
     if they are in the first 3 words.  We assume at least 1 named argument
578
     exists, so we never generate [ARGP] = R0 here.  */
579
 
580
  for (i = get_cumulative_args (cum)->words + 1; i < max_arg_registers; i++)
581
    {
582
      mem = gen_rtx_MEM (Pmode,
583
                         plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
584
      emit_move_insn (mem, gen_rtx_REG (Pmode, i));
585
    }
586
 
587
  *pretend_size = 0;
588
}
589
 
590
/* Value should be nonzero if functions must have frame pointers.
591
   Zero means the frame pointer need not be set up (and parms may
592
   be accessed via the stack pointer) in functions that seem suitable.  */
593
 
594
static bool
595
bfin_frame_pointer_required (void)
596
{
597
  e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
598
 
599
  if (fkind != SUBROUTINE)
600
    return true;
601
 
602
  /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
603
     so we have to override it for non-leaf functions.  */
604
  if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
605
    return true;
606
 
607
  return false;
608
}
609
 
610
/* Return the number of registers pushed during the prologue.  */
611
 
612
static int
613
n_regs_saved_by_prologue (void)
614
{
615
  e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
616
  bool is_inthandler = fkind != SUBROUTINE;
617
  tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
618
  bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
619
              || (is_inthandler && !current_function_is_leaf));
620
  int ndregs = all ? 8 : n_dregs_to_save (is_inthandler, false);
621
  int npregs = all ? 6 : n_pregs_to_save (is_inthandler, false);
622
  int n = ndregs + npregs;
623
  int i;
624
 
625
  if (all || stack_frame_needed_p ())
626
    n += 2;
627
  else
628
    {
629
      if (must_save_fp_p ())
630
        n++;
631
      if (must_save_rets_p ())
632
        n++;
633
    }
634
 
635
  if (fkind != SUBROUTINE || all)
636
    {
637
      /* Increment once for ASTAT.  */
638
      n++;
639
      if (! current_function_is_leaf
640
          || cfun->machine->has_hardware_loops
641
          || cfun->machine->has_loopreg_clobber)
642
        {
643
          n += 6;
644
        }
645
    }
646
 
647
  if (fkind != SUBROUTINE)
648
    {
649
      /* RETE/X/N.  */
650
      if (lookup_attribute ("nesting", attrs))
651
        n++;
652
    }
653
 
654
  for (i = REG_P7 + 1; i < REG_CC; i++)
655
    if (all
656
        || (fkind != SUBROUTINE
657
            && (df_regs_ever_live_p (i)
658
                || (!leaf_function_p () && call_used_regs[i]))))
659
      n += i == REG_A0 || i == REG_A1 ? 2 : 1;
660
 
661
  return n;
662
}
663
 
664
/* Given FROM and TO register numbers, say whether this elimination is
665
   allowed.  Frame pointer elimination is automatically handled.
666
 
667
   All other eliminations are valid.  */
668
 
669
static bool
670
bfin_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
671
{
672
  return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
673
}
674
 
675
/* Return the offset between two registers, one to be eliminated, and the other
676
   its replacement, at the start of a routine.  */
677
 
678
HOST_WIDE_INT
679
bfin_initial_elimination_offset (int from, int to)
680
{
681
  HOST_WIDE_INT offset = 0;
682
 
683
  if (from == ARG_POINTER_REGNUM)
684
    offset = n_regs_saved_by_prologue () * 4;
685
 
686
  if (to == STACK_POINTER_REGNUM)
687
    {
688
      if (crtl->outgoing_args_size >= FIXED_STACK_AREA)
689
        offset += crtl->outgoing_args_size;
690
      else if (crtl->outgoing_args_size)
691
        offset += FIXED_STACK_AREA;
692
 
693
      offset += get_frame_size ();
694
    }
695
 
696
  return offset;
697
}
698
 
699
/* Emit code to load a constant CONSTANT into register REG; setting
700
   RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
701
   Make sure that the insns we generate need not be split.  */
702
 
703
static void
704
frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
705
{
706
  rtx insn;
707
  rtx cst = GEN_INT (constant);
708
 
709
  if (constant >= -32768 && constant < 65536)
710
    insn = emit_move_insn (reg, cst);
711
  else
712
    {
713
      /* We don't call split_load_immediate here, since dwarf2out.c can get
714
         confused about some of the more clever sequences it can generate.  */
715
      insn = emit_insn (gen_movsi_high (reg, cst));
716
      if (related)
717
        RTX_FRAME_RELATED_P (insn) = 1;
718
      insn = emit_insn (gen_movsi_low (reg, reg, cst));
719
    }
720
  if (related)
721
    RTX_FRAME_RELATED_P (insn) = 1;
722
}
723
 
724
/* Generate efficient code to add a value to a P register.
725
   Set RTX_FRAME_RELATED_P on the generated insns if FRAME is nonzero.
726
   EPILOGUE_P is zero if this function is called for prologue,
727
   otherwise it's nonzero. And it's less than zero if this is for
728
   sibcall epilogue.  */
729
 
730
static void
731
add_to_reg (rtx reg, HOST_WIDE_INT value, int frame, int epilogue_p)
732
{
733
  if (value == 0)
734
    return;
735
 
736
  /* Choose whether to use a sequence using a temporary register, or
737
     a sequence with multiple adds.  We can add a signed 7-bit value
738
     in one instruction.  */
739
  if (value > 120 || value < -120)
740
    {
741
      rtx tmpreg;
742
      rtx tmpreg2;
743
      rtx insn;
744
 
745
      tmpreg2 = NULL_RTX;
746
 
747
      /* For prologue or normal epilogue, P1 can be safely used
748
         as the temporary register. For sibcall epilogue, we try to find
749
         a call used P register, which will be restored in epilogue.
750
         If we cannot find such a P register, we have to use one I register
751
         to help us.  */
752
 
753
      if (epilogue_p >= 0)
754
        tmpreg = gen_rtx_REG (SImode, REG_P1);
755
      else
756
        {
757
          int i;
758
          for (i = REG_P0; i <= REG_P5; i++)
759
            if ((df_regs_ever_live_p (i) && ! call_used_regs[i])
760
                || (!TARGET_FDPIC
761
                    && i == PIC_OFFSET_TABLE_REGNUM
762
                    && (crtl->uses_pic_offset_table
763
                        || (TARGET_ID_SHARED_LIBRARY
764
                            && ! current_function_is_leaf))))
765
              break;
766
          if (i <= REG_P5)
767
            tmpreg = gen_rtx_REG (SImode, i);
768
          else
769
            {
770
              tmpreg = gen_rtx_REG (SImode, REG_P1);
771
              tmpreg2 = gen_rtx_REG (SImode, REG_I0);
772
              emit_move_insn (tmpreg2, tmpreg);
773
            }
774
        }
775
 
776
      if (frame)
777
        frame_related_constant_load (tmpreg, value, TRUE);
778
      else
779
        insn = emit_move_insn (tmpreg, GEN_INT (value));
780
 
781
      insn = emit_insn (gen_addsi3 (reg, reg, tmpreg));
782
      if (frame)
783
        RTX_FRAME_RELATED_P (insn) = 1;
784
 
785
      if (tmpreg2 != NULL_RTX)
786
        emit_move_insn (tmpreg, tmpreg2);
787
    }
788
  else
789
    do
790
      {
791
        int size = value;
792
        rtx insn;
793
 
794
        if (size > 60)
795
          size = 60;
796
        else if (size < -60)
797
          /* We could use -62, but that would leave the stack unaligned, so
798
             it's no good.  */
799
          size = -60;
800
 
801
        insn = emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
802
        if (frame)
803
          RTX_FRAME_RELATED_P (insn) = 1;
804
        value -= size;
805
      }
806
    while (value != 0);
807
}
808
 
809
/* Generate a LINK insn for a frame sized FRAME_SIZE.  If this constant
810
   is too large, generate a sequence of insns that has the same effect.
811
   SPREG contains (reg:SI REG_SP).  */
812
 
813
static void
814
emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
815
{
816
  HOST_WIDE_INT link_size = frame_size;
817
  rtx insn;
818
  int i;
819
 
820
  if (link_size > 262140)
821
    link_size = 262140;
822
 
823
  /* Use a LINK insn with as big a constant as possible, then subtract
824
     any remaining size from the SP.  */
825
  insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
826
  RTX_FRAME_RELATED_P (insn) = 1;
827
 
828
  for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
829
    {
830
      rtx set = XVECEXP (PATTERN (insn), 0, i);
831
      gcc_assert (GET_CODE (set) == SET);
832
      RTX_FRAME_RELATED_P (set) = 1;
833
    }
834
 
835
  frame_size -= link_size;
836
 
837
  if (frame_size > 0)
838
    {
839
      /* Must use a call-clobbered PREG that isn't the static chain.  */
840
      rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
841
 
842
      frame_related_constant_load (tmpreg, -frame_size, TRUE);
843
      insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
844
      RTX_FRAME_RELATED_P (insn) = 1;
845
    }
846
}
847
 
848
/* Return the number of bytes we must reserve for outgoing arguments
849
   in the current function's stack frame.  */
850
 
851
static HOST_WIDE_INT
852
arg_area_size (void)
853
{
854
  if (crtl->outgoing_args_size)
855
    {
856
      if (crtl->outgoing_args_size >= FIXED_STACK_AREA)
857
        return crtl->outgoing_args_size;
858
      else
859
        return FIXED_STACK_AREA;
860
    }
861
  return 0;
862
}
863
 
864
/* Save RETS and FP, and allocate a stack frame.  ALL is true if the
865
   function must save all its registers (true only for certain interrupt
866
   handlers).  */
867
 
868
static void
869
do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
870
{
871
  frame_size += arg_area_size ();
872
 
873
  if (all
874
      || stack_frame_needed_p ()
875
      || (must_save_rets_p () && must_save_fp_p ()))
876
    emit_link_insn (spreg, frame_size);
877
  else
878
    {
879
      if (must_save_rets_p ())
880
        {
881
          rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
882
                                            gen_rtx_PRE_DEC (Pmode, spreg)),
883
                               bfin_rets_rtx);
884
          rtx insn = emit_insn (pat);
885
          RTX_FRAME_RELATED_P (insn) = 1;
886
        }
887
      if (must_save_fp_p ())
888
        {
889
          rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
890
                                            gen_rtx_PRE_DEC (Pmode, spreg)),
891
                               gen_rtx_REG (Pmode, REG_FP));
892
          rtx insn = emit_insn (pat);
893
          RTX_FRAME_RELATED_P (insn) = 1;
894
        }
895
      add_to_reg (spreg, -frame_size, 1, 0);
896
    }
897
}
898
 
899
/* Like do_link, but used for epilogues to deallocate the stack frame.
900
   EPILOGUE_P is zero if this function is called for prologue,
901
   otherwise it's nonzero. And it's less than zero if this is for
902
   sibcall epilogue.  */
903
 
904
static void
905
do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all, int epilogue_p)
906
{
907
  frame_size += arg_area_size ();
908
 
909
  if (stack_frame_needed_p ())
910
    emit_insn (gen_unlink ());
911
  else
912
    {
913
      rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
914
 
915
      add_to_reg (spreg, frame_size, 0, epilogue_p);
916
      if (all || must_save_fp_p ())
917
        {
918
          rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
919
          emit_move_insn (fpreg, postinc);
920
          emit_use (fpreg);
921
        }
922
      if (all || must_save_rets_p ())
923
        {
924
          emit_move_insn (bfin_rets_rtx, postinc);
925
          emit_use (bfin_rets_rtx);
926
        }
927
    }
928
}
929
 
930
/* Generate a prologue suitable for a function of kind FKIND.  This is
931
   called for interrupt and exception handler prologues.
932
   SPREG contains (reg:SI REG_SP).  */
933
 
934
static void
935
expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind, bool all)
936
{
937
  HOST_WIDE_INT frame_size = get_frame_size ();
938
  rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
939
  rtx predec = gen_rtx_MEM (SImode, predec1);
940
  rtx insn;
941
  tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
942
  tree kspisusp = lookup_attribute ("kspisusp", attrs);
943
 
944
  if (kspisusp)
945
    {
946
      insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
947
      RTX_FRAME_RELATED_P (insn) = 1;
948
    }
949
 
950
  /* We need space on the stack in case we need to save the argument
951
     registers.  */
952
  if (fkind == EXCPT_HANDLER)
953
    {
954
      insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
955
      RTX_FRAME_RELATED_P (insn) = 1;
956
    }
957
 
958
  /* If we're calling other functions, they won't save their call-clobbered
959
     registers, so we must save everything here.  */
960
  if (!current_function_is_leaf)
961
    all = true;
962
  expand_prologue_reg_save (spreg, all, true);
963
 
964
  if (ENABLE_WA_05000283 || ENABLE_WA_05000315)
965
    {
966
      rtx chipid = GEN_INT (trunc_int_for_mode (0xFFC00014, SImode));
967
      rtx p5reg = gen_rtx_REG (Pmode, REG_P5);
968
      emit_insn (gen_movbi (bfin_cc_rtx, const1_rtx));
969
      emit_insn (gen_movsi_high (p5reg, chipid));
970
      emit_insn (gen_movsi_low (p5reg, p5reg, chipid));
971
      emit_insn (gen_dummy_load (p5reg, bfin_cc_rtx));
972
    }
973
 
974
  if (lookup_attribute ("nesting", attrs))
975
    {
976
      rtx srcreg = gen_rtx_REG (Pmode, ret_regs[fkind]);
977
      insn = emit_move_insn (predec, srcreg);
978
      RTX_FRAME_RELATED_P (insn) = 1;
979
    }
980
 
981
  do_link (spreg, frame_size, all);
982
 
983
  if (fkind == EXCPT_HANDLER)
984
    {
985
      rtx r0reg = gen_rtx_REG (SImode, REG_R0);
986
      rtx r1reg = gen_rtx_REG (SImode, REG_R1);
987
      rtx r2reg = gen_rtx_REG (SImode, REG_R2);
988
 
989
      emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
990
      emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
991
      emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
992
      emit_move_insn (r1reg, spreg);
993
      emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
994
      emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
995
    }
996
}
997
 
998
/* Generate an epilogue suitable for a function of kind FKIND.  This is
999
   called for interrupt and exception handler epilogues.
1000
   SPREG contains (reg:SI REG_SP).  */
1001
 
1002
static void
1003
expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind, bool all)
1004
{
1005
  tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1006
  rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
1007
  rtx postinc = gen_rtx_MEM (SImode, postinc1);
1008
 
1009
  /* A slightly crude technique to stop flow from trying to delete "dead"
1010
     insns.  */
1011
  MEM_VOLATILE_P (postinc) = 1;
1012
 
1013
  do_unlink (spreg, get_frame_size (), all, 1);
1014
 
1015
  if (lookup_attribute ("nesting", attrs))
1016
    {
1017
      rtx srcreg = gen_rtx_REG (Pmode, ret_regs[fkind]);
1018
      emit_move_insn (srcreg, postinc);
1019
    }
1020
 
1021
  /* If we're calling other functions, they won't save their call-clobbered
1022
     registers, so we must save (and restore) everything here.  */
1023
  if (!current_function_is_leaf)
1024
    all = true;
1025
 
1026
  expand_epilogue_reg_restore (spreg, all, true);
1027
 
1028
  /* Deallocate any space we left on the stack in case we needed to save the
1029
     argument registers.  */
1030
  if (fkind == EXCPT_HANDLER)
1031
    emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
1032
 
1033
  emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode, ret_regs[fkind])));
1034
}
1035
 
1036
/* Used while emitting the prologue to generate code to load the correct value
1037
   into the PIC register, which is passed in DEST.  */
1038
 
1039
static rtx
1040
bfin_load_pic_reg (rtx dest)
1041
{
1042
  struct cgraph_local_info *i = NULL;
1043
  rtx addr;
1044
 
1045
  i = cgraph_local_info (current_function_decl);
1046
 
1047
  /* Functions local to the translation unit don't need to reload the
1048
     pic reg, since the caller always passes a usable one.  */
1049
  if (i && i->local)
1050
    return pic_offset_table_rtx;
1051
 
1052
  if (global_options_set.x_bfin_library_id)
1053
    addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
1054
  else
1055
    addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1056
                         gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
1057
                                         UNSPEC_LIBRARY_OFFSET));
1058
  emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
1059
  return dest;
1060
}
1061
 
1062
/* Generate RTL for the prologue of the current function.  */
1063
 
1064
void
1065
bfin_expand_prologue (void)
1066
{
1067
  HOST_WIDE_INT frame_size = get_frame_size ();
1068
  rtx spreg = gen_rtx_REG (Pmode, REG_SP);
1069
  e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1070
  rtx pic_reg_loaded = NULL_RTX;
1071
  tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1072
  bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
1073
 
1074
  if (fkind != SUBROUTINE)
1075
    {
1076
      expand_interrupt_handler_prologue (spreg, fkind, all);
1077
      return;
1078
    }
1079
 
1080
  if (crtl->limit_stack
1081
      || (TARGET_STACK_CHECK_L1
1082
          && !DECL_NO_LIMIT_STACK (current_function_decl)))
1083
    {
1084
      HOST_WIDE_INT offset
1085
        = bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
1086
                                           STACK_POINTER_REGNUM);
1087
      rtx lim = crtl->limit_stack ? stack_limit_rtx : NULL_RTX;
1088
      rtx tmp = gen_rtx_REG (Pmode, REG_R3);
1089
      rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
1090
 
1091
      emit_move_insn (tmp, p2reg);
1092
      if (!lim)
1093
        {
1094
          emit_move_insn (p2reg, gen_int_mode (0xFFB00000, SImode));
1095
          emit_move_insn (p2reg, gen_rtx_MEM (Pmode, p2reg));
1096
          lim = p2reg;
1097
        }
1098
      if (GET_CODE (lim) == SYMBOL_REF)
1099
        {
1100
          if (TARGET_ID_SHARED_LIBRARY)
1101
            {
1102
              rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
1103
              rtx val;
1104
              pic_reg_loaded = bfin_load_pic_reg (p2reg);
1105
              val = legitimize_pic_address (stack_limit_rtx, p1reg,
1106
                                            pic_reg_loaded);
1107
              emit_move_insn (p1reg, val);
1108
              frame_related_constant_load (p2reg, offset, FALSE);
1109
              emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
1110
              lim = p2reg;
1111
            }
1112
          else
1113
            {
1114
              rtx limit = plus_constant (lim, offset);
1115
              emit_move_insn (p2reg, limit);
1116
              lim = p2reg;
1117
            }
1118
        }
1119
      else
1120
        {
1121
          if (lim != p2reg)
1122
            emit_move_insn (p2reg, lim);
1123
          add_to_reg (p2reg, offset, 0, 0);
1124
          lim = p2reg;
1125
        }
1126
      emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
1127
      emit_insn (gen_trapifcc ());
1128
      emit_move_insn (p2reg, tmp);
1129
    }
1130
  expand_prologue_reg_save (spreg, all, false);
1131
 
1132
  do_link (spreg, frame_size, all);
1133
 
1134
  if (TARGET_ID_SHARED_LIBRARY
1135
      && !TARGET_SEP_DATA
1136
      && (crtl->uses_pic_offset_table
1137
          || !current_function_is_leaf))
1138
    bfin_load_pic_reg (pic_offset_table_rtx);
1139
}
1140
 
1141
/* Generate RTL for the epilogue of the current function.  NEED_RETURN is zero
1142
   if this is for a sibcall.  EH_RETURN is nonzero if we're expanding an
1143
   eh_return pattern. SIBCALL_P is true if this is a sibcall epilogue,
1144
   false otherwise.  */
1145
 
1146
void
1147
bfin_expand_epilogue (int need_return, int eh_return, bool sibcall_p)
1148
{
1149
  rtx spreg = gen_rtx_REG (Pmode, REG_SP);
1150
  e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1151
  int e = sibcall_p ? -1 : 1;
1152
  tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1153
  bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
1154
 
1155
  if (fkind != SUBROUTINE)
1156
    {
1157
      expand_interrupt_handler_epilogue (spreg, fkind, all);
1158
      return;
1159
    }
1160
 
1161
  do_unlink (spreg, get_frame_size (), all, e);
1162
 
1163
  expand_epilogue_reg_restore (spreg, all, false);
1164
 
1165
  /* Omit the return insn if this is for a sibcall.  */
1166
  if (! need_return)
1167
    return;
1168
 
1169
  if (eh_return)
1170
    emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
1171
 
1172
  emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode, REG_RETS)));
1173
}
1174
 
1175
/* Return nonzero if register OLD_REG can be renamed to register NEW_REG.  */
1176
 
1177
int
1178
bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
1179
                           unsigned int new_reg)
1180
{
1181
  /* Interrupt functions can only use registers that have already been
1182
     saved by the prologue, even if they would normally be
1183
     call-clobbered.  */
1184
 
1185
  if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
1186
      && !df_regs_ever_live_p (new_reg))
1187
    return 0;
1188
 
1189
  return 1;
1190
}
1191
 
1192
/* Implement TARGET_EXTRA_LIVE_ON_ENTRY.  */
1193
static void
1194
bfin_extra_live_on_entry (bitmap regs)
1195
{
1196
  if (TARGET_FDPIC)
1197
    bitmap_set_bit (regs, FDPIC_REGNO);
1198
}
1199
 
1200
/* Return the value of the return address for the frame COUNT steps up
1201
   from the current frame, after the prologue.
1202
   We punt for everything but the current frame by returning const0_rtx.  */
1203
 
1204
rtx
1205
bfin_return_addr_rtx (int count)
1206
{
1207
  if (count != 0)
1208
    return const0_rtx;
1209
 
1210
  return get_hard_reg_initial_val (Pmode, REG_RETS);
1211
}
1212
 
1213
static rtx
1214
bfin_delegitimize_address (rtx orig_x)
1215
{
1216
  rtx x = orig_x;
1217
 
1218
  if (GET_CODE (x) != MEM)
1219
    return orig_x;
1220
 
1221
  x = XEXP (x, 0);
1222
  if (GET_CODE (x) == PLUS
1223
      && GET_CODE (XEXP (x, 1)) == UNSPEC
1224
      && XINT (XEXP (x, 1), 1) == UNSPEC_MOVE_PIC
1225
      && GET_CODE (XEXP (x, 0)) == REG
1226
      && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
1227
    return XVECEXP (XEXP (x, 1), 0, 0);
1228
 
1229
  return orig_x;
1230
}
1231
 
1232
/* This predicate is used to compute the length of a load/store insn.
1233
   OP is a MEM rtx, we return nonzero if its addressing mode requires a
1234
   32-bit instruction.  */
1235
 
1236
int
1237
effective_address_32bit_p (rtx op, enum machine_mode mode)
1238
{
1239
  HOST_WIDE_INT offset;
1240
 
1241
  mode = GET_MODE (op);
1242
  op = XEXP (op, 0);
1243
 
1244
  if (GET_CODE (op) != PLUS)
1245
    {
1246
      gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
1247
                  || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
1248
      return 0;
1249
    }
1250
 
1251
  if (GET_CODE (XEXP (op, 1)) == UNSPEC)
1252
    return 1;
1253
 
1254
  offset = INTVAL (XEXP (op, 1));
1255
 
1256
  /* All byte loads use a 16-bit offset.  */
1257
  if (GET_MODE_SIZE (mode) == 1)
1258
    return 1;
1259
 
1260
  if (GET_MODE_SIZE (mode) == 4)
1261
    {
1262
      /* Frame pointer relative loads can use a negative offset, all others
1263
         are restricted to a small positive one.  */
1264
      if (XEXP (op, 0) == frame_pointer_rtx)
1265
        return offset < -128 || offset > 60;
1266
      return offset < 0 || offset > 60;
1267
    }
1268
 
1269
  /* Must be HImode now.  */
1270
  return offset < 0 || offset > 30;
1271
}
1272
 
1273
/* Returns true if X is a memory reference using an I register.  */
1274
bool
1275
bfin_dsp_memref_p (rtx x)
1276
{
1277
  if (! MEM_P (x))
1278
    return false;
1279
  x = XEXP (x, 0);
1280
  if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_INC
1281
      || GET_CODE (x) == POST_DEC || GET_CODE (x) == PRE_DEC)
1282
    x = XEXP (x, 0);
1283
  return IREG_P (x);
1284
}
1285
 
1286
/* Return cost of the memory address ADDR.
1287
   All addressing modes are equally cheap on the Blackfin.  */
1288
 
1289
static int
1290
bfin_address_cost (rtx addr ATTRIBUTE_UNUSED, bool speed ATTRIBUTE_UNUSED)
1291
{
1292
  return 1;
1293
}
1294
 
1295
/* Subroutine of print_operand; used to print a memory reference X to FILE.  */
1296
 
1297
void
1298
print_address_operand (FILE *file, rtx x)
1299
{
1300
  switch (GET_CODE (x))
1301
    {
1302
    case PLUS:
1303
      output_address (XEXP (x, 0));
1304
      fprintf (file, "+");
1305
      output_address (XEXP (x, 1));
1306
      break;
1307
 
1308
    case PRE_DEC:
1309
      fprintf (file, "--");
1310
      output_address (XEXP (x, 0));
1311
      break;
1312
    case POST_INC:
1313
      output_address (XEXP (x, 0));
1314
      fprintf (file, "++");
1315
      break;
1316
    case POST_DEC:
1317
      output_address (XEXP (x, 0));
1318
      fprintf (file, "--");
1319
      break;
1320
 
1321
    default:
1322
      gcc_assert (GET_CODE (x) != MEM);
1323
      print_operand (file, x, 0);
1324
      break;
1325
    }
1326
}
1327
 
1328
/* Adding intp DImode support by Tony
1329
 * -- Q: (low  word)
1330
 * -- R: (high word)
1331
 */
1332
 
1333
void
1334
print_operand (FILE *file, rtx x, char code)
1335
{
1336
  enum machine_mode mode;
1337
 
1338
  if (code == '!')
1339
    {
1340
      if (GET_MODE (current_output_insn) == SImode)
1341
        fprintf (file, " ||");
1342
      else
1343
        fprintf (file, ";");
1344
      return;
1345
    }
1346
 
1347
  mode = GET_MODE (x);
1348
 
1349
  switch (code)
1350
    {
1351
    case 'j':
1352
      switch (GET_CODE (x))
1353
        {
1354
        case EQ:
1355
          fprintf (file, "e");
1356
          break;
1357
        case NE:
1358
          fprintf (file, "ne");
1359
          break;
1360
        case GT:
1361
          fprintf (file, "g");
1362
          break;
1363
        case LT:
1364
          fprintf (file, "l");
1365
          break;
1366
        case GE:
1367
          fprintf (file, "ge");
1368
          break;
1369
        case LE:
1370
          fprintf (file, "le");
1371
          break;
1372
        case GTU:
1373
          fprintf (file, "g");
1374
          break;
1375
        case LTU:
1376
          fprintf (file, "l");
1377
          break;
1378
        case GEU:
1379
          fprintf (file, "ge");
1380
          break;
1381
        case LEU:
1382
          fprintf (file, "le");
1383
          break;
1384
        default:
1385
          output_operand_lossage ("invalid %%j value");
1386
        }
1387
      break;
1388
 
1389
    case 'J':                                    /* reverse logic */
1390
      switch (GET_CODE(x))
1391
        {
1392
        case EQ:
1393
          fprintf (file, "ne");
1394
          break;
1395
        case NE:
1396
          fprintf (file, "e");
1397
          break;
1398
        case GT:
1399
          fprintf (file, "le");
1400
          break;
1401
        case LT:
1402
          fprintf (file, "ge");
1403
          break;
1404
        case GE:
1405
          fprintf (file, "l");
1406
          break;
1407
        case LE:
1408
          fprintf (file, "g");
1409
          break;
1410
        case GTU:
1411
          fprintf (file, "le");
1412
          break;
1413
        case LTU:
1414
          fprintf (file, "ge");
1415
          break;
1416
        case GEU:
1417
          fprintf (file, "l");
1418
          break;
1419
        case LEU:
1420
          fprintf (file, "g");
1421
          break;
1422
        default:
1423
          output_operand_lossage ("invalid %%J value");
1424
        }
1425
      break;
1426
 
1427
    default:
1428
      switch (GET_CODE (x))
1429
        {
1430
        case REG:
1431
          if (code == 'h')
1432
            {
1433
              if (REGNO (x) < 32)
1434
                fprintf (file, "%s", short_reg_names[REGNO (x)]);
1435
              else
1436
                output_operand_lossage ("invalid operand for code '%c'", code);
1437
            }
1438
          else if (code == 'd')
1439
            {
1440
              if (REGNO (x) < 32)
1441
                fprintf (file, "%s", high_reg_names[REGNO (x)]);
1442
              else
1443
                output_operand_lossage ("invalid operand for code '%c'", code);
1444
            }
1445
          else if (code == 'w')
1446
            {
1447
              if (REGNO (x) == REG_A0 || REGNO (x) == REG_A1)
1448
                fprintf (file, "%s.w", reg_names[REGNO (x)]);
1449
              else
1450
                output_operand_lossage ("invalid operand for code '%c'", code);
1451
            }
1452
          else if (code == 'x')
1453
            {
1454
              if (REGNO (x) == REG_A0 || REGNO (x) == REG_A1)
1455
                fprintf (file, "%s.x", reg_names[REGNO (x)]);
1456
              else
1457
                output_operand_lossage ("invalid operand for code '%c'", code);
1458
            }
1459
          else if (code == 'v')
1460
            {
1461
              if (REGNO (x) == REG_A0)
1462
                fprintf (file, "AV0");
1463
              else if (REGNO (x) == REG_A1)
1464
                fprintf (file, "AV1");
1465
              else
1466
                output_operand_lossage ("invalid operand for code '%c'", code);
1467
            }
1468
          else if (code == 'D')
1469
            {
1470
              if (D_REGNO_P (REGNO (x)))
1471
                fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1472
              else
1473
                output_operand_lossage ("invalid operand for code '%c'", code);
1474
            }
1475
          else if (code == 'H')
1476
            {
1477
              if ((mode == DImode || mode == DFmode) && REG_P (x))
1478
                fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1479
              else
1480
                output_operand_lossage ("invalid operand for code '%c'", code);
1481
            }
1482
          else if (code == 'T')
1483
            {
1484
              if (D_REGNO_P (REGNO (x)))
1485
                fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1486
              else
1487
                output_operand_lossage ("invalid operand for code '%c'", code);
1488
            }
1489
          else
1490
            fprintf (file, "%s", reg_names[REGNO (x)]);
1491
          break;
1492
 
1493
        case MEM:
1494
          fputc ('[', file);
1495
          x = XEXP (x,0);
1496
          print_address_operand (file, x);
1497
          fputc (']', file);
1498
          break;
1499
 
1500
        case CONST_INT:
1501
          if (code == 'M')
1502
            {
1503
              switch (INTVAL (x))
1504
                {
1505
                case MACFLAG_NONE:
1506
                  break;
1507
                case MACFLAG_FU:
1508
                  fputs ("(FU)", file);
1509
                  break;
1510
                case MACFLAG_T:
1511
                  fputs ("(T)", file);
1512
                  break;
1513
                case MACFLAG_TFU:
1514
                  fputs ("(TFU)", file);
1515
                  break;
1516
                case MACFLAG_W32:
1517
                  fputs ("(W32)", file);
1518
                  break;
1519
                case MACFLAG_IS:
1520
                  fputs ("(IS)", file);
1521
                  break;
1522
                case MACFLAG_IU:
1523
                  fputs ("(IU)", file);
1524
                  break;
1525
                case MACFLAG_IH:
1526
                  fputs ("(IH)", file);
1527
                  break;
1528
                case MACFLAG_M:
1529
                  fputs ("(M)", file);
1530
                  break;
1531
                case MACFLAG_IS_M:
1532
                  fputs ("(IS,M)", file);
1533
                  break;
1534
                case MACFLAG_ISS2:
1535
                  fputs ("(ISS2)", file);
1536
                  break;
1537
                case MACFLAG_S2RND:
1538
                  fputs ("(S2RND)", file);
1539
                  break;
1540
                default:
1541
                  gcc_unreachable ();
1542
                }
1543
              break;
1544
            }
1545
          else if (code == 'b')
1546
            {
1547
              if (INTVAL (x) == 0)
1548
                fputs ("+=", file);
1549
              else if (INTVAL (x) == 1)
1550
                fputs ("-=", file);
1551
              else
1552
                gcc_unreachable ();
1553
              break;
1554
            }
1555
          /* Moves to half registers with d or h modifiers always use unsigned
1556
             constants.  */
1557
          else if (code == 'd')
1558
            x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1559
          else if (code == 'h')
1560
            x = GEN_INT (INTVAL (x) & 0xffff);
1561
          else if (code == 'N')
1562
            x = GEN_INT (-INTVAL (x));
1563
          else if (code == 'X')
1564
            x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1565
          else if (code == 'Y')
1566
            x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1567
          else if (code == 'Z')
1568
            /* Used for LINK insns.  */
1569
            x = GEN_INT (-8 - INTVAL (x));
1570
 
1571
          /* fall through */
1572
 
1573
        case SYMBOL_REF:
1574
          output_addr_const (file, x);
1575
          break;
1576
 
1577
        case CONST_DOUBLE:
1578
          output_operand_lossage ("invalid const_double operand");
1579
          break;
1580
 
1581
        case UNSPEC:
1582
          switch (XINT (x, 1))
1583
            {
1584
            case UNSPEC_MOVE_PIC:
1585
              output_addr_const (file, XVECEXP (x, 0, 0));
1586
              fprintf (file, "@GOT");
1587
              break;
1588
 
1589
            case UNSPEC_MOVE_FDPIC:
1590
              output_addr_const (file, XVECEXP (x, 0, 0));
1591
              fprintf (file, "@GOT17M4");
1592
              break;
1593
 
1594
            case UNSPEC_FUNCDESC_GOT17M4:
1595
              output_addr_const (file, XVECEXP (x, 0, 0));
1596
              fprintf (file, "@FUNCDESC_GOT17M4");
1597
              break;
1598
 
1599
            case UNSPEC_LIBRARY_OFFSET:
1600
              fprintf (file, "_current_shared_library_p5_offset_");
1601
              break;
1602
 
1603
            default:
1604
              gcc_unreachable ();
1605
            }
1606
          break;
1607
 
1608
        default:
1609
          output_addr_const (file, x);
1610
        }
1611
    }
1612
}
1613
 
1614
/* Argument support functions.  */
1615
 
1616
/* Initialize a variable CUM of type CUMULATIVE_ARGS
1617
   for a call to a function whose data type is FNTYPE.
1618
   For a library call, FNTYPE is 0.
1619
   VDSP C Compiler manual, our ABI says that
1620
   first 3 words of arguments will use R0, R1 and R2.
1621
*/
1622
 
1623
void
1624
init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
1625
                      rtx libname ATTRIBUTE_UNUSED)
1626
{
1627
  static CUMULATIVE_ARGS zero_cum;
1628
 
1629
  *cum = zero_cum;
1630
 
1631
  /* Set up the number of registers to use for passing arguments.  */
1632
 
1633
  cum->nregs = max_arg_registers;
1634
  cum->arg_regs = arg_regs;
1635
 
1636
  cum->call_cookie = CALL_NORMAL;
1637
  /* Check for a longcall attribute.  */
1638
  if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1639
    cum->call_cookie |= CALL_SHORT;
1640
  else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1641
    cum->call_cookie |= CALL_LONG;
1642
 
1643
  return;
1644
}
1645
 
1646
/* Update the data in CUM to advance over an argument
1647
   of mode MODE and data type TYPE.
1648
   (TYPE is null for libcalls where that information may not be available.)  */
1649
 
1650
static void
1651
bfin_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1652
                           const_tree type, bool named ATTRIBUTE_UNUSED)
1653
{
1654
  CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1655
  int count, bytes, words;
1656
 
1657
  bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1658
  words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1659
 
1660
  cum->words += words;
1661
  cum->nregs -= words;
1662
 
1663
  if (cum->nregs <= 0)
1664
    {
1665
      cum->nregs = 0;
1666
      cum->arg_regs = NULL;
1667
    }
1668
  else
1669
    {
1670
      for (count = 1; count <= words; count++)
1671
        cum->arg_regs++;
1672
    }
1673
 
1674
  return;
1675
}
1676
 
1677
/* Define where to put the arguments to a function.
1678
   Value is zero to push the argument on the stack,
1679
   or a hard register in which to store the argument.
1680
 
1681
   MODE is the argument's machine mode.
1682
   TYPE is the data type of the argument (as a tree).
1683
    This is null for libcalls where that information may
1684
    not be available.
1685
   CUM is a variable of type CUMULATIVE_ARGS which gives info about
1686
    the preceding args and about the function being called.
1687
   NAMED is nonzero if this argument is a named parameter
1688
    (otherwise it is an extra parameter matching an ellipsis).  */
1689
 
1690
static rtx
1691
bfin_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1692
                   const_tree type, bool named ATTRIBUTE_UNUSED)
1693
{
1694
  CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1695
  int bytes
1696
    = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1697
 
1698
  if (mode == VOIDmode)
1699
    /* Compute operand 2 of the call insn.  */
1700
    return GEN_INT (cum->call_cookie);
1701
 
1702
  if (bytes == -1)
1703
    return NULL_RTX;
1704
 
1705
  if (cum->nregs)
1706
    return gen_rtx_REG (mode, *(cum->arg_regs));
1707
 
1708
  return NULL_RTX;
1709
}
1710
 
1711
/* For an arg passed partly in registers and partly in memory,
1712
   this is the number of bytes passed in registers.
1713
   For args passed entirely in registers or entirely in memory, zero.
1714
 
1715
   Refer VDSP C Compiler manual, our ABI.
1716
   First 3 words are in registers. So, if an argument is larger
1717
   than the registers available, it will span the register and
1718
   stack.   */
1719
 
1720
static int
1721
bfin_arg_partial_bytes (cumulative_args_t cum, enum machine_mode mode,
1722
                        tree type ATTRIBUTE_UNUSED,
1723
                        bool named ATTRIBUTE_UNUSED)
1724
{
1725
  int bytes
1726
    = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1727
  int bytes_left = get_cumulative_args (cum)->nregs * UNITS_PER_WORD;
1728
 
1729
  if (bytes == -1)
1730
    return 0;
1731
 
1732
  if (bytes_left == 0)
1733
    return 0;
1734
  if (bytes > bytes_left)
1735
    return bytes_left;
1736
  return 0;
1737
}
1738
 
1739
/* Variable sized types are passed by reference.  */
1740
 
1741
static bool
1742
bfin_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
1743
                        enum machine_mode mode ATTRIBUTE_UNUSED,
1744
                        const_tree type, bool named ATTRIBUTE_UNUSED)
1745
{
1746
  return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1747
}
1748
 
1749
/* Decide whether a type should be returned in memory (true)
1750
   or in a register (false).  This is called by the macro
1751
   TARGET_RETURN_IN_MEMORY.  */
1752
 
1753
static bool
1754
bfin_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1755
{
1756
  int size = int_size_in_bytes (type);
1757
  return size > 2 * UNITS_PER_WORD || size == -1;
1758
}
1759
 
1760
/* Register in which address to store a structure value
1761
   is passed to a function.  */
1762
static rtx
1763
bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1764
                      int incoming ATTRIBUTE_UNUSED)
1765
{
1766
  return gen_rtx_REG (Pmode, REG_P0);
1767
}
1768
 
1769
/* Return true when register may be used to pass function parameters.  */
1770
 
1771
bool
1772
function_arg_regno_p (int n)
1773
{
1774
  int i;
1775
  for (i = 0; arg_regs[i] != -1; i++)
1776
    if (n == arg_regs[i])
1777
      return true;
1778
  return false;
1779
}
1780
 
1781
/* Returns 1 if OP contains a symbol reference */
1782
 
1783
int
1784
symbolic_reference_mentioned_p (rtx op)
1785
{
1786
  register const char *fmt;
1787
  register int i;
1788
 
1789
  if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1790
    return 1;
1791
 
1792
  fmt = GET_RTX_FORMAT (GET_CODE (op));
1793
  for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1794
    {
1795
      if (fmt[i] == 'E')
1796
        {
1797
          register int j;
1798
 
1799
          for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1800
            if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1801
              return 1;
1802
        }
1803
 
1804
      else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1805
        return 1;
1806
    }
1807
 
1808
  return 0;
1809
}
1810
 
1811
/* Decide whether we can make a sibling call to a function.  DECL is the
1812
   declaration of the function being targeted by the call and EXP is the
1813
   CALL_EXPR representing the call.  */
1814
 
1815
static bool
1816
bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1817
                              tree exp ATTRIBUTE_UNUSED)
1818
{
1819
  struct cgraph_local_info *this_func, *called_func;
1820
  e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1821
  if (fkind != SUBROUTINE)
1822
    return false;
1823
  if (!TARGET_ID_SHARED_LIBRARY || TARGET_SEP_DATA)
1824
    return true;
1825
 
1826
  /* When compiling for ID shared libraries, can't sibcall a local function
1827
     from a non-local function, because the local function thinks it does
1828
     not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1829
     sibcall epilogue, and we end up with the wrong value in P5.  */
1830
 
1831
  if (!decl)
1832
    /* Not enough information.  */
1833
    return false;
1834
 
1835
  this_func = cgraph_local_info (current_function_decl);
1836
  called_func = cgraph_local_info (decl);
1837
  if (!called_func)
1838
    return false;
1839
  return !called_func->local || this_func->local;
1840
}
1841
 
1842
/* Write a template for a trampoline to F.  */
1843
 
1844
static void
1845
bfin_asm_trampoline_template (FILE *f)
1846
{
1847
  if (TARGET_FDPIC)
1848
    {
1849
      fprintf (f, "\t.dd\t0x00000000\n");       /* 0 */
1850
      fprintf (f, "\t.dd\t0x00000000\n");       /* 0 */
1851
      fprintf (f, "\t.dd\t0x0000e109\n");       /* p1.l = fn low */
1852
      fprintf (f, "\t.dd\t0x0000e149\n");       /* p1.h = fn high */
1853
      fprintf (f, "\t.dd\t0x0000e10a\n");       /* p2.l = sc low */
1854
      fprintf (f, "\t.dd\t0x0000e14a\n");       /* p2.h = sc high */
1855
      fprintf (f, "\t.dw\t0xac4b\n");           /* p3 = [p1 + 4] */
1856
      fprintf (f, "\t.dw\t0x9149\n");           /* p1 = [p1] */
1857
      fprintf (f, "\t.dw\t0x0051\n");           /* jump (p1)*/
1858
    }
1859
  else
1860
    {
1861
      fprintf (f, "\t.dd\t0x0000e109\n");       /* p1.l = fn low */
1862
      fprintf (f, "\t.dd\t0x0000e149\n");       /* p1.h = fn high */
1863
      fprintf (f, "\t.dd\t0x0000e10a\n");       /* p2.l = sc low */
1864
      fprintf (f, "\t.dd\t0x0000e14a\n");       /* p2.h = sc high */
1865
      fprintf (f, "\t.dw\t0x0051\n");           /* jump (p1)*/
1866
    }
1867
}
1868
 
1869
/* Emit RTL insns to initialize the variable parts of a trampoline at
1870
   M_TRAMP. FNDECL is the target function.  CHAIN_VALUE is an RTX for
1871
   the static chain value for the function.  */
1872
 
1873
static void
1874
bfin_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
1875
{
1876
  rtx t1 = copy_to_reg (XEXP (DECL_RTL (fndecl), 0));
1877
  rtx t2 = copy_to_reg (chain_value);
1878
  rtx mem;
1879
  int i = 0;
1880
 
1881
  emit_block_move (m_tramp, assemble_trampoline_template (),
1882
                   GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
1883
 
1884
  if (TARGET_FDPIC)
1885
    {
1886
      rtx a = force_reg (Pmode, plus_constant (XEXP (m_tramp, 0), 8));
1887
      mem = adjust_address (m_tramp, Pmode, 0);
1888
      emit_move_insn (mem, a);
1889
      i = 8;
1890
    }
1891
 
1892
  mem = adjust_address (m_tramp, HImode, i + 2);
1893
  emit_move_insn (mem, gen_lowpart (HImode, t1));
1894
  emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
1895
  mem = adjust_address (m_tramp, HImode, i + 6);
1896
  emit_move_insn (mem, gen_lowpart (HImode, t1));
1897
 
1898
  mem = adjust_address (m_tramp, HImode, i + 10);
1899
  emit_move_insn (mem, gen_lowpart (HImode, t2));
1900
  emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
1901
  mem = adjust_address (m_tramp, HImode, i + 14);
1902
  emit_move_insn (mem, gen_lowpart (HImode, t2));
1903
}
1904
 
1905
/* Emit insns to move operands[1] into operands[0].  */
1906
 
1907
void
1908
emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1909
{
1910
  rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1911
 
1912
  gcc_assert (!TARGET_FDPIC || !(reload_in_progress || reload_completed));
1913
  if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1914
    operands[1] = force_reg (SImode, operands[1]);
1915
  else
1916
    operands[1] = legitimize_pic_address (operands[1], temp,
1917
                                          TARGET_FDPIC ? OUR_FDPIC_REG
1918
                                          : pic_offset_table_rtx);
1919
}
1920
 
1921
/* Expand a move operation in mode MODE.  The operands are in OPERANDS.
1922
   Returns true if no further code must be generated, false if the caller
1923
   should generate an insn to move OPERANDS[1] to OPERANDS[0].  */
1924
 
1925
bool
1926
expand_move (rtx *operands, enum machine_mode mode)
1927
{
1928
  rtx op = operands[1];
1929
  if ((TARGET_ID_SHARED_LIBRARY || TARGET_FDPIC)
1930
      && SYMBOLIC_CONST (op))
1931
    emit_pic_move (operands, mode);
1932
  else if (mode == SImode && GET_CODE (op) == CONST
1933
           && GET_CODE (XEXP (op, 0)) == PLUS
1934
           && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
1935
           && !targetm.legitimate_constant_p (mode, op))
1936
    {
1937
      rtx dest = operands[0];
1938
      rtx op0, op1;
1939
      gcc_assert (!reload_in_progress && !reload_completed);
1940
      op = XEXP (op, 0);
1941
      op0 = force_reg (mode, XEXP (op, 0));
1942
      op1 = XEXP (op, 1);
1943
      if (!insn_data[CODE_FOR_addsi3].operand[2].predicate (op1, mode))
1944
        op1 = force_reg (mode, op1);
1945
      if (GET_CODE (dest) == MEM)
1946
        dest = gen_reg_rtx (mode);
1947
      emit_insn (gen_addsi3 (dest, op0, op1));
1948
      if (dest == operands[0])
1949
        return true;
1950
      operands[1] = dest;
1951
    }
1952
  /* Don't generate memory->memory or constant->memory moves, go through a
1953
     register */
1954
  else if ((reload_in_progress | reload_completed) == 0
1955
           && GET_CODE (operands[0]) == MEM
1956
           && GET_CODE (operands[1]) != REG)
1957
    operands[1] = force_reg (mode, operands[1]);
1958
  return false;
1959
}
1960
 
1961
/* Split one or more DImode RTL references into pairs of SImode
1962
   references.  The RTL can be REG, offsettable MEM, integer constant, or
1963
   CONST_DOUBLE.  "operands" is a pointer to an array of DImode RTL to
1964
   split and "num" is its length.  lo_half and hi_half are output arrays
1965
   that parallel "operands".  */
1966
 
1967
void
1968
split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1969
{
1970
  while (num--)
1971
    {
1972
      rtx op = operands[num];
1973
 
1974
      /* simplify_subreg refuse to split volatile memory addresses,
1975
         but we still have to handle it.  */
1976
      if (GET_CODE (op) == MEM)
1977
        {
1978
          lo_half[num] = adjust_address (op, SImode, 0);
1979
          hi_half[num] = adjust_address (op, SImode, 4);
1980
        }
1981
      else
1982
        {
1983
          lo_half[num] = simplify_gen_subreg (SImode, op,
1984
                                              GET_MODE (op) == VOIDmode
1985
                                              ? DImode : GET_MODE (op), 0);
1986
          hi_half[num] = simplify_gen_subreg (SImode, op,
1987
                                              GET_MODE (op) == VOIDmode
1988
                                              ? DImode : GET_MODE (op), 4);
1989
        }
1990
    }
1991
}
1992
 
1993
bool
1994
bfin_longcall_p (rtx op, int call_cookie)
1995
{
1996
  gcc_assert (GET_CODE (op) == SYMBOL_REF);
1997
  if (SYMBOL_REF_WEAK (op))
1998
    return 1;
1999
  if (call_cookie & CALL_SHORT)
2000
    return 0;
2001
  if (call_cookie & CALL_LONG)
2002
    return 1;
2003
  if (TARGET_LONG_CALLS)
2004
    return 1;
2005
  return 0;
2006
}
2007
 
2008
/* Expand a call instruction.  FNADDR is the call target, RETVAL the return value.
2009
   COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
2010
   SIBCALL is nonzero if this is a sibling call.  */
2011
 
2012
void
2013
bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
2014
{
2015
  rtx use = NULL, call;
2016
  rtx callee = XEXP (fnaddr, 0);
2017
  int nelts = 3;
2018
  rtx pat;
2019
  rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REGNO);
2020
  rtx retsreg = gen_rtx_REG (Pmode, REG_RETS);
2021
  int n;
2022
 
2023
  /* In an untyped call, we can get NULL for operand 2.  */
2024
  if (cookie == NULL_RTX)
2025
    cookie = const0_rtx;
2026
 
2027
  /* Static functions and indirect calls don't need the pic register.  */
2028
  if (!TARGET_FDPIC && flag_pic
2029
      && GET_CODE (callee) == SYMBOL_REF
2030
      && !SYMBOL_REF_LOCAL_P (callee))
2031
    use_reg (&use, pic_offset_table_rtx);
2032
 
2033
  if (TARGET_FDPIC)
2034
    {
2035
      int caller_in_sram, callee_in_sram;
2036
 
2037
      /* 0 is not in sram, 1 is in L1 sram, 2 is in L2 sram.  */
2038
      caller_in_sram = callee_in_sram = 0;
2039
 
2040
      if (lookup_attribute ("l1_text",
2041
                            DECL_ATTRIBUTES (cfun->decl)) != NULL_TREE)
2042
        caller_in_sram = 1;
2043
      else if (lookup_attribute ("l2",
2044
                                 DECL_ATTRIBUTES (cfun->decl)) != NULL_TREE)
2045
        caller_in_sram = 2;
2046
 
2047
      if (GET_CODE (callee) == SYMBOL_REF
2048
          && SYMBOL_REF_DECL (callee) && DECL_P (SYMBOL_REF_DECL (callee)))
2049
        {
2050
          if (lookup_attribute
2051
              ("l1_text",
2052
               DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee))) != NULL_TREE)
2053
            callee_in_sram = 1;
2054
          else if (lookup_attribute
2055
                   ("l2",
2056
                    DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee))) != NULL_TREE)
2057
            callee_in_sram = 2;
2058
        }
2059
 
2060
      if (GET_CODE (callee) != SYMBOL_REF
2061
          || bfin_longcall_p (callee, INTVAL (cookie))
2062
          || (GET_CODE (callee) == SYMBOL_REF
2063
              && !SYMBOL_REF_LOCAL_P (callee)
2064
              && TARGET_INLINE_PLT)
2065
          || caller_in_sram != callee_in_sram
2066
          || (caller_in_sram && callee_in_sram
2067
              && (GET_CODE (callee) != SYMBOL_REF
2068
                  || !SYMBOL_REF_LOCAL_P (callee))))
2069
        {
2070
          rtx addr = callee;
2071
          if (! address_operand (addr, Pmode))
2072
            addr = force_reg (Pmode, addr);
2073
 
2074
          fnaddr = gen_reg_rtx (SImode);
2075
          emit_insn (gen_load_funcdescsi (fnaddr, addr));
2076
          fnaddr = gen_rtx_MEM (Pmode, fnaddr);
2077
 
2078
          picreg = gen_reg_rtx (SImode);
2079
          emit_insn (gen_load_funcdescsi (picreg,
2080
                                          plus_constant (addr, 4)));
2081
        }
2082
 
2083
      nelts++;
2084
    }
2085
  else if ((!register_no_elim_operand (callee, Pmode)
2086
            && GET_CODE (callee) != SYMBOL_REF)
2087
           || (GET_CODE (callee) == SYMBOL_REF
2088
               && ((TARGET_ID_SHARED_LIBRARY && !TARGET_LEAF_ID_SHARED_LIBRARY)
2089
                   || bfin_longcall_p (callee, INTVAL (cookie)))))
2090
    {
2091
      callee = copy_to_mode_reg (Pmode, callee);
2092
      fnaddr = gen_rtx_MEM (Pmode, callee);
2093
    }
2094
  call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
2095
 
2096
  if (retval)
2097
    call = gen_rtx_SET (VOIDmode, retval, call);
2098
 
2099
  pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nelts));
2100
  n = 0;
2101
  XVECEXP (pat, 0, n++) = call;
2102
  if (TARGET_FDPIC)
2103
    XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, picreg);
2104
  XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, cookie);
2105
  if (sibcall)
2106
    XVECEXP (pat, 0, n++) = ret_rtx;
2107
  else
2108
    XVECEXP (pat, 0, n++) = gen_rtx_CLOBBER (VOIDmode, retsreg);
2109
  call = emit_call_insn (pat);
2110
  if (use)
2111
    CALL_INSN_FUNCTION_USAGE (call) = use;
2112
}
2113
 
2114
/* Return 1 if hard register REGNO can hold a value of machine-mode MODE.  */
2115
 
2116
int
2117
hard_regno_mode_ok (int regno, enum machine_mode mode)
2118
{
2119
  /* Allow only dregs to store value of mode HI or QI */
2120
  enum reg_class rclass = REGNO_REG_CLASS (regno);
2121
 
2122
  if (mode == CCmode)
2123
    return 0;
2124
 
2125
  if (mode == V2HImode)
2126
    return D_REGNO_P (regno);
2127
  if (rclass == CCREGS)
2128
    return mode == BImode;
2129
  if (mode == PDImode || mode == V2PDImode)
2130
    return regno == REG_A0 || regno == REG_A1;
2131
 
2132
  /* Allow all normal 32-bit regs, except REG_M3, in case regclass ever comes
2133
     up with a bad register class (such as ALL_REGS) for DImode.  */
2134
  if (mode == DImode)
2135
    return regno < REG_M3;
2136
 
2137
  if (mode == SImode
2138
      && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
2139
    return 1;
2140
 
2141
  return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
2142
}
2143
 
2144
/* Implements target hook vector_mode_supported_p.  */
2145
 
2146
static bool
2147
bfin_vector_mode_supported_p (enum machine_mode mode)
2148
{
2149
  return mode == V2HImode;
2150
}
2151
 
2152
/* Worker function for TARGET_REGISTER_MOVE_COST.  */
2153
 
2154
static int
2155
bfin_register_move_cost (enum machine_mode mode,
2156
                         reg_class_t class1, reg_class_t class2)
2157
{
2158
  /* These need secondary reloads, so they're more expensive.  */
2159
  if ((class1 == CCREGS && !reg_class_subset_p (class2, DREGS))
2160
      || (class2 == CCREGS && !reg_class_subset_p (class1, DREGS)))
2161
    return 4;
2162
 
2163
  /* If optimizing for size, always prefer reg-reg over reg-memory moves.  */
2164
  if (optimize_size)
2165
    return 2;
2166
 
2167
  if (GET_MODE_CLASS (mode) == MODE_INT)
2168
    {
2169
      /* Discourage trying to use the accumulators.  */
2170
      if (TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A0)
2171
          || TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A1)
2172
          || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A0)
2173
          || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A1))
2174
        return 20;
2175
    }
2176
  return 2;
2177
}
2178
 
2179
/* Worker function for TARGET_MEMORY_MOVE_COST.
2180
 
2181
   ??? In theory L1 memory has single-cycle latency.  We should add a switch
2182
   that tells the compiler whether we expect to use only L1 memory for the
2183
   program; it'll make the costs more accurate.  */
2184
 
2185
static int
2186
bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
2187
                       reg_class_t rclass,
2188
                       bool in ATTRIBUTE_UNUSED)
2189
{
2190
  /* Make memory accesses slightly more expensive than any register-register
2191
     move.  Also, penalize non-DP registers, since they need secondary
2192
     reloads to load and store.  */
2193
  if (! reg_class_subset_p (rclass, DPREGS))
2194
    return 10;
2195
 
2196
  return 8;
2197
}
2198
 
2199
/* Inform reload about cases where moving X with a mode MODE to a register in
2200
   RCLASS requires an extra scratch register.  Return the class needed for the
2201
   scratch register.  */
2202
 
2203
static reg_class_t
2204
bfin_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
2205
                       enum machine_mode mode, secondary_reload_info *sri)
2206
{
2207
  /* If we have HImode or QImode, we can only use DREGS as secondary registers;
2208
     in most other cases we can also use PREGS.  */
2209
  enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
2210
  enum reg_class x_class = NO_REGS;
2211
  enum rtx_code code = GET_CODE (x);
2212
  enum reg_class rclass = (enum reg_class) rclass_i;
2213
 
2214
  if (code == SUBREG)
2215
    x = SUBREG_REG (x), code = GET_CODE (x);
2216
  if (REG_P (x))
2217
    {
2218
      int regno = REGNO (x);
2219
      if (regno >= FIRST_PSEUDO_REGISTER)
2220
        regno = reg_renumber[regno];
2221
 
2222
      if (regno == -1)
2223
        code = MEM;
2224
      else
2225
        x_class = REGNO_REG_CLASS (regno);
2226
    }
2227
 
2228
  /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
2229
     This happens as a side effect of register elimination, and we need
2230
     a scratch register to do it.  */
2231
  if (fp_plus_const_operand (x, mode))
2232
    {
2233
      rtx op2 = XEXP (x, 1);
2234
      int large_constant_p = ! satisfies_constraint_Ks7 (op2);
2235
 
2236
      if (rclass == PREGS || rclass == PREGS_CLOBBERED)
2237
        return NO_REGS;
2238
      /* If destination is a DREG, we can do this without a scratch register
2239
         if the constant is valid for an add instruction.  */
2240
      if ((rclass == DREGS || rclass == DPREGS)
2241
          && ! large_constant_p)
2242
        return NO_REGS;
2243
      /* Reloading to anything other than a DREG?  Use a PREG scratch
2244
         register.  */
2245
      sri->icode = CODE_FOR_reload_insi;
2246
      return NO_REGS;
2247
    }
2248
 
2249
  /* Data can usually be moved freely between registers of most classes.
2250
     AREGS are an exception; they can only move to or from another register
2251
     in AREGS or one in DREGS.  They can also be assigned the constant 0.  */
2252
  if (x_class == AREGS || x_class == EVEN_AREGS || x_class == ODD_AREGS)
2253
    return (rclass == DREGS || rclass == AREGS || rclass == EVEN_AREGS
2254
            || rclass == ODD_AREGS
2255
            ? NO_REGS : DREGS);
2256
 
2257
  if (rclass == AREGS || rclass == EVEN_AREGS || rclass == ODD_AREGS)
2258
    {
2259
      if (code == MEM)
2260
        {
2261
          sri->icode = in_p ? CODE_FOR_reload_inpdi : CODE_FOR_reload_outpdi;
2262
          return NO_REGS;
2263
        }
2264
 
2265
      if (x != const0_rtx && x_class != DREGS)
2266
        {
2267
          return DREGS;
2268
        }
2269
      else
2270
        return NO_REGS;
2271
    }
2272
 
2273
  /* CCREGS can only be moved from/to DREGS.  */
2274
  if (rclass == CCREGS && x_class != DREGS)
2275
    return DREGS;
2276
  if (x_class == CCREGS && rclass != DREGS)
2277
    return DREGS;
2278
 
2279
  /* All registers other than AREGS can load arbitrary constants.  The only
2280
     case that remains is MEM.  */
2281
  if (code == MEM)
2282
    if (! reg_class_subset_p (rclass, default_class))
2283
      return default_class;
2284
 
2285
  return NO_REGS;
2286
}
2287
 
2288
/* Implement TARGET_CLASS_LIKELY_SPILLED_P.  */
2289
 
2290
static bool
2291
bfin_class_likely_spilled_p (reg_class_t rclass)
2292
{
2293
  switch (rclass)
2294
    {
2295
      case PREGS_CLOBBERED:
2296
      case PROLOGUE_REGS:
2297
      case P0REGS:
2298
      case D0REGS:
2299
      case D1REGS:
2300
      case D2REGS:
2301
      case CCREGS:
2302
        return true;
2303
 
2304
      default:
2305
        break;
2306
    }
2307
 
2308
  return false;
2309
}
2310
 
2311
static struct machine_function *
2312
bfin_init_machine_status (void)
2313
{
2314
  return ggc_alloc_cleared_machine_function ();
2315
}
2316
 
2317
/* Implement the TARGET_OPTION_OVERRIDE hook.  */
2318
 
2319
static void
2320
bfin_option_override (void)
2321
{
2322
  /* If processor type is not specified, enable all workarounds.  */
2323
  if (bfin_cpu_type == BFIN_CPU_UNKNOWN)
2324
    {
2325
      int i;
2326
 
2327
      for (i = 0; bfin_cpus[i].name != NULL; i++)
2328
        bfin_workarounds |= bfin_cpus[i].workarounds;
2329
 
2330
      bfin_si_revision = 0xffff;
2331
    }
2332
 
2333
  if (bfin_csync_anomaly == 1)
2334
    bfin_workarounds |= WA_SPECULATIVE_SYNCS;
2335
  else if (bfin_csync_anomaly == 0)
2336
    bfin_workarounds &= ~WA_SPECULATIVE_SYNCS;
2337
 
2338
  if (bfin_specld_anomaly == 1)
2339
    bfin_workarounds |= WA_SPECULATIVE_LOADS;
2340
  else if (bfin_specld_anomaly == 0)
2341
    bfin_workarounds &= ~WA_SPECULATIVE_LOADS;
2342
 
2343
  if (TARGET_OMIT_LEAF_FRAME_POINTER)
2344
    flag_omit_frame_pointer = 1;
2345
 
2346
#ifdef SUBTARGET_FDPIC_NOT_SUPPORTED
2347
  if (TARGET_FDPIC)
2348
    error ("-mfdpic is not supported, please use a bfin-linux-uclibc target");
2349
#endif
2350
 
2351
  /* Library identification */
2352
  if (global_options_set.x_bfin_library_id && ! TARGET_ID_SHARED_LIBRARY)
2353
    error ("-mshared-library-id= specified without -mid-shared-library");
2354
 
2355
  if (stack_limit_rtx && TARGET_FDPIC)
2356
    {
2357
      warning (0, "-fstack-limit- options are ignored with -mfdpic; use -mstack-check-l1");
2358
      stack_limit_rtx = NULL_RTX;
2359
    }
2360
 
2361
  if (stack_limit_rtx && TARGET_STACK_CHECK_L1)
2362
    error ("can%'t use multiple stack checking methods together");
2363
 
2364
  if (TARGET_ID_SHARED_LIBRARY && TARGET_FDPIC)
2365
    error ("ID shared libraries and FD-PIC mode can%'t be used together");
2366
 
2367
  /* Don't allow the user to specify -mid-shared-library and -msep-data
2368
     together, as it makes little sense from a user's point of view...  */
2369
  if (TARGET_SEP_DATA && TARGET_ID_SHARED_LIBRARY)
2370
    error ("cannot specify both -msep-data and -mid-shared-library");
2371
  /* ... internally, however, it's nearly the same.  */
2372
  if (TARGET_SEP_DATA)
2373
    target_flags |= MASK_ID_SHARED_LIBRARY | MASK_LEAF_ID_SHARED_LIBRARY;
2374
 
2375
  if (TARGET_ID_SHARED_LIBRARY && flag_pic == 0)
2376
    flag_pic = 1;
2377
 
2378
  /* There is no single unaligned SI op for PIC code.  Sometimes we
2379
     need to use ".4byte" and sometimes we need to use ".picptr".
2380
     See bfin_assemble_integer for details.  */
2381
  if (TARGET_FDPIC)
2382
    targetm.asm_out.unaligned_op.si = 0;
2383
 
2384
  /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2385
     since we don't support it and it'll just break.  */
2386
  if (flag_pic && !TARGET_FDPIC && !TARGET_ID_SHARED_LIBRARY)
2387
    flag_pic = 0;
2388
 
2389
  if (TARGET_MULTICORE && bfin_cpu_type != BFIN_CPU_BF561)
2390
    error ("-mmulticore can only be used with BF561");
2391
 
2392
  if (TARGET_COREA && !TARGET_MULTICORE)
2393
    error ("-mcorea should be used with -mmulticore");
2394
 
2395
  if (TARGET_COREB && !TARGET_MULTICORE)
2396
    error ("-mcoreb should be used with -mmulticore");
2397
 
2398
  if (TARGET_COREA && TARGET_COREB)
2399
    error ("-mcorea and -mcoreb can%'t be used together");
2400
 
2401
  flag_schedule_insns = 0;
2402
 
2403
  init_machine_status = bfin_init_machine_status;
2404
}
2405
 
2406
/* Return the destination address of BRANCH.
2407
   We need to use this instead of get_attr_length, because the
2408
   cbranch_with_nops pattern conservatively sets its length to 6, and
2409
   we still prefer to use shorter sequences.  */
2410
 
2411
static int
2412
branch_dest (rtx branch)
2413
{
2414
  rtx dest;
2415
  int dest_uid;
2416
  rtx pat = PATTERN (branch);
2417
  if (GET_CODE (pat) == PARALLEL)
2418
    pat = XVECEXP (pat, 0, 0);
2419
  dest = SET_SRC (pat);
2420
  if (GET_CODE (dest) == IF_THEN_ELSE)
2421
    dest = XEXP (dest, 1);
2422
  dest = XEXP (dest, 0);
2423
  dest_uid = INSN_UID (dest);
2424
  return INSN_ADDRESSES (dest_uid);
2425
}
2426
 
2427
/* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2428
   it's a branch that's predicted taken.  */
2429
 
2430
static int
2431
cbranch_predicted_taken_p (rtx insn)
2432
{
2433
  rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2434
 
2435
  if (x)
2436
    {
2437
      int pred_val = INTVAL (XEXP (x, 0));
2438
 
2439
      return pred_val >= REG_BR_PROB_BASE / 2;
2440
    }
2441
 
2442
  return 0;
2443
}
2444
 
2445
/* Templates for use by asm_conditional_branch.  */
2446
 
2447
static const char *ccbranch_templates[][3] = {
2448
  { "if !cc jump %3;",  "if cc jump 4 (bp); jump.s %3;",  "if cc jump 6 (bp); jump.l %3;" },
2449
  { "if cc jump %3;",   "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2450
  { "if !cc jump %3 (bp);",  "if cc jump 4; jump.s %3;",  "if cc jump 6; jump.l %3;" },
2451
  { "if cc jump %3 (bp);",  "if !cc jump 4; jump.s %3;",  "if !cc jump 6; jump.l %3;" },
2452
};
2453
 
2454
/* Output INSN, which is a conditional branch instruction with operands
2455
   OPERANDS.
2456
 
2457
   We deal with the various forms of conditional branches that can be generated
2458
   by bfin_reorg to prevent the hardware from doing speculative loads, by
2459
   - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2460
   - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2461
   Either of these is only necessary if the branch is short, otherwise the
2462
   template we use ends in an unconditional jump which flushes the pipeline
2463
   anyway.  */
2464
 
2465
void
2466
asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
2467
{
2468
  int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
2469
  /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2470
            is to be taken from start of if cc rather than jump.
2471
            Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2472
  */
2473
  int len = (offset >= -1024 && offset <= 1022 ? 0
2474
             : offset >= -4094 && offset <= 4096 ? 1
2475
             : 2);
2476
  int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
2477
  int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
2478
  output_asm_insn (ccbranch_templates[idx][len], operands);
2479
  gcc_assert (n_nops == 0 || !bp);
2480
  if (len == 0)
2481
    while (n_nops-- > 0)
2482
      output_asm_insn ("nop;", NULL);
2483
}
2484
 
2485
/* Emit rtl for a comparison operation CMP in mode MODE.  Operands have been
2486
   stored in bfin_compare_op0 and bfin_compare_op1 already.  */
2487
 
2488
rtx
2489
bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
2490
{
2491
  enum rtx_code code1, code2;
2492
  rtx op0 = XEXP (cmp, 0), op1 = XEXP (cmp, 1);
2493
  rtx tem = bfin_cc_rtx;
2494
  enum rtx_code code = GET_CODE (cmp);
2495
 
2496
  /* If we have a BImode input, then we already have a compare result, and
2497
     do not need to emit another comparison.  */
2498
  if (GET_MODE (op0) == BImode)
2499
    {
2500
      gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
2501
      tem = op0, code2 = code;
2502
    }
2503
  else
2504
    {
2505
      switch (code) {
2506
        /* bfin has these conditions */
2507
      case EQ:
2508
      case LT:
2509
      case LE:
2510
      case LEU:
2511
      case LTU:
2512
        code1 = code;
2513
        code2 = NE;
2514
        break;
2515
      default:
2516
        code1 = reverse_condition (code);
2517
        code2 = EQ;
2518
        break;
2519
      }
2520
      emit_insn (gen_rtx_SET (VOIDmode, tem,
2521
                              gen_rtx_fmt_ee (code1, BImode, op0, op1)));
2522
    }
2523
 
2524
  return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
2525
}
2526
 
2527
/* Return nonzero iff C has exactly one bit set if it is interpreted
2528
   as a 32-bit constant.  */
2529
 
2530
int
2531
log2constp (unsigned HOST_WIDE_INT c)
2532
{
2533
  c &= 0xFFFFFFFF;
2534
  return c != 0 && (c & (c-1)) == 0;
2535
}
2536
 
2537
/* Returns the number of consecutive least significant zeros in the binary
2538
   representation of *V.
2539
   We modify *V to contain the original value arithmetically shifted right by
2540
   the number of zeroes.  */
2541
 
2542
static int
2543
shiftr_zero (HOST_WIDE_INT *v)
2544
{
2545
  unsigned HOST_WIDE_INT tmp = *v;
2546
  unsigned HOST_WIDE_INT sgn;
2547
  int n = 0;
2548
 
2549
  if (tmp == 0)
2550
    return 0;
2551
 
2552
  sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
2553
  while ((tmp & 0x1) == 0 && n <= 32)
2554
    {
2555
      tmp = (tmp >> 1) | sgn;
2556
      n++;
2557
    }
2558
  *v = tmp;
2559
  return n;
2560
}
2561
 
2562
/* After reload, split the load of an immediate constant.  OPERANDS are the
2563
   operands of the movsi_insn pattern which we are splitting.  We return
2564
   nonzero if we emitted a sequence to load the constant, zero if we emitted
2565
   nothing because we want to use the splitter's default sequence.  */
2566
 
2567
int
2568
split_load_immediate (rtx operands[])
2569
{
2570
  HOST_WIDE_INT val = INTVAL (operands[1]);
2571
  HOST_WIDE_INT tmp;
2572
  HOST_WIDE_INT shifted = val;
2573
  HOST_WIDE_INT shifted_compl = ~val;
2574
  int num_zero = shiftr_zero (&shifted);
2575
  int num_compl_zero = shiftr_zero (&shifted_compl);
2576
  unsigned int regno = REGNO (operands[0]);
2577
 
2578
  /* This case takes care of single-bit set/clear constants, which we could
2579
     also implement with BITSET/BITCLR.  */
2580
  if (num_zero
2581
      && shifted >= -32768 && shifted < 65536
2582
      && (D_REGNO_P (regno)
2583
          || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
2584
    {
2585
      emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
2586
      emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
2587
      return 1;
2588
    }
2589
 
2590
  tmp = val & 0xFFFF;
2591
  tmp |= -(tmp & 0x8000);
2592
 
2593
  /* If high word has one bit set or clear, try to use a bit operation.  */
2594
  if (D_REGNO_P (regno))
2595
    {
2596
      if (log2constp (val & 0xFFFF0000))
2597
        {
2598
          emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
2599
          emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
2600
          return 1;
2601
        }
2602
      else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
2603
        {
2604
          emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2605
          emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
2606
        }
2607
    }
2608
 
2609
  if (D_REGNO_P (regno))
2610
    {
2611
      if (tmp >= -64 && tmp <= 63)
2612
        {
2613
          emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2614
          emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
2615
          return 1;
2616
        }
2617
 
2618
      if ((val & 0xFFFF0000) == 0)
2619
        {
2620
          emit_insn (gen_movsi (operands[0], const0_rtx));
2621
          emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2622
          return 1;
2623
        }
2624
 
2625
      if ((val & 0xFFFF0000) == 0xFFFF0000)
2626
        {
2627
          emit_insn (gen_movsi (operands[0], constm1_rtx));
2628
          emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2629
          return 1;
2630
        }
2631
    }
2632
 
2633
  /* Need DREGs for the remaining case.  */
2634
  if (regno > REG_R7)
2635
    return 0;
2636
 
2637
  if (optimize_size
2638
      && num_compl_zero && shifted_compl >= -64 && shifted_compl <= 63)
2639
    {
2640
      /* If optimizing for size, generate a sequence that has more instructions
2641
         but is shorter.  */
2642
      emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2643
      emit_insn (gen_ashlsi3 (operands[0], operands[0],
2644
                              GEN_INT (num_compl_zero)));
2645
      emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2646
      return 1;
2647
    }
2648
  return 0;
2649
}
2650
 
2651
/* Return true if the legitimate memory address for a memory operand of mode
2652
   MODE.  Return false if not.  */
2653
 
2654
static bool
2655
bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2656
{
2657
  unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2658
  int sz = GET_MODE_SIZE (mode);
2659
  int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2660
  /* The usual offsettable_memref machinery doesn't work so well for this
2661
     port, so we deal with the problem here.  */
2662
  if (value > 0 && sz == 8)
2663
    v += 4;
2664
  return (v & ~(0x7fff << shift)) == 0;
2665
}
2666
 
2667
static bool
2668
bfin_valid_reg_p (unsigned int regno, int strict, enum machine_mode mode,
2669
                  enum rtx_code outer_code)
2670
{
2671
  if (strict)
2672
    return REGNO_OK_FOR_BASE_STRICT_P (regno, mode, outer_code, SCRATCH);
2673
  else
2674
    return REGNO_OK_FOR_BASE_NONSTRICT_P (regno, mode, outer_code, SCRATCH);
2675
}
2676
 
2677
/* Recognize an RTL expression that is a valid memory address for an
2678
   instruction.  The MODE argument is the machine mode for the MEM expression
2679
   that wants to use this address.
2680
 
2681
   Blackfin addressing modes are as follows:
2682
 
2683
      [preg]
2684
      [preg + imm16]
2685
 
2686
      B [ Preg + uimm15 ]
2687
      W [ Preg + uimm16m2 ]
2688
      [ Preg + uimm17m4 ]
2689
 
2690
      [preg++]
2691
      [preg--]
2692
      [--sp]
2693
*/
2694
 
2695
static bool
2696
bfin_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
2697
{
2698
  switch (GET_CODE (x)) {
2699
  case REG:
2700
    if (bfin_valid_reg_p (REGNO (x), strict, mode, MEM))
2701
      return true;
2702
    break;
2703
  case PLUS:
2704
    if (REG_P (XEXP (x, 0))
2705
        && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PLUS)
2706
        && ((GET_CODE (XEXP (x, 1)) == UNSPEC && mode == SImode)
2707
            || (GET_CODE (XEXP (x, 1)) == CONST_INT
2708
                && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2709
      return true;
2710
    break;
2711
  case POST_INC:
2712
  case POST_DEC:
2713
    if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2714
        && REG_P (XEXP (x, 0))
2715
        && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, POST_INC))
2716
      return true;
2717
  case PRE_DEC:
2718
    if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2719
        && XEXP (x, 0) == stack_pointer_rtx
2720
        && REG_P (XEXP (x, 0))
2721
        && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PRE_DEC))
2722
      return true;
2723
    break;
2724
  default:
2725
    break;
2726
  }
2727
  return false;
2728
}
2729
 
2730
/* Decide whether we can force certain constants to memory.  If we
2731
   decide we can't, the caller should be able to cope with it in
2732
   another way.  */
2733
 
2734
static bool
2735
bfin_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED,
2736
                             rtx x ATTRIBUTE_UNUSED)
2737
{
2738
  /* We have only one class of non-legitimate constants, and our movsi
2739
     expander knows how to handle them.  Dropping these constants into the
2740
     data section would only shift the problem - we'd still get relocs
2741
     outside the object, in the data section rather than the text section.  */
2742
  return true;
2743
}
2744
 
2745
/* Ensure that for any constant of the form symbol + offset, the offset
2746
   remains within the object.  Any other constants are ok.
2747
   This ensures that flat binaries never have to deal with relocations
2748
   crossing section boundaries.  */
2749
 
2750
static bool
2751
bfin_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2752
{
2753
  rtx sym;
2754
  HOST_WIDE_INT offset;
2755
 
2756
  if (GET_CODE (x) != CONST)
2757
    return true;
2758
 
2759
  x = XEXP (x, 0);
2760
  gcc_assert (GET_CODE (x) == PLUS);
2761
 
2762
  sym = XEXP (x, 0);
2763
  x = XEXP (x, 1);
2764
  if (GET_CODE (sym) != SYMBOL_REF
2765
      || GET_CODE (x) != CONST_INT)
2766
    return true;
2767
  offset = INTVAL (x);
2768
 
2769
  if (SYMBOL_REF_DECL (sym) == 0)
2770
    return true;
2771
  if (offset < 0
2772
      || offset >= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym))))
2773
    return false;
2774
 
2775
  return true;
2776
}
2777
 
2778
static bool
2779
bfin_rtx_costs (rtx x, int code_i, int outer_code_i, int opno, int *total,
2780
                bool speed)
2781
{
2782
  enum rtx_code code = (enum rtx_code) code_i;
2783
  enum rtx_code outer_code = (enum rtx_code) outer_code_i;
2784
  int cost2 = COSTS_N_INSNS (1);
2785
  rtx op0, op1;
2786
 
2787
  switch (code)
2788
    {
2789
    case CONST_INT:
2790
      if (outer_code == SET || outer_code == PLUS)
2791
        *total = satisfies_constraint_Ks7 (x) ? 0 : cost2;
2792
      else if (outer_code == AND)
2793
        *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2794
      else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2795
        *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2796
      else if (outer_code == LEU || outer_code == LTU)
2797
        *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2798
      else if (outer_code == MULT)
2799
        *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2800
      else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2801
        *total = 0;
2802
      else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2803
               || outer_code == LSHIFTRT)
2804
        *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2805
      else if (outer_code == IOR || outer_code == XOR)
2806
        *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2807
      else
2808
        *total = cost2;
2809
      return true;
2810
 
2811
    case CONST:
2812
    case LABEL_REF:
2813
    case SYMBOL_REF:
2814
    case CONST_DOUBLE:
2815
      *total = COSTS_N_INSNS (2);
2816
      return true;
2817
 
2818
    case PLUS:
2819
      op0 = XEXP (x, 0);
2820
      op1 = XEXP (x, 1);
2821
      if (GET_MODE (x) == SImode)
2822
        {
2823
          if (GET_CODE (op0) == MULT
2824
              && GET_CODE (XEXP (op0, 1)) == CONST_INT)
2825
            {
2826
              HOST_WIDE_INT val = INTVAL (XEXP (op0, 1));
2827
              if (val == 2 || val == 4)
2828
                {
2829
                  *total = cost2;
2830
                  *total += rtx_cost (XEXP (op0, 0), outer_code, opno, speed);
2831
                  *total += rtx_cost (op1, outer_code, opno, speed);
2832
                  return true;
2833
                }
2834
            }
2835
          *total = cost2;
2836
          if (GET_CODE (op0) != REG
2837
              && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2838
            *total += set_src_cost (op0, speed);
2839
#if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
2840
         towards creating too many induction variables.  */
2841
          if (!reg_or_7bit_operand (op1, SImode))
2842
            *total += set_src_cost (op1, speed);
2843
#endif
2844
        }
2845
      else if (GET_MODE (x) == DImode)
2846
        {
2847
          *total = 6 * cost2;
2848
          if (GET_CODE (op1) != CONST_INT
2849
              || !satisfies_constraint_Ks7 (op1))
2850
            *total += rtx_cost (op1, PLUS, 1, speed);
2851
          if (GET_CODE (op0) != REG
2852
              && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2853
            *total += rtx_cost (op0, PLUS, 0, speed);
2854
        }
2855
      return true;
2856
 
2857
    case MINUS:
2858
      if (GET_MODE (x) == DImode)
2859
        *total = 6 * cost2;
2860
      else
2861
        *total = cost2;
2862
      return true;
2863
 
2864
    case ASHIFT:
2865
    case ASHIFTRT:
2866
    case LSHIFTRT:
2867
      if (GET_MODE (x) == DImode)
2868
        *total = 6 * cost2;
2869
      else
2870
        *total = cost2;
2871
 
2872
      op0 = XEXP (x, 0);
2873
      op1 = XEXP (x, 1);
2874
      if (GET_CODE (op0) != REG
2875
          && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2876
        *total += rtx_cost (op0, code, 0, speed);
2877
 
2878
      return true;
2879
 
2880
    case IOR:
2881
    case AND:
2882
    case XOR:
2883
      op0 = XEXP (x, 0);
2884
      op1 = XEXP (x, 1);
2885
 
2886
      /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high.  */
2887
      if (code == IOR)
2888
        {
2889
          if ((GET_CODE (op0) == LSHIFTRT && GET_CODE (op1) == ASHIFT)
2890
              || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == ZERO_EXTEND)
2891
              || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
2892
              || (GET_CODE (op0) == AND && GET_CODE (op1) == CONST_INT))
2893
            {
2894
              *total = cost2;
2895
              return true;
2896
            }
2897
        }
2898
 
2899
      if (GET_CODE (op0) != REG
2900
          && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2901
        *total += rtx_cost (op0, code, 0, speed);
2902
 
2903
      if (GET_MODE (x) == DImode)
2904
        {
2905
          *total = 2 * cost2;
2906
          return true;
2907
        }
2908
      *total = cost2;
2909
      if (GET_MODE (x) != SImode)
2910
        return true;
2911
 
2912
      if (code == AND)
2913
        {
2914
          if (! rhs_andsi3_operand (XEXP (x, 1), SImode))
2915
            *total += rtx_cost (XEXP (x, 1), code, 1, speed);
2916
        }
2917
      else
2918
        {
2919
          if (! regorlog2_operand (XEXP (x, 1), SImode))
2920
            *total += rtx_cost (XEXP (x, 1), code, 1, speed);
2921
        }
2922
 
2923
      return true;
2924
 
2925
    case ZERO_EXTRACT:
2926
    case SIGN_EXTRACT:
2927
      if (outer_code == SET
2928
          && XEXP (x, 1) == const1_rtx
2929
          && GET_CODE (XEXP (x, 2)) == CONST_INT)
2930
        {
2931
          *total = 2 * cost2;
2932
          return true;
2933
        }
2934
      /* fall through */
2935
 
2936
    case SIGN_EXTEND:
2937
    case ZERO_EXTEND:
2938
      *total = cost2;
2939
      return true;
2940
 
2941
    case MULT:
2942
        {
2943
          op0 = XEXP (x, 0);
2944
          op1 = XEXP (x, 1);
2945
          if (GET_CODE (op0) == GET_CODE (op1)
2946
              && (GET_CODE (op0) == ZERO_EXTEND
2947
                  || GET_CODE (op0) == SIGN_EXTEND))
2948
            {
2949
              *total = COSTS_N_INSNS (1);
2950
              op0 = XEXP (op0, 0);
2951
              op1 = XEXP (op1, 0);
2952
            }
2953
          else if (!speed)
2954
            *total = COSTS_N_INSNS (1);
2955
          else
2956
            *total = COSTS_N_INSNS (3);
2957
 
2958
          if (GET_CODE (op0) != REG
2959
              && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2960
            *total += rtx_cost (op0, MULT, 0, speed);
2961
          if (GET_CODE (op1) != REG
2962
              && (GET_CODE (op1) != SUBREG || GET_CODE (SUBREG_REG (op1)) != REG))
2963
            *total += rtx_cost (op1, MULT, 1, speed);
2964
        }
2965
      return true;
2966
 
2967
    case UDIV:
2968
    case UMOD:
2969
      *total = COSTS_N_INSNS (32);
2970
      return true;
2971
 
2972
    case VEC_CONCAT:
2973
    case VEC_SELECT:
2974
      if (outer_code == SET)
2975
        *total = cost2;
2976
      return true;
2977
 
2978
    default:
2979
      return false;
2980
    }
2981
}
2982
 
2983
/* Used for communication between {push,pop}_multiple_operation (which
2984
   we use not only as a predicate) and the corresponding output functions.  */
2985
static int first_preg_to_save, first_dreg_to_save;
2986
static int n_regs_to_save;
2987
 
2988
int
2989
push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2990
{
2991
  int lastdreg = 8, lastpreg = 6;
2992
  int i, group;
2993
 
2994
  first_preg_to_save = lastpreg;
2995
  first_dreg_to_save = lastdreg;
2996
  for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
2997
    {
2998
      rtx t = XVECEXP (op, 0, i);
2999
      rtx src, dest;
3000
      int regno;
3001
 
3002
      if (GET_CODE (t) != SET)
3003
        return 0;
3004
 
3005
      src = SET_SRC (t);
3006
      dest = SET_DEST (t);
3007
      if (GET_CODE (dest) != MEM || ! REG_P (src))
3008
        return 0;
3009
      dest = XEXP (dest, 0);
3010
      if (GET_CODE (dest) != PLUS
3011
          || ! REG_P (XEXP (dest, 0))
3012
          || REGNO (XEXP (dest, 0)) != REG_SP
3013
          || GET_CODE (XEXP (dest, 1)) != CONST_INT
3014
          || INTVAL (XEXP (dest, 1)) != -i * 4)
3015
        return 0;
3016
 
3017
      regno = REGNO (src);
3018
      if (group == 0)
3019
        {
3020
          if (D_REGNO_P (regno))
3021
            {
3022
              group = 1;
3023
              first_dreg_to_save = lastdreg = regno - REG_R0;
3024
            }
3025
          else if (regno >= REG_P0 && regno <= REG_P7)
3026
            {
3027
              group = 2;
3028
              first_preg_to_save = lastpreg = regno - REG_P0;
3029
            }
3030
          else
3031
            return 0;
3032
 
3033
          continue;
3034
        }
3035
 
3036
      if (group == 1)
3037
        {
3038
          if (regno >= REG_P0 && regno <= REG_P7)
3039
            {
3040
              group = 2;
3041
              first_preg_to_save = lastpreg = regno - REG_P0;
3042
            }
3043
          else if (regno != REG_R0 + lastdreg + 1)
3044
            return 0;
3045
          else
3046
            lastdreg++;
3047
        }
3048
      else if (group == 2)
3049
        {
3050
          if (regno != REG_P0 + lastpreg + 1)
3051
            return 0;
3052
          lastpreg++;
3053
        }
3054
    }
3055
  n_regs_to_save = 8 - first_dreg_to_save + 6 - first_preg_to_save;
3056
  return 1;
3057
}
3058
 
3059
int
3060
pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3061
{
3062
  int lastdreg = 8, lastpreg = 6;
3063
  int i, group;
3064
 
3065
  for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
3066
    {
3067
      rtx t = XVECEXP (op, 0, i);
3068
      rtx src, dest;
3069
      int regno;
3070
 
3071
      if (GET_CODE (t) != SET)
3072
        return 0;
3073
 
3074
      src = SET_SRC (t);
3075
      dest = SET_DEST (t);
3076
      if (GET_CODE (src) != MEM || ! REG_P (dest))
3077
        return 0;
3078
      src = XEXP (src, 0);
3079
 
3080
      if (i == 1)
3081
        {
3082
          if (! REG_P (src) || REGNO (src) != REG_SP)
3083
            return 0;
3084
        }
3085
      else if (GET_CODE (src) != PLUS
3086
               || ! REG_P (XEXP (src, 0))
3087
               || REGNO (XEXP (src, 0)) != REG_SP
3088
               || GET_CODE (XEXP (src, 1)) != CONST_INT
3089
               || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
3090
        return 0;
3091
 
3092
      regno = REGNO (dest);
3093
      if (group == 0)
3094
        {
3095
          if (regno == REG_R7)
3096
            {
3097
              group = 1;
3098
              lastdreg = 7;
3099
            }
3100
          else if (regno != REG_P0 + lastpreg - 1)
3101
            return 0;
3102
          else
3103
            lastpreg--;
3104
        }
3105
      else if (group == 1)
3106
        {
3107
          if (regno != REG_R0 + lastdreg - 1)
3108
            return 0;
3109
          else
3110
            lastdreg--;
3111
        }
3112
    }
3113
  first_dreg_to_save = lastdreg;
3114
  first_preg_to_save = lastpreg;
3115
  n_regs_to_save = 8 - first_dreg_to_save + 6 - first_preg_to_save;
3116
  return 1;
3117
}
3118
 
3119
/* Emit assembly code for one multi-register push described by INSN, with
3120
   operands in OPERANDS.  */
3121
 
3122
void
3123
output_push_multiple (rtx insn, rtx *operands)
3124
{
3125
  char buf[80];
3126
  int ok;
3127
 
3128
  /* Validate the insn again, and compute first_[dp]reg_to_save. */
3129
  ok = push_multiple_operation (PATTERN (insn), VOIDmode);
3130
  gcc_assert (ok);
3131
 
3132
  if (first_dreg_to_save == 8)
3133
    sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
3134
  else if (first_preg_to_save == 6)
3135
    sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
3136
  else
3137
    sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
3138
             first_dreg_to_save, first_preg_to_save);
3139
 
3140
  output_asm_insn (buf, operands);
3141
}
3142
 
3143
/* Emit assembly code for one multi-register pop described by INSN, with
3144
   operands in OPERANDS.  */
3145
 
3146
void
3147
output_pop_multiple (rtx insn, rtx *operands)
3148
{
3149
  char buf[80];
3150
  int ok;
3151
 
3152
  /* Validate the insn again, and compute first_[dp]reg_to_save. */
3153
  ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
3154
  gcc_assert (ok);
3155
 
3156
  if (first_dreg_to_save == 8)
3157
    sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
3158
  else if (first_preg_to_save == 6)
3159
    sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
3160
  else
3161
    sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
3162
             first_dreg_to_save, first_preg_to_save);
3163
 
3164
  output_asm_insn (buf, operands);
3165
}
3166
 
3167
/* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE.  */
3168
 
3169
static void
3170
single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
3171
{
3172
  rtx scratch = gen_reg_rtx (mode);
3173
  rtx srcmem, dstmem;
3174
 
3175
  srcmem = adjust_address_nv (src, mode, offset);
3176
  dstmem = adjust_address_nv (dst, mode, offset);
3177
  emit_move_insn (scratch, srcmem);
3178
  emit_move_insn (dstmem, scratch);
3179
}
3180
 
3181
/* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
3182
   alignment ALIGN_EXP.  Return true if successful, false if we should fall
3183
   back on a different method.  */
3184
 
3185
bool
3186
bfin_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp)
3187
{
3188
  rtx srcreg, destreg, countreg;
3189
  HOST_WIDE_INT align = 0;
3190
  unsigned HOST_WIDE_INT count = 0;
3191
 
3192
  if (GET_CODE (align_exp) == CONST_INT)
3193
    align = INTVAL (align_exp);
3194
  if (GET_CODE (count_exp) == CONST_INT)
3195
    {
3196
      count = INTVAL (count_exp);
3197
#if 0
3198
      if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
3199
        return false;
3200
#endif
3201
    }
3202
 
3203
  /* If optimizing for size, only do single copies inline.  */
3204
  if (optimize_size)
3205
    {
3206
      if (count == 2 && align < 2)
3207
        return false;
3208
      if (count == 4 && align < 4)
3209
        return false;
3210
      if (count != 1 && count != 2 && count != 4)
3211
        return false;
3212
    }
3213
  if (align < 2 && count != 1)
3214
    return false;
3215
 
3216
  destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
3217
  if (destreg != XEXP (dst, 0))
3218
    dst = replace_equiv_address_nv (dst, destreg);
3219
  srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
3220
  if (srcreg != XEXP (src, 0))
3221
    src = replace_equiv_address_nv (src, srcreg);
3222
 
3223
  if (count != 0 && align >= 2)
3224
    {
3225
      unsigned HOST_WIDE_INT offset = 0;
3226
 
3227
      if (align >= 4)
3228
        {
3229
          if ((count & ~3) == 4)
3230
            {
3231
              single_move_for_movmem (dst, src, SImode, offset);
3232
              offset = 4;
3233
            }
3234
          else if (count & ~3)
3235
            {
3236
              HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
3237
              countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
3238
 
3239
              emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
3240
              cfun->machine->has_loopreg_clobber = true;
3241
            }
3242
          if (count & 2)
3243
            {
3244
              single_move_for_movmem (dst, src, HImode, offset);
3245
              offset += 2;
3246
            }
3247
        }
3248
      else
3249
        {
3250
          if ((count & ~1) == 2)
3251
            {
3252
              single_move_for_movmem (dst, src, HImode, offset);
3253
              offset = 2;
3254
            }
3255
          else if (count & ~1)
3256
            {
3257
              HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
3258
              countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
3259
 
3260
              emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
3261
              cfun->machine->has_loopreg_clobber = true;
3262
            }
3263
        }
3264
      if (count & 1)
3265
        {
3266
          single_move_for_movmem (dst, src, QImode, offset);
3267
        }
3268
      return true;
3269
    }
3270
  return false;
3271
}
3272
 
3273
/* Compute the alignment for a local variable.
3274
   TYPE is the data type, and ALIGN is the alignment that
3275
   the object would ordinarily have.  The value of this macro is used
3276
   instead of that alignment to align the object.  */
3277
 
3278
unsigned
3279
bfin_local_alignment (tree type, unsigned align)
3280
{
3281
  /* Increasing alignment for (relatively) big types allows the builtin
3282
     memcpy can use 32 bit loads/stores.  */
3283
  if (TYPE_SIZE (type)
3284
      && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3285
      && (TREE_INT_CST_LOW (TYPE_SIZE (type)) > 8
3286
          || TREE_INT_CST_HIGH (TYPE_SIZE (type))) && align < 32)
3287
    return 32;
3288
  return align;
3289
}
3290
 
3291
/* Implement TARGET_SCHED_ISSUE_RATE.  */
3292
 
3293
static int
3294
bfin_issue_rate (void)
3295
{
3296
  return 3;
3297
}
3298
 
3299
static int
3300
bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
3301
{
3302
  enum attr_type dep_insn_type;
3303
  int dep_insn_code_number;
3304
 
3305
  /* Anti and output dependencies have zero cost.  */
3306
  if (REG_NOTE_KIND (link) != 0)
3307
    return 0;
3308
 
3309
  dep_insn_code_number = recog_memoized (dep_insn);
3310
 
3311
  /* If we can't recognize the insns, we can't really do anything.  */
3312
  if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
3313
    return cost;
3314
 
3315
  dep_insn_type = get_attr_type (dep_insn);
3316
 
3317
  if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
3318
    {
3319
      rtx pat = PATTERN (dep_insn);
3320
      rtx dest, src;
3321
 
3322
      if (GET_CODE (pat) == PARALLEL)
3323
        pat = XVECEXP (pat, 0, 0);
3324
      dest = SET_DEST (pat);
3325
      src = SET_SRC (pat);
3326
      if (! ADDRESS_REGNO_P (REGNO (dest))
3327
          || ! (MEM_P (src) || D_REGNO_P (REGNO (src))))
3328
        return cost;
3329
      return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
3330
    }
3331
 
3332
  return cost;
3333
}
3334
 
3335
/* This function acts like NEXT_INSN, but is aware of three-insn bundles and
3336
   skips all subsequent parallel instructions if INSN is the start of such
3337
   a group.  */
3338
static rtx
3339
find_next_insn_start (rtx insn)
3340
{
3341
  if (GET_MODE (insn) == SImode)
3342
    {
3343
      while (GET_MODE (insn) != QImode)
3344
        insn = NEXT_INSN (insn);
3345
    }
3346
  return NEXT_INSN (insn);
3347
}
3348
 
3349
/* This function acts like PREV_INSN, but is aware of three-insn bundles and
3350
   skips all subsequent parallel instructions if INSN is the start of such
3351
   a group.  */
3352
static rtx
3353
find_prev_insn_start (rtx insn)
3354
{
3355
  insn = PREV_INSN (insn);
3356
  gcc_assert (GET_MODE (insn) != SImode);
3357
  if (GET_MODE (insn) == QImode)
3358
    {
3359
      while (GET_MODE (PREV_INSN (insn)) == SImode)
3360
        insn = PREV_INSN (insn);
3361
    }
3362
  return insn;
3363
}
3364
 
3365
/* Increment the counter for the number of loop instructions in the
3366
   current function.  */
3367
 
3368
void
3369
bfin_hardware_loop (void)
3370
{
3371
  cfun->machine->has_hardware_loops++;
3372
}
3373
 
3374
/* Maximum loop nesting depth.  */
3375
#define MAX_LOOP_DEPTH 2
3376
 
3377
/* Maximum size of a loop.  */
3378
#define MAX_LOOP_LENGTH 2042
3379
 
3380
/* Maximum distance of the LSETUP instruction from the loop start.  */
3381
#define MAX_LSETUP_DISTANCE 30
3382
 
3383
/* Estimate the length of INSN conservatively.  */
3384
 
3385
static int
3386
length_for_loop (rtx insn)
3387
{
3388
  int length = 0;
3389
  if (JUMP_P (insn) && any_condjump_p (insn) && !optimize_size)
3390
    {
3391
      if (ENABLE_WA_SPECULATIVE_SYNCS)
3392
        length = 8;
3393
      else if (ENABLE_WA_SPECULATIVE_LOADS)
3394
        length = 6;
3395
    }
3396
  else if (LABEL_P (insn))
3397
    {
3398
      if (ENABLE_WA_SPECULATIVE_SYNCS)
3399
        length = 4;
3400
    }
3401
 
3402
  if (NONDEBUG_INSN_P (insn))
3403
    length += get_attr_length (insn);
3404
 
3405
  return length;
3406
}
3407
 
3408
/* Optimize LOOP.  */
3409
 
3410
static bool
3411
hwloop_optimize (hwloop_info loop)
3412
{
3413
  basic_block bb;
3414
  hwloop_info inner;
3415
  rtx insn, last_insn;
3416
  rtx loop_init, start_label, end_label;
3417
  rtx iter_reg, scratchreg, scratch_init, scratch_init_insn;
3418
  rtx lc_reg, lt_reg, lb_reg;
3419
  rtx seq, seq_end;
3420
  int length;
3421
  unsigned ix;
3422
  bool clobber0, clobber1;
3423
 
3424
  if (loop->depth > MAX_LOOP_DEPTH)
3425
    {
3426
      if (dump_file)
3427
        fprintf (dump_file, ";; loop %d too deep\n", loop->loop_no);
3428
      return false;
3429
    }
3430
 
3431
  /* Get the loop iteration register.  */
3432
  iter_reg = loop->iter_reg;
3433
 
3434
  gcc_assert (REG_P (iter_reg));
3435
 
3436
  scratchreg = NULL_RTX;
3437
  scratch_init = iter_reg;
3438
  scratch_init_insn = NULL_RTX;
3439
  if (!PREG_P (iter_reg) && loop->incoming_src)
3440
    {
3441
      basic_block bb_in = loop->incoming_src;
3442
      int i;
3443
      for (i = REG_P0; i <= REG_P5; i++)
3444
        if ((df_regs_ever_live_p (i)
3445
             || (funkind (TREE_TYPE (current_function_decl)) == SUBROUTINE
3446
                 && call_used_regs[i]))
3447
            && !REGNO_REG_SET_P (df_get_live_out (bb_in), i))
3448
          {
3449
            scratchreg = gen_rtx_REG (SImode, i);
3450
            break;
3451
          }
3452
      for (insn = BB_END (bb_in); insn != BB_HEAD (bb_in);
3453
           insn = PREV_INSN (insn))
3454
        {
3455
          rtx set;
3456
          if (NOTE_P (insn) || BARRIER_P (insn))
3457
            continue;
3458
          set = single_set (insn);
3459
          if (set && rtx_equal_p (SET_DEST (set), iter_reg))
3460
            {
3461
              if (CONSTANT_P (SET_SRC (set)))
3462
                {
3463
                  scratch_init = SET_SRC (set);
3464
                  scratch_init_insn = insn;
3465
                }
3466
              break;
3467
            }
3468
          else if (reg_mentioned_p (iter_reg, PATTERN (insn)))
3469
            break;
3470
        }
3471
    }
3472
 
3473
  if (loop->incoming_src)
3474
    {
3475
      /* Make sure the predecessor is before the loop start label, as required by
3476
         the LSETUP instruction.  */
3477
      length = 0;
3478
      insn = BB_END (loop->incoming_src);
3479
      /* If we have to insert the LSETUP before a jump, count that jump in the
3480
         length.  */
3481
      if (VEC_length (edge, loop->incoming) > 1
3482
          || !(VEC_last (edge, loop->incoming)->flags & EDGE_FALLTHRU))
3483
        {
3484
          gcc_assert (JUMP_P (insn));
3485
          insn = PREV_INSN (insn);
3486
        }
3487
 
3488
      for (; insn && insn != loop->start_label; insn = NEXT_INSN (insn))
3489
        length += length_for_loop (insn);
3490
 
3491
      if (!insn)
3492
        {
3493
          if (dump_file)
3494
            fprintf (dump_file, ";; loop %d lsetup not before loop_start\n",
3495
                     loop->loop_no);
3496
          return false;
3497
        }
3498
 
3499
      /* Account for the pop of a scratch register where necessary.  */
3500
      if (!PREG_P (iter_reg) && scratchreg == NULL_RTX
3501
          && ENABLE_WA_LOAD_LCREGS)
3502
        length += 2;
3503
 
3504
      if (length > MAX_LSETUP_DISTANCE)
3505
        {
3506
          if (dump_file)
3507
            fprintf (dump_file, ";; loop %d lsetup too far away\n", loop->loop_no);
3508
          return false;
3509
        }
3510
    }
3511
 
3512
  /* Check if start_label appears before loop_end and calculate the
3513
     offset between them.  We calculate the length of instructions
3514
     conservatively.  */
3515
  length = 0;
3516
  for (insn = loop->start_label;
3517
       insn && insn != loop->loop_end;
3518
       insn = NEXT_INSN (insn))
3519
    length += length_for_loop (insn);
3520
 
3521
  if (!insn)
3522
    {
3523
      if (dump_file)
3524
        fprintf (dump_file, ";; loop %d start_label not before loop_end\n",
3525
                 loop->loop_no);
3526
      return false;
3527
    }
3528
 
3529
  loop->length = length;
3530
  if (loop->length > MAX_LOOP_LENGTH)
3531
    {
3532
      if (dump_file)
3533
        fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
3534
      return false;
3535
    }
3536
 
3537
  /* Scan all the blocks to make sure they don't use iter_reg.  */
3538
  if (loop->iter_reg_used || loop->iter_reg_used_outside)
3539
    {
3540
      if (dump_file)
3541
        fprintf (dump_file, ";; loop %d uses iterator\n", loop->loop_no);
3542
      return false;
3543
    }
3544
 
3545
  clobber0 = (TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC0)
3546
              || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LB0)
3547
              || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LT0));
3548
  clobber1 = (TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC1)
3549
              || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LB1)
3550
              || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LT1));
3551
  if (clobber0 && clobber1)
3552
    {
3553
      if (dump_file)
3554
        fprintf (dump_file, ";; loop %d no loop reg available\n",
3555
                 loop->loop_no);
3556
      return false;
3557
    }
3558
 
3559
  /* There should be an instruction before the loop_end instruction
3560
     in the same basic block. And the instruction must not be
3561
     - JUMP
3562
     - CONDITIONAL BRANCH
3563
     - CALL
3564
     - CSYNC
3565
     - SSYNC
3566
     - Returns (RTS, RTN, etc.)  */
3567
 
3568
  bb = loop->tail;
3569
  last_insn = find_prev_insn_start (loop->loop_end);
3570
 
3571
  while (1)
3572
    {
3573
      for (; last_insn != BB_HEAD (bb);
3574
           last_insn = find_prev_insn_start (last_insn))
3575
        if (NONDEBUG_INSN_P (last_insn))
3576
          break;
3577
 
3578
      if (last_insn != BB_HEAD (bb))
3579
        break;
3580
 
3581
      if (single_pred_p (bb)
3582
          && single_pred_edge (bb)->flags & EDGE_FALLTHRU
3583
          && single_pred (bb) != ENTRY_BLOCK_PTR)
3584
        {
3585
          bb = single_pred (bb);
3586
          last_insn = BB_END (bb);
3587
          continue;
3588
        }
3589
      else
3590
        {
3591
          last_insn = NULL_RTX;
3592
          break;
3593
        }
3594
    }
3595
 
3596
  if (!last_insn)
3597
    {
3598
      if (dump_file)
3599
        fprintf (dump_file, ";; loop %d has no last instruction\n",
3600
                 loop->loop_no);
3601
      return false;
3602
    }
3603
 
3604
  if (JUMP_P (last_insn) && !any_condjump_p (last_insn))
3605
    {
3606
      if (dump_file)
3607
        fprintf (dump_file, ";; loop %d has bad last instruction\n",
3608
                 loop->loop_no);
3609
      return false;
3610
    }
3611
  /* In all other cases, try to replace a bad last insn with a nop.  */
3612
  else if (JUMP_P (last_insn)
3613
           || CALL_P (last_insn)
3614
           || get_attr_type (last_insn) == TYPE_SYNC
3615
           || get_attr_type (last_insn) == TYPE_CALL
3616
           || get_attr_seq_insns (last_insn) == SEQ_INSNS_MULTI
3617
           || recog_memoized (last_insn) == CODE_FOR_return_internal
3618
           || GET_CODE (PATTERN (last_insn)) == ASM_INPUT
3619
           || asm_noperands (PATTERN (last_insn)) >= 0)
3620
    {
3621
      if (loop->length + 2 > MAX_LOOP_LENGTH)
3622
        {
3623
          if (dump_file)
3624
            fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
3625
          return false;
3626
        }
3627
      if (dump_file)
3628
        fprintf (dump_file, ";; loop %d has bad last insn; replace with nop\n",
3629
                 loop->loop_no);
3630
 
3631
      last_insn = emit_insn_after (gen_forced_nop (), last_insn);
3632
    }
3633
 
3634
  loop->last_insn = last_insn;
3635
 
3636
  /* The loop is good for replacement.  */
3637
  start_label = loop->start_label;
3638
  end_label = gen_label_rtx ();
3639
  iter_reg = loop->iter_reg;
3640
 
3641
  if (loop->depth == 1 && !clobber1)
3642
    {
3643
      lc_reg = gen_rtx_REG (SImode, REG_LC1);
3644
      lb_reg = gen_rtx_REG (SImode, REG_LB1);
3645
      lt_reg = gen_rtx_REG (SImode, REG_LT1);
3646
      SET_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC1);
3647
    }
3648
  else
3649
    {
3650
      lc_reg = gen_rtx_REG (SImode, REG_LC0);
3651
      lb_reg = gen_rtx_REG (SImode, REG_LB0);
3652
      lt_reg = gen_rtx_REG (SImode, REG_LT0);
3653
      SET_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC0);
3654
    }
3655
 
3656
  loop->end_label = end_label;
3657
 
3658
  /* Create a sequence containing the loop setup.  */
3659
  start_sequence ();
3660
 
3661
  /* LSETUP only accepts P registers.  If we have one, we can use it,
3662
     otherwise there are several ways of working around the problem.
3663
     If we're not affected by anomaly 312, we can load the LC register
3664
     from any iteration register, and use LSETUP without initialization.
3665
     If we've found a P scratch register that's not live here, we can
3666
     instead copy the iter_reg into that and use an initializing LSETUP.
3667
     If all else fails, push and pop P0 and use it as a scratch.  */
3668
  if (P_REGNO_P (REGNO (iter_reg)))
3669
    {
3670
      loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3671
                                            lb_reg, end_label,
3672
                                            lc_reg, iter_reg);
3673
      seq_end = emit_insn (loop_init);
3674
    }
3675
  else if (!ENABLE_WA_LOAD_LCREGS && DPREG_P (iter_reg))
3676
    {
3677
      emit_insn (gen_movsi (lc_reg, iter_reg));
3678
      loop_init = gen_lsetup_without_autoinit (lt_reg, start_label,
3679
                                               lb_reg, end_label,
3680
                                               lc_reg);
3681
      seq_end = emit_insn (loop_init);
3682
    }
3683
  else if (scratchreg != NULL_RTX)
3684
    {
3685
      emit_insn (gen_movsi (scratchreg, scratch_init));
3686
      loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3687
                                            lb_reg, end_label,
3688
                                            lc_reg, scratchreg);
3689
      seq_end = emit_insn (loop_init);
3690
      if (scratch_init_insn != NULL_RTX)
3691
        delete_insn (scratch_init_insn);
3692
    }
3693
  else
3694
    {
3695
      rtx p0reg = gen_rtx_REG (SImode, REG_P0);
3696
      rtx push = gen_frame_mem (SImode,
3697
                                gen_rtx_PRE_DEC (SImode, stack_pointer_rtx));
3698
      rtx pop = gen_frame_mem (SImode,
3699
                               gen_rtx_POST_INC (SImode, stack_pointer_rtx));
3700
      emit_insn (gen_movsi (push, p0reg));
3701
      emit_insn (gen_movsi (p0reg, scratch_init));
3702
      loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3703
                                            lb_reg, end_label,
3704
                                            lc_reg, p0reg);
3705
      emit_insn (loop_init);
3706
      seq_end = emit_insn (gen_movsi (p0reg, pop));
3707
      if (scratch_init_insn != NULL_RTX)
3708
        delete_insn (scratch_init_insn);
3709
    }
3710
 
3711
  if (dump_file)
3712
    {
3713
      fprintf (dump_file, ";; replacing loop %d initializer with\n",
3714
               loop->loop_no);
3715
      print_rtl_single (dump_file, loop_init);
3716
      fprintf (dump_file, ";; replacing loop %d terminator with\n",
3717
               loop->loop_no);
3718
      print_rtl_single (dump_file, loop->loop_end);
3719
    }
3720
 
3721
  /* If the loop isn't entered at the top, also create a jump to the entry
3722
     point.  */
3723
  if (!loop->incoming_src && loop->head != loop->incoming_dest)
3724
    {
3725
      rtx label = BB_HEAD (loop->incoming_dest);
3726
      /* If we're jumping to the final basic block in the loop, and there's
3727
         only one cheap instruction before the end (typically an increment of
3728
         an induction variable), we can just emit a copy here instead of a
3729
         jump.  */
3730
      if (loop->incoming_dest == loop->tail
3731
          && next_real_insn (label) == last_insn
3732
          && asm_noperands (last_insn) < 0
3733
          && GET_CODE (PATTERN (last_insn)) == SET)
3734
        {
3735
          seq_end = emit_insn (copy_rtx (PATTERN (last_insn)));
3736
        }
3737
      else
3738
        {
3739
          emit_jump_insn (gen_jump (label));
3740
          seq_end = emit_barrier ();
3741
        }
3742
    }
3743
 
3744
  seq = get_insns ();
3745
  end_sequence ();
3746
 
3747
  if (loop->incoming_src)
3748
    {
3749
      rtx prev = BB_END (loop->incoming_src);
3750
      if (VEC_length (edge, loop->incoming) > 1
3751
          || !(VEC_last (edge, loop->incoming)->flags & EDGE_FALLTHRU))
3752
        {
3753
          gcc_assert (JUMP_P (prev));
3754
          prev = PREV_INSN (prev);
3755
        }
3756
      emit_insn_after (seq, prev);
3757
    }
3758
  else
3759
    {
3760
      basic_block new_bb;
3761
      edge e;
3762
      edge_iterator ei;
3763
 
3764
#ifdef ENABLE_CHECKING
3765
      if (loop->head != loop->incoming_dest)
3766
        {
3767
          /* We aren't entering the loop at the top.  Since we've established
3768
             that the loop is entered only at one point, this means there
3769
             can't be fallthru edges into the head.  Any such fallthru edges
3770
             would become invalid when we insert the new block, so verify
3771
             that this does not in fact happen.  */
3772
          FOR_EACH_EDGE (e, ei, loop->head->preds)
3773
            gcc_assert (!(e->flags & EDGE_FALLTHRU));
3774
        }
3775
#endif
3776
 
3777
      emit_insn_before (seq, BB_HEAD (loop->head));
3778
      seq = emit_label_before (gen_label_rtx (), seq);
3779
 
3780
      new_bb = create_basic_block (seq, seq_end, loop->head->prev_bb);
3781
      FOR_EACH_EDGE (e, ei, loop->incoming)
3782
        {
3783
          if (!(e->flags & EDGE_FALLTHRU)
3784
              || e->dest != loop->head)
3785
            redirect_edge_and_branch_force (e, new_bb);
3786
          else
3787
            redirect_edge_succ (e, new_bb);
3788
        }
3789
      e = make_edge (new_bb, loop->head, 0);
3790
    }
3791
 
3792
  delete_insn (loop->loop_end);
3793
  /* Insert the loop end label before the last instruction of the loop.  */
3794
  emit_label_before (loop->end_label, loop->last_insn);
3795
 
3796
  return true;
3797
}
3798
 
3799
/* A callback for the hw-doloop pass.  Called when a loop we have discovered
3800
   turns out not to be optimizable; we have to split the doloop_end pattern
3801
   into a subtract and a test.  */
3802
static void
3803
hwloop_fail (hwloop_info loop)
3804
{
3805
  rtx insn = loop->loop_end;
3806
 
3807
  if (DPREG_P (loop->iter_reg))
3808
    {
3809
      /* If loop->iter_reg is a DREG or PREG, we can split it here
3810
         without scratch register.  */
3811
      rtx insn, test;
3812
 
3813
      emit_insn_before (gen_addsi3 (loop->iter_reg,
3814
                                    loop->iter_reg,
3815
                                    constm1_rtx),
3816
                        loop->loop_end);
3817
 
3818
      test = gen_rtx_NE (VOIDmode, loop->iter_reg, const0_rtx);
3819
      insn = emit_jump_insn_before (gen_cbranchsi4 (test,
3820
                                                    loop->iter_reg, const0_rtx,
3821
                                                    loop->start_label),
3822
                                    loop->loop_end);
3823
 
3824
      JUMP_LABEL (insn) = loop->start_label;
3825
      LABEL_NUSES (loop->start_label)++;
3826
      delete_insn (loop->loop_end);
3827
    }
3828
  else
3829
    {
3830
      splitting_loops = 1;
3831
      try_split (PATTERN (insn), insn, 1);
3832
      splitting_loops = 0;
3833
    }
3834
}
3835
 
3836
/* A callback for the hw-doloop pass.  This function examines INSN; if
3837
   it is a loop_end pattern we recognize, return the reg rtx for the
3838
   loop counter.  Otherwise, return NULL_RTX.  */
3839
 
3840
static rtx
3841
hwloop_pattern_reg (rtx insn)
3842
{
3843
  rtx pat, reg;
3844
 
3845
  if (!JUMP_P (insn) || recog_memoized (insn) != CODE_FOR_loop_end)
3846
    return NULL_RTX;
3847
 
3848
  pat = PATTERN (insn);
3849
  reg = SET_DEST (XVECEXP (PATTERN (insn), 0, 1));
3850
  if (!REG_P (reg))
3851
    return NULL_RTX;
3852
  return reg;
3853
}
3854
 
3855
static struct hw_doloop_hooks bfin_doloop_hooks =
3856
{
3857
  hwloop_pattern_reg,
3858
  hwloop_optimize,
3859
  hwloop_fail
3860
};
3861
 
3862
/* Run from machine_dependent_reorg, this pass looks for doloop_end insns
3863
   and tries to rewrite the RTL of these loops so that proper Blackfin
3864
   hardware loops are generated.  */
3865
 
3866
static void
3867
bfin_reorg_loops (FILE *dump_file)
3868
{
3869
  reorg_loops (true, &bfin_doloop_hooks);
3870
}
3871
 
3872
/* Possibly generate a SEQUENCE out of three insns found in SLOT.
3873
   Returns true if we modified the insn chain, false otherwise.  */
3874
static bool
3875
gen_one_bundle (rtx slot[3])
3876
{
3877
  gcc_assert (slot[1] != NULL_RTX);
3878
 
3879
  /* Don't add extra NOPs if optimizing for size.  */
3880
  if (optimize_size
3881
      && (slot[0] == NULL_RTX || slot[2] == NULL_RTX))
3882
    return false;
3883
 
3884
  /* Verify that we really can do the multi-issue.  */
3885
  if (slot[0])
3886
    {
3887
      rtx t = NEXT_INSN (slot[0]);
3888
      while (t != slot[1])
3889
        {
3890
          if (GET_CODE (t) != NOTE
3891
              || NOTE_KIND (t) != NOTE_INSN_DELETED)
3892
            return false;
3893
          t = NEXT_INSN (t);
3894
        }
3895
    }
3896
  if (slot[2])
3897
    {
3898
      rtx t = NEXT_INSN (slot[1]);
3899
      while (t != slot[2])
3900
        {
3901
          if (GET_CODE (t) != NOTE
3902
              || NOTE_KIND (t) != NOTE_INSN_DELETED)
3903
            return false;
3904
          t = NEXT_INSN (t);
3905
        }
3906
    }
3907
 
3908
  if (slot[0] == NULL_RTX)
3909
    {
3910
      slot[0] = emit_insn_before (gen_mnop (), slot[1]);
3911
      df_insn_rescan (slot[0]);
3912
    }
3913
  if (slot[2] == NULL_RTX)
3914
    {
3915
      slot[2] = emit_insn_after (gen_forced_nop (), slot[1]);
3916
      df_insn_rescan (slot[2]);
3917
    }
3918
 
3919
  /* Avoid line number information being printed inside one bundle.  */
3920
  if (INSN_LOCATOR (slot[1])
3921
      && INSN_LOCATOR (slot[1]) != INSN_LOCATOR (slot[0]))
3922
    INSN_LOCATOR (slot[1]) = INSN_LOCATOR (slot[0]);
3923
  if (INSN_LOCATOR (slot[2])
3924
      && INSN_LOCATOR (slot[2]) != INSN_LOCATOR (slot[0]))
3925
    INSN_LOCATOR (slot[2]) = INSN_LOCATOR (slot[0]);
3926
 
3927
  /* Terminate them with "|| " instead of ";" in the output.  */
3928
  PUT_MODE (slot[0], SImode);
3929
  PUT_MODE (slot[1], SImode);
3930
  /* Terminate the bundle, for the benefit of reorder_var_tracking_notes.  */
3931
  PUT_MODE (slot[2], QImode);
3932
  return true;
3933
}
3934
 
3935
/* Go through all insns, and use the information generated during scheduling
3936
   to generate SEQUENCEs to represent bundles of instructions issued
3937
   simultaneously.  */
3938
 
3939
static void
3940
bfin_gen_bundles (void)
3941
{
3942
  basic_block bb;
3943
  FOR_EACH_BB (bb)
3944
    {
3945
      rtx insn, next;
3946
      rtx slot[3];
3947
      int n_filled = 0;
3948
 
3949
      slot[0] = slot[1] = slot[2] = NULL_RTX;
3950
      for (insn = BB_HEAD (bb);; insn = next)
3951
        {
3952
          int at_end;
3953
          rtx delete_this = NULL_RTX;
3954
 
3955
          if (NONDEBUG_INSN_P (insn))
3956
            {
3957
              enum attr_type type = get_attr_type (insn);
3958
 
3959
              if (type == TYPE_STALL)
3960
                {
3961
                  gcc_assert (n_filled == 0);
3962
                  delete_this = insn;
3963
                }
3964
              else
3965
                {
3966
                  if (type == TYPE_DSP32 || type == TYPE_DSP32SHIFTIMM)
3967
                    slot[0] = insn;
3968
                  else if (slot[1] == NULL_RTX)
3969
                    slot[1] = insn;
3970
                  else
3971
                    slot[2] = insn;
3972
                  n_filled++;
3973
                }
3974
            }
3975
 
3976
          next = NEXT_INSN (insn);
3977
          while (next && insn != BB_END (bb)
3978
                 && !(INSN_P (next)
3979
                      && GET_CODE (PATTERN (next)) != USE
3980
                      && GET_CODE (PATTERN (next)) != CLOBBER))
3981
            {
3982
              insn = next;
3983
              next = NEXT_INSN (insn);
3984
            }
3985
 
3986
          /* BB_END can change due to emitting extra NOPs, so check here.  */
3987
          at_end = insn == BB_END (bb);
3988
          if (delete_this == NULL_RTX && (at_end || GET_MODE (next) == TImode))
3989
            {
3990
              if ((n_filled < 2
3991
                   || !gen_one_bundle (slot))
3992
                  && slot[0] != NULL_RTX)
3993
                {
3994
                  rtx pat = PATTERN (slot[0]);
3995
                  if (GET_CODE (pat) == SET
3996
                      && GET_CODE (SET_SRC (pat)) == UNSPEC
3997
                      && XINT (SET_SRC (pat), 1) == UNSPEC_32BIT)
3998
                    {
3999
                      SET_SRC (pat) = XVECEXP (SET_SRC (pat), 0, 0);
4000
                      INSN_CODE (slot[0]) = -1;
4001
                      df_insn_rescan (slot[0]);
4002
                    }
4003
                }
4004
              n_filled = 0;
4005
              slot[0] = slot[1] = slot[2] = NULL_RTX;
4006
            }
4007
          if (delete_this != NULL_RTX)
4008
            delete_insn (delete_this);
4009
          if (at_end)
4010
            break;
4011
        }
4012
    }
4013
}
4014
 
4015
/* Ensure that no var tracking notes are emitted in the middle of a
4016
   three-instruction bundle.  */
4017
 
4018
static void
4019
reorder_var_tracking_notes (void)
4020
{
4021
  basic_block bb;
4022
  FOR_EACH_BB (bb)
4023
    {
4024
      rtx insn, next;
4025
      rtx queue = NULL_RTX;
4026
      bool in_bundle = false;
4027
 
4028
      for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = next)
4029
        {
4030
          next = NEXT_INSN (insn);
4031
 
4032
          if (INSN_P (insn))
4033
            {
4034
              /* Emit queued up notes at the last instruction of a bundle.  */
4035
              if (GET_MODE (insn) == QImode)
4036
                {
4037
                  while (queue)
4038
                    {
4039
                      rtx next_queue = PREV_INSN (queue);
4040
                      PREV_INSN (NEXT_INSN (insn)) = queue;
4041
                      NEXT_INSN (queue) = NEXT_INSN (insn);
4042
                      NEXT_INSN (insn) = queue;
4043
                      PREV_INSN (queue) = insn;
4044
                      queue = next_queue;
4045
                    }
4046
                  in_bundle = false;
4047
                }
4048
              else if (GET_MODE (insn) == SImode)
4049
                in_bundle = true;
4050
            }
4051
          else if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
4052
            {
4053
              if (in_bundle)
4054
                {
4055
                  rtx prev = PREV_INSN (insn);
4056
                  PREV_INSN (next) = prev;
4057
                  NEXT_INSN (prev) = next;
4058
 
4059
                  PREV_INSN (insn) = queue;
4060
                  queue = insn;
4061
                }
4062
            }
4063
        }
4064
    }
4065
}
4066
 
4067
/* On some silicon revisions, functions shorter than a certain number of cycles
4068
   can cause unpredictable behaviour.  Work around this by adding NOPs as
4069
   needed.  */
4070
static void
4071
workaround_rts_anomaly (void)
4072
{
4073
  rtx insn, first_insn = NULL_RTX;
4074
  int cycles = 4;
4075
 
4076
  if (! ENABLE_WA_RETS)
4077
    return;
4078
 
4079
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4080
    {
4081
      rtx pat;
4082
 
4083
      if (BARRIER_P (insn))
4084
        return;
4085
 
4086
      if (NOTE_P (insn) || LABEL_P (insn))
4087
        continue;
4088
 
4089
      if (first_insn == NULL_RTX)
4090
        first_insn = insn;
4091
      pat = PATTERN (insn);
4092
      if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4093
          || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4094
          || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4095
        continue;
4096
 
4097
      if (CALL_P (insn))
4098
        return;
4099
 
4100
      if (JUMP_P (insn))
4101
        {
4102
          if (recog_memoized (insn) == CODE_FOR_return_internal)
4103
            break;
4104
 
4105
          /* Nothing to worry about for direct jumps.  */
4106
          if (!any_condjump_p (insn))
4107
            return;
4108
          if (cycles <= 1)
4109
            return;
4110
          cycles--;
4111
        }
4112
      else if (INSN_P (insn))
4113
        {
4114
          rtx pat = PATTERN (insn);
4115
          int this_cycles = 1;
4116
 
4117
          if (GET_CODE (pat) == PARALLEL)
4118
            {
4119
              if (push_multiple_operation (pat, VOIDmode)
4120
                  || pop_multiple_operation (pat, VOIDmode))
4121
                this_cycles = n_regs_to_save;
4122
            }
4123
          else
4124
            {
4125
              int icode = recog_memoized (insn);
4126
 
4127
              if (icode == CODE_FOR_link)
4128
                this_cycles = 4;
4129
              else if (icode == CODE_FOR_unlink)
4130
                this_cycles = 3;
4131
              else if (icode == CODE_FOR_mulsi3)
4132
                this_cycles = 5;
4133
            }
4134
          if (this_cycles >= cycles)
4135
            return;
4136
 
4137
          cycles -= this_cycles;
4138
        }
4139
    }
4140
  while (cycles > 0)
4141
    {
4142
      emit_insn_before (gen_nop (), first_insn);
4143
      cycles--;
4144
    }
4145
}
4146
 
4147
/* Return an insn type for INSN that can be used by the caller for anomaly
4148
   workarounds.  This differs from plain get_attr_type in that it handles
4149
   SEQUENCEs.  */
4150
 
4151
static enum attr_type
4152
type_for_anomaly (rtx insn)
4153
{
4154
  rtx pat = PATTERN (insn);
4155
  if (GET_CODE (pat) == SEQUENCE)
4156
    {
4157
      enum attr_type t;
4158
      t = get_attr_type (XVECEXP (pat, 0, 1));
4159
      if (t == TYPE_MCLD)
4160
        return t;
4161
      t = get_attr_type (XVECEXP (pat, 0, 2));
4162
      if (t == TYPE_MCLD)
4163
        return t;
4164
      return TYPE_MCST;
4165
    }
4166
  else
4167
    return get_attr_type (insn);
4168
}
4169
 
4170
/* Return true iff the address found in MEM is based on the register
4171
   NP_REG and optionally has a positive offset.  */
4172
static bool
4173
harmless_null_pointer_p (rtx mem, int np_reg)
4174
{
4175
  mem = XEXP (mem, 0);
4176
  if (GET_CODE (mem) == POST_INC || GET_CODE (mem) == POST_DEC)
4177
    mem = XEXP (mem, 0);
4178
  if (REG_P (mem) && (int) REGNO (mem) == np_reg)
4179
    return true;
4180
  if (GET_CODE (mem) == PLUS
4181
      && REG_P (XEXP (mem, 0)) && (int) REGNO (XEXP (mem, 0)) == np_reg)
4182
    {
4183
      mem = XEXP (mem, 1);
4184
      if (GET_CODE (mem) == CONST_INT && INTVAL (mem) > 0)
4185
        return true;
4186
    }
4187
  return false;
4188
}
4189
 
4190
/* Return nonzero if INSN contains any loads that may trap.  */
4191
 
4192
static bool
4193
trapping_loads_p (rtx insn, int np_reg, bool after_np_branch)
4194
{
4195
  rtx mem = SET_SRC (single_set (insn));
4196
 
4197
  if (!after_np_branch)
4198
    np_reg = -1;
4199
  return ((np_reg == -1 || !harmless_null_pointer_p (mem, np_reg))
4200
          && may_trap_p (mem));
4201
}
4202
 
4203
/* Return INSN if it is of TYPE_MCLD.  Alternatively, if INSN is the start of
4204
   a three-insn bundle, see if one of them is a load and return that if so.
4205
   Return NULL_RTX if the insn does not contain loads.  */
4206
static rtx
4207
find_load (rtx insn)
4208
{
4209
  if (!NONDEBUG_INSN_P (insn))
4210
    return NULL_RTX;
4211
  if (get_attr_type (insn) == TYPE_MCLD)
4212
    return insn;
4213
  if (GET_MODE (insn) != SImode)
4214
    return NULL_RTX;
4215
  do {
4216
    insn = NEXT_INSN (insn);
4217
    if ((GET_MODE (insn) == SImode || GET_MODE (insn) == QImode)
4218
        && get_attr_type (insn) == TYPE_MCLD)
4219
      return insn;
4220
  } while (GET_MODE (insn) != QImode);
4221
  return NULL_RTX;
4222
}
4223
 
4224
/* Determine whether PAT is an indirect call pattern.  */
4225
static bool
4226
indirect_call_p (rtx pat)
4227
{
4228
  if (GET_CODE (pat) == PARALLEL)
4229
    pat = XVECEXP (pat, 0, 0);
4230
  if (GET_CODE (pat) == SET)
4231
    pat = SET_SRC (pat);
4232
  gcc_assert (GET_CODE (pat) == CALL);
4233
  pat = XEXP (pat, 0);
4234
  gcc_assert (GET_CODE (pat) == MEM);
4235
  pat = XEXP (pat, 0);
4236
 
4237
  return REG_P (pat);
4238
}
4239
 
4240
/* During workaround_speculation, track whether we're in the shadow of a
4241
   conditional branch that tests a P register for NULL.  If so, we can omit
4242
   emitting NOPs if we see a load from that P register, since a speculative
4243
   access at address 0 isn't a problem, and the load is executed in all other
4244
   cases anyway.
4245
   Global for communication with note_np_check_stores through note_stores.
4246
   */
4247
int np_check_regno = -1;
4248
bool np_after_branch = false;
4249
 
4250
/* Subroutine of workaround_speculation, called through note_stores.  */
4251
static void
4252
note_np_check_stores (rtx x, const_rtx pat ATTRIBUTE_UNUSED,
4253
                      void *data ATTRIBUTE_UNUSED)
4254
{
4255
  if (REG_P (x) && (REGNO (x) == REG_CC || (int) REGNO (x) == np_check_regno))
4256
    np_check_regno = -1;
4257
}
4258
 
4259
static void
4260
workaround_speculation (void)
4261
{
4262
  rtx insn, next;
4263
  rtx last_condjump = NULL_RTX;
4264
  int cycles_since_jump = INT_MAX;
4265
  int delay_added = 0;
4266
 
4267
  if (! ENABLE_WA_SPECULATIVE_LOADS && ! ENABLE_WA_SPECULATIVE_SYNCS
4268
      && ! ENABLE_WA_INDIRECT_CALLS)
4269
    return;
4270
 
4271
  /* First pass: find predicted-false branches; if something after them
4272
     needs nops, insert them or change the branch to predict true.  */
4273
  for (insn = get_insns (); insn; insn = next)
4274
    {
4275
      rtx pat;
4276
      int delay_needed = 0;
4277
 
4278
      next = find_next_insn_start (insn);
4279
 
4280
      if (NOTE_P (insn) || BARRIER_P (insn))
4281
        continue;
4282
 
4283
      if (LABEL_P (insn))
4284
        {
4285
          np_check_regno = -1;
4286
          continue;
4287
        }
4288
 
4289
      pat = PATTERN (insn);
4290
      if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4291
          || GET_CODE (pat) == ADDR_VEC || GET_CODE (pat) == ADDR_DIFF_VEC)
4292
        continue;
4293
 
4294
      if (GET_CODE (pat) == ASM_INPUT || asm_noperands (pat) >= 0)
4295
        {
4296
          np_check_regno = -1;
4297
          continue;
4298
        }
4299
 
4300
      if (JUMP_P (insn))
4301
        {
4302
          /* Is this a condjump based on a null pointer comparison we saw
4303
             earlier?  */
4304
          if (np_check_regno != -1
4305
              && recog_memoized (insn) == CODE_FOR_cbranchbi4)
4306
            {
4307
              rtx op = XEXP (SET_SRC (PATTERN (insn)), 0);
4308
              gcc_assert (GET_CODE (op) == EQ || GET_CODE (op) == NE);
4309
              if (GET_CODE (op) == NE)
4310
                np_after_branch = true;
4311
            }
4312
          if (any_condjump_p (insn)
4313
              && ! cbranch_predicted_taken_p (insn))
4314
            {
4315
              last_condjump = insn;
4316
              delay_added = 0;
4317
              cycles_since_jump = 0;
4318
            }
4319
          else
4320
            cycles_since_jump = INT_MAX;
4321
        }
4322
      else if (CALL_P (insn))
4323
        {
4324
          np_check_regno = -1;
4325
          if (cycles_since_jump < INT_MAX)
4326
            cycles_since_jump++;
4327
          if (indirect_call_p (pat) && ENABLE_WA_INDIRECT_CALLS)
4328
            {
4329
              delay_needed = 3;
4330
            }
4331
        }
4332
      else if (NONDEBUG_INSN_P (insn))
4333
        {
4334
          rtx load_insn = find_load (insn);
4335
          enum attr_type type = type_for_anomaly (insn);
4336
 
4337
          if (cycles_since_jump < INT_MAX)
4338
            cycles_since_jump++;
4339
 
4340
          /* Detect a comparison of a P register with zero.  If we later
4341
             see a condjump based on it, we have found a null pointer
4342
             check.  */
4343
          if (recog_memoized (insn) == CODE_FOR_compare_eq)
4344
            {
4345
              rtx src = SET_SRC (PATTERN (insn));
4346
              if (REG_P (XEXP (src, 0))
4347
                  && P_REGNO_P (REGNO (XEXP (src, 0)))
4348
                  && XEXP (src, 1) == const0_rtx)
4349
                {
4350
                  np_check_regno = REGNO (XEXP (src, 0));
4351
                  np_after_branch = false;
4352
                }
4353
              else
4354
                np_check_regno = -1;
4355
            }
4356
 
4357
          if (load_insn && ENABLE_WA_SPECULATIVE_LOADS)
4358
            {
4359
              if (trapping_loads_p (load_insn, np_check_regno,
4360
                                    np_after_branch))
4361
                delay_needed = 4;
4362
            }
4363
          else if (type == TYPE_SYNC && ENABLE_WA_SPECULATIVE_SYNCS)
4364
            delay_needed = 3;
4365
 
4366
          /* See if we need to forget about a null pointer comparison
4367
             we found earlier.  */
4368
          if (recog_memoized (insn) != CODE_FOR_compare_eq)
4369
            {
4370
              note_stores (PATTERN (insn), note_np_check_stores, NULL);
4371
              if (np_check_regno != -1)
4372
                {
4373
                  if (find_regno_note (insn, REG_INC, np_check_regno))
4374
                    np_check_regno = -1;
4375
                }
4376
            }
4377
 
4378
        }
4379
 
4380
      if (delay_needed > cycles_since_jump
4381
          && (delay_needed - cycles_since_jump) > delay_added)
4382
        {
4383
          rtx pat1;
4384
          int num_clobbers;
4385
          rtx *op = recog_data.operand;
4386
 
4387
          delay_needed -= cycles_since_jump;
4388
 
4389
          extract_insn (last_condjump);
4390
          if (optimize_size)
4391
            {
4392
              pat1 = gen_cbranch_predicted_taken (op[0], op[1], op[2],
4393
                                                 op[3]);
4394
              cycles_since_jump = INT_MAX;
4395
            }
4396
          else
4397
            {
4398
              /* Do not adjust cycles_since_jump in this case, so that
4399
                 we'll increase the number of NOPs for a subsequent insn
4400
                 if necessary.  */
4401
              pat1 = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
4402
                                            GEN_INT (delay_needed));
4403
              delay_added = delay_needed;
4404
            }
4405
          PATTERN (last_condjump) = pat1;
4406
          INSN_CODE (last_condjump) = recog (pat1, insn, &num_clobbers);
4407
        }
4408
      if (CALL_P (insn))
4409
        {
4410
          cycles_since_jump = INT_MAX;
4411
          delay_added = 0;
4412
        }
4413
    }
4414
 
4415
  /* Second pass: for predicted-true branches, see if anything at the
4416
     branch destination needs extra nops.  */
4417
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4418
    {
4419
      int cycles_since_jump;
4420
      if (JUMP_P (insn)
4421
          && any_condjump_p (insn)
4422
          && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
4423
              || cbranch_predicted_taken_p (insn)))
4424
        {
4425
          rtx target = JUMP_LABEL (insn);
4426
          rtx label = target;
4427
          rtx next_tgt;
4428
 
4429
          cycles_since_jump = 0;
4430
          for (; target && cycles_since_jump < 3; target = next_tgt)
4431
            {
4432
              rtx pat;
4433
 
4434
              next_tgt = find_next_insn_start (target);
4435
 
4436
              if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
4437
                continue;
4438
 
4439
              pat = PATTERN (target);
4440
              if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4441
                  || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4442
                  || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4443
                continue;
4444
 
4445
              if (NONDEBUG_INSN_P (target))
4446
                {
4447
                  rtx load_insn = find_load (target);
4448
                  enum attr_type type = type_for_anomaly (target);
4449
                  int delay_needed = 0;
4450
                  if (cycles_since_jump < INT_MAX)
4451
                    cycles_since_jump++;
4452
 
4453
                  if (load_insn && ENABLE_WA_SPECULATIVE_LOADS)
4454
                    {
4455
                      if (trapping_loads_p (load_insn, -1, false))
4456
                        delay_needed = 2;
4457
                    }
4458
                  else if (type == TYPE_SYNC && ENABLE_WA_SPECULATIVE_SYNCS)
4459
                    delay_needed = 2;
4460
 
4461
                  if (delay_needed > cycles_since_jump)
4462
                    {
4463
                      rtx prev = prev_real_insn (label);
4464
                      delay_needed -= cycles_since_jump;
4465
                      if (dump_file)
4466
                        fprintf (dump_file, "Adding %d nops after %d\n",
4467
                                 delay_needed, INSN_UID (label));
4468
                      if (JUMP_P (prev)
4469
                          && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
4470
                        {
4471
                          rtx x;
4472
                          HOST_WIDE_INT v;
4473
 
4474
                          if (dump_file)
4475
                            fprintf (dump_file,
4476
                                     "Reducing nops on insn %d.\n",
4477
                                     INSN_UID (prev));
4478
                          x = PATTERN (prev);
4479
                          x = XVECEXP (x, 0, 1);
4480
                          v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
4481
                          XVECEXP (x, 0, 0) = GEN_INT (v);
4482
                        }
4483
                      while (delay_needed-- > 0)
4484
                        emit_insn_after (gen_nop (), label);
4485
                      break;
4486
                    }
4487
                }
4488
            }
4489
        }
4490
    }
4491
}
4492
 
4493
/* Called just before the final scheduling pass.  If we need to insert NOPs
4494
   later on to work around speculative loads, insert special placeholder
4495
   insns that cause loads to be delayed for as many cycles as necessary
4496
   (and possible).  This reduces the number of NOPs we need to add.
4497
   The dummy insns we generate are later removed by bfin_gen_bundles.  */
4498
static void
4499
add_sched_insns_for_speculation (void)
4500
{
4501
  rtx insn;
4502
 
4503
  if (! ENABLE_WA_SPECULATIVE_LOADS && ! ENABLE_WA_SPECULATIVE_SYNCS
4504
      && ! ENABLE_WA_INDIRECT_CALLS)
4505
    return;
4506
 
4507
  /* First pass: find predicted-false branches; if something after them
4508
     needs nops, insert them or change the branch to predict true.  */
4509
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4510
    {
4511
      rtx pat;
4512
 
4513
      if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
4514
        continue;
4515
 
4516
      pat = PATTERN (insn);
4517
      if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4518
          || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4519
          || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4520
        continue;
4521
 
4522
      if (JUMP_P (insn))
4523
        {
4524
          if (any_condjump_p (insn)
4525
              && !cbranch_predicted_taken_p (insn))
4526
            {
4527
              rtx n = next_real_insn (insn);
4528
              emit_insn_before (gen_stall (GEN_INT (3)), n);
4529
            }
4530
        }
4531
    }
4532
 
4533
  /* Second pass: for predicted-true branches, see if anything at the
4534
     branch destination needs extra nops.  */
4535
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4536
    {
4537
      if (JUMP_P (insn)
4538
          && any_condjump_p (insn)
4539
          && (cbranch_predicted_taken_p (insn)))
4540
        {
4541
          rtx target = JUMP_LABEL (insn);
4542
          rtx next = next_real_insn (target);
4543
 
4544
          if (GET_CODE (PATTERN (next)) == UNSPEC_VOLATILE
4545
              && get_attr_type (next) == TYPE_STALL)
4546
            continue;
4547
          emit_insn_before (gen_stall (GEN_INT (1)), next);
4548
        }
4549
    }
4550
}
4551
 
4552
/* We use the machine specific reorg pass for emitting CSYNC instructions
4553
   after conditional branches as needed.
4554
 
4555
   The Blackfin is unusual in that a code sequence like
4556
     if cc jump label
4557
     r0 = (p0)
4558
   may speculatively perform the load even if the condition isn't true.  This
4559
   happens for a branch that is predicted not taken, because the pipeline
4560
   isn't flushed or stalled, so the early stages of the following instructions,
4561
   which perform the memory reference, are allowed to execute before the
4562
   jump condition is evaluated.
4563
   Therefore, we must insert additional instructions in all places where this
4564
   could lead to incorrect behavior.  The manual recommends CSYNC, while
4565
   VDSP seems to use NOPs (even though its corresponding compiler option is
4566
   named CSYNC).
4567
 
4568
   When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
4569
   When optimizing for size, we turn the branch into a predicted taken one.
4570
   This may be slower due to mispredicts, but saves code size.  */
4571
 
4572
static void
4573
bfin_reorg (void)
4574
{
4575
  /* We are freeing block_for_insn in the toplev to keep compatibility
4576
     with old MDEP_REORGS that are not CFG based.  Recompute it now.  */
4577
  compute_bb_for_insn ();
4578
 
4579
  if (flag_schedule_insns_after_reload)
4580
    {
4581
      splitting_for_sched = 1;
4582
      split_all_insns ();
4583
      splitting_for_sched = 0;
4584
 
4585
      add_sched_insns_for_speculation ();
4586
 
4587
      timevar_push (TV_SCHED2);
4588
      if (flag_selective_scheduling2
4589
          && !maybe_skip_selective_scheduling ())
4590
        run_selective_scheduling ();
4591
      else
4592
        schedule_insns ();
4593
      timevar_pop (TV_SCHED2);
4594
 
4595
      /* Examine the schedule and insert nops as necessary for 64-bit parallel
4596
         instructions.  */
4597
      bfin_gen_bundles ();
4598
    }
4599
 
4600
  df_analyze ();
4601
 
4602
  /* Doloop optimization */
4603
  if (cfun->machine->has_hardware_loops)
4604
    bfin_reorg_loops (dump_file);
4605
 
4606
  workaround_speculation ();
4607
 
4608
  if (flag_var_tracking)
4609
    {
4610
      timevar_push (TV_VAR_TRACKING);
4611
      variable_tracking_main ();
4612
      reorder_var_tracking_notes ();
4613
      timevar_pop (TV_VAR_TRACKING);
4614
    }
4615
 
4616
  df_finish_pass (false);
4617
 
4618
  workaround_rts_anomaly ();
4619
}
4620
 
4621
/* Handle interrupt_handler, exception_handler and nmi_handler function
4622
   attributes; arguments as in struct attribute_spec.handler.  */
4623
 
4624
static tree
4625
handle_int_attribute (tree *node, tree name,
4626
                      tree args ATTRIBUTE_UNUSED,
4627
                      int flags ATTRIBUTE_UNUSED,
4628
                      bool *no_add_attrs)
4629
{
4630
  tree x = *node;
4631
  if (TREE_CODE (x) == FUNCTION_DECL)
4632
    x = TREE_TYPE (x);
4633
 
4634
  if (TREE_CODE (x) != FUNCTION_TYPE)
4635
    {
4636
      warning (OPT_Wattributes, "%qE attribute only applies to functions",
4637
               name);
4638
      *no_add_attrs = true;
4639
    }
4640
  else if (funkind (x) != SUBROUTINE)
4641
    error ("multiple function type attributes specified");
4642
 
4643
  return NULL_TREE;
4644
}
4645
 
4646
/* Return 0 if the attributes for two types are incompatible, 1 if they
4647
   are compatible, and 2 if they are nearly compatible (which causes a
4648
   warning to be generated).  */
4649
 
4650
static int
4651
bfin_comp_type_attributes (const_tree type1, const_tree type2)
4652
{
4653
  e_funkind kind1, kind2;
4654
 
4655
  if (TREE_CODE (type1) != FUNCTION_TYPE)
4656
    return 1;
4657
 
4658
  kind1 = funkind (type1);
4659
  kind2 = funkind (type2);
4660
 
4661
  if (kind1 != kind2)
4662
    return 0;
4663
 
4664
  /*  Check for mismatched modifiers */
4665
  if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
4666
      != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
4667
    return 0;
4668
 
4669
  if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
4670
      != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
4671
    return 0;
4672
 
4673
  if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
4674
      != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
4675
    return 0;
4676
 
4677
  if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
4678
      != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
4679
    return 0;
4680
 
4681
  return 1;
4682
}
4683
 
4684
/* Handle a "longcall" or "shortcall" attribute; arguments as in
4685
   struct attribute_spec.handler.  */
4686
 
4687
static tree
4688
bfin_handle_longcall_attribute (tree *node, tree name,
4689
                                tree args ATTRIBUTE_UNUSED,
4690
                                int flags ATTRIBUTE_UNUSED,
4691
                                bool *no_add_attrs)
4692
{
4693
  if (TREE_CODE (*node) != FUNCTION_TYPE
4694
      && TREE_CODE (*node) != FIELD_DECL
4695
      && TREE_CODE (*node) != TYPE_DECL)
4696
    {
4697
      warning (OPT_Wattributes, "%qE attribute only applies to functions",
4698
               name);
4699
      *no_add_attrs = true;
4700
    }
4701
 
4702
  if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
4703
       && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
4704
      || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
4705
          && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
4706
    {
4707
      warning (OPT_Wattributes,
4708
               "can%'t apply both longcall and shortcall attributes to the same function");
4709
      *no_add_attrs = true;
4710
    }
4711
 
4712
  return NULL_TREE;
4713
}
4714
 
4715
/* Handle a "l1_text" attribute; arguments as in
4716
   struct attribute_spec.handler.  */
4717
 
4718
static tree
4719
bfin_handle_l1_text_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4720
                               int ARG_UNUSED (flags), bool *no_add_attrs)
4721
{
4722
  tree decl = *node;
4723
 
4724
  if (TREE_CODE (decl) != FUNCTION_DECL)
4725
    {
4726
      error ("%qE attribute only applies to functions",
4727
             name);
4728
      *no_add_attrs = true;
4729
    }
4730
 
4731
  /* The decl may have already been given a section attribute
4732
     from a previous declaration. Ensure they match.  */
4733
  else if (DECL_SECTION_NAME (decl) != NULL_TREE
4734
           && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4735
                      ".l1.text") != 0)
4736
    {
4737
      error ("section of %q+D conflicts with previous declaration",
4738
             decl);
4739
      *no_add_attrs = true;
4740
    }
4741
  else
4742
    DECL_SECTION_NAME (decl) = build_string (9, ".l1.text");
4743
 
4744
  return NULL_TREE;
4745
}
4746
 
4747
/* Handle a "l1_data", "l1_data_A" or "l1_data_B" attribute;
4748
   arguments as in struct attribute_spec.handler.  */
4749
 
4750
static tree
4751
bfin_handle_l1_data_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4752
                               int ARG_UNUSED (flags), bool *no_add_attrs)
4753
{
4754
  tree decl = *node;
4755
 
4756
  if (TREE_CODE (decl) != VAR_DECL)
4757
    {
4758
      error ("%qE attribute only applies to variables",
4759
             name);
4760
      *no_add_attrs = true;
4761
    }
4762
  else if (current_function_decl != NULL_TREE
4763
           && !TREE_STATIC (decl))
4764
    {
4765
      error ("%qE attribute cannot be specified for local variables",
4766
             name);
4767
      *no_add_attrs = true;
4768
    }
4769
  else
4770
    {
4771
      const char *section_name;
4772
 
4773
      if (strcmp (IDENTIFIER_POINTER (name), "l1_data") == 0)
4774
        section_name = ".l1.data";
4775
      else if (strcmp (IDENTIFIER_POINTER (name), "l1_data_A") == 0)
4776
        section_name = ".l1.data.A";
4777
      else if (strcmp (IDENTIFIER_POINTER (name), "l1_data_B") == 0)
4778
        section_name = ".l1.data.B";
4779
      else
4780
        gcc_unreachable ();
4781
 
4782
      /* The decl may have already been given a section attribute
4783
         from a previous declaration. Ensure they match.  */
4784
      if (DECL_SECTION_NAME (decl) != NULL_TREE
4785
          && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4786
                     section_name) != 0)
4787
        {
4788
          error ("section of %q+D conflicts with previous declaration",
4789
                 decl);
4790
          *no_add_attrs = true;
4791
        }
4792
      else
4793
        DECL_SECTION_NAME (decl)
4794
          = build_string (strlen (section_name) + 1, section_name);
4795
    }
4796
 
4797
 return NULL_TREE;
4798
}
4799
 
4800
/* Handle a "l2" attribute; arguments as in struct attribute_spec.handler.  */
4801
 
4802
static tree
4803
bfin_handle_l2_attribute (tree *node, tree ARG_UNUSED (name),
4804
                          tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4805
                          bool *no_add_attrs)
4806
{
4807
  tree decl = *node;
4808
 
4809
  if (TREE_CODE (decl) == FUNCTION_DECL)
4810
    {
4811
      if (DECL_SECTION_NAME (decl) != NULL_TREE
4812
          && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4813
                     ".l2.text") != 0)
4814
        {
4815
          error ("section of %q+D conflicts with previous declaration",
4816
                 decl);
4817
          *no_add_attrs = true;
4818
        }
4819
      else
4820
        DECL_SECTION_NAME (decl) = build_string (9, ".l2.text");
4821
    }
4822
  else if (TREE_CODE (decl) == VAR_DECL)
4823
    {
4824
      if (DECL_SECTION_NAME (decl) != NULL_TREE
4825
          && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4826
                     ".l2.data") != 0)
4827
        {
4828
          error ("section of %q+D conflicts with previous declaration",
4829
                 decl);
4830
          *no_add_attrs = true;
4831
        }
4832
      else
4833
        DECL_SECTION_NAME (decl) = build_string (9, ".l2.data");
4834
    }
4835
 
4836
  return NULL_TREE;
4837
}
4838
 
4839
/* Table of valid machine attributes.  */
4840
static const struct attribute_spec bfin_attribute_table[] =
4841
{
4842
  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
4843
       affects_type_identity } */
4844
  { "interrupt_handler", 0, 0, false, true,  true, handle_int_attribute,
4845
    false },
4846
  { "exception_handler", 0, 0, false, true,  true, handle_int_attribute,
4847
    false },
4848
  { "nmi_handler", 0, 0, false, true,  true, handle_int_attribute, false },
4849
  { "nesting", 0, 0, false, true,  true, NULL, false },
4850
  { "kspisusp", 0, 0, false, true,  true, NULL, false },
4851
  { "saveall", 0, 0, false, true,  true, NULL, false },
4852
  { "longcall",  0, 0, false, true,  true,  bfin_handle_longcall_attribute,
4853
    false },
4854
  { "shortcall", 0, 0, false, true,  true,  bfin_handle_longcall_attribute,
4855
    false },
4856
  { "l1_text", 0, 0, true, false, false,  bfin_handle_l1_text_attribute,
4857
    false },
4858
  { "l1_data", 0, 0, true, false, false,  bfin_handle_l1_data_attribute,
4859
    false },
4860
  { "l1_data_A", 0, 0, true, false, false, bfin_handle_l1_data_attribute,
4861
    false },
4862
  { "l1_data_B", 0, 0, true, false, false,  bfin_handle_l1_data_attribute,
4863
    false },
4864
  { "l2", 0, 0, true, false, false,  bfin_handle_l2_attribute, false },
4865
  { NULL, 0, 0, false, false, false, NULL, false }
4866
};
4867
 
4868
/* Implementation of TARGET_ASM_INTEGER.  When using FD-PIC, we need to
4869
   tell the assembler to generate pointers to function descriptors in
4870
   some cases.  */
4871
 
4872
static bool
4873
bfin_assemble_integer (rtx value, unsigned int size, int aligned_p)
4874
{
4875
  if (TARGET_FDPIC && size == UNITS_PER_WORD)
4876
    {
4877
      if (GET_CODE (value) == SYMBOL_REF
4878
          && SYMBOL_REF_FUNCTION_P (value))
4879
        {
4880
          fputs ("\t.picptr\tfuncdesc(", asm_out_file);
4881
          output_addr_const (asm_out_file, value);
4882
          fputs (")\n", asm_out_file);
4883
          return true;
4884
        }
4885
      if (!aligned_p)
4886
        {
4887
          /* We've set the unaligned SI op to NULL, so we always have to
4888
             handle the unaligned case here.  */
4889
          assemble_integer_with_op ("\t.4byte\t", value);
4890
          return true;
4891
        }
4892
    }
4893
  return default_assemble_integer (value, size, aligned_p);
4894
}
4895
 
4896
/* Output the assembler code for a thunk function.  THUNK_DECL is the
4897
   declaration for the thunk function itself, FUNCTION is the decl for
4898
   the target function.  DELTA is an immediate constant offset to be
4899
   added to THIS.  If VCALL_OFFSET is nonzero, the word at
4900
   *(*this + vcall_offset) should be added to THIS.  */
4901
 
4902
static void
4903
bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
4904
                      tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
4905
                      HOST_WIDE_INT vcall_offset, tree function)
4906
{
4907
  rtx xops[3];
4908
  /* The this parameter is passed as the first argument.  */
4909
  rtx this_rtx = gen_rtx_REG (Pmode, REG_R0);
4910
 
4911
  /* Adjust the this parameter by a fixed constant.  */
4912
  if (delta)
4913
    {
4914
      xops[1] = this_rtx;
4915
      if (delta >= -64 && delta <= 63)
4916
        {
4917
          xops[0] = GEN_INT (delta);
4918
          output_asm_insn ("%1 += %0;", xops);
4919
        }
4920
      else if (delta >= -128 && delta < -64)
4921
        {
4922
          xops[0] = GEN_INT (delta + 64);
4923
          output_asm_insn ("%1 += -64; %1 += %0;", xops);
4924
        }
4925
      else if (delta > 63 && delta <= 126)
4926
        {
4927
          xops[0] = GEN_INT (delta - 63);
4928
          output_asm_insn ("%1 += 63; %1 += %0;", xops);
4929
        }
4930
      else
4931
        {
4932
          xops[0] = GEN_INT (delta);
4933
          output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
4934
        }
4935
    }
4936
 
4937
  /* Adjust the this parameter by a value stored in the vtable.  */
4938
  if (vcall_offset)
4939
    {
4940
      rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
4941
      rtx tmp = gen_rtx_REG (Pmode, REG_R3);
4942
 
4943
      xops[1] = tmp;
4944
      xops[2] = p2tmp;
4945
      output_asm_insn ("%2 = r0; %2 = [%2];", xops);
4946
 
4947
      /* Adjust the this parameter.  */
4948
      xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
4949
      if (!memory_operand (xops[0], Pmode))
4950
        {
4951
          rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
4952
          xops[0] = GEN_INT (vcall_offset);
4953
          xops[1] = tmp2;
4954
          output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
4955
          xops[0] = gen_rtx_MEM (Pmode, p2tmp);
4956
        }
4957
      xops[2] = this_rtx;
4958
      output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
4959
    }
4960
 
4961
  xops[0] = XEXP (DECL_RTL (function), 0);
4962
  if (1 || !flag_pic || (*targetm.binds_local_p) (function))
4963
    output_asm_insn ("jump.l\t%P0", xops);
4964
}
4965
 
4966
/* Codes for all the Blackfin builtins.  */
4967
enum bfin_builtins
4968
{
4969
  BFIN_BUILTIN_CSYNC,
4970
  BFIN_BUILTIN_SSYNC,
4971
  BFIN_BUILTIN_ONES,
4972
  BFIN_BUILTIN_COMPOSE_2X16,
4973
  BFIN_BUILTIN_EXTRACTLO,
4974
  BFIN_BUILTIN_EXTRACTHI,
4975
 
4976
  BFIN_BUILTIN_SSADD_2X16,
4977
  BFIN_BUILTIN_SSSUB_2X16,
4978
  BFIN_BUILTIN_SSADDSUB_2X16,
4979
  BFIN_BUILTIN_SSSUBADD_2X16,
4980
  BFIN_BUILTIN_MULT_2X16,
4981
  BFIN_BUILTIN_MULTR_2X16,
4982
  BFIN_BUILTIN_NEG_2X16,
4983
  BFIN_BUILTIN_ABS_2X16,
4984
  BFIN_BUILTIN_MIN_2X16,
4985
  BFIN_BUILTIN_MAX_2X16,
4986
 
4987
  BFIN_BUILTIN_SSADD_1X16,
4988
  BFIN_BUILTIN_SSSUB_1X16,
4989
  BFIN_BUILTIN_MULT_1X16,
4990
  BFIN_BUILTIN_MULTR_1X16,
4991
  BFIN_BUILTIN_NORM_1X16,
4992
  BFIN_BUILTIN_NEG_1X16,
4993
  BFIN_BUILTIN_ABS_1X16,
4994
  BFIN_BUILTIN_MIN_1X16,
4995
  BFIN_BUILTIN_MAX_1X16,
4996
 
4997
  BFIN_BUILTIN_SUM_2X16,
4998
  BFIN_BUILTIN_DIFFHL_2X16,
4999
  BFIN_BUILTIN_DIFFLH_2X16,
5000
 
5001
  BFIN_BUILTIN_SSADD_1X32,
5002
  BFIN_BUILTIN_SSSUB_1X32,
5003
  BFIN_BUILTIN_NORM_1X32,
5004
  BFIN_BUILTIN_ROUND_1X32,
5005
  BFIN_BUILTIN_NEG_1X32,
5006
  BFIN_BUILTIN_ABS_1X32,
5007
  BFIN_BUILTIN_MIN_1X32,
5008
  BFIN_BUILTIN_MAX_1X32,
5009
  BFIN_BUILTIN_MULT_1X32,
5010
  BFIN_BUILTIN_MULT_1X32X32,
5011
  BFIN_BUILTIN_MULT_1X32X32NS,
5012
 
5013
  BFIN_BUILTIN_MULHISILL,
5014
  BFIN_BUILTIN_MULHISILH,
5015
  BFIN_BUILTIN_MULHISIHL,
5016
  BFIN_BUILTIN_MULHISIHH,
5017
 
5018
  BFIN_BUILTIN_LSHIFT_1X16,
5019
  BFIN_BUILTIN_LSHIFT_2X16,
5020
  BFIN_BUILTIN_SSASHIFT_1X16,
5021
  BFIN_BUILTIN_SSASHIFT_2X16,
5022
  BFIN_BUILTIN_SSASHIFT_1X32,
5023
 
5024
  BFIN_BUILTIN_CPLX_MUL_16,
5025
  BFIN_BUILTIN_CPLX_MAC_16,
5026
  BFIN_BUILTIN_CPLX_MSU_16,
5027
 
5028
  BFIN_BUILTIN_CPLX_MUL_16_S40,
5029
  BFIN_BUILTIN_CPLX_MAC_16_S40,
5030
  BFIN_BUILTIN_CPLX_MSU_16_S40,
5031
 
5032
  BFIN_BUILTIN_CPLX_SQU,
5033
 
5034
  BFIN_BUILTIN_LOADBYTES,
5035
 
5036
  BFIN_BUILTIN_MAX
5037
};
5038
 
5039
#define def_builtin(NAME, TYPE, CODE)                                   \
5040
do {                                                                    \
5041
  add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD,            \
5042
                       NULL, NULL_TREE);                                \
5043
} while (0)
5044
 
5045
/* Set up all builtin functions for this target.  */
5046
static void
5047
bfin_init_builtins (void)
5048
{
5049
  tree V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
5050
  tree void_ftype_void
5051
    = build_function_type_list (void_type_node, NULL_TREE);
5052
  tree short_ftype_short
5053
    = build_function_type_list (short_integer_type_node, short_integer_type_node,
5054
                                NULL_TREE);
5055
  tree short_ftype_int_int
5056
    = build_function_type_list (short_integer_type_node, integer_type_node,
5057
                                integer_type_node, NULL_TREE);
5058
  tree int_ftype_int_int
5059
    = build_function_type_list (integer_type_node, integer_type_node,
5060
                                integer_type_node, NULL_TREE);
5061
  tree int_ftype_int
5062
    = build_function_type_list (integer_type_node, integer_type_node,
5063
                                NULL_TREE);
5064
  tree short_ftype_int
5065
    = build_function_type_list (short_integer_type_node, integer_type_node,
5066
                                NULL_TREE);
5067
  tree int_ftype_v2hi_v2hi
5068
    = build_function_type_list (integer_type_node, V2HI_type_node,
5069
                                V2HI_type_node, NULL_TREE);
5070
  tree v2hi_ftype_v2hi_v2hi
5071
    = build_function_type_list (V2HI_type_node, V2HI_type_node,
5072
                                V2HI_type_node, NULL_TREE);
5073
  tree v2hi_ftype_v2hi_v2hi_v2hi
5074
    = build_function_type_list (V2HI_type_node, V2HI_type_node,
5075
                                V2HI_type_node, V2HI_type_node, NULL_TREE);
5076
  tree v2hi_ftype_int_int
5077
    = build_function_type_list (V2HI_type_node, integer_type_node,
5078
                                integer_type_node, NULL_TREE);
5079
  tree v2hi_ftype_v2hi_int
5080
    = build_function_type_list (V2HI_type_node, V2HI_type_node,
5081
                                integer_type_node, NULL_TREE);
5082
  tree int_ftype_short_short
5083
    = build_function_type_list (integer_type_node, short_integer_type_node,
5084
                                short_integer_type_node, NULL_TREE);
5085
  tree v2hi_ftype_v2hi
5086
    = build_function_type_list (V2HI_type_node, V2HI_type_node, NULL_TREE);
5087
  tree short_ftype_v2hi
5088
    = build_function_type_list (short_integer_type_node, V2HI_type_node,
5089
                                NULL_TREE);
5090
  tree int_ftype_pint
5091
    = build_function_type_list (integer_type_node,
5092
                                build_pointer_type (integer_type_node),
5093
                                NULL_TREE);
5094
 
5095
  /* Add the remaining MMX insns with somewhat more complicated types.  */
5096
  def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
5097
  def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
5098
 
5099
  def_builtin ("__builtin_bfin_ones", short_ftype_int, BFIN_BUILTIN_ONES);
5100
 
5101
  def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int,
5102
               BFIN_BUILTIN_COMPOSE_2X16);
5103
  def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi,
5104
               BFIN_BUILTIN_EXTRACTHI);
5105
  def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi,
5106
               BFIN_BUILTIN_EXTRACTLO);
5107
 
5108
  def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi,
5109
               BFIN_BUILTIN_MIN_2X16);
5110
  def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi,
5111
               BFIN_BUILTIN_MAX_2X16);
5112
 
5113
  def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi,
5114
               BFIN_BUILTIN_SSADD_2X16);
5115
  def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi,
5116
               BFIN_BUILTIN_SSSUB_2X16);
5117
  def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi,
5118
               BFIN_BUILTIN_SSADDSUB_2X16);
5119
  def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi,
5120
               BFIN_BUILTIN_SSSUBADD_2X16);
5121
  def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi,
5122
               BFIN_BUILTIN_MULT_2X16);
5123
  def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi,
5124
               BFIN_BUILTIN_MULTR_2X16);
5125
  def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi,
5126
               BFIN_BUILTIN_NEG_2X16);
5127
  def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi,
5128
               BFIN_BUILTIN_ABS_2X16);
5129
 
5130
  def_builtin ("__builtin_bfin_min_fr1x16", short_ftype_int_int,
5131
               BFIN_BUILTIN_MIN_1X16);
5132
  def_builtin ("__builtin_bfin_max_fr1x16", short_ftype_int_int,
5133
               BFIN_BUILTIN_MAX_1X16);
5134
 
5135
  def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int,
5136
               BFIN_BUILTIN_SSADD_1X16);
5137
  def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int,
5138
               BFIN_BUILTIN_SSSUB_1X16);
5139
  def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int,
5140
               BFIN_BUILTIN_MULT_1X16);
5141
  def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int,
5142
               BFIN_BUILTIN_MULTR_1X16);
5143
  def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short,
5144
               BFIN_BUILTIN_NEG_1X16);
5145
  def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short,
5146
               BFIN_BUILTIN_ABS_1X16);
5147
  def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int,
5148
               BFIN_BUILTIN_NORM_1X16);
5149
 
5150
  def_builtin ("__builtin_bfin_sum_fr2x16", short_ftype_v2hi,
5151
               BFIN_BUILTIN_SUM_2X16);
5152
  def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi,
5153
               BFIN_BUILTIN_DIFFHL_2X16);
5154
  def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi,
5155
               BFIN_BUILTIN_DIFFLH_2X16);
5156
 
5157
  def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi,
5158
               BFIN_BUILTIN_MULHISILL);
5159
  def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi,
5160
               BFIN_BUILTIN_MULHISIHL);
5161
  def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi,
5162
               BFIN_BUILTIN_MULHISILH);
5163
  def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi,
5164
               BFIN_BUILTIN_MULHISIHH);
5165
 
5166
  def_builtin ("__builtin_bfin_min_fr1x32", int_ftype_int_int,
5167
               BFIN_BUILTIN_MIN_1X32);
5168
  def_builtin ("__builtin_bfin_max_fr1x32", int_ftype_int_int,
5169
               BFIN_BUILTIN_MAX_1X32);
5170
 
5171
  def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int,
5172
               BFIN_BUILTIN_SSADD_1X32);
5173
  def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int,
5174
               BFIN_BUILTIN_SSSUB_1X32);
5175
  def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int,
5176
               BFIN_BUILTIN_NEG_1X32);
5177
  def_builtin ("__builtin_bfin_abs_fr1x32", int_ftype_int,
5178
               BFIN_BUILTIN_ABS_1X32);
5179
  def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int,
5180
               BFIN_BUILTIN_NORM_1X32);
5181
  def_builtin ("__builtin_bfin_round_fr1x32", short_ftype_int,
5182
               BFIN_BUILTIN_ROUND_1X32);
5183
  def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short,
5184
               BFIN_BUILTIN_MULT_1X32);
5185
  def_builtin ("__builtin_bfin_mult_fr1x32x32", int_ftype_int_int,
5186
               BFIN_BUILTIN_MULT_1X32X32);
5187
  def_builtin ("__builtin_bfin_mult_fr1x32x32NS", int_ftype_int_int,
5188
               BFIN_BUILTIN_MULT_1X32X32NS);
5189
 
5190
  /* Shifts.  */
5191
  def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int,
5192
               BFIN_BUILTIN_SSASHIFT_1X16);
5193
  def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int,
5194
               BFIN_BUILTIN_SSASHIFT_2X16);
5195
  def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int,
5196
               BFIN_BUILTIN_LSHIFT_1X16);
5197
  def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int,
5198
               BFIN_BUILTIN_LSHIFT_2X16);
5199
  def_builtin ("__builtin_bfin_shl_fr1x32", int_ftype_int_int,
5200
               BFIN_BUILTIN_SSASHIFT_1X32);
5201
 
5202
  /* Complex numbers.  */
5203
  def_builtin ("__builtin_bfin_cmplx_add", v2hi_ftype_v2hi_v2hi,
5204
               BFIN_BUILTIN_SSADD_2X16);
5205
  def_builtin ("__builtin_bfin_cmplx_sub", v2hi_ftype_v2hi_v2hi,
5206
               BFIN_BUILTIN_SSSUB_2X16);
5207
  def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi,
5208
               BFIN_BUILTIN_CPLX_MUL_16);
5209
  def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi,
5210
               BFIN_BUILTIN_CPLX_MAC_16);
5211
  def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi,
5212
               BFIN_BUILTIN_CPLX_MSU_16);
5213
  def_builtin ("__builtin_bfin_cmplx_mul_s40", v2hi_ftype_v2hi_v2hi,
5214
               BFIN_BUILTIN_CPLX_MUL_16_S40);
5215
  def_builtin ("__builtin_bfin_cmplx_mac_s40", v2hi_ftype_v2hi_v2hi_v2hi,
5216
               BFIN_BUILTIN_CPLX_MAC_16_S40);
5217
  def_builtin ("__builtin_bfin_cmplx_msu_s40", v2hi_ftype_v2hi_v2hi_v2hi,
5218
               BFIN_BUILTIN_CPLX_MSU_16_S40);
5219
  def_builtin ("__builtin_bfin_csqu_fr16", v2hi_ftype_v2hi,
5220
               BFIN_BUILTIN_CPLX_SQU);
5221
 
5222
  /* "Unaligned" load.  */
5223
  def_builtin ("__builtin_bfin_loadbytes", int_ftype_pint,
5224
               BFIN_BUILTIN_LOADBYTES);
5225
 
5226
}
5227
 
5228
 
5229
struct builtin_description
5230
{
5231
  const enum insn_code icode;
5232
  const char *const name;
5233
  const enum bfin_builtins code;
5234
  int macflag;
5235
};
5236
 
5237
static const struct builtin_description bdesc_2arg[] =
5238
{
5239
  { CODE_FOR_composev2hi, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16, -1 },
5240
 
5241
  { CODE_FOR_ssashiftv2hi3, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16, -1 },
5242
  { CODE_FOR_ssashifthi3, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16, -1 },
5243
  { CODE_FOR_lshiftv2hi3, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16, -1 },
5244
  { CODE_FOR_lshifthi3, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16, -1 },
5245
  { CODE_FOR_ssashiftsi3, "__builtin_bfin_shl_fr1x32", BFIN_BUILTIN_SSASHIFT_1X32, -1 },
5246
 
5247
  { CODE_FOR_sminhi3, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16, -1 },
5248
  { CODE_FOR_smaxhi3, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16, -1 },
5249
  { CODE_FOR_ssaddhi3, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16, -1 },
5250
  { CODE_FOR_sssubhi3, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16, -1 },
5251
 
5252
  { CODE_FOR_sminsi3, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32, -1 },
5253
  { CODE_FOR_smaxsi3, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32, -1 },
5254
  { CODE_FOR_ssaddsi3, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32, -1 },
5255
  { CODE_FOR_sssubsi3, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32, -1 },
5256
 
5257
  { CODE_FOR_sminv2hi3, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16, -1 },
5258
  { CODE_FOR_smaxv2hi3, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16, -1 },
5259
  { CODE_FOR_ssaddv2hi3, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16, -1 },
5260
  { CODE_FOR_sssubv2hi3, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16, -1 },
5261
  { CODE_FOR_ssaddsubv2hi3, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16, -1 },
5262
  { CODE_FOR_sssubaddv2hi3, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16, -1 },
5263
 
5264
  { CODE_FOR_flag_mulhisi, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32, MACFLAG_NONE },
5265
  { CODE_FOR_flag_mulhi, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16, MACFLAG_T },
5266
  { CODE_FOR_flag_mulhi, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16, MACFLAG_NONE },
5267
  { CODE_FOR_flag_mulv2hi, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16, MACFLAG_T },
5268
  { CODE_FOR_flag_mulv2hi, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16, MACFLAG_NONE },
5269
 
5270
  { CODE_FOR_mulhisi_ll, "__builtin_bfin_mulhisill", BFIN_BUILTIN_MULHISILL, -1 },
5271
  { CODE_FOR_mulhisi_lh, "__builtin_bfin_mulhisilh", BFIN_BUILTIN_MULHISILH, -1 },
5272
  { CODE_FOR_mulhisi_hl, "__builtin_bfin_mulhisihl", BFIN_BUILTIN_MULHISIHL, -1 },
5273
  { CODE_FOR_mulhisi_hh, "__builtin_bfin_mulhisihh", BFIN_BUILTIN_MULHISIHH, -1 }
5274
 
5275
};
5276
 
5277
static const struct builtin_description bdesc_1arg[] =
5278
{
5279
  { CODE_FOR_loadbytes, "__builtin_bfin_loadbytes", BFIN_BUILTIN_LOADBYTES, 0 },
5280
 
5281
  { CODE_FOR_ones, "__builtin_bfin_ones", BFIN_BUILTIN_ONES, 0 },
5282
 
5283
  { CODE_FOR_clrsbhi2, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16, 0 },
5284
  { CODE_FOR_ssneghi2, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16, 0 },
5285
  { CODE_FOR_abshi2, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16, 0 },
5286
 
5287
  { CODE_FOR_clrsbsi2, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32, 0 },
5288
  { CODE_FOR_ssroundsi2, "__builtin_bfin_round_fr1x32", BFIN_BUILTIN_ROUND_1X32, 0 },
5289
  { CODE_FOR_ssnegsi2, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32, 0 },
5290
  { CODE_FOR_ssabssi2, "__builtin_bfin_abs_fr1x32", BFIN_BUILTIN_ABS_1X32, 0 },
5291
 
5292
  { CODE_FOR_movv2hi_hi_low, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO, 0 },
5293
  { CODE_FOR_movv2hi_hi_high, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI, 0 },
5294
  { CODE_FOR_ssnegv2hi2, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16, 0 },
5295
  { CODE_FOR_ssabsv2hi2, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16, 0 }
5296
};
5297
 
5298
/* Errors in the source file can cause expand_expr to return const0_rtx
5299
   where we expect a vector.  To avoid crashing, use one of the vector
5300
   clear instructions.  */
5301
static rtx
5302
safe_vector_operand (rtx x, enum machine_mode mode)
5303
{
5304
  if (x != const0_rtx)
5305
    return x;
5306
  x = gen_reg_rtx (SImode);
5307
 
5308
  emit_insn (gen_movsi (x, CONST0_RTX (SImode)));
5309
  return gen_lowpart (mode, x);
5310
}
5311
 
5312
/* Subroutine of bfin_expand_builtin to take care of binop insns.  MACFLAG is -1
5313
   if this is a normal binary op, or one of the MACFLAG_xxx constants.  */
5314
 
5315
static rtx
5316
bfin_expand_binop_builtin (enum insn_code icode, tree exp, rtx target,
5317
                           int macflag)
5318
{
5319
  rtx pat;
5320
  tree arg0 = CALL_EXPR_ARG (exp, 0);
5321
  tree arg1 = CALL_EXPR_ARG (exp, 1);
5322
  rtx op0 = expand_normal (arg0);
5323
  rtx op1 = expand_normal (arg1);
5324
  enum machine_mode op0mode = GET_MODE (op0);
5325
  enum machine_mode op1mode = GET_MODE (op1);
5326
  enum machine_mode tmode = insn_data[icode].operand[0].mode;
5327
  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5328
  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5329
 
5330
  if (VECTOR_MODE_P (mode0))
5331
    op0 = safe_vector_operand (op0, mode0);
5332
  if (VECTOR_MODE_P (mode1))
5333
    op1 = safe_vector_operand (op1, mode1);
5334
 
5335
  if (! target
5336
      || GET_MODE (target) != tmode
5337
      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5338
    target = gen_reg_rtx (tmode);
5339
 
5340
  if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
5341
    {
5342
      op0mode = HImode;
5343
      op0 = gen_lowpart (HImode, op0);
5344
    }
5345
  if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
5346
    {
5347
      op1mode = HImode;
5348
      op1 = gen_lowpart (HImode, op1);
5349
    }
5350
  /* In case the insn wants input operands in modes different from
5351
     the result, abort.  */
5352
  gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
5353
              && (op1mode == mode1 || op1mode == VOIDmode));
5354
 
5355
  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5356
    op0 = copy_to_mode_reg (mode0, op0);
5357
  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5358
    op1 = copy_to_mode_reg (mode1, op1);
5359
 
5360
  if (macflag == -1)
5361
    pat = GEN_FCN (icode) (target, op0, op1);
5362
  else
5363
    pat = GEN_FCN (icode) (target, op0, op1, GEN_INT (macflag));
5364
  if (! pat)
5365
    return 0;
5366
 
5367
  emit_insn (pat);
5368
  return target;
5369
}
5370
 
5371
/* Subroutine of bfin_expand_builtin to take care of unop insns.  */
5372
 
5373
static rtx
5374
bfin_expand_unop_builtin (enum insn_code icode, tree exp,
5375
                          rtx target)
5376
{
5377
  rtx pat;
5378
  tree arg0 = CALL_EXPR_ARG (exp, 0);
5379
  rtx op0 = expand_normal (arg0);
5380
  enum machine_mode op0mode = GET_MODE (op0);
5381
  enum machine_mode tmode = insn_data[icode].operand[0].mode;
5382
  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5383
 
5384
  if (! target
5385
      || GET_MODE (target) != tmode
5386
      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5387
    target = gen_reg_rtx (tmode);
5388
 
5389
  if (VECTOR_MODE_P (mode0))
5390
    op0 = safe_vector_operand (op0, mode0);
5391
 
5392
  if (op0mode == SImode && mode0 == HImode)
5393
    {
5394
      op0mode = HImode;
5395
      op0 = gen_lowpart (HImode, op0);
5396
    }
5397
  gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
5398
 
5399
  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5400
    op0 = copy_to_mode_reg (mode0, op0);
5401
 
5402
  pat = GEN_FCN (icode) (target, op0);
5403
  if (! pat)
5404
    return 0;
5405
  emit_insn (pat);
5406
  return target;
5407
}
5408
 
5409
/* Expand an expression EXP that calls a built-in function,
5410
   with result going to TARGET if that's convenient
5411
   (and in mode MODE if that's convenient).
5412
   SUBTARGET may be used as the target for computing one of EXP's operands.
5413
   IGNORE is nonzero if the value is to be ignored.  */
5414
 
5415
static rtx
5416
bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5417
                     rtx subtarget ATTRIBUTE_UNUSED,
5418
                     enum machine_mode mode ATTRIBUTE_UNUSED,
5419
                     int ignore ATTRIBUTE_UNUSED)
5420
{
5421
  size_t i;
5422
  enum insn_code icode;
5423
  const struct builtin_description *d;
5424
  tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
5425
  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5426
  tree arg0, arg1, arg2;
5427
  rtx op0, op1, op2, accvec, pat, tmp1, tmp2, a0reg, a1reg;
5428
  enum machine_mode tmode, mode0;
5429
 
5430
  switch (fcode)
5431
    {
5432
    case BFIN_BUILTIN_CSYNC:
5433
      emit_insn (gen_csync ());
5434
      return 0;
5435
    case BFIN_BUILTIN_SSYNC:
5436
      emit_insn (gen_ssync ());
5437
      return 0;
5438
 
5439
    case BFIN_BUILTIN_DIFFHL_2X16:
5440
    case BFIN_BUILTIN_DIFFLH_2X16:
5441
    case BFIN_BUILTIN_SUM_2X16:
5442
      arg0 = CALL_EXPR_ARG (exp, 0);
5443
      op0 = expand_normal (arg0);
5444
      icode = (fcode == BFIN_BUILTIN_DIFFHL_2X16 ? CODE_FOR_subhilov2hi3
5445
               : fcode == BFIN_BUILTIN_DIFFLH_2X16 ? CODE_FOR_sublohiv2hi3
5446
               : CODE_FOR_ssaddhilov2hi3);
5447
      tmode = insn_data[icode].operand[0].mode;
5448
      mode0 = insn_data[icode].operand[1].mode;
5449
 
5450
      if (! target
5451
          || GET_MODE (target) != tmode
5452
          || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5453
        target = gen_reg_rtx (tmode);
5454
 
5455
      if (VECTOR_MODE_P (mode0))
5456
        op0 = safe_vector_operand (op0, mode0);
5457
 
5458
      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5459
        op0 = copy_to_mode_reg (mode0, op0);
5460
 
5461
      pat = GEN_FCN (icode) (target, op0, op0);
5462
      if (! pat)
5463
        return 0;
5464
      emit_insn (pat);
5465
      return target;
5466
 
5467
    case BFIN_BUILTIN_MULT_1X32X32:
5468
    case BFIN_BUILTIN_MULT_1X32X32NS:
5469
      arg0 = CALL_EXPR_ARG (exp, 0);
5470
      arg1 = CALL_EXPR_ARG (exp, 1);
5471
      op0 = expand_normal (arg0);
5472
      op1 = expand_normal (arg1);
5473
      if (! target
5474
          || !register_operand (target, SImode))
5475
        target = gen_reg_rtx (SImode);
5476
      if (! register_operand (op0, SImode))
5477
        op0 = copy_to_mode_reg (SImode, op0);
5478
      if (! register_operand (op1, SImode))
5479
        op1 = copy_to_mode_reg (SImode, op1);
5480
 
5481
      a1reg = gen_rtx_REG (PDImode, REG_A1);
5482
      a0reg = gen_rtx_REG (PDImode, REG_A0);
5483
      tmp1 = gen_lowpart (V2HImode, op0);
5484
      tmp2 = gen_lowpart (V2HImode, op1);
5485
      emit_insn (gen_flag_macinit1hi (a1reg,
5486
                                      gen_lowpart (HImode, op0),
5487
                                      gen_lowpart (HImode, op1),
5488
                                      GEN_INT (MACFLAG_FU)));
5489
      emit_insn (gen_lshrpdi3 (a1reg, a1reg, GEN_INT (16)));
5490
 
5491
      if (fcode == BFIN_BUILTIN_MULT_1X32X32)
5492
        emit_insn (gen_flag_mul_macv2hi_parts_acconly (a0reg, a1reg, tmp1, tmp2,
5493
                                                       const1_rtx, const1_rtx,
5494
                                                       const1_rtx, const0_rtx, a1reg,
5495
                                                       const0_rtx, GEN_INT (MACFLAG_NONE),
5496
                                                       GEN_INT (MACFLAG_M)));
5497
      else
5498
        {
5499
          /* For saturating multiplication, there's exactly one special case
5500
             to be handled: multiplying the smallest negative value with
5501
             itself.  Due to shift correction in fractional multiplies, this
5502
             can overflow.  Iff this happens, OP2 will contain 1, which, when
5503
             added in 32 bits to the smallest negative, wraps to the largest
5504
             positive, which is the result we want.  */
5505
          op2 = gen_reg_rtx (V2HImode);
5506
          emit_insn (gen_packv2hi (op2, tmp1, tmp2, const0_rtx, const0_rtx));
5507
          emit_insn (gen_movsibi (gen_rtx_REG (BImode, REG_CC),
5508
                                  gen_lowpart (SImode, op2)));
5509
          emit_insn (gen_flag_mul_macv2hi_parts_acconly_andcc0 (a0reg, a1reg, tmp1, tmp2,
5510
                                                                const1_rtx, const1_rtx,
5511
                                                                const1_rtx, const0_rtx, a1reg,
5512
                                                                const0_rtx, GEN_INT (MACFLAG_NONE),
5513
                                                                GEN_INT (MACFLAG_M)));
5514
          op2 = gen_reg_rtx (SImode);
5515
          emit_insn (gen_movbisi (op2, gen_rtx_REG (BImode, REG_CC)));
5516
        }
5517
      emit_insn (gen_flag_machi_parts_acconly (a1reg, tmp2, tmp1,
5518
                                               const1_rtx, const0_rtx,
5519
                                               a1reg, const0_rtx, GEN_INT (MACFLAG_M)));
5520
      emit_insn (gen_ashrpdi3 (a1reg, a1reg, GEN_INT (15)));
5521
      emit_insn (gen_sum_of_accumulators (target, a0reg, a0reg, a1reg));
5522
      if (fcode == BFIN_BUILTIN_MULT_1X32X32NS)
5523
        emit_insn (gen_addsi3 (target, target, op2));
5524
      return target;
5525
 
5526
    case BFIN_BUILTIN_CPLX_MUL_16:
5527
    case BFIN_BUILTIN_CPLX_MUL_16_S40:
5528
      arg0 = CALL_EXPR_ARG (exp, 0);
5529
      arg1 = CALL_EXPR_ARG (exp, 1);
5530
      op0 = expand_normal (arg0);
5531
      op1 = expand_normal (arg1);
5532
      accvec = gen_reg_rtx (V2PDImode);
5533
      icode = CODE_FOR_flag_macv2hi_parts;
5534
      tmode = insn_data[icode].operand[0].mode;
5535
 
5536
      if (! target
5537
          || GET_MODE (target) != V2HImode
5538
          || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5539
        target = gen_reg_rtx (tmode);
5540
      if (! register_operand (op0, GET_MODE (op0)))
5541
        op0 = copy_to_mode_reg (GET_MODE (op0), op0);
5542
      if (! register_operand (op1, GET_MODE (op1)))
5543
        op1 = copy_to_mode_reg (GET_MODE (op1), op1);
5544
 
5545
      if (fcode == BFIN_BUILTIN_CPLX_MUL_16)
5546
        emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
5547
                                                const0_rtx, const0_rtx,
5548
                                                const1_rtx, GEN_INT (MACFLAG_W32)));
5549
      else
5550
        emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
5551
                                                const0_rtx, const0_rtx,
5552
                                                const1_rtx, GEN_INT (MACFLAG_NONE)));
5553
      emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
5554
                                         const1_rtx, const1_rtx,
5555
                                         const0_rtx, accvec, const1_rtx, const0_rtx,
5556
                                         GEN_INT (MACFLAG_NONE), accvec));
5557
 
5558
      return target;
5559
 
5560
    case BFIN_BUILTIN_CPLX_MAC_16:
5561
    case BFIN_BUILTIN_CPLX_MSU_16:
5562
    case BFIN_BUILTIN_CPLX_MAC_16_S40:
5563
    case BFIN_BUILTIN_CPLX_MSU_16_S40:
5564
      arg0 = CALL_EXPR_ARG (exp, 0);
5565
      arg1 = CALL_EXPR_ARG (exp, 1);
5566
      arg2 = CALL_EXPR_ARG (exp, 2);
5567
      op0 = expand_normal (arg0);
5568
      op1 = expand_normal (arg1);
5569
      op2 = expand_normal (arg2);
5570
      accvec = gen_reg_rtx (V2PDImode);
5571
      icode = CODE_FOR_flag_macv2hi_parts;
5572
      tmode = insn_data[icode].operand[0].mode;
5573
 
5574
      if (! target
5575
          || GET_MODE (target) != V2HImode
5576
          || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5577
        target = gen_reg_rtx (tmode);
5578
      if (! register_operand (op1, GET_MODE (op1)))
5579
        op1 = copy_to_mode_reg (GET_MODE (op1), op1);
5580
      if (! register_operand (op2, GET_MODE (op2)))
5581
        op2 = copy_to_mode_reg (GET_MODE (op2), op2);
5582
 
5583
      tmp1 = gen_reg_rtx (SImode);
5584
      tmp2 = gen_reg_rtx (SImode);
5585
      emit_insn (gen_ashlsi3 (tmp1, gen_lowpart (SImode, op0), GEN_INT (16)));
5586
      emit_move_insn (tmp2, gen_lowpart (SImode, op0));
5587
      emit_insn (gen_movstricthi_1 (gen_lowpart (HImode, tmp2), const0_rtx));
5588
      emit_insn (gen_load_accumulator_pair (accvec, tmp1, tmp2));
5589
      if (fcode == BFIN_BUILTIN_CPLX_MAC_16
5590
          || fcode == BFIN_BUILTIN_CPLX_MSU_16)
5591
        emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op1, op2, const0_rtx,
5592
                                                   const0_rtx, const0_rtx,
5593
                                                   const1_rtx, accvec, const0_rtx,
5594
                                                   const0_rtx,
5595
                                                   GEN_INT (MACFLAG_W32)));
5596
      else
5597
        emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op1, op2, const0_rtx,
5598
                                                   const0_rtx, const0_rtx,
5599
                                                   const1_rtx, accvec, const0_rtx,
5600
                                                   const0_rtx,
5601
                                                   GEN_INT (MACFLAG_NONE)));
5602
      if (fcode == BFIN_BUILTIN_CPLX_MAC_16
5603
          || fcode == BFIN_BUILTIN_CPLX_MAC_16_S40)
5604
        {
5605
          tmp1 = const1_rtx;
5606
          tmp2 = const0_rtx;
5607
        }
5608
      else
5609
        {
5610
          tmp1 = const0_rtx;
5611
          tmp2 = const1_rtx;
5612
        }
5613
      emit_insn (gen_flag_macv2hi_parts (target, op1, op2, const1_rtx,
5614
                                         const1_rtx, const1_rtx,
5615
                                         const0_rtx, accvec, tmp1, tmp2,
5616
                                         GEN_INT (MACFLAG_NONE), accvec));
5617
 
5618
      return target;
5619
 
5620
    case BFIN_BUILTIN_CPLX_SQU:
5621
      arg0 = CALL_EXPR_ARG (exp, 0);
5622
      op0 = expand_normal (arg0);
5623
      accvec = gen_reg_rtx (V2PDImode);
5624
      icode = CODE_FOR_flag_mulv2hi;
5625
      tmp1 = gen_reg_rtx (V2HImode);
5626
      tmp2 = gen_reg_rtx (V2HImode);
5627
 
5628
      if (! target
5629
          || GET_MODE (target) != V2HImode
5630
          || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5631
        target = gen_reg_rtx (V2HImode);
5632
      if (! register_operand (op0, GET_MODE (op0)))
5633
        op0 = copy_to_mode_reg (GET_MODE (op0), op0);
5634
 
5635
      emit_insn (gen_flag_mulv2hi (tmp1, op0, op0, GEN_INT (MACFLAG_NONE)));
5636
 
5637
      emit_insn (gen_flag_mulhi_parts (gen_lowpart (HImode, tmp2), op0, op0,
5638
                                       const0_rtx, const1_rtx,
5639
                                       GEN_INT (MACFLAG_NONE)));
5640
 
5641
      emit_insn (gen_ssaddhi3_high_parts (target, tmp2, tmp2, tmp2, const0_rtx,
5642
                                          const0_rtx));
5643
      emit_insn (gen_sssubhi3_low_parts (target, target, tmp1, tmp1,
5644
                                         const0_rtx, const1_rtx));
5645
 
5646
      return target;
5647
 
5648
    default:
5649
      break;
5650
    }
5651
 
5652
  for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5653
    if (d->code == fcode)
5654
      return bfin_expand_binop_builtin (d->icode, exp, target,
5655
                                        d->macflag);
5656
 
5657
  for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5658
    if (d->code == fcode)
5659
      return bfin_expand_unop_builtin (d->icode, exp, target);
5660
 
5661
  gcc_unreachable ();
5662
}
5663
 
5664
static void
5665
bfin_conditional_register_usage (void)
5666
{
5667
  /* initialize condition code flag register rtx */
5668
  bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
5669
  bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
5670
  if (TARGET_FDPIC)
5671
    call_used_regs[FDPIC_REGNO] = 1;
5672
  if (!TARGET_FDPIC && flag_pic)
5673
    {
5674
      fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
5675
      call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
5676
    }
5677
}
5678
 
5679
#undef TARGET_INIT_BUILTINS
5680
#define TARGET_INIT_BUILTINS bfin_init_builtins
5681
 
5682
#undef TARGET_EXPAND_BUILTIN
5683
#define TARGET_EXPAND_BUILTIN bfin_expand_builtin
5684
 
5685
#undef TARGET_ASM_GLOBALIZE_LABEL
5686
#define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label 
5687
 
5688
#undef TARGET_ASM_FILE_START
5689
#define TARGET_ASM_FILE_START output_file_start
5690
 
5691
#undef TARGET_ATTRIBUTE_TABLE
5692
#define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
5693
 
5694
#undef TARGET_COMP_TYPE_ATTRIBUTES
5695
#define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
5696
 
5697
#undef TARGET_RTX_COSTS
5698
#define TARGET_RTX_COSTS bfin_rtx_costs
5699
 
5700
#undef  TARGET_ADDRESS_COST
5701
#define TARGET_ADDRESS_COST bfin_address_cost
5702
 
5703
#undef TARGET_REGISTER_MOVE_COST
5704
#define TARGET_REGISTER_MOVE_COST bfin_register_move_cost
5705
 
5706
#undef TARGET_MEMORY_MOVE_COST
5707
#define TARGET_MEMORY_MOVE_COST bfin_memory_move_cost
5708
 
5709
#undef  TARGET_ASM_INTEGER
5710
#define TARGET_ASM_INTEGER bfin_assemble_integer
5711
 
5712
#undef TARGET_MACHINE_DEPENDENT_REORG
5713
#define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
5714
 
5715
#undef TARGET_FUNCTION_OK_FOR_SIBCALL
5716
#define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
5717
 
5718
#undef TARGET_ASM_OUTPUT_MI_THUNK
5719
#define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
5720
#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5721
#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
5722
 
5723
#undef TARGET_SCHED_ADJUST_COST
5724
#define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
5725
 
5726
#undef TARGET_SCHED_ISSUE_RATE
5727
#define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
5728
 
5729
#undef TARGET_PROMOTE_FUNCTION_MODE
5730
#define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
5731
 
5732
#undef TARGET_ARG_PARTIAL_BYTES
5733
#define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
5734
 
5735
#undef TARGET_FUNCTION_ARG
5736
#define TARGET_FUNCTION_ARG bfin_function_arg
5737
 
5738
#undef TARGET_FUNCTION_ARG_ADVANCE
5739
#define TARGET_FUNCTION_ARG_ADVANCE bfin_function_arg_advance
5740
 
5741
#undef TARGET_PASS_BY_REFERENCE
5742
#define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
5743
 
5744
#undef TARGET_SETUP_INCOMING_VARARGS
5745
#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
5746
 
5747
#undef TARGET_STRUCT_VALUE_RTX
5748
#define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
5749
 
5750
#undef TARGET_VECTOR_MODE_SUPPORTED_P
5751
#define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
5752
 
5753
#undef TARGET_OPTION_OVERRIDE
5754
#define TARGET_OPTION_OVERRIDE bfin_option_override
5755
 
5756
#undef TARGET_SECONDARY_RELOAD
5757
#define TARGET_SECONDARY_RELOAD bfin_secondary_reload
5758
 
5759
#undef TARGET_CLASS_LIKELY_SPILLED_P
5760
#define TARGET_CLASS_LIKELY_SPILLED_P bfin_class_likely_spilled_p
5761
 
5762
#undef TARGET_DELEGITIMIZE_ADDRESS
5763
#define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
5764
 
5765
#undef TARGET_LEGITIMATE_CONSTANT_P
5766
#define TARGET_LEGITIMATE_CONSTANT_P bfin_legitimate_constant_p
5767
 
5768
#undef TARGET_CANNOT_FORCE_CONST_MEM
5769
#define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
5770
 
5771
#undef TARGET_RETURN_IN_MEMORY
5772
#define TARGET_RETURN_IN_MEMORY bfin_return_in_memory
5773
 
5774
#undef TARGET_LEGITIMATE_ADDRESS_P
5775
#define TARGET_LEGITIMATE_ADDRESS_P     bfin_legitimate_address_p
5776
 
5777
#undef TARGET_FRAME_POINTER_REQUIRED
5778
#define TARGET_FRAME_POINTER_REQUIRED bfin_frame_pointer_required
5779
 
5780
#undef TARGET_CAN_ELIMINATE
5781
#define TARGET_CAN_ELIMINATE bfin_can_eliminate
5782
 
5783
#undef TARGET_CONDITIONAL_REGISTER_USAGE
5784
#define TARGET_CONDITIONAL_REGISTER_USAGE bfin_conditional_register_usage
5785
 
5786
#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5787
#define TARGET_ASM_TRAMPOLINE_TEMPLATE bfin_asm_trampoline_template
5788
#undef TARGET_TRAMPOLINE_INIT
5789
#define TARGET_TRAMPOLINE_INIT bfin_trampoline_init
5790
 
5791
#undef TARGET_EXTRA_LIVE_ON_ENTRY
5792
#define TARGET_EXTRA_LIVE_ON_ENTRY bfin_extra_live_on_entry
5793
 
5794
/* Passes after sched2 can break the helpful TImode annotations that
5795
   haifa-sched puts on every insn.  Just do scheduling in reorg.  */
5796
#undef TARGET_DELAY_SCHED2
5797
#define TARGET_DELAY_SCHED2 true
5798
 
5799
/* Variable tracking should be run after all optimizations which
5800
   change order of insns.  It also needs a valid CFG.  */
5801
#undef TARGET_DELAY_VARTRACK
5802
#define TARGET_DELAY_VARTRACK true
5803
 
5804
struct gcc_target targetm = TARGET_INITIALIZER;

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.