OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [config/] [bfin/] [bfin.c] - Blame information for rev 282

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 282 jeremybenn
/* The Blackfin code generation auxiliary output file.
2
   Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010
3
   Free Software Foundation, Inc.
4
   Contributed by Analog Devices.
5
 
6
   This file is part of GCC.
7
 
8
   GCC is free software; you can redistribute it and/or modify it
9
   under the terms of the GNU General Public License as published
10
   by the Free Software Foundation; either version 3, or (at your
11
   option) any later version.
12
 
13
   GCC is distributed in the hope that it will be useful, but WITHOUT
14
   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15
   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
16
   License for more details.
17
 
18
   You should have received a copy of the GNU General Public License
19
   along with GCC; see the file COPYING3.  If not see
20
   <http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "rtl.h"
27
#include "regs.h"
28
#include "hard-reg-set.h"
29
#include "real.h"
30
#include "insn-config.h"
31
#include "insn-codes.h"
32
#include "conditions.h"
33
#include "insn-flags.h"
34
#include "output.h"
35
#include "insn-attr.h"
36
#include "tree.h"
37
#include "flags.h"
38
#include "except.h"
39
#include "function.h"
40
#include "input.h"
41
#include "target.h"
42
#include "target-def.h"
43
#include "expr.h"
44
#include "toplev.h"
45
#include "recog.h"
46
#include "optabs.h"
47
#include "ggc.h"
48
#include "integrate.h"
49
#include "cgraph.h"
50
#include "langhooks.h"
51
#include "bfin-protos.h"
52
#include "tm-preds.h"
53
#include "tm-constrs.h"
54
#include "gt-bfin.h"
55
#include "basic-block.h"
56
#include "cfglayout.h"
57
#include "timevar.h"
58
#include "df.h"
59
 
60
/* A C structure for machine-specific, per-function data.
61
   This is added to the cfun structure.  */
62
struct GTY(()) machine_function
63
{
64
  /* Set if we are notified by the doloop pass that a hardware loop
65
     was created.  */
66
  int has_hardware_loops;
67
 
68
  /* Set if we create a memcpy pattern that uses loop registers.  */
69
  int has_loopreg_clobber;
70
};
71
 
72
/* RTX for condition code flag register and RETS register */
73
extern GTY(()) rtx bfin_cc_rtx;
74
extern GTY(()) rtx bfin_rets_rtx;
75
rtx bfin_cc_rtx, bfin_rets_rtx;
76
 
77
int max_arg_registers = 0;
78
 
79
/* Arrays used when emitting register names.  */
80
const char *short_reg_names[]  =  SHORT_REGISTER_NAMES;
81
const char *high_reg_names[]   =  HIGH_REGISTER_NAMES;
82
const char *dregs_pair_names[] =  DREGS_PAIR_NAMES;
83
const char *byte_reg_names[]   =  BYTE_REGISTER_NAMES;
84
 
85
static int arg_regs[] = FUNCTION_ARG_REGISTERS;
86
static int ret_regs[] = FUNCTION_RETURN_REGISTERS;
87
 
88
/* Nonzero if -mshared-library-id was given.  */
89
static int bfin_lib_id_given;
90
 
91
/* Nonzero if -fschedule-insns2 was given.  We override it and
92
   call the scheduler ourselves during reorg.  */
93
static int bfin_flag_schedule_insns2;
94
 
95
/* Determines whether we run variable tracking in machine dependent
96
   reorganization.  */
97
static int bfin_flag_var_tracking;
98
 
99
/* -mcpu support */
100
bfin_cpu_t bfin_cpu_type = BFIN_CPU_UNKNOWN;
101
 
102
/* -msi-revision support. There are three special values:
103
   -1      -msi-revision=none.
104
   0xffff  -msi-revision=any.  */
105
int bfin_si_revision;
106
 
107
/* The workarounds enabled */
108
unsigned int bfin_workarounds = 0;
109
 
110
struct bfin_cpu
111
{
112
  const char *name;
113
  bfin_cpu_t type;
114
  int si_revision;
115
  unsigned int workarounds;
116
};
117
 
118
struct bfin_cpu bfin_cpus[] =
119
{
120
  {"bf512", BFIN_CPU_BF512, 0x0000,
121
   WA_SPECULATIVE_LOADS | WA_05000074},
122
 
123
  {"bf514", BFIN_CPU_BF514, 0x0000,
124
   WA_SPECULATIVE_LOADS | WA_05000074},
125
 
126
  {"bf516", BFIN_CPU_BF516, 0x0000,
127
   WA_SPECULATIVE_LOADS | WA_05000074},
128
 
129
  {"bf518", BFIN_CPU_BF518, 0x0000,
130
   WA_SPECULATIVE_LOADS | WA_05000074},
131
 
132
  {"bf522", BFIN_CPU_BF522, 0x0002,
133
   WA_SPECULATIVE_LOADS | WA_05000074},
134
  {"bf522", BFIN_CPU_BF522, 0x0001,
135
   WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
136
  {"bf522", BFIN_CPU_BF522, 0x0000,
137
   WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
138
 
139
  {"bf523", BFIN_CPU_BF523, 0x0002,
140
   WA_SPECULATIVE_LOADS | WA_05000074},
141
  {"bf523", BFIN_CPU_BF523, 0x0001,
142
   WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
143
  {"bf523", BFIN_CPU_BF523, 0x0000,
144
   WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
145
 
146
  {"bf524", BFIN_CPU_BF524, 0x0002,
147
   WA_SPECULATIVE_LOADS | WA_05000074},
148
  {"bf524", BFIN_CPU_BF524, 0x0001,
149
   WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
150
  {"bf524", BFIN_CPU_BF524, 0x0000,
151
   WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
152
 
153
  {"bf525", BFIN_CPU_BF525, 0x0002,
154
   WA_SPECULATIVE_LOADS | WA_05000074},
155
  {"bf525", BFIN_CPU_BF525, 0x0001,
156
   WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
157
  {"bf525", BFIN_CPU_BF525, 0x0000,
158
   WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
159
 
160
  {"bf526", BFIN_CPU_BF526, 0x0002,
161
   WA_SPECULATIVE_LOADS | WA_05000074},
162
  {"bf526", BFIN_CPU_BF526, 0x0001,
163
   WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
164
  {"bf526", BFIN_CPU_BF526, 0x0000,
165
   WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
166
 
167
  {"bf527", BFIN_CPU_BF527, 0x0002,
168
   WA_SPECULATIVE_LOADS | WA_05000074},
169
  {"bf527", BFIN_CPU_BF527, 0x0001,
170
   WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
171
  {"bf527", BFIN_CPU_BF527, 0x0000,
172
   WA_SPECULATIVE_LOADS | WA_RETS | WA_05000074},
173
 
174
  {"bf531", BFIN_CPU_BF531, 0x0006,
175
   WA_SPECULATIVE_LOADS | WA_LOAD_LCREGS | WA_05000074},
176
  {"bf531", BFIN_CPU_BF531, 0x0005,
177
   WA_SPECULATIVE_LOADS | WA_RETS | WA_05000283 | WA_05000315
178
   | WA_LOAD_LCREGS | WA_05000074},
179
  {"bf531", BFIN_CPU_BF531, 0x0004,
180
   WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
181
   | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
182
   | WA_05000074},
183
  {"bf531", BFIN_CPU_BF531, 0x0003,
184
   WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
185
   | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
186
   | WA_05000074},
187
 
188
  {"bf532", BFIN_CPU_BF532, 0x0006,
189
   WA_SPECULATIVE_LOADS | WA_LOAD_LCREGS | WA_05000074},
190
  {"bf532", BFIN_CPU_BF532, 0x0005,
191
   WA_SPECULATIVE_LOADS | WA_RETS | WA_05000283 | WA_05000315
192
   | WA_LOAD_LCREGS | WA_05000074},
193
  {"bf532", BFIN_CPU_BF532, 0x0004,
194
   WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
195
   | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
196
   | WA_05000074},
197
  {"bf532", BFIN_CPU_BF532, 0x0003,
198
   WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
199
   | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
200
   | WA_05000074},
201
 
202
  {"bf533", BFIN_CPU_BF533, 0x0006,
203
   WA_SPECULATIVE_LOADS | WA_LOAD_LCREGS | WA_05000074},
204
  {"bf533", BFIN_CPU_BF533, 0x0005,
205
   WA_SPECULATIVE_LOADS | WA_RETS | WA_05000283 | WA_05000315
206
   | WA_LOAD_LCREGS | WA_05000074},
207
  {"bf533", BFIN_CPU_BF533, 0x0004,
208
   WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
209
   | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
210
   | WA_05000074},
211
  {"bf533", BFIN_CPU_BF533, 0x0003,
212
   WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
213
   | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
214
   | WA_05000074},
215
 
216
  {"bf534", BFIN_CPU_BF534, 0x0003,
217
   WA_SPECULATIVE_LOADS | WA_RETS | WA_LOAD_LCREGS | WA_05000074},
218
  {"bf534", BFIN_CPU_BF534, 0x0002,
219
   WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
220
   | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
221
   | WA_05000074},
222
  {"bf534", BFIN_CPU_BF534, 0x0001,
223
   WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
224
   | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
225
   | WA_05000074},
226
 
227
  {"bf536", BFIN_CPU_BF536, 0x0003,
228
   WA_SPECULATIVE_LOADS | WA_RETS | WA_LOAD_LCREGS | WA_05000074},
229
  {"bf536", BFIN_CPU_BF536, 0x0002,
230
   WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
231
   | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
232
   | WA_05000074},
233
  {"bf536", BFIN_CPU_BF536, 0x0001,
234
   WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
235
   | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
236
   | WA_05000074},
237
 
238
  {"bf537", BFIN_CPU_BF537, 0x0003,
239
   WA_SPECULATIVE_LOADS | WA_RETS | WA_LOAD_LCREGS | WA_05000074},
240
  {"bf537", BFIN_CPU_BF537, 0x0002,
241
   WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
242
   | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
243
   | WA_05000074},
244
  {"bf537", BFIN_CPU_BF537, 0x0001,
245
   WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
246
   | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
247
   | WA_05000074},
248
 
249
  {"bf538", BFIN_CPU_BF538, 0x0005,
250
   WA_SPECULATIVE_LOADS | WA_LOAD_LCREGS | WA_05000074},
251
  {"bf538", BFIN_CPU_BF538, 0x0004,
252
   WA_SPECULATIVE_LOADS | WA_RETS | WA_LOAD_LCREGS | WA_05000074},
253
  {"bf538", BFIN_CPU_BF538, 0x0003,
254
   WA_SPECULATIVE_LOADS | WA_RETS
255
   | WA_05000283 | WA_05000315 | WA_LOAD_LCREGS | WA_05000074},
256
  {"bf538", BFIN_CPU_BF538, 0x0002,
257
   WA_SPECULATIVE_LOADS | WA_RETS
258
   | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
259
   | WA_05000074},
260
 
261
  {"bf539", BFIN_CPU_BF539, 0x0005,
262
   WA_SPECULATIVE_LOADS | WA_LOAD_LCREGS | WA_05000074},
263
  {"bf539", BFIN_CPU_BF539, 0x0004,
264
   WA_SPECULATIVE_LOADS | WA_RETS | WA_LOAD_LCREGS | WA_05000074},
265
  {"bf539", BFIN_CPU_BF539, 0x0003,
266
   WA_SPECULATIVE_LOADS | WA_RETS
267
   | WA_05000283 | WA_05000315 | WA_LOAD_LCREGS | WA_05000074},
268
  {"bf539", BFIN_CPU_BF539, 0x0002,
269
   WA_SPECULATIVE_LOADS | WA_RETS
270
   | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
271
   | WA_05000074},
272
 
273
  {"bf542m", BFIN_CPU_BF542M, 0x0003,
274
   WA_SPECULATIVE_LOADS | WA_INDIRECT_CALLS | WA_05000074},
275
 
276
  {"bf542", BFIN_CPU_BF542, 0x0002,
277
   WA_SPECULATIVE_LOADS | WA_INDIRECT_CALLS | WA_05000074},
278
  {"bf542", BFIN_CPU_BF542, 0x0001,
279
   WA_SPECULATIVE_LOADS | WA_RETS | WA_INDIRECT_CALLS | WA_05000074},
280
  {"bf542", BFIN_CPU_BF542, 0x0000,
281
   WA_SPECULATIVE_LOADS | WA_RETS | WA_INDIRECT_CALLS | WA_LOAD_LCREGS
282
   | WA_05000074},
283
 
284
  {"bf544m", BFIN_CPU_BF544M, 0x0003,
285
   WA_SPECULATIVE_LOADS | WA_INDIRECT_CALLS | WA_05000074},
286
 
287
  {"bf544", BFIN_CPU_BF544, 0x0002,
288
   WA_SPECULATIVE_LOADS | WA_INDIRECT_CALLS | WA_05000074},
289
  {"bf544", BFIN_CPU_BF544, 0x0001,
290
   WA_SPECULATIVE_LOADS | WA_RETS | WA_INDIRECT_CALLS | WA_05000074},
291
  {"bf544", BFIN_CPU_BF544, 0x0000,
292
   WA_SPECULATIVE_LOADS | WA_RETS | WA_INDIRECT_CALLS | WA_LOAD_LCREGS
293
   | WA_05000074},
294
 
295
  {"bf547m", BFIN_CPU_BF547M, 0x0003,
296
   WA_SPECULATIVE_LOADS | WA_INDIRECT_CALLS | WA_05000074},
297
 
298
  {"bf547", BFIN_CPU_BF547, 0x0002,
299
   WA_SPECULATIVE_LOADS | WA_INDIRECT_CALLS | WA_05000074},
300
  {"bf547", BFIN_CPU_BF547, 0x0001,
301
   WA_SPECULATIVE_LOADS | WA_RETS | WA_INDIRECT_CALLS | WA_05000074},
302
  {"bf547", BFIN_CPU_BF547, 0x0000,
303
   WA_SPECULATIVE_LOADS | WA_RETS | WA_INDIRECT_CALLS | WA_LOAD_LCREGS
304
   | WA_05000074},
305
 
306
  {"bf548m", BFIN_CPU_BF548M, 0x0003,
307
   WA_SPECULATIVE_LOADS | WA_INDIRECT_CALLS | WA_05000074},
308
 
309
  {"bf548", BFIN_CPU_BF548, 0x0002,
310
   WA_SPECULATIVE_LOADS | WA_INDIRECT_CALLS | WA_05000074},
311
  {"bf548", BFIN_CPU_BF548, 0x0001,
312
   WA_SPECULATIVE_LOADS | WA_RETS | WA_INDIRECT_CALLS | WA_05000074},
313
  {"bf548", BFIN_CPU_BF548, 0x0000,
314
   WA_SPECULATIVE_LOADS | WA_RETS | WA_INDIRECT_CALLS | WA_LOAD_LCREGS
315
   | WA_05000074},
316
 
317
  {"bf549m", BFIN_CPU_BF549M, 0x0003,
318
   WA_SPECULATIVE_LOADS | WA_INDIRECT_CALLS | WA_05000074},
319
 
320
  {"bf549", BFIN_CPU_BF549, 0x0002,
321
   WA_SPECULATIVE_LOADS | WA_INDIRECT_CALLS | WA_05000074},
322
  {"bf549", BFIN_CPU_BF549, 0x0001,
323
   WA_SPECULATIVE_LOADS | WA_RETS | WA_INDIRECT_CALLS | WA_05000074},
324
  {"bf549", BFIN_CPU_BF549, 0x0000,
325
   WA_SPECULATIVE_LOADS | WA_RETS | WA_INDIRECT_CALLS | WA_LOAD_LCREGS
326
   | WA_05000074},
327
 
328
  {"bf561", BFIN_CPU_BF561, 0x0005, WA_RETS
329
   | WA_05000283 | WA_05000315 | WA_LOAD_LCREGS | WA_05000074},
330
  {"bf561", BFIN_CPU_BF561, 0x0003,
331
   WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
332
   | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
333
   | WA_05000074},
334
  {"bf561", BFIN_CPU_BF561, 0x0002,
335
   WA_SPECULATIVE_LOADS | WA_SPECULATIVE_SYNCS | WA_RETS
336
   | WA_05000283 | WA_05000257 | WA_05000315 | WA_LOAD_LCREGS
337
   | WA_05000074},
338
 
339
  {NULL, 0, 0, 0}
340
};
341
 
342
int splitting_for_sched, splitting_loops;
343
 
344
static void
345
bfin_globalize_label (FILE *stream, const char *name)
346
{
347
  fputs (".global ", stream);
348
  assemble_name (stream, name);
349
  fputc (';',stream);
350
  fputc ('\n',stream);
351
}
352
 
353
static void
354
output_file_start (void)
355
{
356
  FILE *file = asm_out_file;
357
  int i;
358
 
359
  /* Variable tracking should be run after all optimizations which change order
360
     of insns.  It also needs a valid CFG.  This can't be done in
361
     override_options, because flag_var_tracking is finalized after
362
     that.  */
363
  bfin_flag_var_tracking = flag_var_tracking;
364
  flag_var_tracking = 0;
365
 
366
  fprintf (file, ".file \"%s\";\n", input_filename);
367
 
368
  for (i = 0; arg_regs[i] >= 0; i++)
369
    ;
370
  max_arg_registers = i;        /* how many arg reg used  */
371
}
372
 
373
/* Called early in the compilation to conditionally modify
374
   fixed_regs/call_used_regs.  */
375
 
376
void
377
conditional_register_usage (void)
378
{
379
  /* initialize condition code flag register rtx */
380
  bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
381
  bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
382
}
383
 
384
/* Examine machine-dependent attributes of function type FUNTYPE and return its
385
   type.  See the definition of E_FUNKIND.  */
386
 
387
static e_funkind
388
funkind (const_tree funtype)
389
{
390
  tree attrs = TYPE_ATTRIBUTES (funtype);
391
  if (lookup_attribute ("interrupt_handler", attrs))
392
    return INTERRUPT_HANDLER;
393
  else if (lookup_attribute ("exception_handler", attrs))
394
    return EXCPT_HANDLER;
395
  else if (lookup_attribute ("nmi_handler", attrs))
396
    return NMI_HANDLER;
397
  else
398
    return SUBROUTINE;
399
}
400
 
401
/* Legitimize PIC addresses.  If the address is already position-independent,
402
   we return ORIG.  Newly generated position-independent addresses go into a
403
   reg.  This is REG if nonzero, otherwise we allocate register(s) as
404
   necessary.  PICREG is the register holding the pointer to the PIC offset
405
   table.  */
406
 
407
static rtx
408
legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
409
{
410
  rtx addr = orig;
411
  rtx new_rtx = orig;
412
 
413
  if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
414
    {
415
      int unspec;
416
      rtx tmp;
417
 
418
      if (TARGET_ID_SHARED_LIBRARY)
419
        unspec = UNSPEC_MOVE_PIC;
420
      else if (GET_CODE (addr) == SYMBOL_REF
421
               && SYMBOL_REF_FUNCTION_P (addr))
422
        unspec = UNSPEC_FUNCDESC_GOT17M4;
423
      else
424
        unspec = UNSPEC_MOVE_FDPIC;
425
 
426
      if (reg == 0)
427
        {
428
          gcc_assert (can_create_pseudo_p ());
429
          reg = gen_reg_rtx (Pmode);
430
        }
431
 
432
      tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), unspec);
433
      new_rtx = gen_const_mem (Pmode, gen_rtx_PLUS (Pmode, picreg, tmp));
434
 
435
      emit_move_insn (reg, new_rtx);
436
      if (picreg == pic_offset_table_rtx)
437
        crtl->uses_pic_offset_table = 1;
438
      return reg;
439
    }
440
 
441
  else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
442
    {
443
      rtx base;
444
 
445
      if (GET_CODE (addr) == CONST)
446
        {
447
          addr = XEXP (addr, 0);
448
          gcc_assert (GET_CODE (addr) == PLUS);
449
        }
450
 
451
      if (XEXP (addr, 0) == picreg)
452
        return orig;
453
 
454
      if (reg == 0)
455
        {
456
          gcc_assert (can_create_pseudo_p ());
457
          reg = gen_reg_rtx (Pmode);
458
        }
459
 
460
      base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
461
      addr = legitimize_pic_address (XEXP (addr, 1),
462
                                     base == reg ? NULL_RTX : reg,
463
                                     picreg);
464
 
465
      if (GET_CODE (addr) == CONST_INT)
466
        {
467
          gcc_assert (! reload_in_progress && ! reload_completed);
468
          addr = force_reg (Pmode, addr);
469
        }
470
 
471
      if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
472
        {
473
          base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
474
          addr = XEXP (addr, 1);
475
        }
476
 
477
      return gen_rtx_PLUS (Pmode, base, addr);
478
    }
479
 
480
  return new_rtx;
481
}
482
 
483
/* Stack frame layout. */
484
 
485
/* For a given REGNO, determine whether it must be saved in the function
486
   prologue.  IS_INTHANDLER specifies whether we're generating a normal
487
   prologue or an interrupt/exception one.  */
488
static bool
489
must_save_p (bool is_inthandler, unsigned regno)
490
{
491
  if (D_REGNO_P (regno))
492
    {
493
      bool is_eh_return_reg = false;
494
      if (crtl->calls_eh_return)
495
        {
496
          unsigned j;
497
          for (j = 0; ; j++)
498
            {
499
              unsigned test = EH_RETURN_DATA_REGNO (j);
500
              if (test == INVALID_REGNUM)
501
                break;
502
              if (test == regno)
503
                is_eh_return_reg = true;
504
            }
505
        }
506
 
507
      return (is_eh_return_reg
508
              || (df_regs_ever_live_p (regno)
509
                  && !fixed_regs[regno]
510
                  && (is_inthandler || !call_used_regs[regno])));
511
    }
512
  else if (P_REGNO_P (regno))
513
    {
514
      return ((df_regs_ever_live_p (regno)
515
               && !fixed_regs[regno]
516
               && (is_inthandler || !call_used_regs[regno]))
517
              || (is_inthandler
518
                  && (ENABLE_WA_05000283 || ENABLE_WA_05000315)
519
                  && regno == REG_P5)
520
              || (!TARGET_FDPIC
521
                  && regno == PIC_OFFSET_TABLE_REGNUM
522
                  && (crtl->uses_pic_offset_table
523
                      || (TARGET_ID_SHARED_LIBRARY && !current_function_is_leaf))));
524
    }
525
  else
526
    return ((is_inthandler || !call_used_regs[regno])
527
            && (df_regs_ever_live_p (regno)
528
                || (!leaf_function_p () && call_used_regs[regno])));
529
 
530
}
531
 
532
/* Compute the number of DREGS to save with a push_multiple operation.
533
   This could include registers that aren't modified in the function,
534
   since push_multiple only takes a range of registers.
535
   If IS_INTHANDLER, then everything that is live must be saved, even
536
   if normally call-clobbered.
537
   If CONSECUTIVE, return the number of registers we can save in one
538
   instruction with a push/pop multiple instruction.  */
539
 
540
static int
541
n_dregs_to_save (bool is_inthandler, bool consecutive)
542
{
543
  int count = 0;
544
  unsigned i;
545
 
546
  for (i = REG_R7 + 1; i-- != REG_R0;)
547
    {
548
      if (must_save_p (is_inthandler, i))
549
        count++;
550
      else if (consecutive)
551
        return count;
552
    }
553
  return count;
554
}
555
 
556
/* Like n_dregs_to_save, but compute number of PREGS to save.  */
557
 
558
static int
559
n_pregs_to_save (bool is_inthandler, bool consecutive)
560
{
561
  int count = 0;
562
  unsigned i;
563
 
564
  for (i = REG_P5 + 1; i-- != REG_P0;)
565
    if (must_save_p (is_inthandler, i))
566
      count++;
567
    else if (consecutive)
568
      return count;
569
  return count;
570
}
571
 
572
/* Determine if we are going to save the frame pointer in the prologue.  */
573
 
574
static bool
575
must_save_fp_p (void)
576
{
577
  return df_regs_ever_live_p (REG_FP);
578
}
579
 
580
/* Determine if we are going to save the RETS register.  */
581
static bool
582
must_save_rets_p (void)
583
{
584
  return df_regs_ever_live_p (REG_RETS);
585
}
586
 
587
static bool
588
stack_frame_needed_p (void)
589
{
590
  /* EH return puts a new return address into the frame using an
591
     address relative to the frame pointer.  */
592
  if (crtl->calls_eh_return)
593
    return true;
594
  return frame_pointer_needed;
595
}
596
 
597
/* Emit code to save registers in the prologue.  SAVEALL is nonzero if we
598
   must save all registers; this is used for interrupt handlers.
599
   SPREG contains (reg:SI REG_SP).  IS_INTHANDLER is true if we're doing
600
   this for an interrupt (or exception) handler.  */
601
 
602
static void
603
expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
604
{
605
  rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
606
  rtx predec = gen_rtx_MEM (SImode, predec1);
607
  int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler, false);
608
  int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler, false);
609
  int ndregs_consec = saveall ? 8 : n_dregs_to_save (is_inthandler, true);
610
  int npregs_consec = saveall ? 6 : n_pregs_to_save (is_inthandler, true);
611
  int dregno, pregno;
612
  int total_consec = ndregs_consec + npregs_consec;
613
  int i, d_to_save;
614
 
615
  if (saveall || is_inthandler)
616
    {
617
      rtx insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
618
 
619
      RTX_FRAME_RELATED_P (insn) = 1;
620
      for (dregno = REG_LT0; dregno <= REG_LB1; dregno++)
621
        if (! current_function_is_leaf
622
            || cfun->machine->has_hardware_loops
623
            || cfun->machine->has_loopreg_clobber
624
            || (ENABLE_WA_05000257
625
                && (dregno == REG_LC0 || dregno == REG_LC1)))
626
          {
627
            insn = emit_move_insn (predec, gen_rtx_REG (SImode, dregno));
628
            RTX_FRAME_RELATED_P (insn) = 1;
629
          }
630
    }
631
 
632
  if (total_consec != 0)
633
    {
634
      rtx insn;
635
      rtx val = GEN_INT (-total_consec * 4);
636
      rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_consec + 2));
637
 
638
      XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
639
                                            UNSPEC_PUSH_MULTIPLE);
640
      XVECEXP (pat, 0, total_consec + 1) = gen_rtx_SET (VOIDmode, spreg,
641
                                                        gen_rtx_PLUS (Pmode,
642
                                                                      spreg,
643
                                                                      val));
644
      RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total_consec + 1)) = 1;
645
      d_to_save = ndregs_consec;
646
      dregno = REG_R7 + 1 - ndregs_consec;
647
      pregno = REG_P5 + 1 - npregs_consec;
648
      for (i = 0; i < total_consec; i++)
649
        {
650
          rtx memref = gen_rtx_MEM (word_mode,
651
                                    gen_rtx_PLUS (Pmode, spreg,
652
                                                  GEN_INT (- i * 4 - 4)));
653
          rtx subpat;
654
          if (d_to_save > 0)
655
            {
656
              subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
657
                                                                   dregno++));
658
              d_to_save--;
659
            }
660
          else
661
            {
662
              subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
663
                                                                   pregno++));
664
            }
665
          XVECEXP (pat, 0, i + 1) = subpat;
666
          RTX_FRAME_RELATED_P (subpat) = 1;
667
        }
668
      insn = emit_insn (pat);
669
      RTX_FRAME_RELATED_P (insn) = 1;
670
    }
671
 
672
  for (dregno = REG_R0; ndregs != ndregs_consec; dregno++)
673
    {
674
      if (must_save_p (is_inthandler, dregno))
675
        {
676
          rtx insn = emit_move_insn (predec, gen_rtx_REG (word_mode, dregno));
677
          RTX_FRAME_RELATED_P (insn) = 1;
678
          ndregs--;
679
        }
680
    }
681
  for (pregno = REG_P0; npregs != npregs_consec; pregno++)
682
    {
683
      if (must_save_p (is_inthandler, pregno))
684
        {
685
          rtx insn = emit_move_insn (predec, gen_rtx_REG (word_mode, pregno));
686
          RTX_FRAME_RELATED_P (insn) = 1;
687
          npregs--;
688
        }
689
    }
690
  for (i = REG_P7 + 1; i < REG_CC; i++)
691
    if (saveall
692
        || (is_inthandler
693
            && (df_regs_ever_live_p (i)
694
                || (!leaf_function_p () && call_used_regs[i]))))
695
      {
696
        rtx insn;
697
        if (i == REG_A0 || i == REG_A1)
698
          insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
699
                                 gen_rtx_REG (PDImode, i));
700
        else
701
          insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
702
        RTX_FRAME_RELATED_P (insn) = 1;
703
      }
704
}
705
 
706
/* Emit code to restore registers in the epilogue.  SAVEALL is nonzero if we
707
   must save all registers; this is used for interrupt handlers.
708
   SPREG contains (reg:SI REG_SP).  IS_INTHANDLER is true if we're doing
709
   this for an interrupt (or exception) handler.  */
710
 
711
static void
712
expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
713
{
714
  rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
715
  rtx postinc = gen_rtx_MEM (SImode, postinc1);
716
 
717
  int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler, false);
718
  int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler, false);
719
  int ndregs_consec = saveall ? 8 : n_dregs_to_save (is_inthandler, true);
720
  int npregs_consec = saveall ? 6 : n_pregs_to_save (is_inthandler, true);
721
  int total_consec = ndregs_consec + npregs_consec;
722
  int i, regno;
723
  rtx insn;
724
 
725
  /* A slightly crude technique to stop flow from trying to delete "dead"
726
     insns.  */
727
  MEM_VOLATILE_P (postinc) = 1;
728
 
729
  for (i = REG_CC - 1; i > REG_P7; i--)
730
    if (saveall
731
        || (is_inthandler
732
            && (df_regs_ever_live_p (i)
733
                || (!leaf_function_p () && call_used_regs[i]))))
734
      {
735
        if (i == REG_A0 || i == REG_A1)
736
          {
737
            rtx mem = gen_rtx_MEM (PDImode, postinc1);
738
            MEM_VOLATILE_P (mem) = 1;
739
            emit_move_insn (gen_rtx_REG (PDImode, i), mem);
740
          }
741
        else
742
          emit_move_insn (gen_rtx_REG (SImode, i), postinc);
743
      }
744
 
745
  regno = REG_P5 - npregs_consec;
746
  for (; npregs != npregs_consec; regno--)
747
    {
748
      if (must_save_p (is_inthandler, regno))
749
        {
750
          emit_move_insn (gen_rtx_REG (word_mode, regno), postinc);
751
          npregs--;
752
        }
753
    }
754
  regno = REG_R7 - ndregs_consec;
755
  for (; ndregs != ndregs_consec; regno--)
756
    {
757
      if (must_save_p (is_inthandler, regno))
758
        {
759
          emit_move_insn (gen_rtx_REG (word_mode, regno), postinc);
760
          ndregs--;
761
        }
762
    }
763
 
764
  if (total_consec != 0)
765
    {
766
      rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_consec + 1));
767
      XVECEXP (pat, 0, 0)
768
        = gen_rtx_SET (VOIDmode, spreg,
769
                       gen_rtx_PLUS (Pmode, spreg,
770
                                     GEN_INT (total_consec * 4)));
771
 
772
      if (npregs_consec > 0)
773
        regno = REG_P5 + 1;
774
      else
775
        regno = REG_R7 + 1;
776
 
777
      for (i = 0; i < total_consec; i++)
778
        {
779
          rtx addr = (i > 0
780
                      ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
781
                      : spreg);
782
          rtx memref = gen_rtx_MEM (word_mode, addr);
783
 
784
          regno--;
785
          XVECEXP (pat, 0, i + 1)
786
            = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
787
 
788
          if (npregs_consec > 0)
789
            {
790
              if (--npregs_consec == 0)
791
                regno = REG_R7 + 1;
792
            }
793
        }
794
 
795
      insn = emit_insn (pat);
796
      RTX_FRAME_RELATED_P (insn) = 1;
797
    }
798
  if (saveall || is_inthandler)
799
    {
800
      for (regno = REG_LB1; regno >= REG_LT0; regno--)
801
        if (! current_function_is_leaf
802
            || cfun->machine->has_hardware_loops
803
            || cfun->machine->has_loopreg_clobber
804
            || (ENABLE_WA_05000257 && (regno == REG_LC0 || regno == REG_LC1)))
805
          emit_move_insn (gen_rtx_REG (SImode, regno), postinc);
806
 
807
      emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
808
    }
809
}
810
 
811
/* Perform any needed actions needed for a function that is receiving a
812
   variable number of arguments.
813
 
814
   CUM is as above.
815
 
816
   MODE and TYPE are the mode and type of the current parameter.
817
 
818
   PRETEND_SIZE is a variable that should be set to the amount of stack
819
   that must be pushed by the prolog to pretend that our caller pushed
820
   it.
821
 
822
   Normally, this macro will push all remaining incoming registers on the
823
   stack and set PRETEND_SIZE to the length of the registers pushed.
824
 
825
   Blackfin specific :
826
   - VDSP C compiler manual (our ABI) says that a variable args function
827
     should save the R0, R1 and R2 registers in the stack.
828
   - The caller will always leave space on the stack for the
829
     arguments that are passed in registers, so we dont have
830
     to leave any extra space.
831
   - now, the vastart pointer can access all arguments from the stack.  */
832
 
833
static void
834
setup_incoming_varargs (CUMULATIVE_ARGS *cum,
835
                        enum machine_mode mode ATTRIBUTE_UNUSED,
836
                        tree type ATTRIBUTE_UNUSED, int *pretend_size,
837
                        int no_rtl)
838
{
839
  rtx mem;
840
  int i;
841
 
842
  if (no_rtl)
843
    return;
844
 
845
  /* The move for named arguments will be generated automatically by the
846
     compiler.  We need to generate the move rtx for the unnamed arguments
847
     if they are in the first 3 words.  We assume at least 1 named argument
848
     exists, so we never generate [ARGP] = R0 here.  */
849
 
850
  for (i = cum->words + 1; i < max_arg_registers; i++)
851
    {
852
      mem = gen_rtx_MEM (Pmode,
853
                         plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
854
      emit_move_insn (mem, gen_rtx_REG (Pmode, i));
855
    }
856
 
857
  *pretend_size = 0;
858
}
859
 
860
/* Value should be nonzero if functions must have frame pointers.
861
   Zero means the frame pointer need not be set up (and parms may
862
   be accessed via the stack pointer) in functions that seem suitable.  */
863
 
864
static bool
865
bfin_frame_pointer_required (void)
866
{
867
  e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
868
 
869
  if (fkind != SUBROUTINE)
870
    return true;
871
 
872
  /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
873
     so we have to override it for non-leaf functions.  */
874
  if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
875
    return true;
876
 
877
  return false;
878
}
879
 
880
/* Return the number of registers pushed during the prologue.  */
881
 
882
static int
883
n_regs_saved_by_prologue (void)
884
{
885
  e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
886
  bool is_inthandler = fkind != SUBROUTINE;
887
  tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
888
  bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
889
              || (is_inthandler && !current_function_is_leaf));
890
  int ndregs = all ? 8 : n_dregs_to_save (is_inthandler, false);
891
  int npregs = all ? 6 : n_pregs_to_save (is_inthandler, false);
892
  int n = ndregs + npregs;
893
  int i;
894
 
895
  if (all || stack_frame_needed_p ())
896
    n += 2;
897
  else
898
    {
899
      if (must_save_fp_p ())
900
        n++;
901
      if (must_save_rets_p ())
902
        n++;
903
    }
904
 
905
  if (fkind != SUBROUTINE || all)
906
    {
907
      /* Increment once for ASTAT.  */
908
      n++;
909
      if (! current_function_is_leaf
910
          || cfun->machine->has_hardware_loops
911
          || cfun->machine->has_loopreg_clobber)
912
        {
913
          n += 6;
914
        }
915
    }
916
 
917
  if (fkind != SUBROUTINE)
918
    {
919
      /* RETE/X/N.  */
920
      if (lookup_attribute ("nesting", attrs))
921
        n++;
922
    }
923
 
924
  for (i = REG_P7 + 1; i < REG_CC; i++)
925
    if (all
926
        || (fkind != SUBROUTINE
927
            && (df_regs_ever_live_p (i)
928
                || (!leaf_function_p () && call_used_regs[i]))))
929
      n += i == REG_A0 || i == REG_A1 ? 2 : 1;
930
 
931
  return n;
932
}
933
 
934
/* Given FROM and TO register numbers, say whether this elimination is
935
   allowed.  Frame pointer elimination is automatically handled.
936
 
937
   All other eliminations are valid.  */
938
 
939
static bool
940
bfin_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
941
{
942
  return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
943
}
944
 
945
/* Return the offset between two registers, one to be eliminated, and the other
946
   its replacement, at the start of a routine.  */
947
 
948
HOST_WIDE_INT
949
bfin_initial_elimination_offset (int from, int to)
950
{
951
  HOST_WIDE_INT offset = 0;
952
 
953
  if (from == ARG_POINTER_REGNUM)
954
    offset = n_regs_saved_by_prologue () * 4;
955
 
956
  if (to == STACK_POINTER_REGNUM)
957
    {
958
      if (crtl->outgoing_args_size >= FIXED_STACK_AREA)
959
        offset += crtl->outgoing_args_size;
960
      else if (crtl->outgoing_args_size)
961
        offset += FIXED_STACK_AREA;
962
 
963
      offset += get_frame_size ();
964
    }
965
 
966
  return offset;
967
}
968
 
969
/* Emit code to load a constant CONSTANT into register REG; setting
970
   RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
971
   Make sure that the insns we generate need not be split.  */
972
 
973
static void
974
frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
975
{
976
  rtx insn;
977
  rtx cst = GEN_INT (constant);
978
 
979
  if (constant >= -32768 && constant < 65536)
980
    insn = emit_move_insn (reg, cst);
981
  else
982
    {
983
      /* We don't call split_load_immediate here, since dwarf2out.c can get
984
         confused about some of the more clever sequences it can generate.  */
985
      insn = emit_insn (gen_movsi_high (reg, cst));
986
      if (related)
987
        RTX_FRAME_RELATED_P (insn) = 1;
988
      insn = emit_insn (gen_movsi_low (reg, reg, cst));
989
    }
990
  if (related)
991
    RTX_FRAME_RELATED_P (insn) = 1;
992
}
993
 
994
/* Generate efficient code to add a value to a P register.
995
   Set RTX_FRAME_RELATED_P on the generated insns if FRAME is nonzero.
996
   EPILOGUE_P is zero if this function is called for prologue,
997
   otherwise it's nonzero. And it's less than zero if this is for
998
   sibcall epilogue.  */
999
 
1000
static void
1001
add_to_reg (rtx reg, HOST_WIDE_INT value, int frame, int epilogue_p)
1002
{
1003
  if (value == 0)
1004
    return;
1005
 
1006
  /* Choose whether to use a sequence using a temporary register, or
1007
     a sequence with multiple adds.  We can add a signed 7-bit value
1008
     in one instruction.  */
1009
  if (value > 120 || value < -120)
1010
    {
1011
      rtx tmpreg;
1012
      rtx tmpreg2;
1013
      rtx insn;
1014
 
1015
      tmpreg2 = NULL_RTX;
1016
 
1017
      /* For prologue or normal epilogue, P1 can be safely used
1018
         as the temporary register. For sibcall epilogue, we try to find
1019
         a call used P register, which will be restored in epilogue.
1020
         If we cannot find such a P register, we have to use one I register
1021
         to help us.  */
1022
 
1023
      if (epilogue_p >= 0)
1024
        tmpreg = gen_rtx_REG (SImode, REG_P1);
1025
      else
1026
        {
1027
          int i;
1028
          for (i = REG_P0; i <= REG_P5; i++)
1029
            if ((df_regs_ever_live_p (i) && ! call_used_regs[i])
1030
                || (!TARGET_FDPIC
1031
                    && i == PIC_OFFSET_TABLE_REGNUM
1032
                    && (crtl->uses_pic_offset_table
1033
                        || (TARGET_ID_SHARED_LIBRARY
1034
                            && ! current_function_is_leaf))))
1035
              break;
1036
          if (i <= REG_P5)
1037
            tmpreg = gen_rtx_REG (SImode, i);
1038
          else
1039
            {
1040
              tmpreg = gen_rtx_REG (SImode, REG_P1);
1041
              tmpreg2 = gen_rtx_REG (SImode, REG_I0);
1042
              emit_move_insn (tmpreg2, tmpreg);
1043
            }
1044
        }
1045
 
1046
      if (frame)
1047
        frame_related_constant_load (tmpreg, value, TRUE);
1048
      else
1049
        insn = emit_move_insn (tmpreg, GEN_INT (value));
1050
 
1051
      insn = emit_insn (gen_addsi3 (reg, reg, tmpreg));
1052
      if (frame)
1053
        RTX_FRAME_RELATED_P (insn) = 1;
1054
 
1055
      if (tmpreg2 != NULL_RTX)
1056
        emit_move_insn (tmpreg, tmpreg2);
1057
    }
1058
  else
1059
    do
1060
      {
1061
        int size = value;
1062
        rtx insn;
1063
 
1064
        if (size > 60)
1065
          size = 60;
1066
        else if (size < -60)
1067
          /* We could use -62, but that would leave the stack unaligned, so
1068
             it's no good.  */
1069
          size = -60;
1070
 
1071
        insn = emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
1072
        if (frame)
1073
          RTX_FRAME_RELATED_P (insn) = 1;
1074
        value -= size;
1075
      }
1076
    while (value != 0);
1077
}
1078
 
1079
/* Generate a LINK insn for a frame sized FRAME_SIZE.  If this constant
1080
   is too large, generate a sequence of insns that has the same effect.
1081
   SPREG contains (reg:SI REG_SP).  */
1082
 
1083
static void
1084
emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
1085
{
1086
  HOST_WIDE_INT link_size = frame_size;
1087
  rtx insn;
1088
  int i;
1089
 
1090
  if (link_size > 262140)
1091
    link_size = 262140;
1092
 
1093
  /* Use a LINK insn with as big a constant as possible, then subtract
1094
     any remaining size from the SP.  */
1095
  insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
1096
  RTX_FRAME_RELATED_P (insn) = 1;
1097
 
1098
  for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1099
    {
1100
      rtx set = XVECEXP (PATTERN (insn), 0, i);
1101
      gcc_assert (GET_CODE (set) == SET);
1102
      RTX_FRAME_RELATED_P (set) = 1;
1103
    }
1104
 
1105
  frame_size -= link_size;
1106
 
1107
  if (frame_size > 0)
1108
    {
1109
      /* Must use a call-clobbered PREG that isn't the static chain.  */
1110
      rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
1111
 
1112
      frame_related_constant_load (tmpreg, -frame_size, TRUE);
1113
      insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
1114
      RTX_FRAME_RELATED_P (insn) = 1;
1115
    }
1116
}
1117
 
1118
/* Return the number of bytes we must reserve for outgoing arguments
1119
   in the current function's stack frame.  */
1120
 
1121
static HOST_WIDE_INT
1122
arg_area_size (void)
1123
{
1124
  if (crtl->outgoing_args_size)
1125
    {
1126
      if (crtl->outgoing_args_size >= FIXED_STACK_AREA)
1127
        return crtl->outgoing_args_size;
1128
      else
1129
        return FIXED_STACK_AREA;
1130
    }
1131
  return 0;
1132
}
1133
 
1134
/* Save RETS and FP, and allocate a stack frame.  ALL is true if the
1135
   function must save all its registers (true only for certain interrupt
1136
   handlers).  */
1137
 
1138
static void
1139
do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
1140
{
1141
  frame_size += arg_area_size ();
1142
 
1143
  if (all
1144
      || stack_frame_needed_p ()
1145
      || (must_save_rets_p () && must_save_fp_p ()))
1146
    emit_link_insn (spreg, frame_size);
1147
  else
1148
    {
1149
      if (must_save_rets_p ())
1150
        {
1151
          rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
1152
                                            gen_rtx_PRE_DEC (Pmode, spreg)),
1153
                               bfin_rets_rtx);
1154
          rtx insn = emit_insn (pat);
1155
          RTX_FRAME_RELATED_P (insn) = 1;
1156
        }
1157
      if (must_save_fp_p ())
1158
        {
1159
          rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
1160
                                            gen_rtx_PRE_DEC (Pmode, spreg)),
1161
                               gen_rtx_REG (Pmode, REG_FP));
1162
          rtx insn = emit_insn (pat);
1163
          RTX_FRAME_RELATED_P (insn) = 1;
1164
        }
1165
      add_to_reg (spreg, -frame_size, 1, 0);
1166
    }
1167
}
1168
 
1169
/* Like do_link, but used for epilogues to deallocate the stack frame.
1170
   EPILOGUE_P is zero if this function is called for prologue,
1171
   otherwise it's nonzero. And it's less than zero if this is for
1172
   sibcall epilogue.  */
1173
 
1174
static void
1175
do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all, int epilogue_p)
1176
{
1177
  frame_size += arg_area_size ();
1178
 
1179
  if (stack_frame_needed_p ())
1180
    emit_insn (gen_unlink ());
1181
  else
1182
    {
1183
      rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
1184
 
1185
      add_to_reg (spreg, frame_size, 0, epilogue_p);
1186
      if (all || must_save_fp_p ())
1187
        {
1188
          rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
1189
          emit_move_insn (fpreg, postinc);
1190
          emit_use (fpreg);
1191
        }
1192
      if (all || must_save_rets_p ())
1193
        {
1194
          emit_move_insn (bfin_rets_rtx, postinc);
1195
          emit_use (bfin_rets_rtx);
1196
        }
1197
    }
1198
}
1199
 
1200
/* Generate a prologue suitable for a function of kind FKIND.  This is
1201
   called for interrupt and exception handler prologues.
1202
   SPREG contains (reg:SI REG_SP).  */
1203
 
1204
static void
1205
expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind, bool all)
1206
{
1207
  HOST_WIDE_INT frame_size = get_frame_size ();
1208
  rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
1209
  rtx predec = gen_rtx_MEM (SImode, predec1);
1210
  rtx insn;
1211
  tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1212
  tree kspisusp = lookup_attribute ("kspisusp", attrs);
1213
 
1214
  if (kspisusp)
1215
    {
1216
      insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
1217
      RTX_FRAME_RELATED_P (insn) = 1;
1218
    }
1219
 
1220
  /* We need space on the stack in case we need to save the argument
1221
     registers.  */
1222
  if (fkind == EXCPT_HANDLER)
1223
    {
1224
      insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
1225
      RTX_FRAME_RELATED_P (insn) = 1;
1226
    }
1227
 
1228
  /* If we're calling other functions, they won't save their call-clobbered
1229
     registers, so we must save everything here.  */
1230
  if (!current_function_is_leaf)
1231
    all = true;
1232
  expand_prologue_reg_save (spreg, all, true);
1233
 
1234
  if (ENABLE_WA_05000283 || ENABLE_WA_05000315)
1235
    {
1236
      rtx chipid = GEN_INT (trunc_int_for_mode (0xFFC00014, SImode));
1237
      rtx p5reg = gen_rtx_REG (Pmode, REG_P5);
1238
      emit_insn (gen_movbi (bfin_cc_rtx, const1_rtx));
1239
      emit_insn (gen_movsi_high (p5reg, chipid));
1240
      emit_insn (gen_movsi_low (p5reg, p5reg, chipid));
1241
      emit_insn (gen_dummy_load (p5reg, bfin_cc_rtx));
1242
    }
1243
 
1244
  if (lookup_attribute ("nesting", attrs))
1245
    {
1246
      rtx srcreg = gen_rtx_REG (Pmode, ret_regs[fkind]);
1247
      insn = emit_move_insn (predec, srcreg);
1248
      RTX_FRAME_RELATED_P (insn) = 1;
1249
    }
1250
 
1251
  do_link (spreg, frame_size, all);
1252
 
1253
  if (fkind == EXCPT_HANDLER)
1254
    {
1255
      rtx r0reg = gen_rtx_REG (SImode, REG_R0);
1256
      rtx r1reg = gen_rtx_REG (SImode, REG_R1);
1257
      rtx r2reg = gen_rtx_REG (SImode, REG_R2);
1258
      rtx insn;
1259
 
1260
      insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
1261
      insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
1262
      insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
1263
      insn = emit_move_insn (r1reg, spreg);
1264
      insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
1265
      insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
1266
    }
1267
}
1268
 
1269
/* Generate an epilogue suitable for a function of kind FKIND.  This is
1270
   called for interrupt and exception handler epilogues.
1271
   SPREG contains (reg:SI REG_SP).  */
1272
 
1273
static void
1274
expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind, bool all)
1275
{
1276
  tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1277
  rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
1278
  rtx postinc = gen_rtx_MEM (SImode, postinc1);
1279
 
1280
  /* A slightly crude technique to stop flow from trying to delete "dead"
1281
     insns.  */
1282
  MEM_VOLATILE_P (postinc) = 1;
1283
 
1284
  do_unlink (spreg, get_frame_size (), all, 1);
1285
 
1286
  if (lookup_attribute ("nesting", attrs))
1287
    {
1288
      rtx srcreg = gen_rtx_REG (Pmode, ret_regs[fkind]);
1289
      emit_move_insn (srcreg, postinc);
1290
    }
1291
 
1292
  /* If we're calling other functions, they won't save their call-clobbered
1293
     registers, so we must save (and restore) everything here.  */
1294
  if (!current_function_is_leaf)
1295
    all = true;
1296
 
1297
  expand_epilogue_reg_restore (spreg, all, true);
1298
 
1299
  /* Deallocate any space we left on the stack in case we needed to save the
1300
     argument registers.  */
1301
  if (fkind == EXCPT_HANDLER)
1302
    emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
1303
 
1304
  emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode, ret_regs[fkind])));
1305
}
1306
 
1307
/* Used while emitting the prologue to generate code to load the correct value
1308
   into the PIC register, which is passed in DEST.  */
1309
 
1310
static rtx
1311
bfin_load_pic_reg (rtx dest)
1312
{
1313
  struct cgraph_local_info *i = NULL;
1314
  rtx addr, insn;
1315
 
1316
  i = cgraph_local_info (current_function_decl);
1317
 
1318
  /* Functions local to the translation unit don't need to reload the
1319
     pic reg, since the caller always passes a usable one.  */
1320
  if (i && i->local)
1321
    return pic_offset_table_rtx;
1322
 
1323
  if (bfin_lib_id_given)
1324
    addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
1325
  else
1326
    addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1327
                         gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
1328
                                         UNSPEC_LIBRARY_OFFSET));
1329
  insn = emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
1330
  return dest;
1331
}
1332
 
1333
/* Generate RTL for the prologue of the current function.  */
1334
 
1335
void
1336
bfin_expand_prologue (void)
1337
{
1338
  HOST_WIDE_INT frame_size = get_frame_size ();
1339
  rtx spreg = gen_rtx_REG (Pmode, REG_SP);
1340
  e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1341
  rtx pic_reg_loaded = NULL_RTX;
1342
  tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1343
  bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
1344
 
1345
  if (fkind != SUBROUTINE)
1346
    {
1347
      expand_interrupt_handler_prologue (spreg, fkind, all);
1348
      return;
1349
    }
1350
 
1351
  if (crtl->limit_stack
1352
      || (TARGET_STACK_CHECK_L1
1353
          && !DECL_NO_LIMIT_STACK (current_function_decl)))
1354
    {
1355
      HOST_WIDE_INT offset
1356
        = bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
1357
                                           STACK_POINTER_REGNUM);
1358
      rtx lim = crtl->limit_stack ? stack_limit_rtx : NULL_RTX;
1359
      rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
1360
 
1361
      if (!lim)
1362
        {
1363
          emit_move_insn (p2reg, gen_int_mode (0xFFB00000, SImode));
1364
          emit_move_insn (p2reg, gen_rtx_MEM (Pmode, p2reg));
1365
          lim = p2reg;
1366
        }
1367
      if (GET_CODE (lim) == SYMBOL_REF)
1368
        {
1369
          if (TARGET_ID_SHARED_LIBRARY)
1370
            {
1371
              rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
1372
              rtx val;
1373
              pic_reg_loaded = bfin_load_pic_reg (p2reg);
1374
              val = legitimize_pic_address (stack_limit_rtx, p1reg,
1375
                                            pic_reg_loaded);
1376
              emit_move_insn (p1reg, val);
1377
              frame_related_constant_load (p2reg, offset, FALSE);
1378
              emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
1379
              lim = p2reg;
1380
            }
1381
          else
1382
            {
1383
              rtx limit = plus_constant (lim, offset);
1384
              emit_move_insn (p2reg, limit);
1385
              lim = p2reg;
1386
            }
1387
        }
1388
      else
1389
        {
1390
          if (lim != p2reg)
1391
            emit_move_insn (p2reg, lim);
1392
          add_to_reg (p2reg, offset, 0, 0);
1393
          lim = p2reg;
1394
        }
1395
      emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
1396
      emit_insn (gen_trapifcc ());
1397
    }
1398
  expand_prologue_reg_save (spreg, all, false);
1399
 
1400
  do_link (spreg, frame_size, all);
1401
 
1402
  if (TARGET_ID_SHARED_LIBRARY
1403
      && !TARGET_SEP_DATA
1404
      && (crtl->uses_pic_offset_table
1405
          || !current_function_is_leaf))
1406
    bfin_load_pic_reg (pic_offset_table_rtx);
1407
}
1408
 
1409
/* Generate RTL for the epilogue of the current function.  NEED_RETURN is zero
1410
   if this is for a sibcall.  EH_RETURN is nonzero if we're expanding an
1411
   eh_return pattern. SIBCALL_P is true if this is a sibcall epilogue,
1412
   false otherwise.  */
1413
 
1414
void
1415
bfin_expand_epilogue (int need_return, int eh_return, bool sibcall_p)
1416
{
1417
  rtx spreg = gen_rtx_REG (Pmode, REG_SP);
1418
  e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1419
  int e = sibcall_p ? -1 : 1;
1420
  tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1421
  bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
1422
 
1423
  if (fkind != SUBROUTINE)
1424
    {
1425
      expand_interrupt_handler_epilogue (spreg, fkind, all);
1426
      return;
1427
    }
1428
 
1429
  do_unlink (spreg, get_frame_size (), all, e);
1430
 
1431
  expand_epilogue_reg_restore (spreg, all, false);
1432
 
1433
  /* Omit the return insn if this is for a sibcall.  */
1434
  if (! need_return)
1435
    return;
1436
 
1437
  if (eh_return)
1438
    emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
1439
 
1440
  emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode, REG_RETS)));
1441
}
1442
 
1443
/* Return nonzero if register OLD_REG can be renamed to register NEW_REG.  */
1444
 
1445
int
1446
bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
1447
                           unsigned int new_reg)
1448
{
1449
  /* Interrupt functions can only use registers that have already been
1450
     saved by the prologue, even if they would normally be
1451
     call-clobbered.  */
1452
 
1453
  if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
1454
      && !df_regs_ever_live_p (new_reg))
1455
    return 0;
1456
 
1457
  return 1;
1458
}
1459
 
1460
/* Return the value of the return address for the frame COUNT steps up
1461
   from the current frame, after the prologue.
1462
   We punt for everything but the current frame by returning const0_rtx.  */
1463
 
1464
rtx
1465
bfin_return_addr_rtx (int count)
1466
{
1467
  if (count != 0)
1468
    return const0_rtx;
1469
 
1470
  return get_hard_reg_initial_val (Pmode, REG_RETS);
1471
}
1472
 
1473
static rtx
1474
bfin_delegitimize_address (rtx orig_x)
1475
{
1476
  rtx x = orig_x;
1477
 
1478
  if (GET_CODE (x) != MEM)
1479
    return orig_x;
1480
 
1481
  x = XEXP (x, 0);
1482
  if (GET_CODE (x) == PLUS
1483
      && GET_CODE (XEXP (x, 1)) == UNSPEC
1484
      && XINT (XEXP (x, 1), 1) == UNSPEC_MOVE_PIC
1485
      && GET_CODE (XEXP (x, 0)) == REG
1486
      && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
1487
    return XVECEXP (XEXP (x, 1), 0, 0);
1488
 
1489
  return orig_x;
1490
}
1491
 
1492
/* This predicate is used to compute the length of a load/store insn.
1493
   OP is a MEM rtx, we return nonzero if its addressing mode requires a
1494
   32-bit instruction.  */
1495
 
1496
int
1497
effective_address_32bit_p (rtx op, enum machine_mode mode)
1498
{
1499
  HOST_WIDE_INT offset;
1500
 
1501
  mode = GET_MODE (op);
1502
  op = XEXP (op, 0);
1503
 
1504
  if (GET_CODE (op) != PLUS)
1505
    {
1506
      gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
1507
                  || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
1508
      return 0;
1509
    }
1510
 
1511
  if (GET_CODE (XEXP (op, 1)) == UNSPEC)
1512
    return 1;
1513
 
1514
  offset = INTVAL (XEXP (op, 1));
1515
 
1516
  /* All byte loads use a 16-bit offset.  */
1517
  if (GET_MODE_SIZE (mode) == 1)
1518
    return 1;
1519
 
1520
  if (GET_MODE_SIZE (mode) == 4)
1521
    {
1522
      /* Frame pointer relative loads can use a negative offset, all others
1523
         are restricted to a small positive one.  */
1524
      if (XEXP (op, 0) == frame_pointer_rtx)
1525
        return offset < -128 || offset > 60;
1526
      return offset < 0 || offset > 60;
1527
    }
1528
 
1529
  /* Must be HImode now.  */
1530
  return offset < 0 || offset > 30;
1531
}
1532
 
1533
/* Returns true if X is a memory reference using an I register.  */
1534
bool
1535
bfin_dsp_memref_p (rtx x)
1536
{
1537
  if (! MEM_P (x))
1538
    return false;
1539
  x = XEXP (x, 0);
1540
  if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_INC
1541
      || GET_CODE (x) == POST_DEC || GET_CODE (x) == PRE_DEC)
1542
    x = XEXP (x, 0);
1543
  return IREG_P (x);
1544
}
1545
 
1546
/* Return cost of the memory address ADDR.
1547
   All addressing modes are equally cheap on the Blackfin.  */
1548
 
1549
static int
1550
bfin_address_cost (rtx addr ATTRIBUTE_UNUSED, bool speed ATTRIBUTE_UNUSED)
1551
{
1552
  return 1;
1553
}
1554
 
1555
/* Subroutine of print_operand; used to print a memory reference X to FILE.  */
1556
 
1557
void
1558
print_address_operand (FILE *file, rtx x)
1559
{
1560
  switch (GET_CODE (x))
1561
    {
1562
    case PLUS:
1563
      output_address (XEXP (x, 0));
1564
      fprintf (file, "+");
1565
      output_address (XEXP (x, 1));
1566
      break;
1567
 
1568
    case PRE_DEC:
1569
      fprintf (file, "--");
1570
      output_address (XEXP (x, 0));
1571
      break;
1572
    case POST_INC:
1573
      output_address (XEXP (x, 0));
1574
      fprintf (file, "++");
1575
      break;
1576
    case POST_DEC:
1577
      output_address (XEXP (x, 0));
1578
      fprintf (file, "--");
1579
      break;
1580
 
1581
    default:
1582
      gcc_assert (GET_CODE (x) != MEM);
1583
      print_operand (file, x, 0);
1584
      break;
1585
    }
1586
}
1587
 
1588
/* Adding intp DImode support by Tony
1589
 * -- Q: (low  word)
1590
 * -- R: (high word)
1591
 */
1592
 
1593
void
1594
print_operand (FILE *file, rtx x, char code)
1595
{
1596
  enum machine_mode mode;
1597
 
1598
  if (code == '!')
1599
    {
1600
      if (GET_MODE (current_output_insn) == SImode)
1601
        fprintf (file, " ||");
1602
      else
1603
        fprintf (file, ";");
1604
      return;
1605
    }
1606
 
1607
  mode = GET_MODE (x);
1608
 
1609
  switch (code)
1610
    {
1611
    case 'j':
1612
      switch (GET_CODE (x))
1613
        {
1614
        case EQ:
1615
          fprintf (file, "e");
1616
          break;
1617
        case NE:
1618
          fprintf (file, "ne");
1619
          break;
1620
        case GT:
1621
          fprintf (file, "g");
1622
          break;
1623
        case LT:
1624
          fprintf (file, "l");
1625
          break;
1626
        case GE:
1627
          fprintf (file, "ge");
1628
          break;
1629
        case LE:
1630
          fprintf (file, "le");
1631
          break;
1632
        case GTU:
1633
          fprintf (file, "g");
1634
          break;
1635
        case LTU:
1636
          fprintf (file, "l");
1637
          break;
1638
        case GEU:
1639
          fprintf (file, "ge");
1640
          break;
1641
        case LEU:
1642
          fprintf (file, "le");
1643
          break;
1644
        default:
1645
          output_operand_lossage ("invalid %%j value");
1646
        }
1647
      break;
1648
 
1649
    case 'J':                                    /* reverse logic */
1650
      switch (GET_CODE(x))
1651
        {
1652
        case EQ:
1653
          fprintf (file, "ne");
1654
          break;
1655
        case NE:
1656
          fprintf (file, "e");
1657
          break;
1658
        case GT:
1659
          fprintf (file, "le");
1660
          break;
1661
        case LT:
1662
          fprintf (file, "ge");
1663
          break;
1664
        case GE:
1665
          fprintf (file, "l");
1666
          break;
1667
        case LE:
1668
          fprintf (file, "g");
1669
          break;
1670
        case GTU:
1671
          fprintf (file, "le");
1672
          break;
1673
        case LTU:
1674
          fprintf (file, "ge");
1675
          break;
1676
        case GEU:
1677
          fprintf (file, "l");
1678
          break;
1679
        case LEU:
1680
          fprintf (file, "g");
1681
          break;
1682
        default:
1683
          output_operand_lossage ("invalid %%J value");
1684
        }
1685
      break;
1686
 
1687
    default:
1688
      switch (GET_CODE (x))
1689
        {
1690
        case REG:
1691
          if (code == 'h')
1692
            {
1693
              if (REGNO (x) < 32)
1694
                fprintf (file, "%s", short_reg_names[REGNO (x)]);
1695
              else
1696
                output_operand_lossage ("invalid operand for code '%c'", code);
1697
            }
1698
          else if (code == 'd')
1699
            {
1700
              if (REGNO (x) < 32)
1701
                fprintf (file, "%s", high_reg_names[REGNO (x)]);
1702
              else
1703
                output_operand_lossage ("invalid operand for code '%c'", code);
1704
            }
1705
          else if (code == 'w')
1706
            {
1707
              if (REGNO (x) == REG_A0 || REGNO (x) == REG_A1)
1708
                fprintf (file, "%s.w", reg_names[REGNO (x)]);
1709
              else
1710
                output_operand_lossage ("invalid operand for code '%c'", code);
1711
            }
1712
          else if (code == 'x')
1713
            {
1714
              if (REGNO (x) == REG_A0 || REGNO (x) == REG_A1)
1715
                fprintf (file, "%s.x", reg_names[REGNO (x)]);
1716
              else
1717
                output_operand_lossage ("invalid operand for code '%c'", code);
1718
            }
1719
          else if (code == 'v')
1720
            {
1721
              if (REGNO (x) == REG_A0)
1722
                fprintf (file, "AV0");
1723
              else if (REGNO (x) == REG_A1)
1724
                fprintf (file, "AV1");
1725
              else
1726
                output_operand_lossage ("invalid operand for code '%c'", code);
1727
            }
1728
          else if (code == 'D')
1729
            {
1730
              if (D_REGNO_P (REGNO (x)))
1731
                fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1732
              else
1733
                output_operand_lossage ("invalid operand for code '%c'", code);
1734
            }
1735
          else if (code == 'H')
1736
            {
1737
              if ((mode == DImode || mode == DFmode) && REG_P (x))
1738
                fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1739
              else
1740
                output_operand_lossage ("invalid operand for code '%c'", code);
1741
            }
1742
          else if (code == 'T')
1743
            {
1744
              if (D_REGNO_P (REGNO (x)))
1745
                fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1746
              else
1747
                output_operand_lossage ("invalid operand for code '%c'", code);
1748
            }
1749
          else
1750
            fprintf (file, "%s", reg_names[REGNO (x)]);
1751
          break;
1752
 
1753
        case MEM:
1754
          fputc ('[', file);
1755
          x = XEXP (x,0);
1756
          print_address_operand (file, x);
1757
          fputc (']', file);
1758
          break;
1759
 
1760
        case CONST_INT:
1761
          if (code == 'M')
1762
            {
1763
              switch (INTVAL (x))
1764
                {
1765
                case MACFLAG_NONE:
1766
                  break;
1767
                case MACFLAG_FU:
1768
                  fputs ("(FU)", file);
1769
                  break;
1770
                case MACFLAG_T:
1771
                  fputs ("(T)", file);
1772
                  break;
1773
                case MACFLAG_TFU:
1774
                  fputs ("(TFU)", file);
1775
                  break;
1776
                case MACFLAG_W32:
1777
                  fputs ("(W32)", file);
1778
                  break;
1779
                case MACFLAG_IS:
1780
                  fputs ("(IS)", file);
1781
                  break;
1782
                case MACFLAG_IU:
1783
                  fputs ("(IU)", file);
1784
                  break;
1785
                case MACFLAG_IH:
1786
                  fputs ("(IH)", file);
1787
                  break;
1788
                case MACFLAG_M:
1789
                  fputs ("(M)", file);
1790
                  break;
1791
                case MACFLAG_IS_M:
1792
                  fputs ("(IS,M)", file);
1793
                  break;
1794
                case MACFLAG_ISS2:
1795
                  fputs ("(ISS2)", file);
1796
                  break;
1797
                case MACFLAG_S2RND:
1798
                  fputs ("(S2RND)", file);
1799
                  break;
1800
                default:
1801
                  gcc_unreachable ();
1802
                }
1803
              break;
1804
            }
1805
          else if (code == 'b')
1806
            {
1807
              if (INTVAL (x) == 0)
1808
                fputs ("+=", file);
1809
              else if (INTVAL (x) == 1)
1810
                fputs ("-=", file);
1811
              else
1812
                gcc_unreachable ();
1813
              break;
1814
            }
1815
          /* Moves to half registers with d or h modifiers always use unsigned
1816
             constants.  */
1817
          else if (code == 'd')
1818
            x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1819
          else if (code == 'h')
1820
            x = GEN_INT (INTVAL (x) & 0xffff);
1821
          else if (code == 'N')
1822
            x = GEN_INT (-INTVAL (x));
1823
          else if (code == 'X')
1824
            x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1825
          else if (code == 'Y')
1826
            x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1827
          else if (code == 'Z')
1828
            /* Used for LINK insns.  */
1829
            x = GEN_INT (-8 - INTVAL (x));
1830
 
1831
          /* fall through */
1832
 
1833
        case SYMBOL_REF:
1834
          output_addr_const (file, x);
1835
          break;
1836
 
1837
        case CONST_DOUBLE:
1838
          output_operand_lossage ("invalid const_double operand");
1839
          break;
1840
 
1841
        case UNSPEC:
1842
          switch (XINT (x, 1))
1843
            {
1844
            case UNSPEC_MOVE_PIC:
1845
              output_addr_const (file, XVECEXP (x, 0, 0));
1846
              fprintf (file, "@GOT");
1847
              break;
1848
 
1849
            case UNSPEC_MOVE_FDPIC:
1850
              output_addr_const (file, XVECEXP (x, 0, 0));
1851
              fprintf (file, "@GOT17M4");
1852
              break;
1853
 
1854
            case UNSPEC_FUNCDESC_GOT17M4:
1855
              output_addr_const (file, XVECEXP (x, 0, 0));
1856
              fprintf (file, "@FUNCDESC_GOT17M4");
1857
              break;
1858
 
1859
            case UNSPEC_LIBRARY_OFFSET:
1860
              fprintf (file, "_current_shared_library_p5_offset_");
1861
              break;
1862
 
1863
            default:
1864
              gcc_unreachable ();
1865
            }
1866
          break;
1867
 
1868
        default:
1869
          output_addr_const (file, x);
1870
        }
1871
    }
1872
}
1873
 
1874
/* Argument support functions.  */
1875
 
1876
/* Initialize a variable CUM of type CUMULATIVE_ARGS
1877
   for a call to a function whose data type is FNTYPE.
1878
   For a library call, FNTYPE is 0.
1879
   VDSP C Compiler manual, our ABI says that
1880
   first 3 words of arguments will use R0, R1 and R2.
1881
*/
1882
 
1883
void
1884
init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
1885
                      rtx libname ATTRIBUTE_UNUSED)
1886
{
1887
  static CUMULATIVE_ARGS zero_cum;
1888
 
1889
  *cum = zero_cum;
1890
 
1891
  /* Set up the number of registers to use for passing arguments.  */
1892
 
1893
  cum->nregs = max_arg_registers;
1894
  cum->arg_regs = arg_regs;
1895
 
1896
  cum->call_cookie = CALL_NORMAL;
1897
  /* Check for a longcall attribute.  */
1898
  if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1899
    cum->call_cookie |= CALL_SHORT;
1900
  else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1901
    cum->call_cookie |= CALL_LONG;
1902
 
1903
  return;
1904
}
1905
 
1906
/* Update the data in CUM to advance over an argument
1907
   of mode MODE and data type TYPE.
1908
   (TYPE is null for libcalls where that information may not be available.)  */
1909
 
1910
void
1911
function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1912
                      int named ATTRIBUTE_UNUSED)
1913
{
1914
  int count, bytes, words;
1915
 
1916
  bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1917
  words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1918
 
1919
  cum->words += words;
1920
  cum->nregs -= words;
1921
 
1922
  if (cum->nregs <= 0)
1923
    {
1924
      cum->nregs = 0;
1925
      cum->arg_regs = NULL;
1926
    }
1927
  else
1928
    {
1929
      for (count = 1; count <= words; count++)
1930
        cum->arg_regs++;
1931
    }
1932
 
1933
  return;
1934
}
1935
 
1936
/* Define where to put the arguments to a function.
1937
   Value is zero to push the argument on the stack,
1938
   or a hard register in which to store the argument.
1939
 
1940
   MODE is the argument's machine mode.
1941
   TYPE is the data type of the argument (as a tree).
1942
    This is null for libcalls where that information may
1943
    not be available.
1944
   CUM is a variable of type CUMULATIVE_ARGS which gives info about
1945
    the preceding args and about the function being called.
1946
   NAMED is nonzero if this argument is a named parameter
1947
    (otherwise it is an extra parameter matching an ellipsis).  */
1948
 
1949
struct rtx_def *
1950
function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1951
              int named ATTRIBUTE_UNUSED)
1952
{
1953
  int bytes
1954
    = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1955
 
1956
  if (mode == VOIDmode)
1957
    /* Compute operand 2 of the call insn.  */
1958
    return GEN_INT (cum->call_cookie);
1959
 
1960
  if (bytes == -1)
1961
    return NULL_RTX;
1962
 
1963
  if (cum->nregs)
1964
    return gen_rtx_REG (mode, *(cum->arg_regs));
1965
 
1966
  return NULL_RTX;
1967
}
1968
 
1969
/* For an arg passed partly in registers and partly in memory,
1970
   this is the number of bytes passed in registers.
1971
   For args passed entirely in registers or entirely in memory, zero.
1972
 
1973
   Refer VDSP C Compiler manual, our ABI.
1974
   First 3 words are in registers. So, if an argument is larger
1975
   than the registers available, it will span the register and
1976
   stack.   */
1977
 
1978
static int
1979
bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1980
                        tree type ATTRIBUTE_UNUSED,
1981
                        bool named ATTRIBUTE_UNUSED)
1982
{
1983
  int bytes
1984
    = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1985
  int bytes_left = cum->nregs * UNITS_PER_WORD;
1986
 
1987
  if (bytes == -1)
1988
    return 0;
1989
 
1990
  if (bytes_left == 0)
1991
    return 0;
1992
  if (bytes > bytes_left)
1993
    return bytes_left;
1994
  return 0;
1995
}
1996
 
1997
/* Variable sized types are passed by reference.  */
1998
 
1999
static bool
2000
bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
2001
                        enum machine_mode mode ATTRIBUTE_UNUSED,
2002
                        const_tree type, bool named ATTRIBUTE_UNUSED)
2003
{
2004
  return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
2005
}
2006
 
2007
/* Decide whether a type should be returned in memory (true)
2008
   or in a register (false).  This is called by the macro
2009
   TARGET_RETURN_IN_MEMORY.  */
2010
 
2011
static bool
2012
bfin_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2013
{
2014
  int size = int_size_in_bytes (type);
2015
  return size > 2 * UNITS_PER_WORD || size == -1;
2016
}
2017
 
2018
/* Register in which address to store a structure value
2019
   is passed to a function.  */
2020
static rtx
2021
bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
2022
                      int incoming ATTRIBUTE_UNUSED)
2023
{
2024
  return gen_rtx_REG (Pmode, REG_P0);
2025
}
2026
 
2027
/* Return true when register may be used to pass function parameters.  */
2028
 
2029
bool
2030
function_arg_regno_p (int n)
2031
{
2032
  int i;
2033
  for (i = 0; arg_regs[i] != -1; i++)
2034
    if (n == arg_regs[i])
2035
      return true;
2036
  return false;
2037
}
2038
 
2039
/* Returns 1 if OP contains a symbol reference */
2040
 
2041
int
2042
symbolic_reference_mentioned_p (rtx op)
2043
{
2044
  register const char *fmt;
2045
  register int i;
2046
 
2047
  if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
2048
    return 1;
2049
 
2050
  fmt = GET_RTX_FORMAT (GET_CODE (op));
2051
  for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2052
    {
2053
      if (fmt[i] == 'E')
2054
        {
2055
          register int j;
2056
 
2057
          for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2058
            if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2059
              return 1;
2060
        }
2061
 
2062
      else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
2063
        return 1;
2064
    }
2065
 
2066
  return 0;
2067
}
2068
 
2069
/* Decide whether we can make a sibling call to a function.  DECL is the
2070
   declaration of the function being targeted by the call and EXP is the
2071
   CALL_EXPR representing the call.  */
2072
 
2073
static bool
2074
bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
2075
                              tree exp ATTRIBUTE_UNUSED)
2076
{
2077
  struct cgraph_local_info *this_func, *called_func;
2078
  e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
2079
  if (fkind != SUBROUTINE)
2080
    return false;
2081
  if (!TARGET_ID_SHARED_LIBRARY || TARGET_SEP_DATA)
2082
    return true;
2083
 
2084
  /* When compiling for ID shared libraries, can't sibcall a local function
2085
     from a non-local function, because the local function thinks it does
2086
     not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
2087
     sibcall epilogue, and we end up with the wrong value in P5.  */
2088
 
2089
  if (!decl)
2090
    /* Not enough information.  */
2091
    return false;
2092
 
2093
  this_func = cgraph_local_info (current_function_decl);
2094
  called_func = cgraph_local_info (decl);
2095
  return !called_func->local || this_func->local;
2096
}
2097
 
2098
/* Write a template for a trampoline to F.  */
2099
 
2100
static void
2101
bfin_asm_trampoline_template (FILE *f)
2102
{
2103
  if (TARGET_FDPIC)
2104
    {
2105
      fprintf (f, "\t.dd\t0x00000000\n");       /* 0 */
2106
      fprintf (f, "\t.dd\t0x00000000\n");       /* 0 */
2107
      fprintf (f, "\t.dd\t0x0000e109\n");       /* p1.l = fn low */
2108
      fprintf (f, "\t.dd\t0x0000e149\n");       /* p1.h = fn high */
2109
      fprintf (f, "\t.dd\t0x0000e10a\n");       /* p2.l = sc low */
2110
      fprintf (f, "\t.dd\t0x0000e14a\n");       /* p2.h = sc high */
2111
      fprintf (f, "\t.dw\t0xac4b\n");           /* p3 = [p1 + 4] */
2112
      fprintf (f, "\t.dw\t0x9149\n");           /* p1 = [p1] */
2113
      fprintf (f, "\t.dw\t0x0051\n");           /* jump (p1)*/
2114
    }
2115
  else
2116
    {
2117
      fprintf (f, "\t.dd\t0x0000e109\n");       /* p1.l = fn low */
2118
      fprintf (f, "\t.dd\t0x0000e149\n");       /* p1.h = fn high */
2119
      fprintf (f, "\t.dd\t0x0000e10a\n");       /* p2.l = sc low */
2120
      fprintf (f, "\t.dd\t0x0000e14a\n");       /* p2.h = sc high */
2121
      fprintf (f, "\t.dw\t0x0051\n");           /* jump (p1)*/
2122
    }
2123
}
2124
 
2125
/* Emit RTL insns to initialize the variable parts of a trampoline at
2126
   M_TRAMP. FNDECL is the target function.  CHAIN_VALUE is an RTX for
2127
   the static chain value for the function.  */
2128
 
2129
static void
2130
bfin_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2131
{
2132
  rtx t1 = copy_to_reg (XEXP (DECL_RTL (fndecl), 0));
2133
  rtx t2 = copy_to_reg (chain_value);
2134
  rtx mem;
2135
  int i = 0;
2136
 
2137
  emit_block_move (m_tramp, assemble_trampoline_template (),
2138
                   GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2139
 
2140
  if (TARGET_FDPIC)
2141
    {
2142
      rtx a = force_reg (Pmode, plus_constant (XEXP (m_tramp, 0), 8));
2143
      mem = adjust_address (m_tramp, Pmode, 0);
2144
      emit_move_insn (mem, a);
2145
      i = 8;
2146
    }
2147
 
2148
  mem = adjust_address (m_tramp, HImode, i + 2);
2149
  emit_move_insn (mem, gen_lowpart (HImode, t1));
2150
  emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
2151
  mem = adjust_address (m_tramp, HImode, i + 6);
2152
  emit_move_insn (mem, gen_lowpart (HImode, t1));
2153
 
2154
  mem = adjust_address (m_tramp, HImode, i + 10);
2155
  emit_move_insn (mem, gen_lowpart (HImode, t2));
2156
  emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
2157
  mem = adjust_address (m_tramp, HImode, i + 14);
2158
  emit_move_insn (mem, gen_lowpart (HImode, t2));
2159
}
2160
 
2161
/* Emit insns to move operands[1] into operands[0].  */
2162
 
2163
void
2164
emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
2165
{
2166
  rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
2167
 
2168
  gcc_assert (!TARGET_FDPIC || !(reload_in_progress || reload_completed));
2169
  if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2170
    operands[1] = force_reg (SImode, operands[1]);
2171
  else
2172
    operands[1] = legitimize_pic_address (operands[1], temp,
2173
                                          TARGET_FDPIC ? OUR_FDPIC_REG
2174
                                          : pic_offset_table_rtx);
2175
}
2176
 
2177
/* Expand a move operation in mode MODE.  The operands are in OPERANDS.
2178
   Returns true if no further code must be generated, false if the caller
2179
   should generate an insn to move OPERANDS[1] to OPERANDS[0].  */
2180
 
2181
bool
2182
expand_move (rtx *operands, enum machine_mode mode)
2183
{
2184
  rtx op = operands[1];
2185
  if ((TARGET_ID_SHARED_LIBRARY || TARGET_FDPIC)
2186
      && SYMBOLIC_CONST (op))
2187
    emit_pic_move (operands, mode);
2188
  else if (mode == SImode && GET_CODE (op) == CONST
2189
           && GET_CODE (XEXP (op, 0)) == PLUS
2190
           && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
2191
           && !bfin_legitimate_constant_p (op))
2192
    {
2193
      rtx dest = operands[0];
2194
      rtx op0, op1;
2195
      gcc_assert (!reload_in_progress && !reload_completed);
2196
      op = XEXP (op, 0);
2197
      op0 = force_reg (mode, XEXP (op, 0));
2198
      op1 = XEXP (op, 1);
2199
      if (!insn_data[CODE_FOR_addsi3].operand[2].predicate (op1, mode))
2200
        op1 = force_reg (mode, op1);
2201
      if (GET_CODE (dest) == MEM)
2202
        dest = gen_reg_rtx (mode);
2203
      emit_insn (gen_addsi3 (dest, op0, op1));
2204
      if (dest == operands[0])
2205
        return true;
2206
      operands[1] = dest;
2207
    }
2208
  /* Don't generate memory->memory or constant->memory moves, go through a
2209
     register */
2210
  else if ((reload_in_progress | reload_completed) == 0
2211
           && GET_CODE (operands[0]) == MEM
2212
           && GET_CODE (operands[1]) != REG)
2213
    operands[1] = force_reg (mode, operands[1]);
2214
  return false;
2215
}
2216
 
2217
/* Split one or more DImode RTL references into pairs of SImode
2218
   references.  The RTL can be REG, offsettable MEM, integer constant, or
2219
   CONST_DOUBLE.  "operands" is a pointer to an array of DImode RTL to
2220
   split and "num" is its length.  lo_half and hi_half are output arrays
2221
   that parallel "operands".  */
2222
 
2223
void
2224
split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
2225
{
2226
  while (num--)
2227
    {
2228
      rtx op = operands[num];
2229
 
2230
      /* simplify_subreg refuse to split volatile memory addresses,
2231
         but we still have to handle it.  */
2232
      if (GET_CODE (op) == MEM)
2233
        {
2234
          lo_half[num] = adjust_address (op, SImode, 0);
2235
          hi_half[num] = adjust_address (op, SImode, 4);
2236
        }
2237
      else
2238
        {
2239
          lo_half[num] = simplify_gen_subreg (SImode, op,
2240
                                              GET_MODE (op) == VOIDmode
2241
                                              ? DImode : GET_MODE (op), 0);
2242
          hi_half[num] = simplify_gen_subreg (SImode, op,
2243
                                              GET_MODE (op) == VOIDmode
2244
                                              ? DImode : GET_MODE (op), 4);
2245
        }
2246
    }
2247
}
2248
 
2249
bool
2250
bfin_longcall_p (rtx op, int call_cookie)
2251
{
2252
  gcc_assert (GET_CODE (op) == SYMBOL_REF);
2253
  if (SYMBOL_REF_WEAK (op))
2254
    return 1;
2255
  if (call_cookie & CALL_SHORT)
2256
    return 0;
2257
  if (call_cookie & CALL_LONG)
2258
    return 1;
2259
  if (TARGET_LONG_CALLS)
2260
    return 1;
2261
  return 0;
2262
}
2263
 
2264
/* Expand a call instruction.  FNADDR is the call target, RETVAL the return value.
2265
   COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
2266
   SIBCALL is nonzero if this is a sibling call.  */
2267
 
2268
void
2269
bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
2270
{
2271
  rtx use = NULL, call;
2272
  rtx callee = XEXP (fnaddr, 0);
2273
  int nelts = 3;
2274
  rtx pat;
2275
  rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REGNO);
2276
  rtx retsreg = gen_rtx_REG (Pmode, REG_RETS);
2277
  int n;
2278
 
2279
  /* In an untyped call, we can get NULL for operand 2.  */
2280
  if (cookie == NULL_RTX)
2281
    cookie = const0_rtx;
2282
 
2283
  /* Static functions and indirect calls don't need the pic register.  */
2284
  if (!TARGET_FDPIC && flag_pic
2285
      && GET_CODE (callee) == SYMBOL_REF
2286
      && !SYMBOL_REF_LOCAL_P (callee))
2287
    use_reg (&use, pic_offset_table_rtx);
2288
 
2289
  if (TARGET_FDPIC)
2290
    {
2291
      int caller_in_sram, callee_in_sram;
2292
 
2293
      /* 0 is not in sram, 1 is in L1 sram, 2 is in L2 sram.  */
2294
      caller_in_sram = callee_in_sram = 0;
2295
 
2296
      if (lookup_attribute ("l1_text",
2297
                            DECL_ATTRIBUTES (cfun->decl)) != NULL_TREE)
2298
        caller_in_sram = 1;
2299
      else if (lookup_attribute ("l2",
2300
                                 DECL_ATTRIBUTES (cfun->decl)) != NULL_TREE)
2301
        caller_in_sram = 2;
2302
 
2303
      if (GET_CODE (callee) == SYMBOL_REF
2304
          && SYMBOL_REF_DECL (callee) && DECL_P (SYMBOL_REF_DECL (callee)))
2305
        {
2306
          if (lookup_attribute
2307
              ("l1_text",
2308
               DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee))) != NULL_TREE)
2309
            callee_in_sram = 1;
2310
          else if (lookup_attribute
2311
                   ("l2",
2312
                    DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee))) != NULL_TREE)
2313
            callee_in_sram = 2;
2314
        }
2315
 
2316
      if (GET_CODE (callee) != SYMBOL_REF
2317
          || bfin_longcall_p (callee, INTVAL (cookie))
2318
          || (GET_CODE (callee) == SYMBOL_REF
2319
              && !SYMBOL_REF_LOCAL_P (callee)
2320
              && TARGET_INLINE_PLT)
2321
          || caller_in_sram != callee_in_sram
2322
          || (caller_in_sram && callee_in_sram
2323
              && (GET_CODE (callee) != SYMBOL_REF
2324
                  || !SYMBOL_REF_LOCAL_P (callee))))
2325
        {
2326
          rtx addr = callee;
2327
          if (! address_operand (addr, Pmode))
2328
            addr = force_reg (Pmode, addr);
2329
 
2330
          fnaddr = gen_reg_rtx (SImode);
2331
          emit_insn (gen_load_funcdescsi (fnaddr, addr));
2332
          fnaddr = gen_rtx_MEM (Pmode, fnaddr);
2333
 
2334
          picreg = gen_reg_rtx (SImode);
2335
          emit_insn (gen_load_funcdescsi (picreg,
2336
                                          plus_constant (addr, 4)));
2337
        }
2338
 
2339
      nelts++;
2340
    }
2341
  else if ((!register_no_elim_operand (callee, Pmode)
2342
            && GET_CODE (callee) != SYMBOL_REF)
2343
           || (GET_CODE (callee) == SYMBOL_REF
2344
               && ((TARGET_ID_SHARED_LIBRARY && !TARGET_LEAF_ID_SHARED_LIBRARY)
2345
                   || bfin_longcall_p (callee, INTVAL (cookie)))))
2346
    {
2347
      callee = copy_to_mode_reg (Pmode, callee);
2348
      fnaddr = gen_rtx_MEM (Pmode, callee);
2349
    }
2350
  call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
2351
 
2352
  if (retval)
2353
    call = gen_rtx_SET (VOIDmode, retval, call);
2354
 
2355
  pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nelts));
2356
  n = 0;
2357
  XVECEXP (pat, 0, n++) = call;
2358
  if (TARGET_FDPIC)
2359
    XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, picreg);
2360
  XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, cookie);
2361
  if (sibcall)
2362
    XVECEXP (pat, 0, n++) = gen_rtx_RETURN (VOIDmode);
2363
  else
2364
    XVECEXP (pat, 0, n++) = gen_rtx_CLOBBER (VOIDmode, retsreg);
2365
  call = emit_call_insn (pat);
2366
  if (use)
2367
    CALL_INSN_FUNCTION_USAGE (call) = use;
2368
}
2369
 
2370
/* Return 1 if hard register REGNO can hold a value of machine-mode MODE.  */
2371
 
2372
int
2373
hard_regno_mode_ok (int regno, enum machine_mode mode)
2374
{
2375
  /* Allow only dregs to store value of mode HI or QI */
2376
  enum reg_class rclass = REGNO_REG_CLASS (regno);
2377
 
2378
  if (mode == CCmode)
2379
    return 0;
2380
 
2381
  if (mode == V2HImode)
2382
    return D_REGNO_P (regno);
2383
  if (rclass == CCREGS)
2384
    return mode == BImode;
2385
  if (mode == PDImode || mode == V2PDImode)
2386
    return regno == REG_A0 || regno == REG_A1;
2387
 
2388
  /* Allow all normal 32-bit regs, except REG_M3, in case regclass ever comes
2389
     up with a bad register class (such as ALL_REGS) for DImode.  */
2390
  if (mode == DImode)
2391
    return regno < REG_M3;
2392
 
2393
  if (mode == SImode
2394
      && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
2395
    return 1;
2396
 
2397
  return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
2398
}
2399
 
2400
/* Implements target hook vector_mode_supported_p.  */
2401
 
2402
static bool
2403
bfin_vector_mode_supported_p (enum machine_mode mode)
2404
{
2405
  return mode == V2HImode;
2406
}
2407
 
2408
/* Return the cost of moving data from a register in class CLASS1 to
2409
   one in class CLASS2.  A cost of 2 is the default.  */
2410
 
2411
int
2412
bfin_register_move_cost (enum machine_mode mode,
2413
                         enum reg_class class1, enum reg_class class2)
2414
{
2415
  /* These need secondary reloads, so they're more expensive.  */
2416
  if ((class1 == CCREGS && !reg_class_subset_p (class2, DREGS))
2417
      || (class2 == CCREGS && !reg_class_subset_p (class1, DREGS)))
2418
    return 4;
2419
 
2420
  /* If optimizing for size, always prefer reg-reg over reg-memory moves.  */
2421
  if (optimize_size)
2422
    return 2;
2423
 
2424
  if (GET_MODE_CLASS (mode) == MODE_INT)
2425
    {
2426
      /* Discourage trying to use the accumulators.  */
2427
      if (TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A0)
2428
          || TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A1)
2429
          || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A0)
2430
          || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A1))
2431
        return 20;
2432
    }
2433
  return 2;
2434
}
2435
 
2436
/* Return the cost of moving data of mode M between a
2437
   register and memory.  A value of 2 is the default; this cost is
2438
   relative to those in `REGISTER_MOVE_COST'.
2439
 
2440
   ??? In theory L1 memory has single-cycle latency.  We should add a switch
2441
   that tells the compiler whether we expect to use only L1 memory for the
2442
   program; it'll make the costs more accurate.  */
2443
 
2444
int
2445
bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
2446
                       enum reg_class rclass,
2447
                       int in ATTRIBUTE_UNUSED)
2448
{
2449
  /* Make memory accesses slightly more expensive than any register-register
2450
     move.  Also, penalize non-DP registers, since they need secondary
2451
     reloads to load and store.  */
2452
  if (! reg_class_subset_p (rclass, DPREGS))
2453
    return 10;
2454
 
2455
  return 8;
2456
}
2457
 
2458
/* Inform reload about cases where moving X with a mode MODE to a register in
2459
   RCLASS requires an extra scratch register.  Return the class needed for the
2460
   scratch register.  */
2461
 
2462
static enum reg_class
2463
bfin_secondary_reload (bool in_p, rtx x, enum reg_class rclass,
2464
                       enum machine_mode mode, secondary_reload_info *sri)
2465
{
2466
  /* If we have HImode or QImode, we can only use DREGS as secondary registers;
2467
     in most other cases we can also use PREGS.  */
2468
  enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
2469
  enum reg_class x_class = NO_REGS;
2470
  enum rtx_code code = GET_CODE (x);
2471
 
2472
  if (code == SUBREG)
2473
    x = SUBREG_REG (x), code = GET_CODE (x);
2474
  if (REG_P (x))
2475
    {
2476
      int regno = REGNO (x);
2477
      if (regno >= FIRST_PSEUDO_REGISTER)
2478
        regno = reg_renumber[regno];
2479
 
2480
      if (regno == -1)
2481
        code = MEM;
2482
      else
2483
        x_class = REGNO_REG_CLASS (regno);
2484
    }
2485
 
2486
  /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
2487
     This happens as a side effect of register elimination, and we need
2488
     a scratch register to do it.  */
2489
  if (fp_plus_const_operand (x, mode))
2490
    {
2491
      rtx op2 = XEXP (x, 1);
2492
      int large_constant_p = ! satisfies_constraint_Ks7 (op2);
2493
 
2494
      if (rclass == PREGS || rclass == PREGS_CLOBBERED)
2495
        return NO_REGS;
2496
      /* If destination is a DREG, we can do this without a scratch register
2497
         if the constant is valid for an add instruction.  */
2498
      if ((rclass == DREGS || rclass == DPREGS)
2499
          && ! large_constant_p)
2500
        return NO_REGS;
2501
      /* Reloading to anything other than a DREG?  Use a PREG scratch
2502
         register.  */
2503
      sri->icode = CODE_FOR_reload_insi;
2504
      return NO_REGS;
2505
    }
2506
 
2507
  /* Data can usually be moved freely between registers of most classes.
2508
     AREGS are an exception; they can only move to or from another register
2509
     in AREGS or one in DREGS.  They can also be assigned the constant 0.  */
2510
  if (x_class == AREGS || x_class == EVEN_AREGS || x_class == ODD_AREGS)
2511
    return (rclass == DREGS || rclass == AREGS || rclass == EVEN_AREGS
2512
            || rclass == ODD_AREGS
2513
            ? NO_REGS : DREGS);
2514
 
2515
  if (rclass == AREGS || rclass == EVEN_AREGS || rclass == ODD_AREGS)
2516
    {
2517
      if (code == MEM)
2518
        {
2519
          sri->icode = in_p ? CODE_FOR_reload_inpdi : CODE_FOR_reload_outpdi;
2520
          return NO_REGS;
2521
        }
2522
 
2523
      if (x != const0_rtx && x_class != DREGS)
2524
        {
2525
          return DREGS;
2526
        }
2527
      else
2528
        return NO_REGS;
2529
    }
2530
 
2531
  /* CCREGS can only be moved from/to DREGS.  */
2532
  if (rclass == CCREGS && x_class != DREGS)
2533
    return DREGS;
2534
  if (x_class == CCREGS && rclass != DREGS)
2535
    return DREGS;
2536
 
2537
  /* All registers other than AREGS can load arbitrary constants.  The only
2538
     case that remains is MEM.  */
2539
  if (code == MEM)
2540
    if (! reg_class_subset_p (rclass, default_class))
2541
      return default_class;
2542
 
2543
  return NO_REGS;
2544
}
2545
 
2546
/* Implement TARGET_HANDLE_OPTION.  */
2547
 
2548
static bool
2549
bfin_handle_option (size_t code, const char *arg, int value)
2550
{
2551
  switch (code)
2552
    {
2553
    case OPT_mshared_library_id_:
2554
      if (value > MAX_LIBRARY_ID)
2555
        error ("-mshared-library-id=%s is not between 0 and %d",
2556
               arg, MAX_LIBRARY_ID);
2557
      bfin_lib_id_given = 1;
2558
      return true;
2559
 
2560
    case OPT_mcpu_:
2561
      {
2562
        const char *p, *q;
2563
        int i;
2564
 
2565
        i = 0;
2566
        while ((p = bfin_cpus[i].name) != NULL)
2567
          {
2568
            if (strncmp (arg, p, strlen (p)) == 0)
2569
              break;
2570
            i++;
2571
          }
2572
 
2573
        if (p == NULL)
2574
          {
2575
            error ("-mcpu=%s is not valid", arg);
2576
            return false;
2577
          }
2578
 
2579
        bfin_cpu_type = bfin_cpus[i].type;
2580
 
2581
        q = arg + strlen (p);
2582
 
2583
        if (*q == '\0')
2584
          {
2585
            bfin_si_revision = bfin_cpus[i].si_revision;
2586
            bfin_workarounds |= bfin_cpus[i].workarounds;
2587
          }
2588
        else if (strcmp (q, "-none") == 0)
2589
          bfin_si_revision = -1;
2590
        else if (strcmp (q, "-any") == 0)
2591
          {
2592
            bfin_si_revision = 0xffff;
2593
            while (bfin_cpus[i].type == bfin_cpu_type)
2594
              {
2595
                bfin_workarounds |= bfin_cpus[i].workarounds;
2596
                i++;
2597
              }
2598
          }
2599
        else
2600
          {
2601
            unsigned int si_major, si_minor;
2602
            int rev_len, n;
2603
 
2604
            rev_len = strlen (q);
2605
 
2606
            if (sscanf (q, "-%u.%u%n", &si_major, &si_minor, &n) != 2
2607
                || n != rev_len
2608
                || si_major > 0xff || si_minor > 0xff)
2609
              {
2610
              invalid_silicon_revision:
2611
                error ("-mcpu=%s has invalid silicon revision", arg);
2612
                return false;
2613
              }
2614
 
2615
            bfin_si_revision = (si_major << 8) | si_minor;
2616
 
2617
            while (bfin_cpus[i].type == bfin_cpu_type
2618
                   && bfin_cpus[i].si_revision != bfin_si_revision)
2619
              i++;
2620
 
2621
            if (bfin_cpus[i].type != bfin_cpu_type)
2622
              goto invalid_silicon_revision;
2623
 
2624
            bfin_workarounds |= bfin_cpus[i].workarounds;
2625
          }
2626
 
2627
        return true;
2628
      }
2629
 
2630
    default:
2631
      return true;
2632
    }
2633
}
2634
 
2635
static struct machine_function *
2636
bfin_init_machine_status (void)
2637
{
2638
  struct machine_function *f;
2639
 
2640
  f = GGC_CNEW (struct machine_function);
2641
 
2642
  return f;
2643
}
2644
 
2645
/* Implement the macro OVERRIDE_OPTIONS.  */
2646
 
2647
void
2648
override_options (void)
2649
{
2650
  /* If processor type is not specified, enable all workarounds.  */
2651
  if (bfin_cpu_type == BFIN_CPU_UNKNOWN)
2652
    {
2653
      int i;
2654
 
2655
      for (i = 0; bfin_cpus[i].name != NULL; i++)
2656
        bfin_workarounds |= bfin_cpus[i].workarounds;
2657
 
2658
      bfin_si_revision = 0xffff;
2659
    }
2660
 
2661
  if (bfin_csync_anomaly == 1)
2662
    bfin_workarounds |= WA_SPECULATIVE_SYNCS;
2663
  else if (bfin_csync_anomaly == 0)
2664
    bfin_workarounds &= ~WA_SPECULATIVE_SYNCS;
2665
 
2666
  if (bfin_specld_anomaly == 1)
2667
    bfin_workarounds |= WA_SPECULATIVE_LOADS;
2668
  else if (bfin_specld_anomaly == 0)
2669
    bfin_workarounds &= ~WA_SPECULATIVE_LOADS;
2670
 
2671
  if (TARGET_OMIT_LEAF_FRAME_POINTER)
2672
    flag_omit_frame_pointer = 1;
2673
 
2674
  /* Library identification */
2675
  if (bfin_lib_id_given && ! TARGET_ID_SHARED_LIBRARY)
2676
    error ("-mshared-library-id= specified without -mid-shared-library");
2677
 
2678
  if (stack_limit_rtx && TARGET_STACK_CHECK_L1)
2679
    error ("Can't use multiple stack checking methods together.");
2680
 
2681
  if (TARGET_ID_SHARED_LIBRARY && TARGET_FDPIC)
2682
    error ("ID shared libraries and FD-PIC mode can't be used together.");
2683
 
2684
  /* Don't allow the user to specify -mid-shared-library and -msep-data
2685
     together, as it makes little sense from a user's point of view...  */
2686
  if (TARGET_SEP_DATA && TARGET_ID_SHARED_LIBRARY)
2687
    error ("cannot specify both -msep-data and -mid-shared-library");
2688
  /* ... internally, however, it's nearly the same.  */
2689
  if (TARGET_SEP_DATA)
2690
    target_flags |= MASK_ID_SHARED_LIBRARY | MASK_LEAF_ID_SHARED_LIBRARY;
2691
 
2692
  if (TARGET_ID_SHARED_LIBRARY && flag_pic == 0)
2693
    flag_pic = 1;
2694
 
2695
  /* There is no single unaligned SI op for PIC code.  Sometimes we
2696
     need to use ".4byte" and sometimes we need to use ".picptr".
2697
     See bfin_assemble_integer for details.  */
2698
  if (TARGET_FDPIC)
2699
    targetm.asm_out.unaligned_op.si = 0;
2700
 
2701
  /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2702
     since we don't support it and it'll just break.  */
2703
  if (flag_pic && !TARGET_FDPIC && !TARGET_ID_SHARED_LIBRARY)
2704
    flag_pic = 0;
2705
 
2706
  if (TARGET_MULTICORE && bfin_cpu_type != BFIN_CPU_BF561)
2707
    error ("-mmulticore can only be used with BF561");
2708
 
2709
  if (TARGET_COREA && !TARGET_MULTICORE)
2710
    error ("-mcorea should be used with -mmulticore");
2711
 
2712
  if (TARGET_COREB && !TARGET_MULTICORE)
2713
    error ("-mcoreb should be used with -mmulticore");
2714
 
2715
  if (TARGET_COREA && TARGET_COREB)
2716
    error ("-mcorea and -mcoreb can't be used together");
2717
 
2718
  flag_schedule_insns = 0;
2719
 
2720
  /* Passes after sched2 can break the helpful TImode annotations that
2721
     haifa-sched puts on every insn.  Just do scheduling in reorg.  */
2722
  bfin_flag_schedule_insns2 = flag_schedule_insns_after_reload;
2723
  flag_schedule_insns_after_reload = 0;
2724
 
2725
  init_machine_status = bfin_init_machine_status;
2726
}
2727
 
2728
/* Return the destination address of BRANCH.
2729
   We need to use this instead of get_attr_length, because the
2730
   cbranch_with_nops pattern conservatively sets its length to 6, and
2731
   we still prefer to use shorter sequences.  */
2732
 
2733
static int
2734
branch_dest (rtx branch)
2735
{
2736
  rtx dest;
2737
  int dest_uid;
2738
  rtx pat = PATTERN (branch);
2739
  if (GET_CODE (pat) == PARALLEL)
2740
    pat = XVECEXP (pat, 0, 0);
2741
  dest = SET_SRC (pat);
2742
  if (GET_CODE (dest) == IF_THEN_ELSE)
2743
    dest = XEXP (dest, 1);
2744
  dest = XEXP (dest, 0);
2745
  dest_uid = INSN_UID (dest);
2746
  return INSN_ADDRESSES (dest_uid);
2747
}
2748
 
2749
/* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2750
   it's a branch that's predicted taken.  */
2751
 
2752
static int
2753
cbranch_predicted_taken_p (rtx insn)
2754
{
2755
  rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2756
 
2757
  if (x)
2758
    {
2759
      int pred_val = INTVAL (XEXP (x, 0));
2760
 
2761
      return pred_val >= REG_BR_PROB_BASE / 2;
2762
    }
2763
 
2764
  return 0;
2765
}
2766
 
2767
/* Templates for use by asm_conditional_branch.  */
2768
 
2769
static const char *ccbranch_templates[][3] = {
2770
  { "if !cc jump %3;",  "if cc jump 4 (bp); jump.s %3;",  "if cc jump 6 (bp); jump.l %3;" },
2771
  { "if cc jump %3;",   "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2772
  { "if !cc jump %3 (bp);",  "if cc jump 4; jump.s %3;",  "if cc jump 6; jump.l %3;" },
2773
  { "if cc jump %3 (bp);",  "if !cc jump 4; jump.s %3;",  "if !cc jump 6; jump.l %3;" },
2774
};
2775
 
2776
/* Output INSN, which is a conditional branch instruction with operands
2777
   OPERANDS.
2778
 
2779
   We deal with the various forms of conditional branches that can be generated
2780
   by bfin_reorg to prevent the hardware from doing speculative loads, by
2781
   - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2782
   - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2783
   Either of these is only necessary if the branch is short, otherwise the
2784
   template we use ends in an unconditional jump which flushes the pipeline
2785
   anyway.  */
2786
 
2787
void
2788
asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
2789
{
2790
  int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
2791
  /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2792
            is to be taken from start of if cc rather than jump.
2793
            Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2794
  */
2795
  int len = (offset >= -1024 && offset <= 1022 ? 0
2796
             : offset >= -4094 && offset <= 4096 ? 1
2797
             : 2);
2798
  int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
2799
  int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
2800
  output_asm_insn (ccbranch_templates[idx][len], operands);
2801
  gcc_assert (n_nops == 0 || !bp);
2802
  if (len == 0)
2803
    while (n_nops-- > 0)
2804
      output_asm_insn ("nop;", NULL);
2805
}
2806
 
2807
/* Emit rtl for a comparison operation CMP in mode MODE.  Operands have been
2808
   stored in bfin_compare_op0 and bfin_compare_op1 already.  */
2809
 
2810
rtx
2811
bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
2812
{
2813
  enum rtx_code code1, code2;
2814
  rtx op0 = XEXP (cmp, 0), op1 = XEXP (cmp, 1);
2815
  rtx tem = bfin_cc_rtx;
2816
  enum rtx_code code = GET_CODE (cmp);
2817
 
2818
  /* If we have a BImode input, then we already have a compare result, and
2819
     do not need to emit another comparison.  */
2820
  if (GET_MODE (op0) == BImode)
2821
    {
2822
      gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
2823
      tem = op0, code2 = code;
2824
    }
2825
  else
2826
    {
2827
      switch (code) {
2828
        /* bfin has these conditions */
2829
      case EQ:
2830
      case LT:
2831
      case LE:
2832
      case LEU:
2833
      case LTU:
2834
        code1 = code;
2835
        code2 = NE;
2836
        break;
2837
      default:
2838
        code1 = reverse_condition (code);
2839
        code2 = EQ;
2840
        break;
2841
      }
2842
      emit_insn (gen_rtx_SET (VOIDmode, tem,
2843
                              gen_rtx_fmt_ee (code1, BImode, op0, op1)));
2844
    }
2845
 
2846
  return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
2847
}
2848
 
2849
/* Return nonzero iff C has exactly one bit set if it is interpreted
2850
   as a 32-bit constant.  */
2851
 
2852
int
2853
log2constp (unsigned HOST_WIDE_INT c)
2854
{
2855
  c &= 0xFFFFFFFF;
2856
  return c != 0 && (c & (c-1)) == 0;
2857
}
2858
 
2859
/* Returns the number of consecutive least significant zeros in the binary
2860
   representation of *V.
2861
   We modify *V to contain the original value arithmetically shifted right by
2862
   the number of zeroes.  */
2863
 
2864
static int
2865
shiftr_zero (HOST_WIDE_INT *v)
2866
{
2867
  unsigned HOST_WIDE_INT tmp = *v;
2868
  unsigned HOST_WIDE_INT sgn;
2869
  int n = 0;
2870
 
2871
  if (tmp == 0)
2872
    return 0;
2873
 
2874
  sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
2875
  while ((tmp & 0x1) == 0 && n <= 32)
2876
    {
2877
      tmp = (tmp >> 1) | sgn;
2878
      n++;
2879
    }
2880
  *v = tmp;
2881
  return n;
2882
}
2883
 
2884
/* After reload, split the load of an immediate constant.  OPERANDS are the
2885
   operands of the movsi_insn pattern which we are splitting.  We return
2886
   nonzero if we emitted a sequence to load the constant, zero if we emitted
2887
   nothing because we want to use the splitter's default sequence.  */
2888
 
2889
int
2890
split_load_immediate (rtx operands[])
2891
{
2892
  HOST_WIDE_INT val = INTVAL (operands[1]);
2893
  HOST_WIDE_INT tmp;
2894
  HOST_WIDE_INT shifted = val;
2895
  HOST_WIDE_INT shifted_compl = ~val;
2896
  int num_zero = shiftr_zero (&shifted);
2897
  int num_compl_zero = shiftr_zero (&shifted_compl);
2898
  unsigned int regno = REGNO (operands[0]);
2899
 
2900
  /* This case takes care of single-bit set/clear constants, which we could
2901
     also implement with BITSET/BITCLR.  */
2902
  if (num_zero
2903
      && shifted >= -32768 && shifted < 65536
2904
      && (D_REGNO_P (regno)
2905
          || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
2906
    {
2907
      emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
2908
      emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
2909
      return 1;
2910
    }
2911
 
2912
  tmp = val & 0xFFFF;
2913
  tmp |= -(tmp & 0x8000);
2914
 
2915
  /* If high word has one bit set or clear, try to use a bit operation.  */
2916
  if (D_REGNO_P (regno))
2917
    {
2918
      if (log2constp (val & 0xFFFF0000))
2919
        {
2920
          emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
2921
          emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
2922
          return 1;
2923
        }
2924
      else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
2925
        {
2926
          emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2927
          emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
2928
        }
2929
    }
2930
 
2931
  if (D_REGNO_P (regno))
2932
    {
2933
      if (tmp >= -64 && tmp <= 63)
2934
        {
2935
          emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2936
          emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
2937
          return 1;
2938
        }
2939
 
2940
      if ((val & 0xFFFF0000) == 0)
2941
        {
2942
          emit_insn (gen_movsi (operands[0], const0_rtx));
2943
          emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2944
          return 1;
2945
        }
2946
 
2947
      if ((val & 0xFFFF0000) == 0xFFFF0000)
2948
        {
2949
          emit_insn (gen_movsi (operands[0], constm1_rtx));
2950
          emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2951
          return 1;
2952
        }
2953
    }
2954
 
2955
  /* Need DREGs for the remaining case.  */
2956
  if (regno > REG_R7)
2957
    return 0;
2958
 
2959
  if (optimize_size
2960
      && num_compl_zero && shifted_compl >= -64 && shifted_compl <= 63)
2961
    {
2962
      /* If optimizing for size, generate a sequence that has more instructions
2963
         but is shorter.  */
2964
      emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2965
      emit_insn (gen_ashlsi3 (operands[0], operands[0],
2966
                              GEN_INT (num_compl_zero)));
2967
      emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2968
      return 1;
2969
    }
2970
  return 0;
2971
}
2972
 
2973
/* Return true if the legitimate memory address for a memory operand of mode
2974
   MODE.  Return false if not.  */
2975
 
2976
static bool
2977
bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2978
{
2979
  unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2980
  int sz = GET_MODE_SIZE (mode);
2981
  int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2982
  /* The usual offsettable_memref machinery doesn't work so well for this
2983
     port, so we deal with the problem here.  */
2984
  if (value > 0 && sz == 8)
2985
    v += 4;
2986
  return (v & ~(0x7fff << shift)) == 0;
2987
}
2988
 
2989
static bool
2990
bfin_valid_reg_p (unsigned int regno, int strict, enum machine_mode mode,
2991
                  enum rtx_code outer_code)
2992
{
2993
  if (strict)
2994
    return REGNO_OK_FOR_BASE_STRICT_P (regno, mode, outer_code, SCRATCH);
2995
  else
2996
    return REGNO_OK_FOR_BASE_NONSTRICT_P (regno, mode, outer_code, SCRATCH);
2997
}
2998
 
2999
/* Recognize an RTL expression that is a valid memory address for an
3000
   instruction.  The MODE argument is the machine mode for the MEM expression
3001
   that wants to use this address.
3002
 
3003
   Blackfin addressing modes are as follows:
3004
 
3005
      [preg]
3006
      [preg + imm16]
3007
 
3008
      B [ Preg + uimm15 ]
3009
      W [ Preg + uimm16m2 ]
3010
      [ Preg + uimm17m4 ]
3011
 
3012
      [preg++]
3013
      [preg--]
3014
      [--sp]
3015
*/
3016
 
3017
static bool
3018
bfin_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
3019
{
3020
  switch (GET_CODE (x)) {
3021
  case REG:
3022
    if (bfin_valid_reg_p (REGNO (x), strict, mode, MEM))
3023
      return true;
3024
    break;
3025
  case PLUS:
3026
    if (REG_P (XEXP (x, 0))
3027
        && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PLUS)
3028
        && ((GET_CODE (XEXP (x, 1)) == UNSPEC && mode == SImode)
3029
            || (GET_CODE (XEXP (x, 1)) == CONST_INT
3030
                && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
3031
      return true;
3032
    break;
3033
  case POST_INC:
3034
  case POST_DEC:
3035
    if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
3036
        && REG_P (XEXP (x, 0))
3037
        && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, POST_INC))
3038
      return true;
3039
  case PRE_DEC:
3040
    if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
3041
        && XEXP (x, 0) == stack_pointer_rtx
3042
        && REG_P (XEXP (x, 0))
3043
        && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PRE_DEC))
3044
      return true;
3045
    break;
3046
  default:
3047
    break;
3048
  }
3049
  return false;
3050
}
3051
 
3052
/* Decide whether we can force certain constants to memory.  If we
3053
   decide we can't, the caller should be able to cope with it in
3054
   another way.  */
3055
 
3056
static bool
3057
bfin_cannot_force_const_mem (rtx x ATTRIBUTE_UNUSED)
3058
{
3059
  /* We have only one class of non-legitimate constants, and our movsi
3060
     expander knows how to handle them.  Dropping these constants into the
3061
     data section would only shift the problem - we'd still get relocs
3062
     outside the object, in the data section rather than the text section.  */
3063
  return true;
3064
}
3065
 
3066
/* Ensure that for any constant of the form symbol + offset, the offset
3067
   remains within the object.  Any other constants are ok.
3068
   This ensures that flat binaries never have to deal with relocations
3069
   crossing section boundaries.  */
3070
 
3071
bool
3072
bfin_legitimate_constant_p (rtx x)
3073
{
3074
  rtx sym;
3075
  HOST_WIDE_INT offset;
3076
 
3077
  if (GET_CODE (x) != CONST)
3078
    return true;
3079
 
3080
  x = XEXP (x, 0);
3081
  gcc_assert (GET_CODE (x) == PLUS);
3082
 
3083
  sym = XEXP (x, 0);
3084
  x = XEXP (x, 1);
3085
  if (GET_CODE (sym) != SYMBOL_REF
3086
      || GET_CODE (x) != CONST_INT)
3087
    return true;
3088
  offset = INTVAL (x);
3089
 
3090
  if (SYMBOL_REF_DECL (sym) == 0)
3091
    return true;
3092
  if (offset < 0
3093
      || offset >= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym))))
3094
    return false;
3095
 
3096
  return true;
3097
}
3098
 
3099
static bool
3100
bfin_rtx_costs (rtx x, int code, int outer_code, int *total, bool speed)
3101
{
3102
  int cost2 = COSTS_N_INSNS (1);
3103
  rtx op0, op1;
3104
 
3105
  switch (code)
3106
    {
3107
    case CONST_INT:
3108
      if (outer_code == SET || outer_code == PLUS)
3109
        *total = satisfies_constraint_Ks7 (x) ? 0 : cost2;
3110
      else if (outer_code == AND)
3111
        *total = log2constp (~INTVAL (x)) ? 0 : cost2;
3112
      else if (outer_code == LE || outer_code == LT || outer_code == EQ)
3113
        *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
3114
      else if (outer_code == LEU || outer_code == LTU)
3115
        *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
3116
      else if (outer_code == MULT)
3117
        *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
3118
      else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
3119
        *total = 0;
3120
      else if (outer_code == ASHIFT || outer_code == ASHIFTRT
3121
               || outer_code == LSHIFTRT)
3122
        *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
3123
      else if (outer_code == IOR || outer_code == XOR)
3124
        *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
3125
      else
3126
        *total = cost2;
3127
      return true;
3128
 
3129
    case CONST:
3130
    case LABEL_REF:
3131
    case SYMBOL_REF:
3132
    case CONST_DOUBLE:
3133
      *total = COSTS_N_INSNS (2);
3134
      return true;
3135
 
3136
    case PLUS:
3137
      op0 = XEXP (x, 0);
3138
      op1 = XEXP (x, 1);
3139
      if (GET_MODE (x) == SImode)
3140
        {
3141
          if (GET_CODE (op0) == MULT
3142
              && GET_CODE (XEXP (op0, 1)) == CONST_INT)
3143
            {
3144
              HOST_WIDE_INT val = INTVAL (XEXP (op0, 1));
3145
              if (val == 2 || val == 4)
3146
                {
3147
                  *total = cost2;
3148
                  *total += rtx_cost (XEXP (op0, 0), outer_code, speed);
3149
                  *total += rtx_cost (op1, outer_code, speed);
3150
                  return true;
3151
                }
3152
            }
3153
          *total = cost2;
3154
          if (GET_CODE (op0) != REG
3155
              && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
3156
            *total += rtx_cost (op0, SET, speed);
3157
#if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
3158
         towards creating too many induction variables.  */
3159
          if (!reg_or_7bit_operand (op1, SImode))
3160
            *total += rtx_cost (op1, SET, speed);
3161
#endif
3162
        }
3163
      else if (GET_MODE (x) == DImode)
3164
        {
3165
          *total = 6 * cost2;
3166
          if (GET_CODE (op1) != CONST_INT
3167
              || !satisfies_constraint_Ks7 (op1))
3168
            *total += rtx_cost (op1, PLUS, speed);
3169
          if (GET_CODE (op0) != REG
3170
              && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
3171
            *total += rtx_cost (op0, PLUS, speed);
3172
        }
3173
      return true;
3174
 
3175
    case MINUS:
3176
      if (GET_MODE (x) == DImode)
3177
        *total = 6 * cost2;
3178
      else
3179
        *total = cost2;
3180
      return true;
3181
 
3182
    case ASHIFT:
3183
    case ASHIFTRT:
3184
    case LSHIFTRT:
3185
      if (GET_MODE (x) == DImode)
3186
        *total = 6 * cost2;
3187
      else
3188
        *total = cost2;
3189
 
3190
      op0 = XEXP (x, 0);
3191
      op1 = XEXP (x, 1);
3192
      if (GET_CODE (op0) != REG
3193
          && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
3194
        *total += rtx_cost (op0, code, speed);
3195
 
3196
      return true;
3197
 
3198
    case IOR:
3199
    case AND:
3200
    case XOR:
3201
      op0 = XEXP (x, 0);
3202
      op1 = XEXP (x, 1);
3203
 
3204
      /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high.  */
3205
      if (code == IOR)
3206
        {
3207
          if ((GET_CODE (op0) == LSHIFTRT && GET_CODE (op1) == ASHIFT)
3208
              || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == ZERO_EXTEND)
3209
              || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
3210
              || (GET_CODE (op0) == AND && GET_CODE (op1) == CONST_INT))
3211
            {
3212
              *total = cost2;
3213
              return true;
3214
            }
3215
        }
3216
 
3217
      if (GET_CODE (op0) != REG
3218
          && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
3219
        *total += rtx_cost (op0, code, speed);
3220
 
3221
      if (GET_MODE (x) == DImode)
3222
        {
3223
          *total = 2 * cost2;
3224
          return true;
3225
        }
3226
      *total = cost2;
3227
      if (GET_MODE (x) != SImode)
3228
        return true;
3229
 
3230
      if (code == AND)
3231
        {
3232
          if (! rhs_andsi3_operand (XEXP (x, 1), SImode))
3233
            *total += rtx_cost (XEXP (x, 1), code, speed);
3234
        }
3235
      else
3236
        {
3237
          if (! regorlog2_operand (XEXP (x, 1), SImode))
3238
            *total += rtx_cost (XEXP (x, 1), code, speed);
3239
        }
3240
 
3241
      return true;
3242
 
3243
    case ZERO_EXTRACT:
3244
    case SIGN_EXTRACT:
3245
      if (outer_code == SET
3246
          && XEXP (x, 1) == const1_rtx
3247
          && GET_CODE (XEXP (x, 2)) == CONST_INT)
3248
        {
3249
          *total = 2 * cost2;
3250
          return true;
3251
        }
3252
      /* fall through */
3253
 
3254
    case SIGN_EXTEND:
3255
    case ZERO_EXTEND:
3256
      *total = cost2;
3257
      return true;
3258
 
3259
    case MULT:
3260
        {
3261
          op0 = XEXP (x, 0);
3262
          op1 = XEXP (x, 1);
3263
          if (GET_CODE (op0) == GET_CODE (op1)
3264
              && (GET_CODE (op0) == ZERO_EXTEND
3265
                  || GET_CODE (op0) == SIGN_EXTEND))
3266
            {
3267
              *total = COSTS_N_INSNS (1);
3268
              op0 = XEXP (op0, 0);
3269
              op1 = XEXP (op1, 0);
3270
            }
3271
          else if (!speed)
3272
            *total = COSTS_N_INSNS (1);
3273
          else
3274
            *total = COSTS_N_INSNS (3);
3275
 
3276
          if (GET_CODE (op0) != REG
3277
              && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
3278
            *total += rtx_cost (op0, MULT, speed);
3279
          if (GET_CODE (op1) != REG
3280
              && (GET_CODE (op1) != SUBREG || GET_CODE (SUBREG_REG (op1)) != REG))
3281
            *total += rtx_cost (op1, MULT, speed);
3282
        }
3283
      return true;
3284
 
3285
    case UDIV:
3286
    case UMOD:
3287
      *total = COSTS_N_INSNS (32);
3288
      return true;
3289
 
3290
    case VEC_CONCAT:
3291
    case VEC_SELECT:
3292
      if (outer_code == SET)
3293
        *total = cost2;
3294
      return true;
3295
 
3296
    default:
3297
      return false;
3298
    }
3299
}
3300
 
3301
/* Used for communication between {push,pop}_multiple_operation (which
3302
   we use not only as a predicate) and the corresponding output functions.  */
3303
static int first_preg_to_save, first_dreg_to_save;
3304
static int n_regs_to_save;
3305
 
3306
int
3307
push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3308
{
3309
  int lastdreg = 8, lastpreg = 6;
3310
  int i, group;
3311
 
3312
  first_preg_to_save = lastpreg;
3313
  first_dreg_to_save = lastdreg;
3314
  for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
3315
    {
3316
      rtx t = XVECEXP (op, 0, i);
3317
      rtx src, dest;
3318
      int regno;
3319
 
3320
      if (GET_CODE (t) != SET)
3321
        return 0;
3322
 
3323
      src = SET_SRC (t);
3324
      dest = SET_DEST (t);
3325
      if (GET_CODE (dest) != MEM || ! REG_P (src))
3326
        return 0;
3327
      dest = XEXP (dest, 0);
3328
      if (GET_CODE (dest) != PLUS
3329
          || ! REG_P (XEXP (dest, 0))
3330
          || REGNO (XEXP (dest, 0)) != REG_SP
3331
          || GET_CODE (XEXP (dest, 1)) != CONST_INT
3332
          || INTVAL (XEXP (dest, 1)) != -i * 4)
3333
        return 0;
3334
 
3335
      regno = REGNO (src);
3336
      if (group == 0)
3337
        {
3338
          if (D_REGNO_P (regno))
3339
            {
3340
              group = 1;
3341
              first_dreg_to_save = lastdreg = regno - REG_R0;
3342
            }
3343
          else if (regno >= REG_P0 && regno <= REG_P7)
3344
            {
3345
              group = 2;
3346
              first_preg_to_save = lastpreg = regno - REG_P0;
3347
            }
3348
          else
3349
            return 0;
3350
 
3351
          continue;
3352
        }
3353
 
3354
      if (group == 1)
3355
        {
3356
          if (regno >= REG_P0 && regno <= REG_P7)
3357
            {
3358
              group = 2;
3359
              first_preg_to_save = lastpreg = regno - REG_P0;
3360
            }
3361
          else if (regno != REG_R0 + lastdreg + 1)
3362
            return 0;
3363
          else
3364
            lastdreg++;
3365
        }
3366
      else if (group == 2)
3367
        {
3368
          if (regno != REG_P0 + lastpreg + 1)
3369
            return 0;
3370
          lastpreg++;
3371
        }
3372
    }
3373
  n_regs_to_save = 8 - first_dreg_to_save + 6 - first_preg_to_save;
3374
  return 1;
3375
}
3376
 
3377
int
3378
pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3379
{
3380
  int lastdreg = 8, lastpreg = 6;
3381
  int i, group;
3382
 
3383
  for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
3384
    {
3385
      rtx t = XVECEXP (op, 0, i);
3386
      rtx src, dest;
3387
      int regno;
3388
 
3389
      if (GET_CODE (t) != SET)
3390
        return 0;
3391
 
3392
      src = SET_SRC (t);
3393
      dest = SET_DEST (t);
3394
      if (GET_CODE (src) != MEM || ! REG_P (dest))
3395
        return 0;
3396
      src = XEXP (src, 0);
3397
 
3398
      if (i == 1)
3399
        {
3400
          if (! REG_P (src) || REGNO (src) != REG_SP)
3401
            return 0;
3402
        }
3403
      else if (GET_CODE (src) != PLUS
3404
               || ! REG_P (XEXP (src, 0))
3405
               || REGNO (XEXP (src, 0)) != REG_SP
3406
               || GET_CODE (XEXP (src, 1)) != CONST_INT
3407
               || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
3408
        return 0;
3409
 
3410
      regno = REGNO (dest);
3411
      if (group == 0)
3412
        {
3413
          if (regno == REG_R7)
3414
            {
3415
              group = 1;
3416
              lastdreg = 7;
3417
            }
3418
          else if (regno != REG_P0 + lastpreg - 1)
3419
            return 0;
3420
          else
3421
            lastpreg--;
3422
        }
3423
      else if (group == 1)
3424
        {
3425
          if (regno != REG_R0 + lastdreg - 1)
3426
            return 0;
3427
          else
3428
            lastdreg--;
3429
        }
3430
    }
3431
  first_dreg_to_save = lastdreg;
3432
  first_preg_to_save = lastpreg;
3433
  n_regs_to_save = 8 - first_dreg_to_save + 6 - first_preg_to_save;
3434
  return 1;
3435
}
3436
 
3437
/* Emit assembly code for one multi-register push described by INSN, with
3438
   operands in OPERANDS.  */
3439
 
3440
void
3441
output_push_multiple (rtx insn, rtx *operands)
3442
{
3443
  char buf[80];
3444
  int ok;
3445
 
3446
  /* Validate the insn again, and compute first_[dp]reg_to_save. */
3447
  ok = push_multiple_operation (PATTERN (insn), VOIDmode);
3448
  gcc_assert (ok);
3449
 
3450
  if (first_dreg_to_save == 8)
3451
    sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
3452
  else if (first_preg_to_save == 6)
3453
    sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
3454
  else
3455
    sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
3456
             first_dreg_to_save, first_preg_to_save);
3457
 
3458
  output_asm_insn (buf, operands);
3459
}
3460
 
3461
/* Emit assembly code for one multi-register pop described by INSN, with
3462
   operands in OPERANDS.  */
3463
 
3464
void
3465
output_pop_multiple (rtx insn, rtx *operands)
3466
{
3467
  char buf[80];
3468
  int ok;
3469
 
3470
  /* Validate the insn again, and compute first_[dp]reg_to_save. */
3471
  ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
3472
  gcc_assert (ok);
3473
 
3474
  if (first_dreg_to_save == 8)
3475
    sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
3476
  else if (first_preg_to_save == 6)
3477
    sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
3478
  else
3479
    sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
3480
             first_dreg_to_save, first_preg_to_save);
3481
 
3482
  output_asm_insn (buf, operands);
3483
}
3484
 
3485
/* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE.  */
3486
 
3487
static void
3488
single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
3489
{
3490
  rtx scratch = gen_reg_rtx (mode);
3491
  rtx srcmem, dstmem;
3492
 
3493
  srcmem = adjust_address_nv (src, mode, offset);
3494
  dstmem = adjust_address_nv (dst, mode, offset);
3495
  emit_move_insn (scratch, srcmem);
3496
  emit_move_insn (dstmem, scratch);
3497
}
3498
 
3499
/* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
3500
   alignment ALIGN_EXP.  Return true if successful, false if we should fall
3501
   back on a different method.  */
3502
 
3503
bool
3504
bfin_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp)
3505
{
3506
  rtx srcreg, destreg, countreg;
3507
  HOST_WIDE_INT align = 0;
3508
  unsigned HOST_WIDE_INT count = 0;
3509
 
3510
  if (GET_CODE (align_exp) == CONST_INT)
3511
    align = INTVAL (align_exp);
3512
  if (GET_CODE (count_exp) == CONST_INT)
3513
    {
3514
      count = INTVAL (count_exp);
3515
#if 0
3516
      if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
3517
        return false;
3518
#endif
3519
    }
3520
 
3521
  /* If optimizing for size, only do single copies inline.  */
3522
  if (optimize_size)
3523
    {
3524
      if (count == 2 && align < 2)
3525
        return false;
3526
      if (count == 4 && align < 4)
3527
        return false;
3528
      if (count != 1 && count != 2 && count != 4)
3529
        return false;
3530
    }
3531
  if (align < 2 && count != 1)
3532
    return false;
3533
 
3534
  destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
3535
  if (destreg != XEXP (dst, 0))
3536
    dst = replace_equiv_address_nv (dst, destreg);
3537
  srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
3538
  if (srcreg != XEXP (src, 0))
3539
    src = replace_equiv_address_nv (src, srcreg);
3540
 
3541
  if (count != 0 && align >= 2)
3542
    {
3543
      unsigned HOST_WIDE_INT offset = 0;
3544
 
3545
      if (align >= 4)
3546
        {
3547
          if ((count & ~3) == 4)
3548
            {
3549
              single_move_for_movmem (dst, src, SImode, offset);
3550
              offset = 4;
3551
            }
3552
          else if (count & ~3)
3553
            {
3554
              HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
3555
              countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
3556
 
3557
              emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
3558
              cfun->machine->has_loopreg_clobber = true;
3559
            }
3560
          if (count & 2)
3561
            {
3562
              single_move_for_movmem (dst, src, HImode, offset);
3563
              offset += 2;
3564
            }
3565
        }
3566
      else
3567
        {
3568
          if ((count & ~1) == 2)
3569
            {
3570
              single_move_for_movmem (dst, src, HImode, offset);
3571
              offset = 2;
3572
            }
3573
          else if (count & ~1)
3574
            {
3575
              HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
3576
              countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
3577
 
3578
              emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
3579
              cfun->machine->has_loopreg_clobber = true;
3580
            }
3581
        }
3582
      if (count & 1)
3583
        {
3584
          single_move_for_movmem (dst, src, QImode, offset);
3585
        }
3586
      return true;
3587
    }
3588
  return false;
3589
}
3590
 
3591
/* Compute the alignment for a local variable.
3592
   TYPE is the data type, and ALIGN is the alignment that
3593
   the object would ordinarily have.  The value of this macro is used
3594
   instead of that alignment to align the object.  */
3595
 
3596
int
3597
bfin_local_alignment (tree type, int align)
3598
{
3599
  /* Increasing alignment for (relatively) big types allows the builtin
3600
     memcpy can use 32 bit loads/stores.  */
3601
  if (TYPE_SIZE (type)
3602
      && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3603
      && (TREE_INT_CST_LOW (TYPE_SIZE (type)) > 8
3604
          || TREE_INT_CST_HIGH (TYPE_SIZE (type))) && align < 32)
3605
    return 32;
3606
  return align;
3607
}
3608
 
3609
/* Implement TARGET_SCHED_ISSUE_RATE.  */
3610
 
3611
static int
3612
bfin_issue_rate (void)
3613
{
3614
  return 3;
3615
}
3616
 
3617
static int
3618
bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
3619
{
3620
  enum attr_type insn_type, dep_insn_type;
3621
  int dep_insn_code_number;
3622
 
3623
  /* Anti and output dependencies have zero cost.  */
3624
  if (REG_NOTE_KIND (link) != 0)
3625
    return 0;
3626
 
3627
  dep_insn_code_number = recog_memoized (dep_insn);
3628
 
3629
  /* If we can't recognize the insns, we can't really do anything.  */
3630
  if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
3631
    return cost;
3632
 
3633
  insn_type = get_attr_type (insn);
3634
  dep_insn_type = get_attr_type (dep_insn);
3635
 
3636
  if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
3637
    {
3638
      rtx pat = PATTERN (dep_insn);
3639
      if (GET_CODE (pat) == PARALLEL)
3640
        pat = XVECEXP (pat, 0, 0);
3641
      rtx dest = SET_DEST (pat);
3642
      rtx src = SET_SRC (pat);
3643
      if (! ADDRESS_REGNO_P (REGNO (dest))
3644
          || ! (MEM_P (src) || D_REGNO_P (REGNO (src))))
3645
        return cost;
3646
      return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
3647
    }
3648
 
3649
  return cost;
3650
}
3651
 
3652
/* This function acts like NEXT_INSN, but is aware of three-insn bundles and
3653
   skips all subsequent parallel instructions if INSN is the start of such
3654
   a group.  */
3655
static rtx
3656
find_next_insn_start (rtx insn)
3657
{
3658
  if (GET_MODE (insn) == SImode)
3659
    {
3660
      while (GET_MODE (insn) != QImode)
3661
        insn = NEXT_INSN (insn);
3662
    }
3663
  return NEXT_INSN (insn);
3664
}
3665
 
3666
/* This function acts like PREV_INSN, but is aware of three-insn bundles and
3667
   skips all subsequent parallel instructions if INSN is the start of such
3668
   a group.  */
3669
static rtx
3670
find_prev_insn_start (rtx insn)
3671
{
3672
  insn = PREV_INSN (insn);
3673
  gcc_assert (GET_MODE (insn) != SImode);
3674
  if (GET_MODE (insn) == QImode)
3675
    {
3676
      while (GET_MODE (PREV_INSN (insn)) == SImode)
3677
        insn = PREV_INSN (insn);
3678
    }
3679
  return insn;
3680
}
3681
 
3682
/* Increment the counter for the number of loop instructions in the
3683
   current function.  */
3684
 
3685
void
3686
bfin_hardware_loop (void)
3687
{
3688
  cfun->machine->has_hardware_loops++;
3689
}
3690
 
3691
/* Maximum loop nesting depth.  */
3692
#define MAX_LOOP_DEPTH 2
3693
 
3694
/* Maximum size of a loop.  */
3695
#define MAX_LOOP_LENGTH 2042
3696
 
3697
/* Maximum distance of the LSETUP instruction from the loop start.  */
3698
#define MAX_LSETUP_DISTANCE 30
3699
 
3700
/* We need to keep a vector of loops */
3701
typedef struct loop_info *loop_info;
3702
DEF_VEC_P (loop_info);
3703
DEF_VEC_ALLOC_P (loop_info,heap);
3704
 
3705
/* Information about a loop we have found (or are in the process of
3706
   finding).  */
3707
struct GTY (()) loop_info
3708
{
3709
  /* loop number, for dumps */
3710
  int loop_no;
3711
 
3712
  /* All edges that jump into and out of the loop.  */
3713
  VEC(edge,gc) *incoming;
3714
 
3715
  /* We can handle two cases: all incoming edges have the same destination
3716
     block, or all incoming edges have the same source block.  These two
3717
     members are set to the common source or destination we found, or NULL
3718
     if different blocks were found.  If both are NULL the loop can't be
3719
     optimized.  */
3720
  basic_block incoming_src;
3721
  basic_block incoming_dest;
3722
 
3723
  /* First block in the loop.  This is the one branched to by the loop_end
3724
     insn.  */
3725
  basic_block head;
3726
 
3727
  /* Last block in the loop (the one with the loop_end insn).  */
3728
  basic_block tail;
3729
 
3730
  /* The successor block of the loop.  This is the one the loop_end insn
3731
     falls into.  */
3732
  basic_block successor;
3733
 
3734
  /* The last instruction in the tail.  */
3735
  rtx last_insn;
3736
 
3737
  /* The loop_end insn.  */
3738
  rtx loop_end;
3739
 
3740
  /* The iteration register.  */
3741
  rtx iter_reg;
3742
 
3743
  /* The new label placed at the beginning of the loop. */
3744
  rtx start_label;
3745
 
3746
  /* The new label placed at the end of the loop. */
3747
  rtx end_label;
3748
 
3749
  /* The length of the loop.  */
3750
  int length;
3751
 
3752
  /* The nesting depth of the loop.  */
3753
  int depth;
3754
 
3755
  /* Nonzero if we can't optimize this loop.  */
3756
  int bad;
3757
 
3758
  /* True if we have visited this loop.  */
3759
  int visited;
3760
 
3761
  /* True if this loop body clobbers any of LC0, LT0, or LB0.  */
3762
  int clobber_loop0;
3763
 
3764
  /* True if this loop body clobbers any of LC1, LT1, or LB1.  */
3765
  int clobber_loop1;
3766
 
3767
  /* Next loop in the graph. */
3768
  struct loop_info *next;
3769
 
3770
  /* Immediate outer loop of this loop.  */
3771
  struct loop_info *outer;
3772
 
3773
  /* Vector of blocks only within the loop, including those within
3774
     inner loops.  */
3775
  VEC (basic_block,heap) *blocks;
3776
 
3777
  /* Same information in a bitmap.  */
3778
  bitmap block_bitmap;
3779
 
3780
  /* Vector of inner loops within this loop  */
3781
  VEC (loop_info,heap) *loops;
3782
};
3783
 
3784
static void
3785
bfin_dump_loops (loop_info loops)
3786
{
3787
  loop_info loop;
3788
 
3789
  for (loop = loops; loop; loop = loop->next)
3790
    {
3791
      loop_info i;
3792
      basic_block b;
3793
      unsigned ix;
3794
 
3795
      fprintf (dump_file, ";; loop %d: ", loop->loop_no);
3796
      if (loop->bad)
3797
        fprintf (dump_file, "(bad) ");
3798
      fprintf (dump_file, "{head:%d, depth:%d}", loop->head->index, loop->depth);
3799
 
3800
      fprintf (dump_file, " blocks: [ ");
3801
      for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, b); ix++)
3802
        fprintf (dump_file, "%d ", b->index);
3803
      fprintf (dump_file, "] ");
3804
 
3805
      fprintf (dump_file, " inner loops: [ ");
3806
      for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, i); ix++)
3807
        fprintf (dump_file, "%d ", i->loop_no);
3808
      fprintf (dump_file, "]\n");
3809
    }
3810
  fprintf (dump_file, "\n");
3811
}
3812
 
3813
/* Scan the blocks of LOOP (and its inferiors) looking for basic block
3814
   BB. Return true, if we find it.  */
3815
 
3816
static bool
3817
bfin_bb_in_loop (loop_info loop, basic_block bb)
3818
{
3819
  return bitmap_bit_p (loop->block_bitmap, bb->index);
3820
}
3821
 
3822
/* Scan the blocks of LOOP (and its inferiors) looking for uses of
3823
   REG.  Return true, if we find any.  Don't count the loop's loop_end
3824
   insn if it matches LOOP_END.  */
3825
 
3826
static bool
3827
bfin_scan_loop (loop_info loop, rtx reg, rtx loop_end)
3828
{
3829
  unsigned ix;
3830
  basic_block bb;
3831
 
3832
  for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
3833
    {
3834
      rtx insn;
3835
 
3836
      for (insn = BB_HEAD (bb);
3837
           insn != NEXT_INSN (BB_END (bb));
3838
           insn = NEXT_INSN (insn))
3839
        {
3840
          if (!INSN_P (insn))
3841
            continue;
3842
          if (insn == loop_end)
3843
            continue;
3844
          if (reg_mentioned_p (reg, PATTERN (insn)))
3845
            return true;
3846
        }
3847
    }
3848
  return false;
3849
}
3850
 
3851
/* Estimate the length of INSN conservatively.  */
3852
 
3853
static int
3854
length_for_loop (rtx insn)
3855
{
3856
  int length = 0;
3857
  if (JUMP_P (insn) && any_condjump_p (insn) && !optimize_size)
3858
    {
3859
      if (ENABLE_WA_SPECULATIVE_SYNCS)
3860
        length = 8;
3861
      else if (ENABLE_WA_SPECULATIVE_LOADS)
3862
        length = 6;
3863
    }
3864
  else if (LABEL_P (insn))
3865
    {
3866
      if (ENABLE_WA_SPECULATIVE_SYNCS)
3867
        length = 4;
3868
    }
3869
 
3870
  if (NONDEBUG_INSN_P (insn))
3871
    length += get_attr_length (insn);
3872
 
3873
  return length;
3874
}
3875
 
3876
/* Optimize LOOP.  */
3877
 
3878
static void
3879
bfin_optimize_loop (loop_info loop)
3880
{
3881
  basic_block bb;
3882
  loop_info inner;
3883
  rtx insn, last_insn;
3884
  rtx loop_init, start_label, end_label;
3885
  rtx reg_lc0, reg_lc1, reg_lt0, reg_lt1, reg_lb0, reg_lb1;
3886
  rtx iter_reg, scratchreg, scratch_init, scratch_init_insn;
3887
  rtx lc_reg, lt_reg, lb_reg;
3888
  rtx seq, seq_end;
3889
  int length;
3890
  unsigned ix;
3891
  int inner_depth = 0;
3892
 
3893
  if (loop->visited)
3894
    return;
3895
 
3896
  loop->visited = 1;
3897
 
3898
  if (loop->bad)
3899
    {
3900
      if (dump_file)
3901
        fprintf (dump_file, ";; loop %d bad when found\n", loop->loop_no);
3902
      goto bad_loop;
3903
    }
3904
 
3905
  /* Every loop contains in its list of inner loops every loop nested inside
3906
     it, even if there are intermediate loops.  This works because we're doing
3907
     a depth-first search here and never visit a loop more than once.  */
3908
  for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, inner); ix++)
3909
    {
3910
      bfin_optimize_loop (inner);
3911
 
3912
      if (!inner->bad && inner_depth < inner->depth)
3913
        {
3914
          inner_depth = inner->depth;
3915
 
3916
          loop->clobber_loop0 |= inner->clobber_loop0;
3917
          loop->clobber_loop1 |= inner->clobber_loop1;
3918
        }
3919
    }
3920
 
3921
  loop->depth = inner_depth + 1;
3922
  if (loop->depth > MAX_LOOP_DEPTH)
3923
    {
3924
      if (dump_file)
3925
        fprintf (dump_file, ";; loop %d too deep\n", loop->loop_no);
3926
      goto bad_loop;
3927
    }
3928
 
3929
  /* Get the loop iteration register.  */
3930
  iter_reg = loop->iter_reg;
3931
 
3932
  if (!REG_P (iter_reg))
3933
    {
3934
      if (dump_file)
3935
        fprintf (dump_file, ";; loop %d iteration count not in a register\n",
3936
                 loop->loop_no);
3937
      goto bad_loop;
3938
    }
3939
  scratchreg = NULL_RTX;
3940
  scratch_init = iter_reg;
3941
  scratch_init_insn = NULL_RTX;
3942
  if (!PREG_P (iter_reg) && loop->incoming_src)
3943
    {
3944
      basic_block bb_in = loop->incoming_src;
3945
      int i;
3946
      for (i = REG_P0; i <= REG_P5; i++)
3947
        if ((df_regs_ever_live_p (i)
3948
             || (funkind (TREE_TYPE (current_function_decl)) == SUBROUTINE
3949
                 && call_used_regs[i]))
3950
            && !REGNO_REG_SET_P (df_get_live_out (bb_in), i))
3951
          {
3952
            scratchreg = gen_rtx_REG (SImode, i);
3953
            break;
3954
          }
3955
      for (insn = BB_END (bb_in); insn != BB_HEAD (bb_in);
3956
           insn = PREV_INSN (insn))
3957
        {
3958
          rtx set;
3959
          if (NOTE_P (insn) || BARRIER_P (insn))
3960
            continue;
3961
          set = single_set (insn);
3962
          if (set && rtx_equal_p (SET_DEST (set), iter_reg))
3963
            {
3964
              if (CONSTANT_P (SET_SRC (set)))
3965
                {
3966
                  scratch_init = SET_SRC (set);
3967
                  scratch_init_insn = insn;
3968
                }
3969
              break;
3970
            }
3971
          else if (reg_mentioned_p (iter_reg, PATTERN (insn)))
3972
            break;
3973
        }
3974
    }
3975
 
3976
  if (loop->incoming_src)
3977
    {
3978
      /* Make sure the predecessor is before the loop start label, as required by
3979
         the LSETUP instruction.  */
3980
      length = 0;
3981
      insn = BB_END (loop->incoming_src);
3982
      /* If we have to insert the LSETUP before a jump, count that jump in the
3983
         length.  */
3984
      if (VEC_length (edge, loop->incoming) > 1
3985
          || !(VEC_last (edge, loop->incoming)->flags & EDGE_FALLTHRU))
3986
        {
3987
          gcc_assert (JUMP_P (insn));
3988
          insn = PREV_INSN (insn);
3989
        }
3990
 
3991
      for (; insn && insn != loop->start_label; insn = NEXT_INSN (insn))
3992
        length += length_for_loop (insn);
3993
 
3994
      if (!insn)
3995
        {
3996
          if (dump_file)
3997
            fprintf (dump_file, ";; loop %d lsetup not before loop_start\n",
3998
                     loop->loop_no);
3999
          goto bad_loop;
4000
        }
4001
 
4002
      /* Account for the pop of a scratch register where necessary.  */
4003
      if (!PREG_P (iter_reg) && scratchreg == NULL_RTX
4004
          && ENABLE_WA_LOAD_LCREGS)
4005
        length += 2;
4006
 
4007
      if (length > MAX_LSETUP_DISTANCE)
4008
        {
4009
          if (dump_file)
4010
            fprintf (dump_file, ";; loop %d lsetup too far away\n", loop->loop_no);
4011
          goto bad_loop;
4012
        }
4013
    }
4014
 
4015
  /* Check if start_label appears before loop_end and calculate the
4016
     offset between them.  We calculate the length of instructions
4017
     conservatively.  */
4018
  length = 0;
4019
  for (insn = loop->start_label;
4020
       insn && insn != loop->loop_end;
4021
       insn = NEXT_INSN (insn))
4022
    length += length_for_loop (insn);
4023
 
4024
  if (!insn)
4025
    {
4026
      if (dump_file)
4027
        fprintf (dump_file, ";; loop %d start_label not before loop_end\n",
4028
                 loop->loop_no);
4029
      goto bad_loop;
4030
    }
4031
 
4032
  loop->length = length;
4033
  if (loop->length > MAX_LOOP_LENGTH)
4034
    {
4035
      if (dump_file)
4036
        fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
4037
      goto bad_loop;
4038
    }
4039
 
4040
  /* Scan all the blocks to make sure they don't use iter_reg.  */
4041
  if (bfin_scan_loop (loop, iter_reg, loop->loop_end))
4042
    {
4043
      if (dump_file)
4044
        fprintf (dump_file, ";; loop %d uses iterator\n", loop->loop_no);
4045
      goto bad_loop;
4046
    }
4047
 
4048
  /* Scan all the insns to see if the loop body clobber
4049
     any hardware loop registers. */
4050
 
4051
  reg_lc0 = gen_rtx_REG (SImode, REG_LC0);
4052
  reg_lc1 = gen_rtx_REG (SImode, REG_LC1);
4053
  reg_lt0 = gen_rtx_REG (SImode, REG_LT0);
4054
  reg_lt1 = gen_rtx_REG (SImode, REG_LT1);
4055
  reg_lb0 = gen_rtx_REG (SImode, REG_LB0);
4056
  reg_lb1 = gen_rtx_REG (SImode, REG_LB1);
4057
 
4058
  for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
4059
    {
4060
      rtx insn;
4061
 
4062
      for (insn = BB_HEAD (bb);
4063
           insn != NEXT_INSN (BB_END (bb));
4064
           insn = NEXT_INSN (insn))
4065
        {
4066
          if (!INSN_P (insn))
4067
            continue;
4068
 
4069
          if (reg_set_p (reg_lc0, insn)
4070
              || reg_set_p (reg_lt0, insn)
4071
              || reg_set_p (reg_lb0, insn))
4072
            loop->clobber_loop0 = 1;
4073
 
4074
          if (reg_set_p (reg_lc1, insn)
4075
              || reg_set_p (reg_lt1, insn)
4076
              || reg_set_p (reg_lb1, insn))
4077
            loop->clobber_loop1 |= 1;
4078
        }
4079
    }
4080
 
4081
  if ((loop->clobber_loop0 && loop->clobber_loop1)
4082
      || (loop->depth == MAX_LOOP_DEPTH && loop->clobber_loop0))
4083
    {
4084
      loop->depth = MAX_LOOP_DEPTH + 1;
4085
      if (dump_file)
4086
        fprintf (dump_file, ";; loop %d no loop reg available\n",
4087
                 loop->loop_no);
4088
      goto bad_loop;
4089
    }
4090
 
4091
  /* There should be an instruction before the loop_end instruction
4092
     in the same basic block. And the instruction must not be
4093
     - JUMP
4094
     - CONDITIONAL BRANCH
4095
     - CALL
4096
     - CSYNC
4097
     - SSYNC
4098
     - Returns (RTS, RTN, etc.)  */
4099
 
4100
  bb = loop->tail;
4101
  last_insn = find_prev_insn_start (loop->loop_end);
4102
 
4103
  while (1)
4104
    {
4105
      for (; last_insn != BB_HEAD (bb);
4106
           last_insn = find_prev_insn_start (last_insn))
4107
        if (NONDEBUG_INSN_P (last_insn))
4108
          break;
4109
 
4110
      if (last_insn != BB_HEAD (bb))
4111
        break;
4112
 
4113
      if (single_pred_p (bb)
4114
          && single_pred_edge (bb)->flags & EDGE_FALLTHRU
4115
          && single_pred (bb) != ENTRY_BLOCK_PTR)
4116
        {
4117
          bb = single_pred (bb);
4118
          last_insn = BB_END (bb);
4119
          continue;
4120
        }
4121
      else
4122
        {
4123
          last_insn = NULL_RTX;
4124
          break;
4125
        }
4126
    }
4127
 
4128
  if (!last_insn)
4129
    {
4130
      if (dump_file)
4131
        fprintf (dump_file, ";; loop %d has no last instruction\n",
4132
                 loop->loop_no);
4133
      goto bad_loop;
4134
    }
4135
 
4136
  if (JUMP_P (last_insn) && !any_condjump_p (last_insn))
4137
    {
4138
      if (dump_file)
4139
        fprintf (dump_file, ";; loop %d has bad last instruction\n",
4140
                 loop->loop_no);
4141
      goto bad_loop;
4142
    }
4143
  /* In all other cases, try to replace a bad last insn with a nop.  */
4144
  else if (JUMP_P (last_insn)
4145
           || CALL_P (last_insn)
4146
           || get_attr_type (last_insn) == TYPE_SYNC
4147
           || get_attr_type (last_insn) == TYPE_CALL
4148
           || get_attr_seq_insns (last_insn) == SEQ_INSNS_MULTI
4149
           || recog_memoized (last_insn) == CODE_FOR_return_internal
4150
           || GET_CODE (PATTERN (last_insn)) == ASM_INPUT
4151
           || asm_noperands (PATTERN (last_insn)) >= 0)
4152
    {
4153
      if (loop->length + 2 > MAX_LOOP_LENGTH)
4154
        {
4155
          if (dump_file)
4156
            fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
4157
          goto bad_loop;
4158
        }
4159
      if (dump_file)
4160
        fprintf (dump_file, ";; loop %d has bad last insn; replace with nop\n",
4161
                 loop->loop_no);
4162
 
4163
      last_insn = emit_insn_after (gen_forced_nop (), last_insn);
4164
    }
4165
 
4166
  loop->last_insn = last_insn;
4167
 
4168
  /* The loop is good for replacement.  */
4169
  start_label = loop->start_label;
4170
  end_label = gen_label_rtx ();
4171
  iter_reg = loop->iter_reg;
4172
 
4173
  if (loop->depth == 1 && !loop->clobber_loop1)
4174
    {
4175
      lc_reg = reg_lc1;
4176
      lt_reg = reg_lt1;
4177
      lb_reg = reg_lb1;
4178
      loop->clobber_loop1 = 1;
4179
    }
4180
  else
4181
    {
4182
      lc_reg = reg_lc0;
4183
      lt_reg = reg_lt0;
4184
      lb_reg = reg_lb0;
4185
      loop->clobber_loop0 = 1;
4186
    }
4187
 
4188
  loop->end_label = end_label;
4189
 
4190
  /* Create a sequence containing the loop setup.  */
4191
  start_sequence ();
4192
 
4193
  /* LSETUP only accepts P registers.  If we have one, we can use it,
4194
     otherwise there are several ways of working around the problem.
4195
     If we're not affected by anomaly 312, we can load the LC register
4196
     from any iteration register, and use LSETUP without initialization.
4197
     If we've found a P scratch register that's not live here, we can
4198
     instead copy the iter_reg into that and use an initializing LSETUP.
4199
     If all else fails, push and pop P0 and use it as a scratch.  */
4200
  if (P_REGNO_P (REGNO (iter_reg)))
4201
    {
4202
      loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
4203
                                            lb_reg, end_label,
4204
                                            lc_reg, iter_reg);
4205
      seq_end = emit_insn (loop_init);
4206
    }
4207
  else if (!ENABLE_WA_LOAD_LCREGS && DPREG_P (iter_reg))
4208
    {
4209
      emit_insn (gen_movsi (lc_reg, iter_reg));
4210
      loop_init = gen_lsetup_without_autoinit (lt_reg, start_label,
4211
                                               lb_reg, end_label,
4212
                                               lc_reg);
4213
      seq_end = emit_insn (loop_init);
4214
    }
4215
  else if (scratchreg != NULL_RTX)
4216
    {
4217
      emit_insn (gen_movsi (scratchreg, scratch_init));
4218
      loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
4219
                                            lb_reg, end_label,
4220
                                            lc_reg, scratchreg);
4221
      seq_end = emit_insn (loop_init);
4222
      if (scratch_init_insn != NULL_RTX)
4223
        delete_insn (scratch_init_insn);
4224
    }
4225
  else
4226
    {
4227
      rtx p0reg = gen_rtx_REG (SImode, REG_P0);
4228
      rtx push = gen_frame_mem (SImode,
4229
                                gen_rtx_PRE_DEC (SImode, stack_pointer_rtx));
4230
      rtx pop = gen_frame_mem (SImode,
4231
                               gen_rtx_POST_INC (SImode, stack_pointer_rtx));
4232
      emit_insn (gen_movsi (push, p0reg));
4233
      emit_insn (gen_movsi (p0reg, scratch_init));
4234
      loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
4235
                                            lb_reg, end_label,
4236
                                            lc_reg, p0reg);
4237
      emit_insn (loop_init);
4238
      seq_end = emit_insn (gen_movsi (p0reg, pop));
4239
      if (scratch_init_insn != NULL_RTX)
4240
        delete_insn (scratch_init_insn);
4241
    }
4242
 
4243
  if (dump_file)
4244
    {
4245
      fprintf (dump_file, ";; replacing loop %d initializer with\n",
4246
               loop->loop_no);
4247
      print_rtl_single (dump_file, loop_init);
4248
      fprintf (dump_file, ";; replacing loop %d terminator with\n",
4249
               loop->loop_no);
4250
      print_rtl_single (dump_file, loop->loop_end);
4251
    }
4252
 
4253
  /* If the loop isn't entered at the top, also create a jump to the entry
4254
     point.  */
4255
  if (!loop->incoming_src && loop->head != loop->incoming_dest)
4256
    {
4257
      rtx label = BB_HEAD (loop->incoming_dest);
4258
      /* If we're jumping to the final basic block in the loop, and there's
4259
         only one cheap instruction before the end (typically an increment of
4260
         an induction variable), we can just emit a copy here instead of a
4261
         jump.  */
4262
      if (loop->incoming_dest == loop->tail
4263
          && next_real_insn (label) == last_insn
4264
          && asm_noperands (last_insn) < 0
4265
          && GET_CODE (PATTERN (last_insn)) == SET)
4266
        {
4267
          seq_end = emit_insn (copy_rtx (PATTERN (last_insn)));
4268
        }
4269
      else
4270
        {
4271
          emit_jump_insn (gen_jump (label));
4272
          seq_end = emit_barrier ();
4273
        }
4274
    }
4275
 
4276
  seq = get_insns ();
4277
  end_sequence ();
4278
 
4279
  if (loop->incoming_src)
4280
    {
4281
      rtx prev = BB_END (loop->incoming_src);
4282
      if (VEC_length (edge, loop->incoming) > 1
4283
          || !(VEC_last (edge, loop->incoming)->flags & EDGE_FALLTHRU))
4284
        {
4285
          gcc_assert (JUMP_P (prev));
4286
          prev = PREV_INSN (prev);
4287
        }
4288
      emit_insn_after (seq, prev);
4289
    }
4290
  else
4291
    {
4292
      basic_block new_bb;
4293
      edge e;
4294
      edge_iterator ei;
4295
 
4296
#ifdef ENABLE_CHECKING
4297
      if (loop->head != loop->incoming_dest)
4298
        {
4299
          /* We aren't entering the loop at the top.  Since we've established
4300
             that the loop is entered only at one point, this means there
4301
             can't be fallthru edges into the head.  Any such fallthru edges
4302
             would become invalid when we insert the new block, so verify
4303
             that this does not in fact happen.  */
4304
          FOR_EACH_EDGE (e, ei, loop->head->preds)
4305
            gcc_assert (!(e->flags & EDGE_FALLTHRU));
4306
        }
4307
#endif
4308
 
4309
      emit_insn_before (seq, BB_HEAD (loop->head));
4310
      seq = emit_label_before (gen_label_rtx (), seq);
4311
 
4312
      new_bb = create_basic_block (seq, seq_end, loop->head->prev_bb);
4313
      FOR_EACH_EDGE (e, ei, loop->incoming)
4314
        {
4315
          if (!(e->flags & EDGE_FALLTHRU)
4316
              || e->dest != loop->head)
4317
            redirect_edge_and_branch_force (e, new_bb);
4318
          else
4319
            redirect_edge_succ (e, new_bb);
4320
        }
4321
      e = make_edge (new_bb, loop->head, 0);
4322
    }
4323
 
4324
  delete_insn (loop->loop_end);
4325
  /* Insert the loop end label before the last instruction of the loop.  */
4326
  emit_label_before (loop->end_label, loop->last_insn);
4327
 
4328
  return;
4329
 
4330
 bad_loop:
4331
 
4332
  if (dump_file)
4333
    fprintf (dump_file, ";; loop %d is bad\n", loop->loop_no);
4334
 
4335
  loop->bad = 1;
4336
 
4337
  if (DPREG_P (loop->iter_reg))
4338
    {
4339
      /* If loop->iter_reg is a DREG or PREG, we can split it here
4340
         without scratch register.  */
4341
      rtx insn, test;
4342
 
4343
      emit_insn_before (gen_addsi3 (loop->iter_reg,
4344
                                    loop->iter_reg,
4345
                                    constm1_rtx),
4346
                        loop->loop_end);
4347
 
4348
      test = gen_rtx_NE (VOIDmode, loop->iter_reg, const0_rtx);
4349
      insn = emit_jump_insn_before (gen_cbranchsi4 (test,
4350
                                                    loop->iter_reg, const0_rtx,
4351
                                                    loop->start_label),
4352
                                    loop->loop_end);
4353
 
4354
      JUMP_LABEL (insn) = loop->start_label;
4355
      LABEL_NUSES (loop->start_label)++;
4356
      delete_insn (loop->loop_end);
4357
    }
4358
}
4359
 
4360
/* Called from bfin_reorg_loops when a potential loop end is found.  LOOP is
4361
   a newly set up structure describing the loop, it is this function's
4362
   responsibility to fill most of it.  TAIL_BB and TAIL_INSN point to the
4363
   loop_end insn and its enclosing basic block.  */
4364
 
4365
static void
4366
bfin_discover_loop (loop_info loop, basic_block tail_bb, rtx tail_insn)
4367
{
4368
  unsigned dwork = 0;
4369
  basic_block bb;
4370
  VEC (basic_block,heap) *works = VEC_alloc (basic_block,heap,20);
4371
 
4372
  loop->tail = tail_bb;
4373
  loop->head = BRANCH_EDGE (tail_bb)->dest;
4374
  loop->successor = FALLTHRU_EDGE (tail_bb)->dest;
4375
  loop->loop_end = tail_insn;
4376
  loop->last_insn = NULL_RTX;
4377
  loop->iter_reg = SET_DEST (XVECEXP (PATTERN (tail_insn), 0, 1));
4378
  loop->depth = loop->length = 0;
4379
  loop->visited = 0;
4380
  loop->clobber_loop0 = loop->clobber_loop1 = 0;
4381
  loop->outer = NULL;
4382
  loop->loops = NULL;
4383
  loop->incoming = VEC_alloc (edge, gc, 2);
4384
  loop->start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (tail_insn), 0, 0)), 1), 0);
4385
  loop->end_label = NULL_RTX;
4386
  loop->bad = 0;
4387
 
4388
  VEC_safe_push (basic_block, heap, works, loop->head);
4389
 
4390
  while (VEC_iterate (basic_block, works, dwork++, bb))
4391
    {
4392
      edge e;
4393
      edge_iterator ei;
4394
      if (bb == EXIT_BLOCK_PTR)
4395
        {
4396
          /* We've reached the exit block.  The loop must be bad. */
4397
          if (dump_file)
4398
            fprintf (dump_file,
4399
                     ";; Loop is bad - reached exit block while scanning\n");
4400
          loop->bad = 1;
4401
          break;
4402
        }
4403
 
4404
      if (bitmap_bit_p (loop->block_bitmap, bb->index))
4405
        continue;
4406
 
4407
      /* We've not seen this block before.  Add it to the loop's
4408
         list and then add each successor to the work list.  */
4409
 
4410
      VEC_safe_push (basic_block, heap, loop->blocks, bb);
4411
      bitmap_set_bit (loop->block_bitmap, bb->index);
4412
 
4413
      if (bb != tail_bb)
4414
        {
4415
          FOR_EACH_EDGE (e, ei, bb->succs)
4416
            {
4417
              basic_block succ = EDGE_SUCC (bb, ei.index)->dest;
4418
              if (!REGNO_REG_SET_P (df_get_live_in (succ),
4419
                                    REGNO (loop->iter_reg)))
4420
                continue;
4421
              if (!VEC_space (basic_block, works, 1))
4422
                {
4423
                  if (dwork)
4424
                    {
4425
                      VEC_block_remove (basic_block, works, 0, dwork);
4426
                      dwork = 0;
4427
                    }
4428
                  else
4429
                    VEC_reserve (basic_block, heap, works, 1);
4430
                }
4431
              VEC_quick_push (basic_block, works, succ);
4432
            }
4433
        }
4434
    }
4435
 
4436
  /* Find the predecessor, and make sure nothing else jumps into this loop.  */
4437
  if (!loop->bad)
4438
    {
4439
      int pass, retry;
4440
      for (dwork = 0; VEC_iterate (basic_block, loop->blocks, dwork, bb); dwork++)
4441
        {
4442
          edge e;
4443
          edge_iterator ei;
4444
          FOR_EACH_EDGE (e, ei, bb->preds)
4445
            {
4446
              basic_block pred = e->src;
4447
 
4448
              if (!bfin_bb_in_loop (loop, pred))
4449
                {
4450
                  if (dump_file)
4451
                    fprintf (dump_file, ";; Loop %d: incoming edge %d -> %d\n",
4452
                             loop->loop_no, pred->index,
4453
                             e->dest->index);
4454
                  VEC_safe_push (edge, gc, loop->incoming, e);
4455
                }
4456
            }
4457
        }
4458
 
4459
      for (pass = 0, retry = 1; retry && pass < 2; pass++)
4460
        {
4461
          edge e;
4462
          edge_iterator ei;
4463
          bool first = true;
4464
          retry = 0;
4465
 
4466
          FOR_EACH_EDGE (e, ei, loop->incoming)
4467
            {
4468
              if (first)
4469
                {
4470
                  loop->incoming_src = e->src;
4471
                  loop->incoming_dest = e->dest;
4472
                  first = false;
4473
                }
4474
              else
4475
                {
4476
                  if (e->dest != loop->incoming_dest)
4477
                    loop->incoming_dest = NULL;
4478
                  if (e->src != loop->incoming_src)
4479
                    loop->incoming_src = NULL;
4480
                }
4481
              if (loop->incoming_src == NULL && loop->incoming_dest == NULL)
4482
                {
4483
                  if (pass == 0)
4484
                    {
4485
                      if (dump_file)
4486
                        fprintf (dump_file,
4487
                                 ";; retrying loop %d with forwarder blocks\n",
4488
                                 loop->loop_no);
4489
                      retry = 1;
4490
                      break;
4491
                    }
4492
                  loop->bad = 1;
4493
                  if (dump_file)
4494
                    fprintf (dump_file,
4495
                             ";; can't find suitable entry for loop %d\n",
4496
                             loop->loop_no);
4497
                  goto out;
4498
                }
4499
            }
4500
          if (retry)
4501
            {
4502
              retry = 0;
4503
              FOR_EACH_EDGE (e, ei, loop->incoming)
4504
                {
4505
                  if (forwarder_block_p (e->src))
4506
                    {
4507
                      edge e2;
4508
                      edge_iterator ei2;
4509
 
4510
                      if (dump_file)
4511
                        fprintf (dump_file,
4512
                                 ";; Adding forwarder block %d to loop %d and retrying\n",
4513
                                 e->src->index, loop->loop_no);
4514
                      VEC_safe_push (basic_block, heap, loop->blocks, e->src);
4515
                      bitmap_set_bit (loop->block_bitmap, e->src->index);
4516
                      FOR_EACH_EDGE (e2, ei2, e->src->preds)
4517
                        VEC_safe_push (edge, gc, loop->incoming, e2);
4518
                      VEC_unordered_remove (edge, loop->incoming, ei.index);
4519
                      retry = 1;
4520
                      break;
4521
                    }
4522
                }
4523
              if (!retry)
4524
                {
4525
                  if (dump_file)
4526
                    fprintf (dump_file, ";; No forwarder blocks found\n");
4527
                  loop->bad = 1;
4528
                }
4529
            }
4530
        }
4531
    }
4532
 
4533
 out:
4534
  VEC_free (basic_block, heap, works);
4535
}
4536
 
4537
/* Analyze the structure of the loops in the current function.  Use STACK
4538
   for bitmap allocations.  Returns all the valid candidates for hardware
4539
   loops found in this function.  */
4540
static loop_info
4541
bfin_discover_loops (bitmap_obstack *stack, FILE *dump_file)
4542
{
4543
  loop_info loops = NULL;
4544
  loop_info loop;
4545
  basic_block bb;
4546
  bitmap tmp_bitmap;
4547
  int nloops = 0;
4548
 
4549
  /* Find all the possible loop tails.  This means searching for every
4550
     loop_end instruction.  For each one found, create a loop_info
4551
     structure and add the head block to the work list. */
4552
  FOR_EACH_BB (bb)
4553
    {
4554
      rtx tail = BB_END (bb);
4555
 
4556
      while (GET_CODE (tail) == NOTE)
4557
        tail = PREV_INSN (tail);
4558
 
4559
      bb->aux = NULL;
4560
 
4561
      if (INSN_P (tail) && recog_memoized (tail) == CODE_FOR_loop_end)
4562
        {
4563
          rtx insn;
4564
          /* A possible loop end */
4565
 
4566
          /* There's a degenerate case we can handle - an empty loop consisting
4567
             of only a back branch.  Handle that by deleting the branch.  */
4568
          insn = BB_HEAD (BRANCH_EDGE (bb)->dest);
4569
          if (next_real_insn (insn) == tail)
4570
            {
4571
              if (dump_file)
4572
                {
4573
                  fprintf (dump_file, ";; degenerate loop ending at\n");
4574
                  print_rtl_single (dump_file, tail);
4575
                }
4576
              delete_insn_and_edges (tail);
4577
              continue;
4578
            }
4579
 
4580
          loop = XNEW (struct loop_info);
4581
          loop->next = loops;
4582
          loops = loop;
4583
          loop->loop_no = nloops++;
4584
          loop->blocks = VEC_alloc (basic_block, heap, 20);
4585
          loop->block_bitmap = BITMAP_ALLOC (stack);
4586
          bb->aux = loop;
4587
 
4588
          if (dump_file)
4589
            {
4590
              fprintf (dump_file, ";; potential loop %d ending at\n",
4591
                       loop->loop_no);
4592
              print_rtl_single (dump_file, tail);
4593
            }
4594
 
4595
          bfin_discover_loop (loop, bb, tail);
4596
        }
4597
    }
4598
 
4599
  tmp_bitmap = BITMAP_ALLOC (stack);
4600
  /* Compute loop nestings.  */
4601
  for (loop = loops; loop; loop = loop->next)
4602
    {
4603
      loop_info other;
4604
      if (loop->bad)
4605
        continue;
4606
 
4607
      for (other = loop->next; other; other = other->next)
4608
        {
4609
          if (other->bad)
4610
            continue;
4611
 
4612
          bitmap_and (tmp_bitmap, other->block_bitmap, loop->block_bitmap);
4613
          if (bitmap_empty_p (tmp_bitmap))
4614
            continue;
4615
          if (bitmap_equal_p (tmp_bitmap, other->block_bitmap))
4616
            {
4617
              other->outer = loop;
4618
              VEC_safe_push (loop_info, heap, loop->loops, other);
4619
            }
4620
          else if (bitmap_equal_p (tmp_bitmap, loop->block_bitmap))
4621
            {
4622
              loop->outer = other;
4623
              VEC_safe_push (loop_info, heap, other->loops, loop);
4624
            }
4625
          else
4626
            {
4627
              if (dump_file)
4628
                fprintf (dump_file,
4629
                         ";; can't find suitable nesting for loops %d and %d\n",
4630
                         loop->loop_no, other->loop_no);
4631
              loop->bad = other->bad = 1;
4632
            }
4633
        }
4634
    }
4635
  BITMAP_FREE (tmp_bitmap);
4636
 
4637
  return loops;
4638
}
4639
 
4640
/* Free up the loop structures in LOOPS.  */
4641
static void
4642
free_loops (loop_info loops)
4643
{
4644
  while (loops)
4645
    {
4646
      loop_info loop = loops;
4647
      loops = loop->next;
4648
      VEC_free (loop_info, heap, loop->loops);
4649
      VEC_free (basic_block, heap, loop->blocks);
4650
      BITMAP_FREE (loop->block_bitmap);
4651
      XDELETE (loop);
4652
    }
4653
}
4654
 
4655
#define BB_AUX_INDEX(BB) ((unsigned)(BB)->aux)
4656
 
4657
/* The taken-branch edge from the loop end can actually go forward.  Since the
4658
   Blackfin's LSETUP instruction requires that the loop end be after the loop
4659
   start, try to reorder a loop's basic blocks when we find such a case.  */
4660
static void
4661
bfin_reorder_loops (loop_info loops, FILE *dump_file)
4662
{
4663
  basic_block bb;
4664
  loop_info loop;
4665
 
4666
  FOR_EACH_BB (bb)
4667
    bb->aux = NULL;
4668
  cfg_layout_initialize (0);
4669
 
4670
  for (loop = loops; loop; loop = loop->next)
4671
    {
4672
      unsigned index;
4673
      basic_block bb;
4674
      edge e;
4675
      edge_iterator ei;
4676
 
4677
      if (loop->bad)
4678
        continue;
4679
 
4680
      /* Recreate an index for basic blocks that represents their order.  */
4681
      for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
4682
           bb != EXIT_BLOCK_PTR;
4683
           bb = bb->next_bb, index++)
4684
        bb->aux = (PTR) index;
4685
 
4686
      if (BB_AUX_INDEX (loop->head) < BB_AUX_INDEX (loop->tail))
4687
        continue;
4688
 
4689
      FOR_EACH_EDGE (e, ei, loop->head->succs)
4690
        {
4691
          if (bitmap_bit_p (loop->block_bitmap, e->dest->index)
4692
              && BB_AUX_INDEX (e->dest) < BB_AUX_INDEX (loop->tail))
4693
            {
4694
              basic_block start_bb = e->dest;
4695
              basic_block start_prev_bb = start_bb->prev_bb;
4696
 
4697
              if (dump_file)
4698
                fprintf (dump_file, ";; Moving block %d before block %d\n",
4699
                         loop->head->index, start_bb->index);
4700
              loop->head->prev_bb->next_bb = loop->head->next_bb;
4701
              loop->head->next_bb->prev_bb = loop->head->prev_bb;
4702
 
4703
              loop->head->prev_bb = start_prev_bb;
4704
              loop->head->next_bb = start_bb;
4705
              start_prev_bb->next_bb = start_bb->prev_bb = loop->head;
4706
              break;
4707
            }
4708
        }
4709
      loops = loops->next;
4710
    }
4711
 
4712
  FOR_EACH_BB (bb)
4713
    {
4714
      if (bb->next_bb != EXIT_BLOCK_PTR)
4715
        bb->aux = bb->next_bb;
4716
      else
4717
        bb->aux = NULL;
4718
    }
4719
  cfg_layout_finalize ();
4720
  df_analyze ();
4721
}
4722
 
4723
/* Run from machine_dependent_reorg, this pass looks for doloop_end insns
4724
   and tries to rewrite the RTL of these loops so that proper Blackfin
4725
   hardware loops are generated.  */
4726
 
4727
static void
4728
bfin_reorg_loops (FILE *dump_file)
4729
{
4730
  loop_info loops = NULL;
4731
  loop_info loop;
4732
  basic_block bb;
4733
  bitmap_obstack stack;
4734
 
4735
  bitmap_obstack_initialize (&stack);
4736
 
4737
  if (dump_file)
4738
    fprintf (dump_file, ";; Find loops, first pass\n\n");
4739
 
4740
  loops = bfin_discover_loops (&stack, dump_file);
4741
 
4742
  if (dump_file)
4743
    bfin_dump_loops (loops);
4744
 
4745
  bfin_reorder_loops (loops, dump_file);
4746
  free_loops (loops);
4747
 
4748
  if (dump_file)
4749
    fprintf (dump_file, ";; Find loops, second pass\n\n");
4750
 
4751
  loops = bfin_discover_loops (&stack, dump_file);
4752
  if (dump_file)
4753
    {
4754
      fprintf (dump_file, ";; All loops found:\n\n");
4755
      bfin_dump_loops (loops);
4756
    }
4757
 
4758
  /* Now apply the optimizations.  */
4759
  for (loop = loops; loop; loop = loop->next)
4760
    bfin_optimize_loop (loop);
4761
 
4762
  if (dump_file)
4763
    {
4764
      fprintf (dump_file, ";; After hardware loops optimization:\n\n");
4765
      bfin_dump_loops (loops);
4766
    }
4767
 
4768
  free_loops (loops);
4769
 
4770
  if (dump_file)
4771
    print_rtl (dump_file, get_insns ());
4772
 
4773
  FOR_EACH_BB (bb)
4774
    bb->aux = NULL;
4775
 
4776
  splitting_loops = 1;
4777
  FOR_EACH_BB (bb)
4778
    {
4779
      rtx insn = BB_END (bb);
4780
      if (!JUMP_P (insn))
4781
        continue;
4782
 
4783
      try_split (PATTERN (insn), insn, 1);
4784
    }
4785
  splitting_loops = 0;
4786
}
4787
 
4788
/* Possibly generate a SEQUENCE out of three insns found in SLOT.
4789
   Returns true if we modified the insn chain, false otherwise.  */
4790
static bool
4791
gen_one_bundle (rtx slot[3])
4792
{
4793
  gcc_assert (slot[1] != NULL_RTX);
4794
 
4795
  /* Don't add extra NOPs if optimizing for size.  */
4796
  if (optimize_size
4797
      && (slot[0] == NULL_RTX || slot[2] == NULL_RTX))
4798
    return false;
4799
 
4800
  /* Verify that we really can do the multi-issue.  */
4801
  if (slot[0])
4802
    {
4803
      rtx t = NEXT_INSN (slot[0]);
4804
      while (t != slot[1])
4805
        {
4806
          if (GET_CODE (t) != NOTE
4807
              || NOTE_KIND (t) != NOTE_INSN_DELETED)
4808
            return false;
4809
          t = NEXT_INSN (t);
4810
        }
4811
    }
4812
  if (slot[2])
4813
    {
4814
      rtx t = NEXT_INSN (slot[1]);
4815
      while (t != slot[2])
4816
        {
4817
          if (GET_CODE (t) != NOTE
4818
              || NOTE_KIND (t) != NOTE_INSN_DELETED)
4819
            return false;
4820
          t = NEXT_INSN (t);
4821
        }
4822
    }
4823
 
4824
  if (slot[0] == NULL_RTX)
4825
    {
4826
      slot[0] = emit_insn_before (gen_mnop (), slot[1]);
4827
      df_insn_rescan (slot[0]);
4828
    }
4829
  if (slot[2] == NULL_RTX)
4830
    {
4831
      slot[2] = emit_insn_after (gen_forced_nop (), slot[1]);
4832
      df_insn_rescan (slot[2]);
4833
    }
4834
 
4835
  /* Avoid line number information being printed inside one bundle.  */
4836
  if (INSN_LOCATOR (slot[1])
4837
      && INSN_LOCATOR (slot[1]) != INSN_LOCATOR (slot[0]))
4838
    INSN_LOCATOR (slot[1]) = INSN_LOCATOR (slot[0]);
4839
  if (INSN_LOCATOR (slot[2])
4840
      && INSN_LOCATOR (slot[2]) != INSN_LOCATOR (slot[0]))
4841
    INSN_LOCATOR (slot[2]) = INSN_LOCATOR (slot[0]);
4842
 
4843
  /* Terminate them with "|| " instead of ";" in the output.  */
4844
  PUT_MODE (slot[0], SImode);
4845
  PUT_MODE (slot[1], SImode);
4846
  /* Terminate the bundle, for the benefit of reorder_var_tracking_notes.  */
4847
  PUT_MODE (slot[2], QImode);
4848
  return true;
4849
}
4850
 
4851
/* Go through all insns, and use the information generated during scheduling
4852
   to generate SEQUENCEs to represent bundles of instructions issued
4853
   simultaneously.  */
4854
 
4855
static void
4856
bfin_gen_bundles (void)
4857
{
4858
  basic_block bb;
4859
  FOR_EACH_BB (bb)
4860
    {
4861
      rtx insn, next;
4862
      rtx slot[3];
4863
      int n_filled = 0;
4864
 
4865
      slot[0] = slot[1] = slot[2] = NULL_RTX;
4866
      for (insn = BB_HEAD (bb);; insn = next)
4867
        {
4868
          int at_end;
4869
          rtx delete_this = NULL_RTX;
4870
 
4871
          if (NONDEBUG_INSN_P (insn))
4872
            {
4873
              enum attr_type type = get_attr_type (insn);
4874
 
4875
              if (type == TYPE_STALL)
4876
                {
4877
                  gcc_assert (n_filled == 0);
4878
                  delete_this = insn;
4879
                }
4880
              else
4881
                {
4882
                  if (type == TYPE_DSP32 || type == TYPE_DSP32SHIFTIMM)
4883
                    slot[0] = insn;
4884
                  else if (slot[1] == NULL_RTX)
4885
                    slot[1] = insn;
4886
                  else
4887
                    slot[2] = insn;
4888
                  n_filled++;
4889
                }
4890
            }
4891
 
4892
          next = NEXT_INSN (insn);
4893
          while (next && insn != BB_END (bb)
4894
                 && !(INSN_P (next)
4895
                      && GET_CODE (PATTERN (next)) != USE
4896
                      && GET_CODE (PATTERN (next)) != CLOBBER))
4897
            {
4898
              insn = next;
4899
              next = NEXT_INSN (insn);
4900
            }
4901
 
4902
          /* BB_END can change due to emitting extra NOPs, so check here.  */
4903
          at_end = insn == BB_END (bb);
4904
          if (delete_this == NULL_RTX && (at_end || GET_MODE (next) == TImode))
4905
            {
4906
              if ((n_filled < 2
4907
                   || !gen_one_bundle (slot))
4908
                  && slot[0] != NULL_RTX)
4909
                {
4910
                  rtx pat = PATTERN (slot[0]);
4911
                  if (GET_CODE (pat) == SET
4912
                      && GET_CODE (SET_SRC (pat)) == UNSPEC
4913
                      && XINT (SET_SRC (pat), 1) == UNSPEC_32BIT)
4914
                    {
4915
                      SET_SRC (pat) = XVECEXP (SET_SRC (pat), 0, 0);
4916
                      INSN_CODE (slot[0]) = -1;
4917
                      df_insn_rescan (slot[0]);
4918
                    }
4919
                }
4920
              n_filled = 0;
4921
              slot[0] = slot[1] = slot[2] = NULL_RTX;
4922
            }
4923
          if (delete_this != NULL_RTX)
4924
            delete_insn (delete_this);
4925
          if (at_end)
4926
            break;
4927
        }
4928
    }
4929
}
4930
 
4931
/* Ensure that no var tracking notes are emitted in the middle of a
4932
   three-instruction bundle.  */
4933
 
4934
static void
4935
reorder_var_tracking_notes (void)
4936
{
4937
  basic_block bb;
4938
  FOR_EACH_BB (bb)
4939
    {
4940
      rtx insn, next;
4941
      rtx queue = NULL_RTX;
4942
      bool in_bundle = false;
4943
 
4944
      for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = next)
4945
        {
4946
          next = NEXT_INSN (insn);
4947
 
4948
          if (INSN_P (insn))
4949
            {
4950
              /* Emit queued up notes at the last instruction of a bundle.  */
4951
              if (GET_MODE (insn) == QImode)
4952
                {
4953
                  while (queue)
4954
                    {
4955
                      rtx next_queue = PREV_INSN (queue);
4956
                      PREV_INSN (NEXT_INSN (insn)) = queue;
4957
                      NEXT_INSN (queue) = NEXT_INSN (insn);
4958
                      NEXT_INSN (insn) = queue;
4959
                      PREV_INSN (queue) = insn;
4960
                      queue = next_queue;
4961
                    }
4962
                  in_bundle = false;
4963
                }
4964
              else if (GET_MODE (insn) == SImode)
4965
                in_bundle = true;
4966
            }
4967
          else if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
4968
            {
4969
              if (in_bundle)
4970
                {
4971
                  rtx prev = PREV_INSN (insn);
4972
                  PREV_INSN (next) = prev;
4973
                  NEXT_INSN (prev) = next;
4974
 
4975
                  PREV_INSN (insn) = queue;
4976
                  queue = insn;
4977
                }
4978
            }
4979
        }
4980
    }
4981
}
4982
 
4983
/* On some silicon revisions, functions shorter than a certain number of cycles
4984
   can cause unpredictable behaviour.  Work around this by adding NOPs as
4985
   needed.  */
4986
static void
4987
workaround_rts_anomaly (void)
4988
{
4989
  rtx insn, first_insn = NULL_RTX;
4990
  int cycles = 4;
4991
 
4992
  if (! ENABLE_WA_RETS)
4993
    return;
4994
 
4995
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4996
    {
4997
      rtx pat;
4998
 
4999
      if (BARRIER_P (insn))
5000
        return;
5001
 
5002
      if (NOTE_P (insn) || LABEL_P (insn))
5003
        continue;
5004
 
5005
      if (first_insn == NULL_RTX)
5006
        first_insn = insn;
5007
      pat = PATTERN (insn);
5008
      if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
5009
          || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
5010
          || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
5011
        continue;
5012
 
5013
      if (CALL_P (insn))
5014
        return;
5015
 
5016
      if (JUMP_P (insn))
5017
        {
5018
          if (recog_memoized (insn) == CODE_FOR_return_internal)
5019
            break;
5020
 
5021
          /* Nothing to worry about for direct jumps.  */
5022
          if (!any_condjump_p (insn))
5023
            return;
5024
          if (cycles <= 1)
5025
            return;
5026
          cycles--;
5027
        }
5028
      else if (INSN_P (insn))
5029
        {
5030
          rtx pat = PATTERN (insn);
5031
          int this_cycles = 1;
5032
 
5033
          if (GET_CODE (pat) == PARALLEL)
5034
            {
5035
              if (push_multiple_operation (pat, VOIDmode)
5036
                  || pop_multiple_operation (pat, VOIDmode))
5037
                this_cycles = n_regs_to_save;
5038
            }
5039
          else
5040
            {
5041
              enum insn_code icode = recog_memoized (insn);
5042
              if (icode == CODE_FOR_link)
5043
                this_cycles = 4;
5044
              else if (icode == CODE_FOR_unlink)
5045
                this_cycles = 3;
5046
              else if (icode == CODE_FOR_mulsi3)
5047
                this_cycles = 5;
5048
            }
5049
          if (this_cycles >= cycles)
5050
            return;
5051
 
5052
          cycles -= this_cycles;
5053
        }
5054
    }
5055
  while (cycles > 0)
5056
    {
5057
      emit_insn_before (gen_nop (), first_insn);
5058
      cycles--;
5059
    }
5060
}
5061
 
5062
/* Return an insn type for INSN that can be used by the caller for anomaly
5063
   workarounds.  This differs from plain get_attr_type in that it handles
5064
   SEQUENCEs.  */
5065
 
5066
static enum attr_type
5067
type_for_anomaly (rtx insn)
5068
{
5069
  rtx pat = PATTERN (insn);
5070
  if (GET_CODE (pat) == SEQUENCE)
5071
    {
5072
      enum attr_type t;
5073
      t = get_attr_type (XVECEXP (pat, 0, 1));
5074
      if (t == TYPE_MCLD)
5075
        return t;
5076
      t = get_attr_type (XVECEXP (pat, 0, 2));
5077
      if (t == TYPE_MCLD)
5078
        return t;
5079
      return TYPE_MCST;
5080
    }
5081
  else
5082
    return get_attr_type (insn);
5083
}
5084
 
5085
/* Return true iff the address found in MEM is based on the register
5086
   NP_REG and optionally has a positive offset.  */
5087
static bool
5088
harmless_null_pointer_p (rtx mem, int np_reg)
5089
{
5090
  mem = XEXP (mem, 0);
5091
  if (GET_CODE (mem) == POST_INC || GET_CODE (mem) == POST_DEC)
5092
    mem = XEXP (mem, 0);
5093
  if (REG_P (mem) && REGNO (mem) == np_reg)
5094
    return true;
5095
  if (GET_CODE (mem) == PLUS
5096
      && REG_P (XEXP (mem, 0)) && REGNO (XEXP (mem, 0)) == np_reg)
5097
    {
5098
      mem = XEXP (mem, 1);
5099
      if (GET_CODE (mem) == CONST_INT && INTVAL (mem) > 0)
5100
        return true;
5101
    }
5102
  return false;
5103
}
5104
 
5105
/* Return nonzero if INSN contains any loads that may trap.  */
5106
 
5107
static bool
5108
trapping_loads_p (rtx insn, int np_reg, bool after_np_branch)
5109
{
5110
  rtx pat = PATTERN (insn);
5111
  rtx mem = SET_SRC (single_set (insn));
5112
 
5113
  if (!after_np_branch)
5114
    np_reg = -1;
5115
  return ((np_reg == -1 || !harmless_null_pointer_p (mem, np_reg))
5116
          && may_trap_p (mem));
5117
}
5118
 
5119
/* Return INSN if it is of TYPE_MCLD.  Alternatively, if INSN is the start of
5120
   a three-insn bundle, see if one of them is a load and return that if so.
5121
   Return NULL_RTX if the insn does not contain loads.  */
5122
static rtx
5123
find_load (rtx insn)
5124
{
5125
  if (!NONDEBUG_INSN_P (insn))
5126
    return NULL_RTX;
5127
  if (get_attr_type (insn) == TYPE_MCLD)
5128
    return insn;
5129
  if (GET_MODE (insn) != SImode)
5130
    return NULL_RTX;
5131
  do {
5132
    insn = NEXT_INSN (insn);
5133
    if ((GET_MODE (insn) == SImode || GET_MODE (insn) == QImode)
5134
        && get_attr_type (insn) == TYPE_MCLD)
5135
      return insn;
5136
  } while (GET_MODE (insn) != QImode);
5137
  return NULL_RTX;
5138
}
5139
 
5140
/* Determine whether PAT is an indirect call pattern.  */
5141
static bool
5142
indirect_call_p (rtx pat)
5143
{
5144
  if (GET_CODE (pat) == PARALLEL)
5145
    pat = XVECEXP (pat, 0, 0);
5146
  if (GET_CODE (pat) == SET)
5147
    pat = SET_SRC (pat);
5148
  gcc_assert (GET_CODE (pat) == CALL);
5149
  pat = XEXP (pat, 0);
5150
  gcc_assert (GET_CODE (pat) == MEM);
5151
  pat = XEXP (pat, 0);
5152
 
5153
  return REG_P (pat);
5154
}
5155
 
5156
/* During workaround_speculation, track whether we're in the shadow of a
5157
   conditional branch that tests a P register for NULL.  If so, we can omit
5158
   emitting NOPs if we see a load from that P register, since a speculative
5159
   access at address 0 isn't a problem, and the load is executed in all other
5160
   cases anyway.
5161
   Global for communication with note_np_check_stores through note_stores.
5162
   */
5163
int np_check_regno = -1;
5164
bool np_after_branch = false;
5165
 
5166
/* Subroutine of workaround_speculation, called through note_stores.  */
5167
static void
5168
note_np_check_stores (rtx x, const_rtx pat, void *data ATTRIBUTE_UNUSED)
5169
{
5170
  if (REG_P (x) && (REGNO (x) == REG_CC || REGNO (x) == np_check_regno))
5171
    np_check_regno = -1;
5172
}
5173
 
5174
static void
5175
workaround_speculation (void)
5176
{
5177
  rtx insn, next;
5178
  rtx last_condjump = NULL_RTX;
5179
  int cycles_since_jump = INT_MAX;
5180
  int delay_added = 0;
5181
 
5182
  if (! ENABLE_WA_SPECULATIVE_LOADS && ! ENABLE_WA_SPECULATIVE_SYNCS
5183
      && ! ENABLE_WA_INDIRECT_CALLS)
5184
    return;
5185
 
5186
  /* First pass: find predicted-false branches; if something after them
5187
     needs nops, insert them or change the branch to predict true.  */
5188
  for (insn = get_insns (); insn; insn = next)
5189
    {
5190
      rtx pat;
5191
      int delay_needed = 0;
5192
 
5193
      next = find_next_insn_start (insn);
5194
 
5195
      if (NOTE_P (insn) || BARRIER_P (insn))
5196
        continue;
5197
 
5198
      if (LABEL_P (insn))
5199
        {
5200
          np_check_regno = -1;
5201
          continue;
5202
        }
5203
 
5204
      pat = PATTERN (insn);
5205
      if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
5206
          || GET_CODE (pat) == ADDR_VEC || GET_CODE (pat) == ADDR_DIFF_VEC)
5207
        continue;
5208
 
5209
      if (GET_CODE (pat) == ASM_INPUT || asm_noperands (pat) >= 0)
5210
        {
5211
          np_check_regno = -1;
5212
          continue;
5213
        }
5214
 
5215
      if (JUMP_P (insn))
5216
        {
5217
          /* Is this a condjump based on a null pointer comparison we saw
5218
             earlier?  */
5219
          if (np_check_regno != -1
5220
              && recog_memoized (insn) == CODE_FOR_cbranchbi4)
5221
            {
5222
              rtx op = XEXP (SET_SRC (PATTERN (insn)), 0);
5223
              gcc_assert (GET_CODE (op) == EQ || GET_CODE (op) == NE);
5224
              if (GET_CODE (op) == NE)
5225
                np_after_branch = true;
5226
            }
5227
          if (any_condjump_p (insn)
5228
              && ! cbranch_predicted_taken_p (insn))
5229
            {
5230
              last_condjump = insn;
5231
              delay_added = 0;
5232
              cycles_since_jump = 0;
5233
            }
5234
          else
5235
            cycles_since_jump = INT_MAX;
5236
        }
5237
      else if (CALL_P (insn))
5238
        {
5239
          np_check_regno = -1;
5240
          if (cycles_since_jump < INT_MAX)
5241
            cycles_since_jump++;
5242
          if (indirect_call_p (pat) && ENABLE_WA_INDIRECT_CALLS)
5243
            {
5244
              delay_needed = 3;
5245
            }
5246
        }
5247
      else if (NONDEBUG_INSN_P (insn))
5248
        {
5249
          rtx load_insn = find_load (insn);
5250
          enum attr_type type = type_for_anomaly (insn);
5251
 
5252
          if (cycles_since_jump < INT_MAX)
5253
            cycles_since_jump++;
5254
 
5255
          /* Detect a comparison of a P register with zero.  If we later
5256
             see a condjump based on it, we have found a null pointer
5257
             check.  */
5258
          if (recog_memoized (insn) == CODE_FOR_compare_eq)
5259
            {
5260
              rtx src = SET_SRC (PATTERN (insn));
5261
              if (REG_P (XEXP (src, 0))
5262
                  && P_REGNO_P (REGNO (XEXP (src, 0)))
5263
                  && XEXP (src, 1) == const0_rtx)
5264
                {
5265
                  np_check_regno = REGNO (XEXP (src, 0));
5266
                  np_after_branch = false;
5267
                }
5268
              else
5269
                np_check_regno = -1;
5270
            }
5271
 
5272
          if (load_insn && ENABLE_WA_SPECULATIVE_LOADS)
5273
            {
5274
              if (trapping_loads_p (load_insn, np_check_regno,
5275
                                    np_after_branch))
5276
                delay_needed = 4;
5277
            }
5278
          else if (type == TYPE_SYNC && ENABLE_WA_SPECULATIVE_SYNCS)
5279
            delay_needed = 3;
5280
 
5281
          /* See if we need to forget about a null pointer comparison
5282
             we found earlier.  */
5283
          if (recog_memoized (insn) != CODE_FOR_compare_eq)
5284
            {
5285
              note_stores (PATTERN (insn), note_np_check_stores, NULL);
5286
              if (np_check_regno != -1)
5287
                {
5288
                  if (find_regno_note (insn, REG_INC, np_check_regno))
5289
                    np_check_regno = -1;
5290
                }
5291
            }
5292
 
5293
        }
5294
 
5295
      if (delay_needed > cycles_since_jump
5296
          && (delay_needed - cycles_since_jump) > delay_added)
5297
        {
5298
          rtx pat1;
5299
          int num_clobbers;
5300
          rtx *op = recog_data.operand;
5301
 
5302
          delay_needed -= cycles_since_jump;
5303
 
5304
          extract_insn (last_condjump);
5305
          if (optimize_size)
5306
            {
5307
              pat1 = gen_cbranch_predicted_taken (op[0], op[1], op[2],
5308
                                                 op[3]);
5309
              cycles_since_jump = INT_MAX;
5310
            }
5311
          else
5312
            {
5313
              /* Do not adjust cycles_since_jump in this case, so that
5314
                 we'll increase the number of NOPs for a subsequent insn
5315
                 if necessary.  */
5316
              pat1 = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
5317
                                            GEN_INT (delay_needed));
5318
              delay_added = delay_needed;
5319
            }
5320
          PATTERN (last_condjump) = pat1;
5321
          INSN_CODE (last_condjump) = recog (pat1, insn, &num_clobbers);
5322
        }
5323
      if (CALL_P (insn))
5324
        {
5325
          cycles_since_jump = INT_MAX;
5326
          delay_added = 0;
5327
        }
5328
    }
5329
 
5330
  /* Second pass: for predicted-true branches, see if anything at the
5331
     branch destination needs extra nops.  */
5332
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5333
    {
5334
      int cycles_since_jump;
5335
      if (JUMP_P (insn)
5336
          && any_condjump_p (insn)
5337
          && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
5338
              || cbranch_predicted_taken_p (insn)))
5339
        {
5340
          rtx target = JUMP_LABEL (insn);
5341
          rtx label = target;
5342
          rtx next_tgt;
5343
 
5344
          cycles_since_jump = 0;
5345
          for (; target && cycles_since_jump < 3; target = next_tgt)
5346
            {
5347
              rtx pat;
5348
 
5349
              next_tgt = find_next_insn_start (target);
5350
 
5351
              if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
5352
                continue;
5353
 
5354
              pat = PATTERN (target);
5355
              if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
5356
                  || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
5357
                  || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
5358
                continue;
5359
 
5360
              if (NONDEBUG_INSN_P (target))
5361
                {
5362
                  rtx load_insn = find_load (target);
5363
                  enum attr_type type = type_for_anomaly (target);
5364
                  int delay_needed = 0;
5365
                  if (cycles_since_jump < INT_MAX)
5366
                    cycles_since_jump++;
5367
 
5368
                  if (load_insn && ENABLE_WA_SPECULATIVE_LOADS)
5369
                    {
5370
                      if (trapping_loads_p (load_insn, -1, false))
5371
                        delay_needed = 2;
5372
                    }
5373
                  else if (type == TYPE_SYNC && ENABLE_WA_SPECULATIVE_SYNCS)
5374
                    delay_needed = 2;
5375
 
5376
                  if (delay_needed > cycles_since_jump)
5377
                    {
5378
                      rtx prev = prev_real_insn (label);
5379
                      delay_needed -= cycles_since_jump;
5380
                      if (dump_file)
5381
                        fprintf (dump_file, "Adding %d nops after %d\n",
5382
                                 delay_needed, INSN_UID (label));
5383
                      if (JUMP_P (prev)
5384
                          && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
5385
                        {
5386
                          rtx x;
5387
                          HOST_WIDE_INT v;
5388
 
5389
                          if (dump_file)
5390
                            fprintf (dump_file,
5391
                                     "Reducing nops on insn %d.\n",
5392
                                     INSN_UID (prev));
5393
                          x = PATTERN (prev);
5394
                          x = XVECEXP (x, 0, 1);
5395
                          v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
5396
                          XVECEXP (x, 0, 0) = GEN_INT (v);
5397
                        }
5398
                      while (delay_needed-- > 0)
5399
                        emit_insn_after (gen_nop (), label);
5400
                      break;
5401
                    }
5402
                }
5403
            }
5404
        }
5405
    }
5406
}
5407
 
5408
/* Called just before the final scheduling pass.  If we need to insert NOPs
5409
   later on to work around speculative loads, insert special placeholder
5410
   insns that cause loads to be delayed for as many cycles as necessary
5411
   (and possible).  This reduces the number of NOPs we need to add.
5412
   The dummy insns we generate are later removed by bfin_gen_bundles.  */
5413
static void
5414
add_sched_insns_for_speculation (void)
5415
{
5416
  rtx insn;
5417
 
5418
  if (! ENABLE_WA_SPECULATIVE_LOADS && ! ENABLE_WA_SPECULATIVE_SYNCS
5419
      && ! ENABLE_WA_INDIRECT_CALLS)
5420
    return;
5421
 
5422
  /* First pass: find predicted-false branches; if something after them
5423
     needs nops, insert them or change the branch to predict true.  */
5424
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5425
    {
5426
      rtx pat;
5427
 
5428
      if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
5429
        continue;
5430
 
5431
      pat = PATTERN (insn);
5432
      if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
5433
          || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
5434
          || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
5435
        continue;
5436
 
5437
      if (JUMP_P (insn))
5438
        {
5439
          if (any_condjump_p (insn)
5440
              && !cbranch_predicted_taken_p (insn))
5441
            {
5442
              rtx n = next_real_insn (insn);
5443
              emit_insn_before (gen_stall (GEN_INT (3)), n);
5444
            }
5445
        }
5446
    }
5447
 
5448
  /* Second pass: for predicted-true branches, see if anything at the
5449
     branch destination needs extra nops.  */
5450
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5451
    {
5452
      if (JUMP_P (insn)
5453
          && any_condjump_p (insn)
5454
          && (cbranch_predicted_taken_p (insn)))
5455
        {
5456
          rtx target = JUMP_LABEL (insn);
5457
          rtx next = next_real_insn (target);
5458
 
5459
          if (GET_CODE (PATTERN (next)) == UNSPEC_VOLATILE
5460
              && get_attr_type (next) == TYPE_STALL)
5461
            continue;
5462
          emit_insn_before (gen_stall (GEN_INT (1)), next);
5463
        }
5464
    }
5465
}
5466
 
5467
/* We use the machine specific reorg pass for emitting CSYNC instructions
5468
   after conditional branches as needed.
5469
 
5470
   The Blackfin is unusual in that a code sequence like
5471
     if cc jump label
5472
     r0 = (p0)
5473
   may speculatively perform the load even if the condition isn't true.  This
5474
   happens for a branch that is predicted not taken, because the pipeline
5475
   isn't flushed or stalled, so the early stages of the following instructions,
5476
   which perform the memory reference, are allowed to execute before the
5477
   jump condition is evaluated.
5478
   Therefore, we must insert additional instructions in all places where this
5479
   could lead to incorrect behavior.  The manual recommends CSYNC, while
5480
   VDSP seems to use NOPs (even though its corresponding compiler option is
5481
   named CSYNC).
5482
 
5483
   When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
5484
   When optimizing for size, we turn the branch into a predicted taken one.
5485
   This may be slower due to mispredicts, but saves code size.  */
5486
 
5487
static void
5488
bfin_reorg (void)
5489
{
5490
  /* We are freeing block_for_insn in the toplev to keep compatibility
5491
     with old MDEP_REORGS that are not CFG based.  Recompute it now.  */
5492
  compute_bb_for_insn ();
5493
 
5494
  if (bfin_flag_schedule_insns2)
5495
    {
5496
      splitting_for_sched = 1;
5497
      split_all_insns ();
5498
      splitting_for_sched = 0;
5499
 
5500
      add_sched_insns_for_speculation ();
5501
 
5502
      timevar_push (TV_SCHED2);
5503
      if (flag_selective_scheduling2
5504
          && !maybe_skip_selective_scheduling ())
5505
        run_selective_scheduling ();
5506
      else
5507
        schedule_insns ();
5508
      timevar_pop (TV_SCHED2);
5509
 
5510
      /* Examine the schedule and insert nops as necessary for 64-bit parallel
5511
         instructions.  */
5512
      bfin_gen_bundles ();
5513
    }
5514
 
5515
  df_analyze ();
5516
 
5517
  /* Doloop optimization */
5518
  if (cfun->machine->has_hardware_loops)
5519
    bfin_reorg_loops (dump_file);
5520
 
5521
  workaround_speculation ();
5522
 
5523
  if (bfin_flag_var_tracking)
5524
    {
5525
      timevar_push (TV_VAR_TRACKING);
5526
      variable_tracking_main ();
5527
      reorder_var_tracking_notes ();
5528
      timevar_pop (TV_VAR_TRACKING);
5529
    }
5530
 
5531
  df_finish_pass (false);
5532
 
5533
  workaround_rts_anomaly ();
5534
}
5535
 
5536
/* Handle interrupt_handler, exception_handler and nmi_handler function
5537
   attributes; arguments as in struct attribute_spec.handler.  */
5538
 
5539
static tree
5540
handle_int_attribute (tree *node, tree name,
5541
                      tree args ATTRIBUTE_UNUSED,
5542
                      int flags ATTRIBUTE_UNUSED,
5543
                      bool *no_add_attrs)
5544
{
5545
  tree x = *node;
5546
  if (TREE_CODE (x) == FUNCTION_DECL)
5547
    x = TREE_TYPE (x);
5548
 
5549
  if (TREE_CODE (x) != FUNCTION_TYPE)
5550
    {
5551
      warning (OPT_Wattributes, "%qE attribute only applies to functions",
5552
               name);
5553
      *no_add_attrs = true;
5554
    }
5555
  else if (funkind (x) != SUBROUTINE)
5556
    error ("multiple function type attributes specified");
5557
 
5558
  return NULL_TREE;
5559
}
5560
 
5561
/* Return 0 if the attributes for two types are incompatible, 1 if they
5562
   are compatible, and 2 if they are nearly compatible (which causes a
5563
   warning to be generated).  */
5564
 
5565
static int
5566
bfin_comp_type_attributes (const_tree type1, const_tree type2)
5567
{
5568
  e_funkind kind1, kind2;
5569
 
5570
  if (TREE_CODE (type1) != FUNCTION_TYPE)
5571
    return 1;
5572
 
5573
  kind1 = funkind (type1);
5574
  kind2 = funkind (type2);
5575
 
5576
  if (kind1 != kind2)
5577
    return 0;
5578
 
5579
  /*  Check for mismatched modifiers */
5580
  if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
5581
      != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
5582
    return 0;
5583
 
5584
  if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
5585
      != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
5586
    return 0;
5587
 
5588
  if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
5589
      != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
5590
    return 0;
5591
 
5592
  if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
5593
      != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
5594
    return 0;
5595
 
5596
  return 1;
5597
}
5598
 
5599
/* Handle a "longcall" or "shortcall" attribute; arguments as in
5600
   struct attribute_spec.handler.  */
5601
 
5602
static tree
5603
bfin_handle_longcall_attribute (tree *node, tree name,
5604
                                tree args ATTRIBUTE_UNUSED,
5605
                                int flags ATTRIBUTE_UNUSED,
5606
                                bool *no_add_attrs)
5607
{
5608
  if (TREE_CODE (*node) != FUNCTION_TYPE
5609
      && TREE_CODE (*node) != FIELD_DECL
5610
      && TREE_CODE (*node) != TYPE_DECL)
5611
    {
5612
      warning (OPT_Wattributes, "%qE attribute only applies to functions",
5613
               name);
5614
      *no_add_attrs = true;
5615
    }
5616
 
5617
  if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
5618
       && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
5619
      || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
5620
          && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
5621
    {
5622
      warning (OPT_Wattributes,
5623
               "can't apply both longcall and shortcall attributes to the same function");
5624
      *no_add_attrs = true;
5625
    }
5626
 
5627
  return NULL_TREE;
5628
}
5629
 
5630
/* Handle a "l1_text" attribute; arguments as in
5631
   struct attribute_spec.handler.  */
5632
 
5633
static tree
5634
bfin_handle_l1_text_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5635
                               int ARG_UNUSED (flags), bool *no_add_attrs)
5636
{
5637
  tree decl = *node;
5638
 
5639
  if (TREE_CODE (decl) != FUNCTION_DECL)
5640
    {
5641
      error ("%qE attribute only applies to functions",
5642
             name);
5643
      *no_add_attrs = true;
5644
    }
5645
 
5646
  /* The decl may have already been given a section attribute
5647
     from a previous declaration. Ensure they match.  */
5648
  else if (DECL_SECTION_NAME (decl) != NULL_TREE
5649
           && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
5650
                      ".l1.text") != 0)
5651
    {
5652
      error ("section of %q+D conflicts with previous declaration",
5653
             decl);
5654
      *no_add_attrs = true;
5655
    }
5656
  else
5657
    DECL_SECTION_NAME (decl) = build_string (9, ".l1.text");
5658
 
5659
  return NULL_TREE;
5660
}
5661
 
5662
/* Handle a "l1_data", "l1_data_A" or "l1_data_B" attribute;
5663
   arguments as in struct attribute_spec.handler.  */
5664
 
5665
static tree
5666
bfin_handle_l1_data_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5667
                               int ARG_UNUSED (flags), bool *no_add_attrs)
5668
{
5669
  tree decl = *node;
5670
 
5671
  if (TREE_CODE (decl) != VAR_DECL)
5672
    {
5673
      error ("%qE attribute only applies to variables",
5674
             name);
5675
      *no_add_attrs = true;
5676
    }
5677
  else if (current_function_decl != NULL_TREE
5678
           && !TREE_STATIC (decl))
5679
    {
5680
      error ("%qE attribute cannot be specified for local variables",
5681
             name);
5682
      *no_add_attrs = true;
5683
    }
5684
  else
5685
    {
5686
      const char *section_name;
5687
 
5688
      if (strcmp (IDENTIFIER_POINTER (name), "l1_data") == 0)
5689
        section_name = ".l1.data";
5690
      else if (strcmp (IDENTIFIER_POINTER (name), "l1_data_A") == 0)
5691
        section_name = ".l1.data.A";
5692
      else if (strcmp (IDENTIFIER_POINTER (name), "l1_data_B") == 0)
5693
        section_name = ".l1.data.B";
5694
      else
5695
        gcc_unreachable ();
5696
 
5697
      /* The decl may have already been given a section attribute
5698
         from a previous declaration. Ensure they match.  */
5699
      if (DECL_SECTION_NAME (decl) != NULL_TREE
5700
          && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
5701
                     section_name) != 0)
5702
        {
5703
          error ("section of %q+D conflicts with previous declaration",
5704
                 decl);
5705
          *no_add_attrs = true;
5706
        }
5707
      else
5708
        DECL_SECTION_NAME (decl)
5709
          = build_string (strlen (section_name) + 1, section_name);
5710
    }
5711
 
5712
 return NULL_TREE;
5713
}
5714
 
5715
/* Handle a "l2" attribute; arguments as in struct attribute_spec.handler.  */
5716
 
5717
static tree
5718
bfin_handle_l2_attribute (tree *node, tree ARG_UNUSED (name),
5719
                          tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5720
                          bool *no_add_attrs)
5721
{
5722
  tree decl = *node;
5723
 
5724
  if (TREE_CODE (decl) == FUNCTION_DECL)
5725
    {
5726
      if (DECL_SECTION_NAME (decl) != NULL_TREE
5727
          && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
5728
                     ".l2.text") != 0)
5729
        {
5730
          error ("section of %q+D conflicts with previous declaration",
5731
                 decl);
5732
          *no_add_attrs = true;
5733
        }
5734
      else
5735
        DECL_SECTION_NAME (decl) = build_string (9, ".l2.text");
5736
    }
5737
  else if (TREE_CODE (decl) == VAR_DECL)
5738
    {
5739
      if (DECL_SECTION_NAME (decl) != NULL_TREE
5740
          && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
5741
                     ".l2.data") != 0)
5742
        {
5743
          error ("section of %q+D conflicts with previous declaration",
5744
                 decl);
5745
          *no_add_attrs = true;
5746
        }
5747
      else
5748
        DECL_SECTION_NAME (decl) = build_string (9, ".l2.data");
5749
    }
5750
 
5751
  return NULL_TREE;
5752
}
5753
 
5754
/* Table of valid machine attributes.  */
5755
static const struct attribute_spec bfin_attribute_table[] =
5756
{
5757
  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
5758
  { "interrupt_handler", 0, 0, false, true,  true, handle_int_attribute },
5759
  { "exception_handler", 0, 0, false, true,  true, handle_int_attribute },
5760
  { "nmi_handler", 0, 0, false, true,  true, handle_int_attribute },
5761
  { "nesting", 0, 0, false, true,  true, NULL },
5762
  { "kspisusp", 0, 0, false, true,  true, NULL },
5763
  { "saveall", 0, 0, false, true,  true, NULL },
5764
  { "longcall",  0, 0, false, true,  true,  bfin_handle_longcall_attribute },
5765
  { "shortcall", 0, 0, false, true,  true,  bfin_handle_longcall_attribute },
5766
  { "l1_text", 0, 0, true, false, false,  bfin_handle_l1_text_attribute },
5767
  { "l1_data", 0, 0, true, false, false,  bfin_handle_l1_data_attribute },
5768
  { "l1_data_A", 0, 0, true, false, false, bfin_handle_l1_data_attribute },
5769
  { "l1_data_B", 0, 0, true, false, false,  bfin_handle_l1_data_attribute },
5770
  { "l2", 0, 0, true, false, false,  bfin_handle_l2_attribute },
5771
  { NULL, 0, 0, false, false, false, NULL }
5772
};
5773
 
5774
/* Implementation of TARGET_ASM_INTEGER.  When using FD-PIC, we need to
5775
   tell the assembler to generate pointers to function descriptors in
5776
   some cases.  */
5777
 
5778
static bool
5779
bfin_assemble_integer (rtx value, unsigned int size, int aligned_p)
5780
{
5781
  if (TARGET_FDPIC && size == UNITS_PER_WORD)
5782
    {
5783
      if (GET_CODE (value) == SYMBOL_REF
5784
          && SYMBOL_REF_FUNCTION_P (value))
5785
        {
5786
          fputs ("\t.picptr\tfuncdesc(", asm_out_file);
5787
          output_addr_const (asm_out_file, value);
5788
          fputs (")\n", asm_out_file);
5789
          return true;
5790
        }
5791
      if (!aligned_p)
5792
        {
5793
          /* We've set the unaligned SI op to NULL, so we always have to
5794
             handle the unaligned case here.  */
5795
          assemble_integer_with_op ("\t.4byte\t", value);
5796
          return true;
5797
        }
5798
    }
5799
  return default_assemble_integer (value, size, aligned_p);
5800
}
5801
 
5802
/* Output the assembler code for a thunk function.  THUNK_DECL is the
5803
   declaration for the thunk function itself, FUNCTION is the decl for
5804
   the target function.  DELTA is an immediate constant offset to be
5805
   added to THIS.  If VCALL_OFFSET is nonzero, the word at
5806
   *(*this + vcall_offset) should be added to THIS.  */
5807
 
5808
static void
5809
bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
5810
                      tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
5811
                      HOST_WIDE_INT vcall_offset, tree function)
5812
{
5813
  rtx xops[3];
5814
  /* The this parameter is passed as the first argument.  */
5815
  rtx this_rtx = gen_rtx_REG (Pmode, REG_R0);
5816
 
5817
  /* Adjust the this parameter by a fixed constant.  */
5818
  if (delta)
5819
    {
5820
      xops[1] = this_rtx;
5821
      if (delta >= -64 && delta <= 63)
5822
        {
5823
          xops[0] = GEN_INT (delta);
5824
          output_asm_insn ("%1 += %0;", xops);
5825
        }
5826
      else if (delta >= -128 && delta < -64)
5827
        {
5828
          xops[0] = GEN_INT (delta + 64);
5829
          output_asm_insn ("%1 += -64; %1 += %0;", xops);
5830
        }
5831
      else if (delta > 63 && delta <= 126)
5832
        {
5833
          xops[0] = GEN_INT (delta - 63);
5834
          output_asm_insn ("%1 += 63; %1 += %0;", xops);
5835
        }
5836
      else
5837
        {
5838
          xops[0] = GEN_INT (delta);
5839
          output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
5840
        }
5841
    }
5842
 
5843
  /* Adjust the this parameter by a value stored in the vtable.  */
5844
  if (vcall_offset)
5845
    {
5846
      rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
5847
      rtx tmp = gen_rtx_REG (Pmode, REG_R3);
5848
 
5849
      xops[1] = tmp;
5850
      xops[2] = p2tmp;
5851
      output_asm_insn ("%2 = r0; %2 = [%2];", xops);
5852
 
5853
      /* Adjust the this parameter.  */
5854
      xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
5855
      if (!memory_operand (xops[0], Pmode))
5856
        {
5857
          rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
5858
          xops[0] = GEN_INT (vcall_offset);
5859
          xops[1] = tmp2;
5860
          output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
5861
          xops[0] = gen_rtx_MEM (Pmode, p2tmp);
5862
        }
5863
      xops[2] = this_rtx;
5864
      output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
5865
    }
5866
 
5867
  xops[0] = XEXP (DECL_RTL (function), 0);
5868
  if (1 || !flag_pic || (*targetm.binds_local_p) (function))
5869
    output_asm_insn ("jump.l\t%P0", xops);
5870
}
5871
 
5872
/* Codes for all the Blackfin builtins.  */
5873
enum bfin_builtins
5874
{
5875
  BFIN_BUILTIN_CSYNC,
5876
  BFIN_BUILTIN_SSYNC,
5877
  BFIN_BUILTIN_ONES,
5878
  BFIN_BUILTIN_COMPOSE_2X16,
5879
  BFIN_BUILTIN_EXTRACTLO,
5880
  BFIN_BUILTIN_EXTRACTHI,
5881
 
5882
  BFIN_BUILTIN_SSADD_2X16,
5883
  BFIN_BUILTIN_SSSUB_2X16,
5884
  BFIN_BUILTIN_SSADDSUB_2X16,
5885
  BFIN_BUILTIN_SSSUBADD_2X16,
5886
  BFIN_BUILTIN_MULT_2X16,
5887
  BFIN_BUILTIN_MULTR_2X16,
5888
  BFIN_BUILTIN_NEG_2X16,
5889
  BFIN_BUILTIN_ABS_2X16,
5890
  BFIN_BUILTIN_MIN_2X16,
5891
  BFIN_BUILTIN_MAX_2X16,
5892
 
5893
  BFIN_BUILTIN_SSADD_1X16,
5894
  BFIN_BUILTIN_SSSUB_1X16,
5895
  BFIN_BUILTIN_MULT_1X16,
5896
  BFIN_BUILTIN_MULTR_1X16,
5897
  BFIN_BUILTIN_NORM_1X16,
5898
  BFIN_BUILTIN_NEG_1X16,
5899
  BFIN_BUILTIN_ABS_1X16,
5900
  BFIN_BUILTIN_MIN_1X16,
5901
  BFIN_BUILTIN_MAX_1X16,
5902
 
5903
  BFIN_BUILTIN_SUM_2X16,
5904
  BFIN_BUILTIN_DIFFHL_2X16,
5905
  BFIN_BUILTIN_DIFFLH_2X16,
5906
 
5907
  BFIN_BUILTIN_SSADD_1X32,
5908
  BFIN_BUILTIN_SSSUB_1X32,
5909
  BFIN_BUILTIN_NORM_1X32,
5910
  BFIN_BUILTIN_ROUND_1X32,
5911
  BFIN_BUILTIN_NEG_1X32,
5912
  BFIN_BUILTIN_ABS_1X32,
5913
  BFIN_BUILTIN_MIN_1X32,
5914
  BFIN_BUILTIN_MAX_1X32,
5915
  BFIN_BUILTIN_MULT_1X32,
5916
  BFIN_BUILTIN_MULT_1X32X32,
5917
  BFIN_BUILTIN_MULT_1X32X32NS,
5918
 
5919
  BFIN_BUILTIN_MULHISILL,
5920
  BFIN_BUILTIN_MULHISILH,
5921
  BFIN_BUILTIN_MULHISIHL,
5922
  BFIN_BUILTIN_MULHISIHH,
5923
 
5924
  BFIN_BUILTIN_LSHIFT_1X16,
5925
  BFIN_BUILTIN_LSHIFT_2X16,
5926
  BFIN_BUILTIN_SSASHIFT_1X16,
5927
  BFIN_BUILTIN_SSASHIFT_2X16,
5928
  BFIN_BUILTIN_SSASHIFT_1X32,
5929
 
5930
  BFIN_BUILTIN_CPLX_MUL_16,
5931
  BFIN_BUILTIN_CPLX_MAC_16,
5932
  BFIN_BUILTIN_CPLX_MSU_16,
5933
 
5934
  BFIN_BUILTIN_CPLX_MUL_16_S40,
5935
  BFIN_BUILTIN_CPLX_MAC_16_S40,
5936
  BFIN_BUILTIN_CPLX_MSU_16_S40,
5937
 
5938
  BFIN_BUILTIN_CPLX_SQU,
5939
 
5940
  BFIN_BUILTIN_LOADBYTES,
5941
 
5942
  BFIN_BUILTIN_MAX
5943
};
5944
 
5945
#define def_builtin(NAME, TYPE, CODE)                                   \
5946
do {                                                                    \
5947
  add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD,            \
5948
                       NULL, NULL_TREE);                                \
5949
} while (0)
5950
 
5951
/* Set up all builtin functions for this target.  */
5952
static void
5953
bfin_init_builtins (void)
5954
{
5955
  tree V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
5956
  tree void_ftype_void
5957
    = build_function_type (void_type_node, void_list_node);
5958
  tree short_ftype_short
5959
    = build_function_type_list (short_integer_type_node, short_integer_type_node,
5960
                                NULL_TREE);
5961
  tree short_ftype_int_int
5962
    = build_function_type_list (short_integer_type_node, integer_type_node,
5963
                                integer_type_node, NULL_TREE);
5964
  tree int_ftype_int_int
5965
    = build_function_type_list (integer_type_node, integer_type_node,
5966
                                integer_type_node, NULL_TREE);
5967
  tree int_ftype_int
5968
    = build_function_type_list (integer_type_node, integer_type_node,
5969
                                NULL_TREE);
5970
  tree short_ftype_int
5971
    = build_function_type_list (short_integer_type_node, integer_type_node,
5972
                                NULL_TREE);
5973
  tree int_ftype_v2hi_v2hi
5974
    = build_function_type_list (integer_type_node, V2HI_type_node,
5975
                                V2HI_type_node, NULL_TREE);
5976
  tree v2hi_ftype_v2hi_v2hi
5977
    = build_function_type_list (V2HI_type_node, V2HI_type_node,
5978
                                V2HI_type_node, NULL_TREE);
5979
  tree v2hi_ftype_v2hi_v2hi_v2hi
5980
    = build_function_type_list (V2HI_type_node, V2HI_type_node,
5981
                                V2HI_type_node, V2HI_type_node, NULL_TREE);
5982
  tree v2hi_ftype_int_int
5983
    = build_function_type_list (V2HI_type_node, integer_type_node,
5984
                                integer_type_node, NULL_TREE);
5985
  tree v2hi_ftype_v2hi_int
5986
    = build_function_type_list (V2HI_type_node, V2HI_type_node,
5987
                                integer_type_node, NULL_TREE);
5988
  tree int_ftype_short_short
5989
    = build_function_type_list (integer_type_node, short_integer_type_node,
5990
                                short_integer_type_node, NULL_TREE);
5991
  tree v2hi_ftype_v2hi
5992
    = build_function_type_list (V2HI_type_node, V2HI_type_node, NULL_TREE);
5993
  tree short_ftype_v2hi
5994
    = build_function_type_list (short_integer_type_node, V2HI_type_node,
5995
                                NULL_TREE);
5996
  tree int_ftype_pint
5997
    = build_function_type_list (integer_type_node,
5998
                                build_pointer_type (integer_type_node),
5999
                                NULL_TREE);
6000
 
6001
  /* Add the remaining MMX insns with somewhat more complicated types.  */
6002
  def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
6003
  def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
6004
 
6005
  def_builtin ("__builtin_bfin_ones", short_ftype_int, BFIN_BUILTIN_ONES);
6006
 
6007
  def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int,
6008
               BFIN_BUILTIN_COMPOSE_2X16);
6009
  def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi,
6010
               BFIN_BUILTIN_EXTRACTHI);
6011
  def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi,
6012
               BFIN_BUILTIN_EXTRACTLO);
6013
 
6014
  def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi,
6015
               BFIN_BUILTIN_MIN_2X16);
6016
  def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi,
6017
               BFIN_BUILTIN_MAX_2X16);
6018
 
6019
  def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi,
6020
               BFIN_BUILTIN_SSADD_2X16);
6021
  def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi,
6022
               BFIN_BUILTIN_SSSUB_2X16);
6023
  def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi,
6024
               BFIN_BUILTIN_SSADDSUB_2X16);
6025
  def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi,
6026
               BFIN_BUILTIN_SSSUBADD_2X16);
6027
  def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi,
6028
               BFIN_BUILTIN_MULT_2X16);
6029
  def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi,
6030
               BFIN_BUILTIN_MULTR_2X16);
6031
  def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi,
6032
               BFIN_BUILTIN_NEG_2X16);
6033
  def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi,
6034
               BFIN_BUILTIN_ABS_2X16);
6035
 
6036
  def_builtin ("__builtin_bfin_min_fr1x16", short_ftype_int_int,
6037
               BFIN_BUILTIN_MIN_1X16);
6038
  def_builtin ("__builtin_bfin_max_fr1x16", short_ftype_int_int,
6039
               BFIN_BUILTIN_MAX_1X16);
6040
 
6041
  def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int,
6042
               BFIN_BUILTIN_SSADD_1X16);
6043
  def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int,
6044
               BFIN_BUILTIN_SSSUB_1X16);
6045
  def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int,
6046
               BFIN_BUILTIN_MULT_1X16);
6047
  def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int,
6048
               BFIN_BUILTIN_MULTR_1X16);
6049
  def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short,
6050
               BFIN_BUILTIN_NEG_1X16);
6051
  def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short,
6052
               BFIN_BUILTIN_ABS_1X16);
6053
  def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int,
6054
               BFIN_BUILTIN_NORM_1X16);
6055
 
6056
  def_builtin ("__builtin_bfin_sum_fr2x16", short_ftype_v2hi,
6057
               BFIN_BUILTIN_SUM_2X16);
6058
  def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi,
6059
               BFIN_BUILTIN_DIFFHL_2X16);
6060
  def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi,
6061
               BFIN_BUILTIN_DIFFLH_2X16);
6062
 
6063
  def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi,
6064
               BFIN_BUILTIN_MULHISILL);
6065
  def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi,
6066
               BFIN_BUILTIN_MULHISIHL);
6067
  def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi,
6068
               BFIN_BUILTIN_MULHISILH);
6069
  def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi,
6070
               BFIN_BUILTIN_MULHISIHH);
6071
 
6072
  def_builtin ("__builtin_bfin_min_fr1x32", int_ftype_int_int,
6073
               BFIN_BUILTIN_MIN_1X32);
6074
  def_builtin ("__builtin_bfin_max_fr1x32", int_ftype_int_int,
6075
               BFIN_BUILTIN_MAX_1X32);
6076
 
6077
  def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int,
6078
               BFIN_BUILTIN_SSADD_1X32);
6079
  def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int,
6080
               BFIN_BUILTIN_SSSUB_1X32);
6081
  def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int,
6082
               BFIN_BUILTIN_NEG_1X32);
6083
  def_builtin ("__builtin_bfin_abs_fr1x32", int_ftype_int,
6084
               BFIN_BUILTIN_ABS_1X32);
6085
  def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int,
6086
               BFIN_BUILTIN_NORM_1X32);
6087
  def_builtin ("__builtin_bfin_round_fr1x32", short_ftype_int,
6088
               BFIN_BUILTIN_ROUND_1X32);
6089
  def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short,
6090
               BFIN_BUILTIN_MULT_1X32);
6091
  def_builtin ("__builtin_bfin_mult_fr1x32x32", int_ftype_int_int,
6092
               BFIN_BUILTIN_MULT_1X32X32);
6093
  def_builtin ("__builtin_bfin_mult_fr1x32x32NS", int_ftype_int_int,
6094
               BFIN_BUILTIN_MULT_1X32X32NS);
6095
 
6096
  /* Shifts.  */
6097
  def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int,
6098
               BFIN_BUILTIN_SSASHIFT_1X16);
6099
  def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int,
6100
               BFIN_BUILTIN_SSASHIFT_2X16);
6101
  def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int,
6102
               BFIN_BUILTIN_LSHIFT_1X16);
6103
  def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int,
6104
               BFIN_BUILTIN_LSHIFT_2X16);
6105
  def_builtin ("__builtin_bfin_shl_fr1x32", int_ftype_int_int,
6106
               BFIN_BUILTIN_SSASHIFT_1X32);
6107
 
6108
  /* Complex numbers.  */
6109
  def_builtin ("__builtin_bfin_cmplx_add", v2hi_ftype_v2hi_v2hi,
6110
               BFIN_BUILTIN_SSADD_2X16);
6111
  def_builtin ("__builtin_bfin_cmplx_sub", v2hi_ftype_v2hi_v2hi,
6112
               BFIN_BUILTIN_SSSUB_2X16);
6113
  def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi,
6114
               BFIN_BUILTIN_CPLX_MUL_16);
6115
  def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi,
6116
               BFIN_BUILTIN_CPLX_MAC_16);
6117
  def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi,
6118
               BFIN_BUILTIN_CPLX_MSU_16);
6119
  def_builtin ("__builtin_bfin_cmplx_mul_s40", v2hi_ftype_v2hi_v2hi,
6120
               BFIN_BUILTIN_CPLX_MUL_16_S40);
6121
  def_builtin ("__builtin_bfin_cmplx_mac_s40", v2hi_ftype_v2hi_v2hi_v2hi,
6122
               BFIN_BUILTIN_CPLX_MAC_16_S40);
6123
  def_builtin ("__builtin_bfin_cmplx_msu_s40", v2hi_ftype_v2hi_v2hi_v2hi,
6124
               BFIN_BUILTIN_CPLX_MSU_16_S40);
6125
  def_builtin ("__builtin_bfin_csqu_fr16", v2hi_ftype_v2hi,
6126
               BFIN_BUILTIN_CPLX_SQU);
6127
 
6128
  /* "Unaligned" load.  */
6129
  def_builtin ("__builtin_bfin_loadbytes", int_ftype_pint,
6130
               BFIN_BUILTIN_LOADBYTES);
6131
 
6132
}
6133
 
6134
 
6135
struct builtin_description
6136
{
6137
  const enum insn_code icode;
6138
  const char *const name;
6139
  const enum bfin_builtins code;
6140
  int macflag;
6141
};
6142
 
6143
static const struct builtin_description bdesc_2arg[] =
6144
{
6145
  { CODE_FOR_composev2hi, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16, -1 },
6146
 
6147
  { CODE_FOR_ssashiftv2hi3, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16, -1 },
6148
  { CODE_FOR_ssashifthi3, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16, -1 },
6149
  { CODE_FOR_lshiftv2hi3, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16, -1 },
6150
  { CODE_FOR_lshifthi3, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16, -1 },
6151
  { CODE_FOR_ssashiftsi3, "__builtin_bfin_shl_fr1x32", BFIN_BUILTIN_SSASHIFT_1X32, -1 },
6152
 
6153
  { CODE_FOR_sminhi3, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16, -1 },
6154
  { CODE_FOR_smaxhi3, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16, -1 },
6155
  { CODE_FOR_ssaddhi3, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16, -1 },
6156
  { CODE_FOR_sssubhi3, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16, -1 },
6157
 
6158
  { CODE_FOR_sminsi3, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32, -1 },
6159
  { CODE_FOR_smaxsi3, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32, -1 },
6160
  { CODE_FOR_ssaddsi3, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32, -1 },
6161
  { CODE_FOR_sssubsi3, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32, -1 },
6162
 
6163
  { CODE_FOR_sminv2hi3, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16, -1 },
6164
  { CODE_FOR_smaxv2hi3, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16, -1 },
6165
  { CODE_FOR_ssaddv2hi3, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16, -1 },
6166
  { CODE_FOR_sssubv2hi3, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16, -1 },
6167
  { CODE_FOR_ssaddsubv2hi3, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16, -1 },
6168
  { CODE_FOR_sssubaddv2hi3, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16, -1 },
6169
 
6170
  { CODE_FOR_flag_mulhisi, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32, MACFLAG_NONE },
6171
  { CODE_FOR_flag_mulhi, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16, MACFLAG_T },
6172
  { CODE_FOR_flag_mulhi, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16, MACFLAG_NONE },
6173
  { CODE_FOR_flag_mulv2hi, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16, MACFLAG_T },
6174
  { CODE_FOR_flag_mulv2hi, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16, MACFLAG_NONE },
6175
 
6176
  { CODE_FOR_mulhisi_ll, "__builtin_bfin_mulhisill", BFIN_BUILTIN_MULHISILL, -1 },
6177
  { CODE_FOR_mulhisi_lh, "__builtin_bfin_mulhisilh", BFIN_BUILTIN_MULHISILH, -1 },
6178
  { CODE_FOR_mulhisi_hl, "__builtin_bfin_mulhisihl", BFIN_BUILTIN_MULHISIHL, -1 },
6179
  { CODE_FOR_mulhisi_hh, "__builtin_bfin_mulhisihh", BFIN_BUILTIN_MULHISIHH, -1 }
6180
 
6181
};
6182
 
6183
static const struct builtin_description bdesc_1arg[] =
6184
{
6185
  { CODE_FOR_loadbytes, "__builtin_bfin_loadbytes", BFIN_BUILTIN_LOADBYTES, 0 },
6186
 
6187
  { CODE_FOR_ones, "__builtin_bfin_ones", BFIN_BUILTIN_ONES, 0 },
6188
 
6189
  { CODE_FOR_signbitshi2, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16, 0 },
6190
  { CODE_FOR_ssneghi2, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16, 0 },
6191
  { CODE_FOR_abshi2, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16, 0 },
6192
 
6193
  { CODE_FOR_signbitssi2, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32, 0 },
6194
  { CODE_FOR_ssroundsi2, "__builtin_bfin_round_fr1x32", BFIN_BUILTIN_ROUND_1X32, 0 },
6195
  { CODE_FOR_ssnegsi2, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32, 0 },
6196
  { CODE_FOR_ssabssi2, "__builtin_bfin_abs_fr1x32", BFIN_BUILTIN_ABS_1X32, 0 },
6197
 
6198
  { CODE_FOR_movv2hi_hi_low, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO, 0 },
6199
  { CODE_FOR_movv2hi_hi_high, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI, 0 },
6200
  { CODE_FOR_ssnegv2hi2, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16, 0 },
6201
  { CODE_FOR_ssabsv2hi2, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16, 0 }
6202
};
6203
 
6204
/* Errors in the source file can cause expand_expr to return const0_rtx
6205
   where we expect a vector.  To avoid crashing, use one of the vector
6206
   clear instructions.  */
6207
static rtx
6208
safe_vector_operand (rtx x, enum machine_mode mode)
6209
{
6210
  if (x != const0_rtx)
6211
    return x;
6212
  x = gen_reg_rtx (SImode);
6213
 
6214
  emit_insn (gen_movsi (x, CONST0_RTX (SImode)));
6215
  return gen_lowpart (mode, x);
6216
}
6217
 
6218
/* Subroutine of bfin_expand_builtin to take care of binop insns.  MACFLAG is -1
6219
   if this is a normal binary op, or one of the MACFLAG_xxx constants.  */
6220
 
6221
static rtx
6222
bfin_expand_binop_builtin (enum insn_code icode, tree exp, rtx target,
6223
                           int macflag)
6224
{
6225
  rtx pat;
6226
  tree arg0 = CALL_EXPR_ARG (exp, 0);
6227
  tree arg1 = CALL_EXPR_ARG (exp, 1);
6228
  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6229
  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6230
  enum machine_mode op0mode = GET_MODE (op0);
6231
  enum machine_mode op1mode = GET_MODE (op1);
6232
  enum machine_mode tmode = insn_data[icode].operand[0].mode;
6233
  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6234
  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6235
 
6236
  if (VECTOR_MODE_P (mode0))
6237
    op0 = safe_vector_operand (op0, mode0);
6238
  if (VECTOR_MODE_P (mode1))
6239
    op1 = safe_vector_operand (op1, mode1);
6240
 
6241
  if (! target
6242
      || GET_MODE (target) != tmode
6243
      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6244
    target = gen_reg_rtx (tmode);
6245
 
6246
  if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
6247
    {
6248
      op0mode = HImode;
6249
      op0 = gen_lowpart (HImode, op0);
6250
    }
6251
  if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
6252
    {
6253
      op1mode = HImode;
6254
      op1 = gen_lowpart (HImode, op1);
6255
    }
6256
  /* In case the insn wants input operands in modes different from
6257
     the result, abort.  */
6258
  gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
6259
              && (op1mode == mode1 || op1mode == VOIDmode));
6260
 
6261
  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6262
    op0 = copy_to_mode_reg (mode0, op0);
6263
  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6264
    op1 = copy_to_mode_reg (mode1, op1);
6265
 
6266
  if (macflag == -1)
6267
    pat = GEN_FCN (icode) (target, op0, op1);
6268
  else
6269
    pat = GEN_FCN (icode) (target, op0, op1, GEN_INT (macflag));
6270
  if (! pat)
6271
    return 0;
6272
 
6273
  emit_insn (pat);
6274
  return target;
6275
}
6276
 
6277
/* Subroutine of bfin_expand_builtin to take care of unop insns.  */
6278
 
6279
static rtx
6280
bfin_expand_unop_builtin (enum insn_code icode, tree exp,
6281
                          rtx target)
6282
{
6283
  rtx pat;
6284
  tree arg0 = CALL_EXPR_ARG (exp, 0);
6285
  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6286
  enum machine_mode op0mode = GET_MODE (op0);
6287
  enum machine_mode tmode = insn_data[icode].operand[0].mode;
6288
  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6289
 
6290
  if (! target
6291
      || GET_MODE (target) != tmode
6292
      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6293
    target = gen_reg_rtx (tmode);
6294
 
6295
  if (VECTOR_MODE_P (mode0))
6296
    op0 = safe_vector_operand (op0, mode0);
6297
 
6298
  if (op0mode == SImode && mode0 == HImode)
6299
    {
6300
      op0mode = HImode;
6301
      op0 = gen_lowpart (HImode, op0);
6302
    }
6303
  gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
6304
 
6305
  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6306
    op0 = copy_to_mode_reg (mode0, op0);
6307
 
6308
  pat = GEN_FCN (icode) (target, op0);
6309
  if (! pat)
6310
    return 0;
6311
  emit_insn (pat);
6312
  return target;
6313
}
6314
 
6315
/* Expand an expression EXP that calls a built-in function,
6316
   with result going to TARGET if that's convenient
6317
   (and in mode MODE if that's convenient).
6318
   SUBTARGET may be used as the target for computing one of EXP's operands.
6319
   IGNORE is nonzero if the value is to be ignored.  */
6320
 
6321
static rtx
6322
bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6323
                     rtx subtarget ATTRIBUTE_UNUSED,
6324
                     enum machine_mode mode ATTRIBUTE_UNUSED,
6325
                     int ignore ATTRIBUTE_UNUSED)
6326
{
6327
  size_t i;
6328
  enum insn_code icode;
6329
  const struct builtin_description *d;
6330
  tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6331
  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6332
  tree arg0, arg1, arg2;
6333
  rtx op0, op1, op2, accvec, pat, tmp1, tmp2, a0reg, a1reg;
6334
  enum machine_mode tmode, mode0;
6335
 
6336
  switch (fcode)
6337
    {
6338
    case BFIN_BUILTIN_CSYNC:
6339
      emit_insn (gen_csync ());
6340
      return 0;
6341
    case BFIN_BUILTIN_SSYNC:
6342
      emit_insn (gen_ssync ());
6343
      return 0;
6344
 
6345
    case BFIN_BUILTIN_DIFFHL_2X16:
6346
    case BFIN_BUILTIN_DIFFLH_2X16:
6347
    case BFIN_BUILTIN_SUM_2X16:
6348
      arg0 = CALL_EXPR_ARG (exp, 0);
6349
      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6350
      icode = (fcode == BFIN_BUILTIN_DIFFHL_2X16 ? CODE_FOR_subhilov2hi3
6351
               : fcode == BFIN_BUILTIN_DIFFLH_2X16 ? CODE_FOR_sublohiv2hi3
6352
               : CODE_FOR_ssaddhilov2hi3);
6353
      tmode = insn_data[icode].operand[0].mode;
6354
      mode0 = insn_data[icode].operand[1].mode;
6355
 
6356
      if (! target
6357
          || GET_MODE (target) != tmode
6358
          || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6359
        target = gen_reg_rtx (tmode);
6360
 
6361
      if (VECTOR_MODE_P (mode0))
6362
        op0 = safe_vector_operand (op0, mode0);
6363
 
6364
      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6365
        op0 = copy_to_mode_reg (mode0, op0);
6366
 
6367
      pat = GEN_FCN (icode) (target, op0, op0);
6368
      if (! pat)
6369
        return 0;
6370
      emit_insn (pat);
6371
      return target;
6372
 
6373
    case BFIN_BUILTIN_MULT_1X32X32:
6374
    case BFIN_BUILTIN_MULT_1X32X32NS:
6375
      arg0 = CALL_EXPR_ARG (exp, 0);
6376
      arg1 = CALL_EXPR_ARG (exp, 1);
6377
      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6378
      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6379
      if (! target
6380
          || !register_operand (target, SImode))
6381
        target = gen_reg_rtx (SImode);
6382
      if (! register_operand (op0, SImode))
6383
        op0 = copy_to_mode_reg (SImode, op0);
6384
      if (! register_operand (op1, SImode))
6385
        op1 = copy_to_mode_reg (SImode, op1);
6386
 
6387
      a1reg = gen_rtx_REG (PDImode, REG_A1);
6388
      a0reg = gen_rtx_REG (PDImode, REG_A0);
6389
      tmp1 = gen_lowpart (V2HImode, op0);
6390
      tmp2 = gen_lowpart (V2HImode, op1);
6391
      emit_insn (gen_flag_macinit1hi (a1reg,
6392
                                      gen_lowpart (HImode, op0),
6393
                                      gen_lowpart (HImode, op1),
6394
                                      GEN_INT (MACFLAG_FU)));
6395
      emit_insn (gen_lshrpdi3 (a1reg, a1reg, GEN_INT (16)));
6396
 
6397
      if (fcode == BFIN_BUILTIN_MULT_1X32X32)
6398
        emit_insn (gen_flag_mul_macv2hi_parts_acconly (a0reg, a1reg, tmp1, tmp2,
6399
                                                       const1_rtx, const1_rtx,
6400
                                                       const1_rtx, const0_rtx, a1reg,
6401
                                                       const0_rtx, GEN_INT (MACFLAG_NONE),
6402
                                                       GEN_INT (MACFLAG_M)));
6403
      else
6404
        {
6405
          /* For saturating multiplication, there's exactly one special case
6406
             to be handled: multiplying the smallest negative value with
6407
             itself.  Due to shift correction in fractional multiplies, this
6408
             can overflow.  Iff this happens, OP2 will contain 1, which, when
6409
             added in 32 bits to the smallest negative, wraps to the largest
6410
             positive, which is the result we want.  */
6411
          op2 = gen_reg_rtx (V2HImode);
6412
          emit_insn (gen_packv2hi (op2, tmp1, tmp2, const0_rtx, const0_rtx));
6413
          emit_insn (gen_movsibi (gen_rtx_REG (BImode, REG_CC),
6414
                                  gen_lowpart (SImode, op2)));
6415
          emit_insn (gen_flag_mul_macv2hi_parts_acconly_andcc0 (a0reg, a1reg, tmp1, tmp2,
6416
                                                                const1_rtx, const1_rtx,
6417
                                                                const1_rtx, const0_rtx, a1reg,
6418
                                                                const0_rtx, GEN_INT (MACFLAG_NONE),
6419
                                                                GEN_INT (MACFLAG_M)));
6420
          op2 = gen_reg_rtx (SImode);
6421
          emit_insn (gen_movbisi (op2, gen_rtx_REG (BImode, REG_CC)));
6422
        }
6423
      emit_insn (gen_flag_machi_parts_acconly (a1reg, tmp2, tmp1,
6424
                                               const1_rtx, const0_rtx,
6425
                                               a1reg, const0_rtx, GEN_INT (MACFLAG_M)));
6426
      emit_insn (gen_ashrpdi3 (a1reg, a1reg, GEN_INT (15)));
6427
      emit_insn (gen_sum_of_accumulators (target, a0reg, a0reg, a1reg));
6428
      if (fcode == BFIN_BUILTIN_MULT_1X32X32NS)
6429
        emit_insn (gen_addsi3 (target, target, op2));
6430
      return target;
6431
 
6432
    case BFIN_BUILTIN_CPLX_MUL_16:
6433
    case BFIN_BUILTIN_CPLX_MUL_16_S40:
6434
      arg0 = CALL_EXPR_ARG (exp, 0);
6435
      arg1 = CALL_EXPR_ARG (exp, 1);
6436
      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6437
      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6438
      accvec = gen_reg_rtx (V2PDImode);
6439
      icode = CODE_FOR_flag_macv2hi_parts;
6440
 
6441
      if (! target
6442
          || GET_MODE (target) != V2HImode
6443
          || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
6444
        target = gen_reg_rtx (tmode);
6445
      if (! register_operand (op0, GET_MODE (op0)))
6446
        op0 = copy_to_mode_reg (GET_MODE (op0), op0);
6447
      if (! register_operand (op1, GET_MODE (op1)))
6448
        op1 = copy_to_mode_reg (GET_MODE (op1), op1);
6449
 
6450
      if (fcode == BFIN_BUILTIN_CPLX_MUL_16)
6451
        emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
6452
                                                const0_rtx, const0_rtx,
6453
                                                const1_rtx, GEN_INT (MACFLAG_W32)));
6454
      else
6455
        emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
6456
                                                const0_rtx, const0_rtx,
6457
                                                const1_rtx, GEN_INT (MACFLAG_NONE)));
6458
      emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
6459
                                         const1_rtx, const1_rtx,
6460
                                         const0_rtx, accvec, const1_rtx, const0_rtx,
6461
                                         GEN_INT (MACFLAG_NONE), accvec));
6462
 
6463
      return target;
6464
 
6465
    case BFIN_BUILTIN_CPLX_MAC_16:
6466
    case BFIN_BUILTIN_CPLX_MSU_16:
6467
    case BFIN_BUILTIN_CPLX_MAC_16_S40:
6468
    case BFIN_BUILTIN_CPLX_MSU_16_S40:
6469
      arg0 = CALL_EXPR_ARG (exp, 0);
6470
      arg1 = CALL_EXPR_ARG (exp, 1);
6471
      arg2 = CALL_EXPR_ARG (exp, 2);
6472
      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6473
      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6474
      op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6475
      accvec = gen_reg_rtx (V2PDImode);
6476
      icode = CODE_FOR_flag_macv2hi_parts;
6477
 
6478
      if (! target
6479
          || GET_MODE (target) != V2HImode
6480
          || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
6481
        target = gen_reg_rtx (tmode);
6482
      if (! register_operand (op1, GET_MODE (op1)))
6483
        op1 = copy_to_mode_reg (GET_MODE (op1), op1);
6484
      if (! register_operand (op2, GET_MODE (op2)))
6485
        op2 = copy_to_mode_reg (GET_MODE (op2), op2);
6486
 
6487
      tmp1 = gen_reg_rtx (SImode);
6488
      tmp2 = gen_reg_rtx (SImode);
6489
      emit_insn (gen_ashlsi3 (tmp1, gen_lowpart (SImode, op0), GEN_INT (16)));
6490
      emit_move_insn (tmp2, gen_lowpart (SImode, op0));
6491
      emit_insn (gen_movstricthi_1 (gen_lowpart (HImode, tmp2), const0_rtx));
6492
      emit_insn (gen_load_accumulator_pair (accvec, tmp1, tmp2));
6493
      if (fcode == BFIN_BUILTIN_CPLX_MAC_16
6494
          || fcode == BFIN_BUILTIN_CPLX_MSU_16)
6495
        emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op1, op2, const0_rtx,
6496
                                                   const0_rtx, const0_rtx,
6497
                                                   const1_rtx, accvec, const0_rtx,
6498
                                                   const0_rtx,
6499
                                                   GEN_INT (MACFLAG_W32)));
6500
      else
6501
        emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op1, op2, const0_rtx,
6502
                                                   const0_rtx, const0_rtx,
6503
                                                   const1_rtx, accvec, const0_rtx,
6504
                                                   const0_rtx,
6505
                                                   GEN_INT (MACFLAG_NONE)));
6506
      if (fcode == BFIN_BUILTIN_CPLX_MAC_16
6507
          || fcode == BFIN_BUILTIN_CPLX_MAC_16_S40)
6508
        {
6509
          tmp1 = const1_rtx;
6510
          tmp2 = const0_rtx;
6511
        }
6512
      else
6513
        {
6514
          tmp1 = const0_rtx;
6515
          tmp2 = const1_rtx;
6516
        }
6517
      emit_insn (gen_flag_macv2hi_parts (target, op1, op2, const1_rtx,
6518
                                         const1_rtx, const1_rtx,
6519
                                         const0_rtx, accvec, tmp1, tmp2,
6520
                                         GEN_INT (MACFLAG_NONE), accvec));
6521
 
6522
      return target;
6523
 
6524
    case BFIN_BUILTIN_CPLX_SQU:
6525
      arg0 = CALL_EXPR_ARG (exp, 0);
6526
      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6527
      accvec = gen_reg_rtx (V2PDImode);
6528
      icode = CODE_FOR_flag_mulv2hi;
6529
      tmp1 = gen_reg_rtx (V2HImode);
6530
      tmp2 = gen_reg_rtx (V2HImode);
6531
 
6532
      if (! target
6533
          || GET_MODE (target) != V2HImode
6534
          || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
6535
        target = gen_reg_rtx (V2HImode);
6536
      if (! register_operand (op0, GET_MODE (op0)))
6537
        op0 = copy_to_mode_reg (GET_MODE (op0), op0);
6538
 
6539
      emit_insn (gen_flag_mulv2hi (tmp1, op0, op0, GEN_INT (MACFLAG_NONE)));
6540
 
6541
      emit_insn (gen_flag_mulhi_parts (gen_lowpart (HImode, tmp2), op0, op0,
6542
                                       const0_rtx, const1_rtx,
6543
                                       GEN_INT (MACFLAG_NONE)));
6544
 
6545
      emit_insn (gen_ssaddhi3_high_parts (target, tmp2, tmp2, tmp2, const0_rtx,
6546
                                          const0_rtx));
6547
      emit_insn (gen_sssubhi3_low_parts (target, target, tmp1, tmp1,
6548
                                         const0_rtx, const1_rtx));
6549
 
6550
      return target;
6551
 
6552
    default:
6553
      break;
6554
    }
6555
 
6556
  for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6557
    if (d->code == fcode)
6558
      return bfin_expand_binop_builtin (d->icode, exp, target,
6559
                                        d->macflag);
6560
 
6561
  for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6562
    if (d->code == fcode)
6563
      return bfin_expand_unop_builtin (d->icode, exp, target);
6564
 
6565
  gcc_unreachable ();
6566
}
6567
 
6568
#undef TARGET_INIT_BUILTINS
6569
#define TARGET_INIT_BUILTINS bfin_init_builtins
6570
 
6571
#undef TARGET_EXPAND_BUILTIN
6572
#define TARGET_EXPAND_BUILTIN bfin_expand_builtin
6573
 
6574
#undef TARGET_ASM_GLOBALIZE_LABEL
6575
#define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label 
6576
 
6577
#undef TARGET_ASM_FILE_START
6578
#define TARGET_ASM_FILE_START output_file_start
6579
 
6580
#undef TARGET_ATTRIBUTE_TABLE
6581
#define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
6582
 
6583
#undef TARGET_COMP_TYPE_ATTRIBUTES
6584
#define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
6585
 
6586
#undef TARGET_RTX_COSTS
6587
#define TARGET_RTX_COSTS bfin_rtx_costs
6588
 
6589
#undef  TARGET_ADDRESS_COST
6590
#define TARGET_ADDRESS_COST bfin_address_cost
6591
 
6592
#undef  TARGET_ASM_INTEGER
6593
#define TARGET_ASM_INTEGER bfin_assemble_integer
6594
 
6595
#undef TARGET_MACHINE_DEPENDENT_REORG
6596
#define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
6597
 
6598
#undef TARGET_FUNCTION_OK_FOR_SIBCALL
6599
#define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
6600
 
6601
#undef TARGET_ASM_OUTPUT_MI_THUNK
6602
#define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
6603
#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
6604
#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
6605
 
6606
#undef TARGET_SCHED_ADJUST_COST
6607
#define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
6608
 
6609
#undef TARGET_SCHED_ISSUE_RATE
6610
#define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
6611
 
6612
#undef TARGET_PROMOTE_FUNCTION_MODE
6613
#define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
6614
 
6615
#undef TARGET_ARG_PARTIAL_BYTES
6616
#define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
6617
 
6618
#undef TARGET_PASS_BY_REFERENCE
6619
#define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
6620
 
6621
#undef TARGET_SETUP_INCOMING_VARARGS
6622
#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
6623
 
6624
#undef TARGET_STRUCT_VALUE_RTX
6625
#define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
6626
 
6627
#undef TARGET_VECTOR_MODE_SUPPORTED_P
6628
#define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
6629
 
6630
#undef TARGET_HANDLE_OPTION
6631
#define TARGET_HANDLE_OPTION bfin_handle_option
6632
 
6633
#undef TARGET_DEFAULT_TARGET_FLAGS
6634
#define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
6635
 
6636
#undef TARGET_SECONDARY_RELOAD
6637
#define TARGET_SECONDARY_RELOAD bfin_secondary_reload
6638
 
6639
#undef TARGET_DELEGITIMIZE_ADDRESS
6640
#define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
6641
 
6642
#undef TARGET_CANNOT_FORCE_CONST_MEM
6643
#define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
6644
 
6645
#undef TARGET_RETURN_IN_MEMORY
6646
#define TARGET_RETURN_IN_MEMORY bfin_return_in_memory
6647
 
6648
#undef TARGET_LEGITIMATE_ADDRESS_P
6649
#define TARGET_LEGITIMATE_ADDRESS_P     bfin_legitimate_address_p
6650
 
6651
#undef TARGET_FRAME_POINTER_REQUIRED
6652
#define TARGET_FRAME_POINTER_REQUIRED bfin_frame_pointer_required
6653
 
6654
#undef TARGET_CAN_ELIMINATE
6655
#define TARGET_CAN_ELIMINATE bfin_can_eliminate
6656
 
6657
#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
6658
#define TARGET_ASM_TRAMPOLINE_TEMPLATE bfin_asm_trampoline_template
6659
#undef TARGET_TRAMPOLINE_INIT
6660
#define TARGET_TRAMPOLINE_INIT bfin_trampoline_init
6661
 
6662
struct gcc_target targetm = TARGET_INITIALIZER;

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.