OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.2.2/] [gcc/] [config/] [xtensa/] [xtensa.c] - Blame information for rev 455

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2
   Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007
3
   Free Software Foundation, Inc.
4
   Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "rtl.h"
27
#include "regs.h"
28
#include "hard-reg-set.h"
29
#include "basic-block.h"
30
#include "real.h"
31
#include "insn-config.h"
32
#include "conditions.h"
33
#include "insn-flags.h"
34
#include "insn-attr.h"
35
#include "insn-codes.h"
36
#include "recog.h"
37
#include "output.h"
38
#include "tree.h"
39
#include "expr.h"
40
#include "flags.h"
41
#include "reload.h"
42
#include "tm_p.h"
43
#include "function.h"
44
#include "toplev.h"
45
#include "optabs.h"
46
#include "libfuncs.h"
47
#include "ggc.h"
48
#include "target.h"
49
#include "target-def.h"
50
#include "langhooks.h"
51
#include "tree-gimple.h"
52
 
53
 
54
/* Enumeration for all of the relational tests, so that we can build
55
   arrays indexed by the test type, and not worry about the order
56
   of EQ, NE, etc.  */
57
 
58
enum internal_test
59
{
60
  ITEST_EQ,
61
  ITEST_NE,
62
  ITEST_GT,
63
  ITEST_GE,
64
  ITEST_LT,
65
  ITEST_LE,
66
  ITEST_GTU,
67
  ITEST_GEU,
68
  ITEST_LTU,
69
  ITEST_LEU,
70
  ITEST_MAX
71
};
72
 
73
/* Cached operands, and operator to compare for use in set/branch on
74
   condition codes.  */
75
rtx branch_cmp[2];
76
 
77
/* what type of branch to use */
78
enum cmp_type branch_type;
79
 
80
/* Array giving truth value on whether or not a given hard register
81
   can support a given mode.  */
82
char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
83
 
84
/* Current frame size calculated by compute_frame_size.  */
85
unsigned xtensa_current_frame_size;
86
 
87
/* Largest block move to handle in-line.  */
88
#define LARGEST_MOVE_RATIO 15
89
 
90
/* Define the structure for the machine field in struct function.  */
91
struct machine_function GTY(())
92
{
93
  int accesses_prev_frame;
94
  bool need_a7_copy;
95
  bool vararg_a7;
96
  rtx vararg_a7_copy;
97
  rtx set_frame_ptr_insn;
98
};
99
 
100
/* Vector, indexed by hard register number, which contains 1 for a
101
   register that is allowable in a candidate for leaf function
102
   treatment.  */
103
 
104
const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
105
{
106
  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
107
  1, 1, 1,
108
  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
109
  1
110
};
111
 
112
/* Map hard register number to register class */
113
const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
114
{
115
  RL_REGS,      SP_REG,         RL_REGS,        RL_REGS,
116
  RL_REGS,      RL_REGS,        RL_REGS,        GR_REGS,
117
  RL_REGS,      RL_REGS,        RL_REGS,        RL_REGS,
118
  RL_REGS,      RL_REGS,        RL_REGS,        RL_REGS,
119
  AR_REGS,      AR_REGS,        BR_REGS,
120
  FP_REGS,      FP_REGS,        FP_REGS,        FP_REGS,
121
  FP_REGS,      FP_REGS,        FP_REGS,        FP_REGS,
122
  FP_REGS,      FP_REGS,        FP_REGS,        FP_REGS,
123
  FP_REGS,      FP_REGS,        FP_REGS,        FP_REGS,
124
  ACC_REG,
125
};
126
 
127
/* Map register constraint character to register class.  */
128
enum reg_class xtensa_char_to_class[256] =
129
{
130
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
131
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
132
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
133
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
134
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
135
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
136
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
137
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
138
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
139
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
140
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
141
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
142
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
143
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
144
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
145
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
146
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
147
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
148
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
149
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
150
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
151
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
152
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
153
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
154
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
155
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
156
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
157
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
158
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
159
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
160
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
161
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
162
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
163
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
164
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
165
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
166
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
167
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
168
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
169
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
170
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
171
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
172
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
173
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
174
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
175
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
176
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
177
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
178
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
179
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
180
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
181
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
182
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
183
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
184
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
185
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
186
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
187
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
188
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
189
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
190
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
191
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
192
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
193
  NO_REGS,      NO_REGS,        NO_REGS,        NO_REGS,
194
};
195
 
196
static enum internal_test map_test_to_internal_test (enum rtx_code);
197
static rtx gen_int_relational (enum rtx_code, rtx, rtx, int *);
198
static rtx gen_float_relational (enum rtx_code, rtx, rtx);
199
static rtx gen_conditional_move (rtx);
200
static rtx fixup_subreg_mem (rtx);
201
static struct machine_function * xtensa_init_machine_status (void);
202
static bool xtensa_return_in_msb (tree);
203
static void printx (FILE *, signed int);
204
static void xtensa_function_epilogue (FILE *, HOST_WIDE_INT);
205
static rtx xtensa_builtin_saveregs (void);
206
static unsigned int xtensa_multibss_section_type_flags (tree, const char *,
207
                                                        int) ATTRIBUTE_UNUSED;
208
static section *xtensa_select_rtx_section (enum machine_mode, rtx,
209
                                           unsigned HOST_WIDE_INT);
210
static bool xtensa_rtx_costs (rtx, int, int, int *);
211
static tree xtensa_build_builtin_va_list (void);
212
static bool xtensa_return_in_memory (tree, tree);
213
static tree xtensa_gimplify_va_arg_expr (tree, tree, tree *, tree *);
214
 
215
static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
216
  REG_ALLOC_ORDER;
217
 
218
 
219
/* This macro generates the assembly code for function exit,
220
   on machines that need it.  If FUNCTION_EPILOGUE is not defined
221
   then individual return instructions are generated for each
222
   return statement.  Args are same as for FUNCTION_PROLOGUE.  */
223
 
224
#undef TARGET_ASM_FUNCTION_EPILOGUE
225
#define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
226
 
227
/* These hooks specify assembly directives for creating certain kinds
228
   of integer object.  */
229
 
230
#undef TARGET_ASM_ALIGNED_SI_OP
231
#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
232
 
233
#undef TARGET_ASM_SELECT_RTX_SECTION
234
#define TARGET_ASM_SELECT_RTX_SECTION  xtensa_select_rtx_section
235
 
236
#undef TARGET_DEFAULT_TARGET_FLAGS
237
#define TARGET_DEFAULT_TARGET_FLAGS (TARGET_DEFAULT | MASK_FUSED_MADD)
238
 
239
#undef TARGET_RTX_COSTS
240
#define TARGET_RTX_COSTS xtensa_rtx_costs
241
#undef TARGET_ADDRESS_COST
242
#define TARGET_ADDRESS_COST hook_int_rtx_0
243
 
244
#undef TARGET_BUILD_BUILTIN_VA_LIST
245
#define TARGET_BUILD_BUILTIN_VA_LIST xtensa_build_builtin_va_list
246
 
247
#undef TARGET_PROMOTE_FUNCTION_ARGS
248
#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
249
#undef TARGET_PROMOTE_FUNCTION_RETURN
250
#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
251
#undef TARGET_PROMOTE_PROTOTYPES
252
#define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
253
 
254
#undef TARGET_RETURN_IN_MEMORY
255
#define TARGET_RETURN_IN_MEMORY xtensa_return_in_memory
256
#undef TARGET_SPLIT_COMPLEX_ARG
257
#define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
258
#undef TARGET_MUST_PASS_IN_STACK
259
#define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
260
 
261
#undef TARGET_EXPAND_BUILTIN_SAVEREGS
262
#define TARGET_EXPAND_BUILTIN_SAVEREGS xtensa_builtin_saveregs
263
#undef TARGET_GIMPLIFY_VA_ARG_EXPR
264
#define TARGET_GIMPLIFY_VA_ARG_EXPR xtensa_gimplify_va_arg_expr
265
 
266
#undef TARGET_RETURN_IN_MSB
267
#define TARGET_RETURN_IN_MSB xtensa_return_in_msb
268
 
269
struct gcc_target targetm = TARGET_INITIALIZER;
270
 
271
 
272
/*
273
 * Functions to test Xtensa immediate operand validity.
274
 */
275
 
276
bool
277
xtensa_simm8 (HOST_WIDE_INT v)
278
{
279
  return v >= -128 && v <= 127;
280
}
281
 
282
 
283
bool
284
xtensa_simm8x256 (HOST_WIDE_INT v)
285
{
286
  return (v & 255) == 0 && (v >= -32768 && v <= 32512);
287
}
288
 
289
 
290
bool
291
xtensa_simm12b (HOST_WIDE_INT v)
292
{
293
  return v >= -2048 && v <= 2047;
294
}
295
 
296
 
297
static bool
298
xtensa_uimm8 (HOST_WIDE_INT v)
299
{
300
  return v >= 0 && v <= 255;
301
}
302
 
303
 
304
static bool
305
xtensa_uimm8x2 (HOST_WIDE_INT v)
306
{
307
  return (v & 1) == 0 && (v >= 0 && v <= 510);
308
}
309
 
310
 
311
static bool
312
xtensa_uimm8x4 (HOST_WIDE_INT v)
313
{
314
  return (v & 3) == 0 && (v >= 0 && v <= 1020);
315
}
316
 
317
 
318
static bool
319
xtensa_b4const (HOST_WIDE_INT v)
320
{
321
  switch (v)
322
    {
323
    case -1:
324
    case 1:
325
    case 2:
326
    case 3:
327
    case 4:
328
    case 5:
329
    case 6:
330
    case 7:
331
    case 8:
332
    case 10:
333
    case 12:
334
    case 16:
335
    case 32:
336
    case 64:
337
    case 128:
338
    case 256:
339
      return true;
340
    }
341
  return false;
342
}
343
 
344
 
345
bool
346
xtensa_b4const_or_zero (HOST_WIDE_INT v)
347
{
348
  if (v == 0)
349
    return true;
350
  return xtensa_b4const (v);
351
}
352
 
353
 
354
bool
355
xtensa_b4constu (HOST_WIDE_INT v)
356
{
357
  switch (v)
358
    {
359
    case 32768:
360
    case 65536:
361
    case 2:
362
    case 3:
363
    case 4:
364
    case 5:
365
    case 6:
366
    case 7:
367
    case 8:
368
    case 10:
369
    case 12:
370
    case 16:
371
    case 32:
372
    case 64:
373
    case 128:
374
    case 256:
375
      return true;
376
    }
377
  return false;
378
}
379
 
380
 
381
bool
382
xtensa_mask_immediate (HOST_WIDE_INT v)
383
{
384
#define MAX_MASK_SIZE 16
385
  int mask_size;
386
 
387
  for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
388
    {
389
      if ((v & 1) == 0)
390
        return false;
391
      v = v >> 1;
392
      if (v == 0)
393
        return true;
394
    }
395
 
396
  return false;
397
}
398
 
399
 
400
bool
401
xtensa_const_ok_for_letter_p (HOST_WIDE_INT v, int c)
402
{
403
  switch (c)
404
    {
405
    case 'I': return xtensa_simm12b (v);
406
    case 'J': return xtensa_simm8 (v);
407
    case 'K': return (v == 0 || xtensa_b4const (v));
408
    case 'L': return xtensa_b4constu (v);
409
    case 'M': return (v >= -32 && v <= 95);
410
    case 'N': return xtensa_simm8x256 (v);
411
    case 'O': return (v == -1 || (v >= 1 && v <= 15));
412
    case 'P': return xtensa_mask_immediate (v);
413
    default: break;
414
    }
415
  return false;
416
}
417
 
418
 
419
/* This is just like the standard true_regnum() function except that it
420
   works even when reg_renumber is not initialized.  */
421
 
422
int
423
xt_true_regnum (rtx x)
424
{
425
  if (GET_CODE (x) == REG)
426
    {
427
      if (reg_renumber
428
          && REGNO (x) >= FIRST_PSEUDO_REGISTER
429
          && reg_renumber[REGNO (x)] >= 0)
430
        return reg_renumber[REGNO (x)];
431
      return REGNO (x);
432
    }
433
  if (GET_CODE (x) == SUBREG)
434
    {
435
      int base = xt_true_regnum (SUBREG_REG (x));
436
      if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
437
        return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
438
                                           GET_MODE (SUBREG_REG (x)),
439
                                           SUBREG_BYTE (x), GET_MODE (x));
440
    }
441
  return -1;
442
}
443
 
444
 
445
int
446
xtensa_valid_move (enum machine_mode mode, rtx *operands)
447
{
448
  /* Either the destination or source must be a register, and the
449
     MAC16 accumulator doesn't count.  */
450
 
451
  if (register_operand (operands[0], mode))
452
    {
453
      int dst_regnum = xt_true_regnum (operands[0]);
454
 
455
      /* The stack pointer can only be assigned with a MOVSP opcode.  */
456
      if (dst_regnum == STACK_POINTER_REGNUM)
457
        return (mode == SImode
458
                && register_operand (operands[1], mode)
459
                && !ACC_REG_P (xt_true_regnum (operands[1])));
460
 
461
      if (!ACC_REG_P (dst_regnum))
462
        return true;
463
    }
464
  if (register_operand (operands[1], mode))
465
    {
466
      int src_regnum = xt_true_regnum (operands[1]);
467
      if (!ACC_REG_P (src_regnum))
468
        return true;
469
    }
470
  return FALSE;
471
}
472
 
473
 
474
int
475
smalloffset_mem_p (rtx op)
476
{
477
  if (GET_CODE (op) == MEM)
478
    {
479
      rtx addr = XEXP (op, 0);
480
      if (GET_CODE (addr) == REG)
481
        return REG_OK_FOR_BASE_P (addr);
482
      if (GET_CODE (addr) == PLUS)
483
        {
484
          rtx offset = XEXP (addr, 0);
485
          HOST_WIDE_INT val;
486
          if (GET_CODE (offset) != CONST_INT)
487
            offset = XEXP (addr, 1);
488
          if (GET_CODE (offset) != CONST_INT)
489
            return FALSE;
490
 
491
          val = INTVAL (offset);
492
          return (val & 3) == 0 && (val >= 0 && val <= 60);
493
        }
494
    }
495
  return FALSE;
496
}
497
 
498
 
499
int
500
constantpool_address_p (rtx addr)
501
{
502
  rtx sym = addr;
503
 
504
  if (GET_CODE (addr) == CONST)
505
    {
506
      rtx offset;
507
 
508
      /* Only handle (PLUS (SYM, OFFSET)) form.  */
509
      addr = XEXP (addr, 0);
510
      if (GET_CODE (addr) != PLUS)
511
        return FALSE;
512
 
513
      /* Make sure the address is word aligned.  */
514
      offset = XEXP (addr, 1);
515
      if ((GET_CODE (offset) != CONST_INT)
516
          || ((INTVAL (offset) & 3) != 0))
517
        return FALSE;
518
 
519
      sym = XEXP (addr, 0);
520
    }
521
 
522
  if ((GET_CODE (sym) == SYMBOL_REF)
523
      && CONSTANT_POOL_ADDRESS_P (sym))
524
    return TRUE;
525
  return FALSE;
526
}
527
 
528
 
529
int
530
constantpool_mem_p (rtx op)
531
{
532
  if (GET_CODE (op) == SUBREG)
533
    op = SUBREG_REG (op);
534
  if (GET_CODE (op) == MEM)
535
    return constantpool_address_p (XEXP (op, 0));
536
  return FALSE;
537
}
538
 
539
 
540
void
541
xtensa_extend_reg (rtx dst, rtx src)
542
{
543
  rtx temp = gen_reg_rtx (SImode);
544
  rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
545
 
546
  /* Generate paradoxical subregs as needed so that the modes match.  */
547
  src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
548
  dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
549
 
550
  emit_insn (gen_ashlsi3 (temp, src, shift));
551
  emit_insn (gen_ashrsi3 (dst, temp, shift));
552
}
553
 
554
 
555
bool
556
xtensa_mem_offset (unsigned v, enum machine_mode mode)
557
{
558
  switch (mode)
559
    {
560
    case BLKmode:
561
      /* Handle the worst case for block moves.  See xtensa_expand_block_move
562
         where we emit an optimized block move operation if the block can be
563
         moved in < "move_ratio" pieces.  The worst case is when the block is
564
         aligned but has a size of (3 mod 4) (does this happen?) so that the
565
         last piece requires a byte load/store.  */
566
      return (xtensa_uimm8 (v)
567
              && xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
568
 
569
    case QImode:
570
      return xtensa_uimm8 (v);
571
 
572
    case HImode:
573
      return xtensa_uimm8x2 (v);
574
 
575
    case DFmode:
576
      return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
577
 
578
    default:
579
      break;
580
    }
581
 
582
  return xtensa_uimm8x4 (v);
583
}
584
 
585
 
586
bool
587
xtensa_extra_constraint (rtx op, int c)
588
{
589
  /* Allow pseudo registers during reload.  */
590
  if (GET_CODE (op) != MEM)
591
    return (c >= 'R' && c <= 'U'
592
            && reload_in_progress && GET_CODE (op) == REG
593
            && REGNO (op) >= FIRST_PSEUDO_REGISTER);
594
 
595
  switch (c)
596
    {
597
    case 'R': return smalloffset_mem_p (op);
598
    case 'T': return !TARGET_CONST16 && constantpool_mem_p (op);
599
    case 'U': return !constantpool_mem_p (op);
600
    default: break;
601
    }
602
  return false;
603
}
604
 
605
 
606
/* Make normal rtx_code into something we can index from an array.  */
607
 
608
static enum internal_test
609
map_test_to_internal_test (enum rtx_code test_code)
610
{
611
  enum internal_test test = ITEST_MAX;
612
 
613
  switch (test_code)
614
    {
615
    default:                    break;
616
    case EQ:  test = ITEST_EQ;  break;
617
    case NE:  test = ITEST_NE;  break;
618
    case GT:  test = ITEST_GT;  break;
619
    case GE:  test = ITEST_GE;  break;
620
    case LT:  test = ITEST_LT;  break;
621
    case LE:  test = ITEST_LE;  break;
622
    case GTU: test = ITEST_GTU; break;
623
    case GEU: test = ITEST_GEU; break;
624
    case LTU: test = ITEST_LTU; break;
625
    case LEU: test = ITEST_LEU; break;
626
    }
627
 
628
  return test;
629
}
630
 
631
 
632
/* Generate the code to compare two integer values.  The return value is
633
   the comparison expression.  */
634
 
635
static rtx
636
gen_int_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
637
                    rtx cmp0, /* first operand to compare */
638
                    rtx cmp1, /* second operand to compare */
639
                    int *p_invert /* whether branch needs to reverse test */)
640
{
641
  struct cmp_info
642
  {
643
    enum rtx_code test_code;    /* test code to use in insn */
644
    bool (*const_range_p) (HOST_WIDE_INT); /* range check function */
645
    int const_add;              /* constant to add (convert LE -> LT) */
646
    int reverse_regs;           /* reverse registers in test */
647
    int invert_const;           /* != 0 if invert value if cmp1 is constant */
648
    int invert_reg;             /* != 0 if invert value if cmp1 is register */
649
    int unsignedp;              /* != 0 for unsigned comparisons.  */
650
  };
651
 
652
  static struct cmp_info info[ (int)ITEST_MAX ] = {
653
 
654
    { EQ,       xtensa_b4const_or_zero, 0, 0, 0, 0, 0 },     /* EQ  */
655
    { NE,       xtensa_b4const_or_zero, 0, 0, 0, 0, 0 },     /* NE  */
656
 
657
    { LT,       xtensa_b4const_or_zero, 1, 1, 1, 0, 0 },  /* GT  */
658
    { GE,       xtensa_b4const_or_zero, 0, 0, 0, 0, 0 },     /* GE  */
659
    { LT,       xtensa_b4const_or_zero, 0, 0, 0, 0, 0 },     /* LT  */
660
    { GE,       xtensa_b4const_or_zero, 1, 1, 1, 0, 0 },  /* LE  */
661
 
662
    { LTU,      xtensa_b4constu,        1, 1, 1, 0, 1 }, /* GTU */
663
    { GEU,      xtensa_b4constu,        0, 0, 0, 0, 1 },    /* GEU */
664
    { LTU,      xtensa_b4constu,        0, 0, 0, 0, 1 },    /* LTU */
665
    { GEU,      xtensa_b4constu,        1, 1, 1, 0, 1 }, /* LEU */
666
  };
667
 
668
  enum internal_test test;
669
  enum machine_mode mode;
670
  struct cmp_info *p_info;
671
 
672
  test = map_test_to_internal_test (test_code);
673
  gcc_assert (test != ITEST_MAX);
674
 
675
  p_info = &info[ (int)test ];
676
 
677
  mode = GET_MODE (cmp0);
678
  if (mode == VOIDmode)
679
    mode = GET_MODE (cmp1);
680
 
681
  /* Make sure we can handle any constants given to us.  */
682
  if (GET_CODE (cmp1) == CONST_INT)
683
    {
684
      HOST_WIDE_INT value = INTVAL (cmp1);
685
      unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
686
 
687
      /* if the immediate overflows or does not fit in the immediate field,
688
         spill it to a register */
689
 
690
      if ((p_info->unsignedp ?
691
           (uvalue + p_info->const_add > uvalue) :
692
           (value + p_info->const_add > value)) != (p_info->const_add > 0))
693
        {
694
          cmp1 = force_reg (mode, cmp1);
695
        }
696
      else if (!(p_info->const_range_p) (value + p_info->const_add))
697
        {
698
          cmp1 = force_reg (mode, cmp1);
699
        }
700
    }
701
  else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
702
    {
703
      cmp1 = force_reg (mode, cmp1);
704
    }
705
 
706
  /* See if we need to invert the result.  */
707
  *p_invert = ((GET_CODE (cmp1) == CONST_INT)
708
               ? p_info->invert_const
709
               : p_info->invert_reg);
710
 
711
  /* Comparison to constants, may involve adding 1 to change a LT into LE.
712
     Comparison between two registers, may involve switching operands.  */
713
  if (GET_CODE (cmp1) == CONST_INT)
714
    {
715
      if (p_info->const_add != 0)
716
        cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
717
 
718
    }
719
  else if (p_info->reverse_regs)
720
    {
721
      rtx temp = cmp0;
722
      cmp0 = cmp1;
723
      cmp1 = temp;
724
    }
725
 
726
  return gen_rtx_fmt_ee (p_info->test_code, VOIDmode, cmp0, cmp1);
727
}
728
 
729
 
730
/* Generate the code to compare two float values.  The return value is
731
   the comparison expression.  */
732
 
733
static rtx
734
gen_float_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
735
                      rtx cmp0, /* first operand to compare */
736
                      rtx cmp1 /* second operand to compare */)
737
{
738
  rtx (*gen_fn) (rtx, rtx, rtx);
739
  rtx brtmp;
740
  int reverse_regs, invert;
741
 
742
  switch (test_code)
743
    {
744
    case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
745
    case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
746
    case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
747
    case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
748
    case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
749
    case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
750
    default:
751
      fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
752
      reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
753
    }
754
 
755
  if (reverse_regs)
756
    {
757
      rtx temp = cmp0;
758
      cmp0 = cmp1;
759
      cmp1 = temp;
760
    }
761
 
762
  brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
763
  emit_insn (gen_fn (brtmp, cmp0, cmp1));
764
 
765
  return gen_rtx_fmt_ee (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
766
}
767
 
768
 
769
void
770
xtensa_expand_conditional_branch (rtx *operands, enum rtx_code test_code)
771
{
772
  enum cmp_type type = branch_type;
773
  rtx cmp0 = branch_cmp[0];
774
  rtx cmp1 = branch_cmp[1];
775
  rtx cmp;
776
  int invert;
777
  rtx label1, label2;
778
 
779
  switch (type)
780
    {
781
    case CMP_DF:
782
    default:
783
      fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
784
 
785
    case CMP_SI:
786
      invert = FALSE;
787
      cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
788
      break;
789
 
790
    case CMP_SF:
791
      if (!TARGET_HARD_FLOAT)
792
        fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
793
      invert = FALSE;
794
      cmp = gen_float_relational (test_code, cmp0, cmp1);
795
      break;
796
    }
797
 
798
  /* Generate the branch.  */
799
 
800
  label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
801
  label2 = pc_rtx;
802
 
803
  if (invert)
804
    {
805
      label2 = label1;
806
      label1 = pc_rtx;
807
    }
808
 
809
  emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
810
                               gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
811
                                                     label1,
812
                                                     label2)));
813
}
814
 
815
 
816
static rtx
817
gen_conditional_move (rtx cmp)
818
{
819
  enum rtx_code code = GET_CODE (cmp);
820
  rtx op0 = branch_cmp[0];
821
  rtx op1 = branch_cmp[1];
822
 
823
  if (branch_type == CMP_SI)
824
    {
825
      /* Jump optimization calls get_condition() which canonicalizes
826
         comparisons like (GE x <const>) to (GT x <const-1>).
827
         Transform those comparisons back to GE, since that is the
828
         comparison supported in Xtensa.  We shouldn't have to
829
         transform <LE x const> comparisons, because neither
830
         xtensa_expand_conditional_branch() nor get_condition() will
831
         produce them.  */
832
 
833
      if ((code == GT) && (op1 == constm1_rtx))
834
        {
835
          code = GE;
836
          op1 = const0_rtx;
837
        }
838
      cmp = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
839
 
840
      if (boolean_operator (cmp, VOIDmode))
841
        {
842
          /* Swap the operands to make const0 second.  */
843
          if (op0 == const0_rtx)
844
            {
845
              op0 = op1;
846
              op1 = const0_rtx;
847
            }
848
 
849
          /* If not comparing against zero, emit a comparison (subtract).  */
850
          if (op1 != const0_rtx)
851
            {
852
              op0 = expand_binop (SImode, sub_optab, op0, op1,
853
                                  0, 0, OPTAB_LIB_WIDEN);
854
              op1 = const0_rtx;
855
            }
856
        }
857
      else if (branch_operator (cmp, VOIDmode))
858
        {
859
          /* Swap the operands to make const0 second.  */
860
          if (op0 == const0_rtx)
861
            {
862
              op0 = op1;
863
              op1 = const0_rtx;
864
 
865
              switch (code)
866
                {
867
                case LT: code = GE; break;
868
                case GE: code = LT; break;
869
                default: gcc_unreachable ();
870
                }
871
            }
872
 
873
          if (op1 != const0_rtx)
874
            return 0;
875
        }
876
      else
877
        return 0;
878
 
879
      return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
880
    }
881
 
882
  if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
883
    return gen_float_relational (code, op0, op1);
884
 
885
  return 0;
886
}
887
 
888
 
889
int
890
xtensa_expand_conditional_move (rtx *operands, int isflt)
891
{
892
  rtx cmp;
893
  rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
894
 
895
  if (!(cmp = gen_conditional_move (operands[1])))
896
    return 0;
897
 
898
  if (isflt)
899
    gen_fn = (branch_type == CMP_SI
900
              ? gen_movsfcc_internal0
901
              : gen_movsfcc_internal1);
902
  else
903
    gen_fn = (branch_type == CMP_SI
904
              ? gen_movsicc_internal0
905
              : gen_movsicc_internal1);
906
 
907
  emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
908
                     operands[2], operands[3], cmp));
909
  return 1;
910
}
911
 
912
 
913
int
914
xtensa_expand_scc (rtx *operands)
915
{
916
  rtx dest = operands[0];
917
  rtx cmp = operands[1];
918
  rtx one_tmp, zero_tmp;
919
  rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
920
 
921
  if (!(cmp = gen_conditional_move (cmp)))
922
    return 0;
923
 
924
  one_tmp = gen_reg_rtx (SImode);
925
  zero_tmp = gen_reg_rtx (SImode);
926
  emit_insn (gen_movsi (one_tmp, const_true_rtx));
927
  emit_insn (gen_movsi (zero_tmp, const0_rtx));
928
 
929
  gen_fn = (branch_type == CMP_SI
930
            ? gen_movsicc_internal0
931
            : gen_movsicc_internal1);
932
  emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
933
  return 1;
934
}
935
 
936
 
937
/* Split OP[1] into OP[2,3] and likewise for OP[0] into OP[0,1].  MODE is
938
   for the output, i.e., the input operands are twice as big as MODE.  */
939
 
940
void
941
xtensa_split_operand_pair (rtx operands[4], enum machine_mode mode)
942
{
943
  switch (GET_CODE (operands[1]))
944
    {
945
    case REG:
946
      operands[3] = gen_rtx_REG (mode, REGNO (operands[1]) + 1);
947
      operands[2] = gen_rtx_REG (mode, REGNO (operands[1]));
948
      break;
949
 
950
    case MEM:
951
      operands[3] = adjust_address (operands[1], mode, GET_MODE_SIZE (mode));
952
      operands[2] = adjust_address (operands[1], mode, 0);
953
      break;
954
 
955
    case CONST_INT:
956
    case CONST_DOUBLE:
957
      split_double (operands[1], &operands[2], &operands[3]);
958
      break;
959
 
960
    default:
961
      gcc_unreachable ();
962
    }
963
 
964
  switch (GET_CODE (operands[0]))
965
    {
966
    case REG:
967
      operands[1] = gen_rtx_REG (mode, REGNO (operands[0]) + 1);
968
      operands[0] = gen_rtx_REG (mode, REGNO (operands[0]));
969
      break;
970
 
971
    case MEM:
972
      operands[1] = adjust_address (operands[0], mode, GET_MODE_SIZE (mode));
973
      operands[0] = adjust_address (operands[0], mode, 0);
974
      break;
975
 
976
    default:
977
      gcc_unreachable ();
978
    }
979
}
980
 
981
 
982
/* Emit insns to move operands[1] into operands[0].
983
   Return 1 if we have written out everything that needs to be done to
984
   do the move.  Otherwise, return 0 and the caller will emit the move
985
   normally.  */
986
 
987
int
988
xtensa_emit_move_sequence (rtx *operands, enum machine_mode mode)
989
{
990
  if (CONSTANT_P (operands[1])
991
      && (GET_CODE (operands[1]) != CONST_INT
992
          || !xtensa_simm12b (INTVAL (operands[1]))))
993
    {
994
      if (!TARGET_CONST16)
995
        operands[1] = force_const_mem (SImode, operands[1]);
996
 
997
      /* PC-relative loads are always SImode, and CONST16 is only
998
         supported in the movsi pattern, so add a SUBREG for any other
999
         (smaller) mode.  */
1000
 
1001
      if (mode != SImode)
1002
        {
1003
          if (register_operand (operands[0], mode))
1004
            {
1005
              operands[0] = simplify_gen_subreg (SImode, operands[0], mode, 0);
1006
              emit_move_insn (operands[0], operands[1]);
1007
              return 1;
1008
            }
1009
          else
1010
            {
1011
              operands[1] = force_reg (SImode, operands[1]);
1012
              operands[1] = gen_lowpart_SUBREG (mode, operands[1]);
1013
            }
1014
        }
1015
    }
1016
 
1017
  if (!(reload_in_progress | reload_completed)
1018
      && !xtensa_valid_move (mode, operands))
1019
    operands[1] = force_reg (mode, operands[1]);
1020
 
1021
  operands[1] = xtensa_copy_incoming_a7 (operands[1]);
1022
 
1023
  /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1024
     instruction won't be recognized after reload, so we remove the
1025
     subreg and adjust mem accordingly.  */
1026
  if (reload_in_progress)
1027
    {
1028
      operands[0] = fixup_subreg_mem (operands[0]);
1029
      operands[1] = fixup_subreg_mem (operands[1]);
1030
    }
1031
  return 0;
1032
}
1033
 
1034
 
1035
static rtx
1036
fixup_subreg_mem (rtx x)
1037
{
1038
  if (GET_CODE (x) == SUBREG
1039
      && GET_CODE (SUBREG_REG (x)) == REG
1040
      && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1041
    {
1042
      rtx temp =
1043
        gen_rtx_SUBREG (GET_MODE (x),
1044
                        reg_equiv_mem [REGNO (SUBREG_REG (x))],
1045
                        SUBREG_BYTE (x));
1046
      x = alter_subreg (&temp);
1047
    }
1048
  return x;
1049
}
1050
 
1051
 
1052
/* Check if an incoming argument in a7 is expected to be used soon and
1053
   if OPND is a register or register pair that includes a7.  If so,
1054
   create a new pseudo and copy a7 into that pseudo at the very
1055
   beginning of the function, followed by the special "set_frame_ptr"
1056
   unspec_volatile insn.  The return value is either the original
1057
   operand, if it is not a7, or the new pseudo containing a copy of
1058
   the incoming argument.  This is necessary because the register
1059
   allocator will ignore conflicts with a7 and may either assign some
1060
   other pseudo to a7 or use a7 as the hard_frame_pointer, clobbering
1061
   the incoming argument in a7.  By copying the argument out of a7 as
1062
   the very first thing, and then immediately following that with an
1063
   unspec_volatile to keep the scheduler away, we should avoid any
1064
   problems.  Putting the set_frame_ptr insn at the beginning, with
1065
   only the a7 copy before it, also makes it easier for the prologue
1066
   expander to initialize the frame pointer after the a7 copy and to
1067
   fix up the a7 copy to use the stack pointer instead of the frame
1068
   pointer.  */
1069
 
1070
rtx
1071
xtensa_copy_incoming_a7 (rtx opnd)
1072
{
1073
  rtx entry_insns = 0;
1074
  rtx reg, tmp;
1075
  enum machine_mode mode;
1076
 
1077
  if (!cfun->machine->need_a7_copy)
1078
    return opnd;
1079
 
1080
  /* This function should never be called again once a7 has been copied.  */
1081
  gcc_assert (!cfun->machine->set_frame_ptr_insn);
1082
 
1083
  mode = GET_MODE (opnd);
1084
 
1085
  /* The operand using a7 may come in a later instruction, so just return
1086
     the original operand if it doesn't use a7.  */
1087
  reg = opnd;
1088
  if (GET_CODE (reg) == SUBREG)
1089
    {
1090
      gcc_assert (SUBREG_BYTE (reg) == 0);
1091
      reg = SUBREG_REG (reg);
1092
    }
1093
  if (GET_CODE (reg) != REG
1094
      || REGNO (reg) > A7_REG
1095
      || REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) <= A7_REG)
1096
    return opnd;
1097
 
1098
  /* 1-word args will always be in a7; 2-word args in a6/a7.  */
1099
  gcc_assert (REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) - 1 == A7_REG);
1100
 
1101
  cfun->machine->need_a7_copy = false;
1102
 
1103
  /* Copy a7 to a new pseudo at the function entry.  Use gen_raw_REG to
1104
     create the REG for a7 so that hard_frame_pointer_rtx is not used.  */
1105
 
1106
  start_sequence ();
1107
  tmp = gen_reg_rtx (mode);
1108
 
1109
  switch (mode)
1110
    {
1111
    case DFmode:
1112
    case DImode:
1113
      emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 0),
1114
                                     gen_rtx_REG (SImode, A7_REG - 1)));
1115
      emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 4),
1116
                                     gen_raw_REG (SImode, A7_REG)));
1117
      break;
1118
    case SFmode:
1119
      emit_insn (gen_movsf_internal (tmp, gen_raw_REG (mode, A7_REG)));
1120
      break;
1121
    case SImode:
1122
      emit_insn (gen_movsi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1123
      break;
1124
    case HImode:
1125
      emit_insn (gen_movhi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1126
      break;
1127
    case QImode:
1128
      emit_insn (gen_movqi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1129
      break;
1130
    default:
1131
      gcc_unreachable ();
1132
    }
1133
 
1134
  cfun->machine->set_frame_ptr_insn = emit_insn (gen_set_frame_ptr ());
1135
  entry_insns = get_insns ();
1136
  end_sequence ();
1137
 
1138
  if (cfun->machine->vararg_a7)
1139
    {
1140
      /* This is called from within builtin_saveregs, which will insert the
1141
         saveregs code at the function entry, ahead of anything placed at
1142
         the function entry now.  Instead, save the sequence to be inserted
1143
         at the beginning of the saveregs code.  */
1144
      cfun->machine->vararg_a7_copy = entry_insns;
1145
    }
1146
  else
1147
    {
1148
      /* Put entry_insns after the NOTE that starts the function.  If
1149
         this is inside a start_sequence, make the outer-level insn
1150
         chain current, so the code is placed at the start of the
1151
         function.  */
1152
      push_topmost_sequence ();
1153
      /* Do not use entry_of_function() here.  This is called from within
1154
         expand_function_start, when the CFG still holds GIMPLE.  */
1155
      emit_insn_after (entry_insns, get_insns ());
1156
      pop_topmost_sequence ();
1157
    }
1158
 
1159
  return tmp;
1160
}
1161
 
1162
 
1163
/* Try to expand a block move operation to a sequence of RTL move
1164
   instructions.  If not optimizing, or if the block size is not a
1165
   constant, or if the block is too large, the expansion fails and GCC
1166
   falls back to calling memcpy().
1167
 
1168
   operands[0] is the destination
1169
   operands[1] is the source
1170
   operands[2] is the length
1171
   operands[3] is the alignment */
1172
 
1173
int
1174
xtensa_expand_block_move (rtx *operands)
1175
{
1176
  static const enum machine_mode mode_from_align[] =
1177
  {
1178
    VOIDmode, QImode, HImode, VOIDmode, SImode,
1179
  };
1180
 
1181
  rtx dst_mem = operands[0];
1182
  rtx src_mem = operands[1];
1183
  HOST_WIDE_INT bytes, align;
1184
  int num_pieces, move_ratio;
1185
  rtx temp[2];
1186
  enum machine_mode mode[2];
1187
  int amount[2];
1188
  bool active[2];
1189
  int phase = 0;
1190
  int next;
1191
  int offset_ld = 0;
1192
  int offset_st = 0;
1193
  rtx x;
1194
 
1195
  /* If this is not a fixed size move, just call memcpy.  */
1196
  if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1197
    return 0;
1198
 
1199
  bytes = INTVAL (operands[2]);
1200
  align = INTVAL (operands[3]);
1201
 
1202
  /* Anything to move?  */
1203
  if (bytes <= 0)
1204
    return 0;
1205
 
1206
  if (align > MOVE_MAX)
1207
    align = MOVE_MAX;
1208
 
1209
  /* Decide whether to expand inline based on the optimization level.  */
1210
  move_ratio = 4;
1211
  if (optimize > 2)
1212
    move_ratio = LARGEST_MOVE_RATIO;
1213
  num_pieces = (bytes / align) + (bytes % align); /* Close enough anyway.  */
1214
  if (num_pieces > move_ratio)
1215
    return 0;
1216
 
1217
  x = XEXP (dst_mem, 0);
1218
  if (!REG_P (x))
1219
    {
1220
      x = force_reg (Pmode, x);
1221
      dst_mem = replace_equiv_address (dst_mem, x);
1222
    }
1223
 
1224
  x = XEXP (src_mem, 0);
1225
  if (!REG_P (x))
1226
    {
1227
      x = force_reg (Pmode, x);
1228
      src_mem = replace_equiv_address (src_mem, x);
1229
    }
1230
 
1231
  active[0] = active[1] = false;
1232
 
1233
  do
1234
    {
1235
      next = phase;
1236
      phase ^= 1;
1237
 
1238
      if (bytes > 0)
1239
        {
1240
          int next_amount;
1241
 
1242
          next_amount = (bytes >= 4 ? 4 : (bytes >= 2 ? 2 : 1));
1243
          next_amount = MIN (next_amount, align);
1244
 
1245
          amount[next] = next_amount;
1246
          mode[next] = mode_from_align[next_amount];
1247
          temp[next] = gen_reg_rtx (mode[next]);
1248
 
1249
          x = adjust_address (src_mem, mode[next], offset_ld);
1250
          emit_insn (gen_rtx_SET (VOIDmode, temp[next], x));
1251
 
1252
          offset_ld += next_amount;
1253
          bytes -= next_amount;
1254
          active[next] = true;
1255
        }
1256
 
1257
      if (active[phase])
1258
        {
1259
          active[phase] = false;
1260
 
1261
          x = adjust_address (dst_mem, mode[phase], offset_st);
1262
          emit_insn (gen_rtx_SET (VOIDmode, x, temp[phase]));
1263
 
1264
          offset_st += amount[phase];
1265
        }
1266
    }
1267
  while (active[next]);
1268
 
1269
  return 1;
1270
}
1271
 
1272
 
1273
void
1274
xtensa_expand_nonlocal_goto (rtx *operands)
1275
{
1276
  rtx goto_handler = operands[1];
1277
  rtx containing_fp = operands[3];
1278
 
1279
  /* Generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1280
     is too big to generate in-line.  */
1281
 
1282
  if (GET_CODE (containing_fp) != REG)
1283
    containing_fp = force_reg (Pmode, containing_fp);
1284
 
1285
  goto_handler = replace_rtx (copy_rtx (goto_handler),
1286
                              virtual_stack_vars_rtx,
1287
                              containing_fp);
1288
 
1289
  emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1290
                     0, VOIDmode, 2,
1291
                     containing_fp, Pmode,
1292
                     goto_handler, Pmode);
1293
}
1294
 
1295
 
1296
static struct machine_function *
1297
xtensa_init_machine_status (void)
1298
{
1299
  return ggc_alloc_cleared (sizeof (struct machine_function));
1300
}
1301
 
1302
 
1303
void
1304
xtensa_setup_frame_addresses (void)
1305
{
1306
  /* Set flag to cause FRAME_POINTER_REQUIRED to be set.  */
1307
  cfun->machine->accesses_prev_frame = 1;
1308
 
1309
  emit_library_call
1310
    (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1311
     0, VOIDmode, 0);
1312
}
1313
 
1314
 
1315
/* Emit the assembly for the end of a zero-cost loop.  Normally we just emit
1316
   a comment showing where the end of the loop is.  However, if there is a
1317
   label or a branch at the end of the loop then we need to place a nop
1318
   there.  If the loop ends with a label we need the nop so that branches
1319
   targeting that label will target the nop (and thus remain in the loop),
1320
   instead of targeting the instruction after the loop (and thus exiting
1321
   the loop).  If the loop ends with a branch, we need the nop in case the
1322
   branch is targeting a location inside the loop.  When the branch
1323
   executes it will cause the loop count to be decremented even if it is
1324
   taken (because it is the last instruction in the loop), so we need to
1325
   nop after the branch to prevent the loop count from being decremented
1326
   when the branch is taken.  */
1327
 
1328
void
1329
xtensa_emit_loop_end (rtx insn, rtx *operands)
1330
{
1331
  char done = 0;
1332
 
1333
  for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1334
    {
1335
      switch (GET_CODE (insn))
1336
        {
1337
        case NOTE:
1338
        case BARRIER:
1339
          break;
1340
 
1341
        case CODE_LABEL:
1342
          output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1343
          done = 1;
1344
          break;
1345
 
1346
        default:
1347
          {
1348
            rtx body = PATTERN (insn);
1349
 
1350
            if (GET_CODE (body) == JUMP_INSN)
1351
              {
1352
                output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1353
                done = 1;
1354
              }
1355
            else if ((GET_CODE (body) != USE)
1356
                     && (GET_CODE (body) != CLOBBER))
1357
              done = 1;
1358
          }
1359
          break;
1360
        }
1361
    }
1362
 
1363
  output_asm_insn ("# loop end for %0", operands);
1364
}
1365
 
1366
 
1367
char *
1368
xtensa_emit_call (int callop, rtx *operands)
1369
{
1370
  static char result[64];
1371
  rtx tgt = operands[callop];
1372
 
1373
  if (GET_CODE (tgt) == CONST_INT)
1374
    sprintf (result, "call8\t0x%lx", INTVAL (tgt));
1375
  else if (register_operand (tgt, VOIDmode))
1376
    sprintf (result, "callx8\t%%%d", callop);
1377
  else
1378
    sprintf (result, "call8\t%%%d", callop);
1379
 
1380
  return result;
1381
}
1382
 
1383
 
1384
/* Return the debugger register number to use for 'regno'.  */
1385
 
1386
int
1387
xtensa_dbx_register_number (int regno)
1388
{
1389
  int first = -1;
1390
 
1391
  if (GP_REG_P (regno))
1392
    {
1393
      regno -= GP_REG_FIRST;
1394
      first = 0;
1395
    }
1396
  else if (BR_REG_P (regno))
1397
    {
1398
      regno -= BR_REG_FIRST;
1399
      first = 16;
1400
    }
1401
  else if (FP_REG_P (regno))
1402
    {
1403
      regno -= FP_REG_FIRST;
1404
      first = 48;
1405
    }
1406
  else if (ACC_REG_P (regno))
1407
    {
1408
      first = 0x200;    /* Start of Xtensa special registers.  */
1409
      regno = 16;       /* ACCLO is special register 16.  */
1410
    }
1411
 
1412
  /* When optimizing, we sometimes get asked about pseudo-registers
1413
     that don't represent hard registers.  Return 0 for these.  */
1414
  if (first == -1)
1415
    return 0;
1416
 
1417
  return first + regno;
1418
}
1419
 
1420
 
1421
/* Argument support functions.  */
1422
 
1423
/* Initialize CUMULATIVE_ARGS for a function.  */
1424
 
1425
void
1426
init_cumulative_args (CUMULATIVE_ARGS *cum, int incoming)
1427
{
1428
  cum->arg_words = 0;
1429
  cum->incoming = incoming;
1430
}
1431
 
1432
 
1433
/* Advance the argument to the next argument position.  */
1434
 
1435
void
1436
function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type)
1437
{
1438
  int words, max;
1439
  int *arg_words;
1440
 
1441
  arg_words = &cum->arg_words;
1442
  max = MAX_ARGS_IN_REGISTERS;
1443
 
1444
  words = (((mode != BLKmode)
1445
            ? (int) GET_MODE_SIZE (mode)
1446
            : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1447
 
1448
  if (*arg_words < max
1449
      && (targetm.calls.must_pass_in_stack (mode, type)
1450
          || *arg_words + words > max))
1451
    *arg_words = max;
1452
 
1453
  *arg_words += words;
1454
}
1455
 
1456
 
1457
/* Return an RTL expression containing the register for the given mode,
1458
   or 0 if the argument is to be passed on the stack.  INCOMING_P is nonzero
1459
   if this is an incoming argument to the current function.  */
1460
 
1461
rtx
1462
function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1463
              int incoming_p)
1464
{
1465
  int regbase, words, max;
1466
  int *arg_words;
1467
  int regno;
1468
 
1469
  arg_words = &cum->arg_words;
1470
  regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1471
  max = MAX_ARGS_IN_REGISTERS;
1472
 
1473
  words = (((mode != BLKmode)
1474
            ? (int) GET_MODE_SIZE (mode)
1475
            : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1476
 
1477
  if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1478
    {
1479
      int align = MIN (TYPE_ALIGN (type), STACK_BOUNDARY) / BITS_PER_WORD;
1480
      *arg_words = (*arg_words + align - 1) & -align;
1481
    }
1482
 
1483
  if (*arg_words + words > max)
1484
    return (rtx)0;
1485
 
1486
  regno = regbase + *arg_words;
1487
 
1488
  if (cum->incoming && regno <= A7_REG && regno + words > A7_REG)
1489
    cfun->machine->need_a7_copy = true;
1490
 
1491
  return gen_rtx_REG (mode, regno);
1492
}
1493
 
1494
 
1495
int
1496
function_arg_boundary (enum machine_mode mode, tree type)
1497
{
1498
  unsigned int alignment;
1499
 
1500
  alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
1501
  if (alignment < PARM_BOUNDARY)
1502
    alignment = PARM_BOUNDARY;
1503
  if (alignment > STACK_BOUNDARY)
1504
    alignment = STACK_BOUNDARY;
1505
  return alignment;
1506
}
1507
 
1508
 
1509
static bool
1510
xtensa_return_in_msb (tree valtype)
1511
{
1512
  return (TARGET_BIG_ENDIAN
1513
          && AGGREGATE_TYPE_P (valtype)
1514
          && int_size_in_bytes (valtype) >= UNITS_PER_WORD);
1515
}
1516
 
1517
 
1518
void
1519
override_options (void)
1520
{
1521
  int regno;
1522
  enum machine_mode mode;
1523
 
1524
  if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1525
    error ("boolean registers required for the floating-point option");
1526
 
1527
  xtensa_char_to_class['q'] = SP_REG;
1528
  xtensa_char_to_class['a'] = GR_REGS;
1529
  xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1530
  xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1531
  xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1532
  xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1533
  xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1534
  xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1535
  xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1536
  xtensa_char_to_class['W'] = ((TARGET_CONST16) ? GR_REGS: NO_REGS);
1537
 
1538
  /* Set up array giving whether a given register can hold a given mode.  */
1539
  for (mode = VOIDmode;
1540
       mode != MAX_MACHINE_MODE;
1541
       mode = (enum machine_mode) ((int) mode + 1))
1542
    {
1543
      int size = GET_MODE_SIZE (mode);
1544
      enum mode_class class = GET_MODE_CLASS (mode);
1545
 
1546
      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1547
        {
1548
          int temp;
1549
 
1550
          if (ACC_REG_P (regno))
1551
            temp = (TARGET_MAC16
1552
                    && (class == MODE_INT) && (size <= UNITS_PER_WORD));
1553
          else if (GP_REG_P (regno))
1554
            temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1555
          else if (FP_REG_P (regno))
1556
            temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1557
          else if (BR_REG_P (regno))
1558
            temp = (TARGET_BOOLEANS && (mode == CCmode));
1559
          else
1560
            temp = FALSE;
1561
 
1562
          xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1563
        }
1564
    }
1565
 
1566
  init_machine_status = xtensa_init_machine_status;
1567
 
1568
  /* Check PIC settings.  PIC is only supported when using L32R
1569
     instructions, and some targets need to always use PIC.  */
1570
  if (flag_pic && TARGET_CONST16)
1571
    error ("-f%s is not supported with CONST16 instructions",
1572
           (flag_pic > 1 ? "PIC" : "pic"));
1573
  else if (XTENSA_ALWAYS_PIC)
1574
    {
1575
      if (TARGET_CONST16)
1576
        error ("PIC is required but not supported with CONST16 instructions");
1577
      flag_pic = 1;
1578
    }
1579
  /* There's no need for -fPIC (as opposed to -fpic) on Xtensa.  */
1580
  if (flag_pic > 1)
1581
    flag_pic = 1;
1582
 
1583
  /* Hot/cold partitioning does not work on this architecture, because of
1584
     constant pools (the load instruction cannot necessarily reach that far).
1585
     Therefore disable it on this architecture.  */
1586
  if (flag_reorder_blocks_and_partition)
1587
    {
1588
      flag_reorder_blocks_and_partition = 0;
1589
      flag_reorder_blocks = 1;
1590
    }
1591
}
1592
 
1593
 
1594
/* A C compound statement to output to stdio stream STREAM the
1595
   assembler syntax for an instruction operand X.  X is an RTL
1596
   expression.
1597
 
1598
   CODE is a value that can be used to specify one of several ways
1599
   of printing the operand.  It is used when identical operands
1600
   must be printed differently depending on the context.  CODE
1601
   comes from the '%' specification that was used to request
1602
   printing of the operand.  If the specification was just '%DIGIT'
1603
   then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1604
   is the ASCII code for LTR.
1605
 
1606
   If X is a register, this macro should print the register's name.
1607
   The names can be found in an array 'reg_names' whose type is
1608
   'char *[]'.  'reg_names' is initialized from 'REGISTER_NAMES'.
1609
 
1610
   When the machine description has a specification '%PUNCT' (a '%'
1611
   followed by a punctuation character), this macro is called with
1612
   a null pointer for X and the punctuation character for CODE.
1613
 
1614
   'a', 'c', 'l', and 'n' are reserved.
1615
 
1616
   The Xtensa specific codes are:
1617
 
1618
   'd'  CONST_INT, print as signed decimal
1619
   'x'  CONST_INT, print as signed hexadecimal
1620
   'K'  CONST_INT, print number of bits in mask for EXTUI
1621
   'R'  CONST_INT, print (X & 0x1f)
1622
   'L'  CONST_INT, print ((32 - X) & 0x1f)
1623
   'D'  REG, print second register of double-word register operand
1624
   'N'  MEM, print address of next word following a memory operand
1625
   'v'  MEM, if memory reference is volatile, output a MEMW before it
1626
   't'  any constant, add "@h" suffix for top 16 bits
1627
   'b'  any constant, add "@l" suffix for bottom 16 bits
1628
*/
1629
 
1630
static void
1631
printx (FILE *file, signed int val)
1632
{
1633
  /* Print a hexadecimal value in a nice way.  */
1634
  if ((val > -0xa) && (val < 0xa))
1635
    fprintf (file, "%d", val);
1636
  else if (val < 0)
1637
    fprintf (file, "-0x%x", -val);
1638
  else
1639
    fprintf (file, "0x%x", val);
1640
}
1641
 
1642
 
1643
void
1644
print_operand (FILE *file, rtx x, int letter)
1645
{
1646
  if (!x)
1647
    error ("PRINT_OPERAND null pointer");
1648
 
1649
  switch (letter)
1650
    {
1651
    case 'D':
1652
      if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
1653
        fprintf (file, "%s", reg_names[xt_true_regnum (x) + 1]);
1654
      else
1655
        output_operand_lossage ("invalid %%D value");
1656
      break;
1657
 
1658
    case 'v':
1659
      if (GET_CODE (x) == MEM)
1660
        {
1661
          /* For a volatile memory reference, emit a MEMW before the
1662
             load or store.  */
1663
          if (MEM_VOLATILE_P (x))
1664
            fprintf (file, "memw\n\t");
1665
        }
1666
      else
1667
        output_operand_lossage ("invalid %%v value");
1668
      break;
1669
 
1670
    case 'N':
1671
      if (GET_CODE (x) == MEM
1672
          && (GET_MODE (x) == DFmode || GET_MODE (x) == DImode))
1673
        {
1674
          x = adjust_address (x, GET_MODE (x) == DFmode ? SFmode : SImode, 4);
1675
          output_address (XEXP (x, 0));
1676
        }
1677
      else
1678
        output_operand_lossage ("invalid %%N value");
1679
      break;
1680
 
1681
    case 'K':
1682
      if (GET_CODE (x) == CONST_INT)
1683
        {
1684
          int num_bits = 0;
1685
          unsigned val = INTVAL (x);
1686
          while (val & 1)
1687
            {
1688
              num_bits += 1;
1689
              val = val >> 1;
1690
            }
1691
          if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1692
            fatal_insn ("invalid mask", x);
1693
 
1694
          fprintf (file, "%d", num_bits);
1695
        }
1696
      else
1697
        output_operand_lossage ("invalid %%K value");
1698
      break;
1699
 
1700
    case 'L':
1701
      if (GET_CODE (x) == CONST_INT)
1702
        fprintf (file, "%ld", (32 - INTVAL (x)) & 0x1f);
1703
      else
1704
        output_operand_lossage ("invalid %%L value");
1705
      break;
1706
 
1707
    case 'R':
1708
      if (GET_CODE (x) == CONST_INT)
1709
        fprintf (file, "%ld", INTVAL (x) & 0x1f);
1710
      else
1711
        output_operand_lossage ("invalid %%R value");
1712
      break;
1713
 
1714
    case 'x':
1715
      if (GET_CODE (x) == CONST_INT)
1716
        printx (file, INTVAL (x));
1717
      else
1718
        output_operand_lossage ("invalid %%x value");
1719
      break;
1720
 
1721
    case 'd':
1722
      if (GET_CODE (x) == CONST_INT)
1723
        fprintf (file, "%ld", INTVAL (x));
1724
      else
1725
        output_operand_lossage ("invalid %%d value");
1726
      break;
1727
 
1728
    case 't':
1729
    case 'b':
1730
      if (GET_CODE (x) == CONST_INT)
1731
        {
1732
          printx (file, INTVAL (x));
1733
          fputs (letter == 't' ? "@h" : "@l", file);
1734
        }
1735
      else if (GET_CODE (x) == CONST_DOUBLE)
1736
        {
1737
          REAL_VALUE_TYPE r;
1738
          REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1739
          if (GET_MODE (x) == SFmode)
1740
            {
1741
              long l;
1742
              REAL_VALUE_TO_TARGET_SINGLE (r, l);
1743
              fprintf (file, "0x%08lx@%c", l, letter == 't' ? 'h' : 'l');
1744
            }
1745
          else
1746
            output_operand_lossage ("invalid %%t/%%b value");
1747
        }
1748
      else if (GET_CODE (x) == CONST)
1749
        {
1750
          /* X must be a symbolic constant on ELF.  Write an expression
1751
             suitable for 'const16' that sets the high or low 16 bits.  */
1752
          if (GET_CODE (XEXP (x, 0)) != PLUS
1753
              || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1754
                  && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
1755
              || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
1756
            output_operand_lossage ("invalid %%t/%%b value");
1757
          print_operand (file, XEXP (XEXP (x, 0), 0), 0);
1758
          fputs (letter == 't' ? "@h" : "@l", file);
1759
          /* There must be a non-alphanumeric character between 'h' or 'l'
1760
             and the number.  The '-' is added by print_operand() already.  */
1761
          if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
1762
            fputs ("+", file);
1763
          print_operand (file, XEXP (XEXP (x, 0), 1), 0);
1764
        }
1765
      else
1766
        {
1767
          output_addr_const (file, x);
1768
          fputs (letter == 't' ? "@h" : "@l", file);
1769
        }
1770
      break;
1771
 
1772
    default:
1773
      if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
1774
        fprintf (file, "%s", reg_names[xt_true_regnum (x)]);
1775
      else if (GET_CODE (x) == MEM)
1776
        output_address (XEXP (x, 0));
1777
      else if (GET_CODE (x) == CONST_INT)
1778
        fprintf (file, "%ld", INTVAL (x));
1779
      else
1780
        output_addr_const (file, x);
1781
    }
1782
}
1783
 
1784
 
1785
/* A C compound statement to output to stdio stream STREAM the
1786
   assembler syntax for an instruction operand that is a memory
1787
   reference whose address is ADDR.  ADDR is an RTL expression.  */
1788
 
1789
void
1790
print_operand_address (FILE *file, rtx addr)
1791
{
1792
  if (!addr)
1793
    error ("PRINT_OPERAND_ADDRESS, null pointer");
1794
 
1795
  switch (GET_CODE (addr))
1796
    {
1797
    default:
1798
      fatal_insn ("invalid address", addr);
1799
      break;
1800
 
1801
    case REG:
1802
      fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
1803
      break;
1804
 
1805
    case PLUS:
1806
      {
1807
        rtx reg = (rtx)0;
1808
        rtx offset = (rtx)0;
1809
        rtx arg0 = XEXP (addr, 0);
1810
        rtx arg1 = XEXP (addr, 1);
1811
 
1812
        if (GET_CODE (arg0) == REG)
1813
          {
1814
            reg = arg0;
1815
            offset = arg1;
1816
          }
1817
        else if (GET_CODE (arg1) == REG)
1818
          {
1819
            reg = arg1;
1820
            offset = arg0;
1821
          }
1822
        else
1823
          fatal_insn ("no register in address", addr);
1824
 
1825
        if (CONSTANT_P (offset))
1826
          {
1827
            fprintf (file, "%s, ", reg_names [REGNO (reg)]);
1828
            output_addr_const (file, offset);
1829
          }
1830
        else
1831
          fatal_insn ("address offset not a constant", addr);
1832
      }
1833
      break;
1834
 
1835
    case LABEL_REF:
1836
    case SYMBOL_REF:
1837
    case CONST_INT:
1838
    case CONST:
1839
      output_addr_const (file, addr);
1840
      break;
1841
    }
1842
}
1843
 
1844
 
1845
void
1846
xtensa_output_literal (FILE *file, rtx x, enum machine_mode mode, int labelno)
1847
{
1848
  long value_long[2];
1849
  REAL_VALUE_TYPE r;
1850
  int size;
1851
  rtx first, second;
1852
 
1853
  fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
1854
 
1855
  switch (GET_MODE_CLASS (mode))
1856
    {
1857
    case MODE_FLOAT:
1858
      gcc_assert (GET_CODE (x) == CONST_DOUBLE);
1859
 
1860
      REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1861
      switch (mode)
1862
        {
1863
        case SFmode:
1864
          REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
1865
          if (HOST_BITS_PER_LONG > 32)
1866
            value_long[0] &= 0xffffffff;
1867
          fprintf (file, "0x%08lx\n", value_long[0]);
1868
          break;
1869
 
1870
        case DFmode:
1871
          REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
1872
          if (HOST_BITS_PER_LONG > 32)
1873
            {
1874
              value_long[0] &= 0xffffffff;
1875
              value_long[1] &= 0xffffffff;
1876
            }
1877
          fprintf (file, "0x%08lx, 0x%08lx\n",
1878
                   value_long[0], value_long[1]);
1879
          break;
1880
 
1881
        default:
1882
          gcc_unreachable ();
1883
        }
1884
 
1885
      break;
1886
 
1887
    case MODE_INT:
1888
    case MODE_PARTIAL_INT:
1889
      size = GET_MODE_SIZE (mode);
1890
      switch (size)
1891
        {
1892
        case 4:
1893
          output_addr_const (file, x);
1894
          fputs ("\n", file);
1895
          break;
1896
 
1897
        case 8:
1898
          split_double (x, &first, &second);
1899
          output_addr_const (file, first);
1900
          fputs (", ", file);
1901
          output_addr_const (file, second);
1902
          fputs ("\n", file);
1903
          break;
1904
 
1905
        default:
1906
          gcc_unreachable ();
1907
        }
1908
      break;
1909
 
1910
    default:
1911
      gcc_unreachable ();
1912
    }
1913
}
1914
 
1915
 
1916
/* Return the bytes needed to compute the frame pointer from the current
1917
   stack pointer.  */
1918
 
1919
#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
1920
#define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
1921
 
1922
long
1923
compute_frame_size (int size)
1924
{
1925
  /* Add space for the incoming static chain value.  */
1926
  if (cfun->static_chain_decl != NULL)
1927
    size += (1 * UNITS_PER_WORD);
1928
 
1929
  xtensa_current_frame_size =
1930
    XTENSA_STACK_ALIGN (size
1931
                        + current_function_outgoing_args_size
1932
                        + (WINDOW_SIZE * UNITS_PER_WORD));
1933
  return xtensa_current_frame_size;
1934
}
1935
 
1936
 
1937
int
1938
xtensa_frame_pointer_required (void)
1939
{
1940
  /* The code to expand builtin_frame_addr and builtin_return_addr
1941
     currently uses the hard_frame_pointer instead of frame_pointer.
1942
     This seems wrong but maybe it's necessary for other architectures.
1943
     This function is derived from the i386 code.  */
1944
 
1945
  if (cfun->machine->accesses_prev_frame)
1946
    return 1;
1947
 
1948
  return 0;
1949
}
1950
 
1951
 
1952
void
1953
xtensa_expand_prologue (void)
1954
{
1955
  HOST_WIDE_INT total_size;
1956
  rtx size_rtx;
1957
 
1958
  total_size = compute_frame_size (get_frame_size ());
1959
  size_rtx = GEN_INT (total_size);
1960
 
1961
  if (total_size < (1 << (12+3)))
1962
    emit_insn (gen_entry (size_rtx, size_rtx));
1963
  else
1964
    {
1965
      /* Use a8 as a temporary since a0-a7 may be live.  */
1966
      rtx tmp_reg = gen_rtx_REG (Pmode, A8_REG);
1967
      emit_insn (gen_entry (size_rtx, GEN_INT (MIN_FRAME_SIZE)));
1968
      emit_move_insn (tmp_reg, GEN_INT (total_size - MIN_FRAME_SIZE));
1969
      emit_insn (gen_subsi3 (tmp_reg, stack_pointer_rtx, tmp_reg));
1970
      emit_move_insn (stack_pointer_rtx, tmp_reg);
1971
    }
1972
 
1973
  if (frame_pointer_needed)
1974
    {
1975
      if (cfun->machine->set_frame_ptr_insn)
1976
        {
1977
          rtx first, insn;
1978
 
1979
          push_topmost_sequence ();
1980
          first = get_insns ();
1981
          pop_topmost_sequence ();
1982
 
1983
          /* For all instructions prior to set_frame_ptr_insn, replace
1984
             hard_frame_pointer references with stack_pointer.  */
1985
          for (insn = first;
1986
               insn != cfun->machine->set_frame_ptr_insn;
1987
               insn = NEXT_INSN (insn))
1988
            {
1989
              if (INSN_P (insn))
1990
                PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
1991
                                              hard_frame_pointer_rtx,
1992
                                              stack_pointer_rtx);
1993
            }
1994
        }
1995
      else
1996
        emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1997
    }
1998
}
1999
 
2000
 
2001
/* Clear variables at function end.  */
2002
 
2003
void
2004
xtensa_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
2005
                          HOST_WIDE_INT size ATTRIBUTE_UNUSED)
2006
{
2007
  xtensa_current_frame_size = 0;
2008
}
2009
 
2010
 
2011
rtx
2012
xtensa_return_addr (int count, rtx frame)
2013
{
2014
  rtx result, retaddr;
2015
 
2016
  if (count == -1)
2017
    retaddr = gen_rtx_REG (Pmode, A0_REG);
2018
  else
2019
    {
2020
      rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2021
      addr = memory_address (Pmode, addr);
2022
      retaddr = gen_reg_rtx (Pmode);
2023
      emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2024
    }
2025
 
2026
  /* The 2 most-significant bits of the return address on Xtensa hold
2027
     the register window size.  To get the real return address, these
2028
     bits must be replaced with the high bits from the current PC.  */
2029
 
2030
  result = gen_reg_rtx (Pmode);
2031
  emit_insn (gen_fix_return_addr (result, retaddr));
2032
  return result;
2033
}
2034
 
2035
 
2036
/* Create the va_list data type.
2037
 
2038
   This structure is set up by __builtin_saveregs.  The __va_reg field
2039
   points to a stack-allocated region holding the contents of the
2040
   incoming argument registers.  The __va_ndx field is an index
2041
   initialized to the position of the first unnamed (variable)
2042
   argument.  This same index is also used to address the arguments
2043
   passed in memory.  Thus, the __va_stk field is initialized to point
2044
   to the position of the first argument in memory offset to account
2045
   for the arguments passed in registers and to account for the size
2046
   of the argument registers not being 16-byte aligned.  E.G., there
2047
   are 6 argument registers of 4 bytes each, but we want the __va_ndx
2048
   for the first stack argument to have the maximal alignment of 16
2049
   bytes, so we offset the __va_stk address by 32 bytes so that
2050
   __va_stk[32] references the first argument on the stack.  */
2051
 
2052
static tree
2053
xtensa_build_builtin_va_list (void)
2054
{
2055
  tree f_stk, f_reg, f_ndx, record, type_decl;
2056
 
2057
  record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2058
  type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2059
 
2060
  f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2061
                      ptr_type_node);
2062
  f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2063
                      ptr_type_node);
2064
  f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2065
                      integer_type_node);
2066
 
2067
  DECL_FIELD_CONTEXT (f_stk) = record;
2068
  DECL_FIELD_CONTEXT (f_reg) = record;
2069
  DECL_FIELD_CONTEXT (f_ndx) = record;
2070
 
2071
  TREE_CHAIN (record) = type_decl;
2072
  TYPE_NAME (record) = type_decl;
2073
  TYPE_FIELDS (record) = f_stk;
2074
  TREE_CHAIN (f_stk) = f_reg;
2075
  TREE_CHAIN (f_reg) = f_ndx;
2076
 
2077
  layout_type (record);
2078
  return record;
2079
}
2080
 
2081
 
2082
/* Save the incoming argument registers on the stack.  Returns the
2083
   address of the saved registers.  */
2084
 
2085
static rtx
2086
xtensa_builtin_saveregs (void)
2087
{
2088
  rtx gp_regs, dest;
2089
  int arg_words = current_function_args_info.arg_words;
2090
  int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2091
 
2092
  if (gp_left <= 0)
2093
    return const0_rtx;
2094
 
2095
  /* Allocate the general-purpose register space.  */
2096
  gp_regs = assign_stack_local
2097
    (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2098
  set_mem_alias_set (gp_regs, get_varargs_alias_set ());
2099
 
2100
  /* Now store the incoming registers.  */
2101
  dest = change_address (gp_regs, SImode,
2102
                         plus_constant (XEXP (gp_regs, 0),
2103
                                        arg_words * UNITS_PER_WORD));
2104
  cfun->machine->need_a7_copy = true;
2105
  cfun->machine->vararg_a7 = true;
2106
  move_block_from_reg (GP_ARG_FIRST + arg_words, dest, gp_left);
2107
  gcc_assert (cfun->machine->vararg_a7_copy != 0);
2108
  emit_insn_before (cfun->machine->vararg_a7_copy, get_insns ());
2109
 
2110
  return XEXP (gp_regs, 0);
2111
}
2112
 
2113
 
2114
/* Implement `va_start' for varargs and stdarg.  We look at the
2115
   current function to fill in an initial va_list.  */
2116
 
2117
void
2118
xtensa_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
2119
{
2120
  tree f_stk, stk;
2121
  tree f_reg, reg;
2122
  tree f_ndx, ndx;
2123
  tree t, u;
2124
  int arg_words;
2125
 
2126
  arg_words = current_function_args_info.arg_words;
2127
 
2128
  f_stk = TYPE_FIELDS (va_list_type_node);
2129
  f_reg = TREE_CHAIN (f_stk);
2130
  f_ndx = TREE_CHAIN (f_reg);
2131
 
2132
  stk = build3 (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk, NULL_TREE);
2133
  reg = build3 (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg, NULL_TREE);
2134
  ndx = build3 (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx, NULL_TREE);
2135
 
2136
  /* Call __builtin_saveregs; save the result in __va_reg */
2137
  u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2138
  t = build2 (MODIFY_EXPR, ptr_type_node, reg, u);
2139
  TREE_SIDE_EFFECTS (t) = 1;
2140
  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2141
 
2142
  /* Set the __va_stk member to ($arg_ptr - 32).  */
2143
  u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2144
  u = fold_build2 (PLUS_EXPR, ptr_type_node, u,
2145
                   build_int_cst (NULL_TREE, -32));
2146
  t = build2 (MODIFY_EXPR, ptr_type_node, stk, u);
2147
  TREE_SIDE_EFFECTS (t) = 1;
2148
  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2149
 
2150
  /* Set the __va_ndx member.  If the first variable argument is on
2151
     the stack, adjust __va_ndx by 2 words to account for the extra
2152
     alignment offset for __va_stk.  */
2153
  if (arg_words >= MAX_ARGS_IN_REGISTERS)
2154
    arg_words += 2;
2155
  u = build_int_cst (NULL_TREE, arg_words * UNITS_PER_WORD);
2156
  t = build2 (MODIFY_EXPR, integer_type_node, ndx, u);
2157
  TREE_SIDE_EFFECTS (t) = 1;
2158
  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2159
}
2160
 
2161
 
2162
/* Implement `va_arg'.  */
2163
 
2164
static tree
2165
xtensa_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
2166
                             tree *post_p ATTRIBUTE_UNUSED)
2167
{
2168
  tree f_stk, stk;
2169
  tree f_reg, reg;
2170
  tree f_ndx, ndx;
2171
  tree type_size, array, orig_ndx, addr, size, va_size, t;
2172
  tree lab_false, lab_over, lab_false2;
2173
  bool indirect;
2174
 
2175
  indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
2176
  if (indirect)
2177
    type = build_pointer_type (type);
2178
 
2179
  /* Handle complex values as separate real and imaginary parts.  */
2180
  if (TREE_CODE (type) == COMPLEX_TYPE)
2181
    {
2182
      tree real_part, imag_part;
2183
 
2184
      real_part = xtensa_gimplify_va_arg_expr (valist, TREE_TYPE (type),
2185
                                               pre_p, NULL);
2186
      real_part = get_initialized_tmp_var (real_part, pre_p, NULL);
2187
 
2188
      imag_part = xtensa_gimplify_va_arg_expr (valist, TREE_TYPE (type),
2189
                                               pre_p, NULL);
2190
      imag_part = get_initialized_tmp_var (imag_part, pre_p, NULL);
2191
 
2192
      return build2 (COMPLEX_EXPR, type, real_part, imag_part);
2193
    }
2194
 
2195
  f_stk = TYPE_FIELDS (va_list_type_node);
2196
  f_reg = TREE_CHAIN (f_stk);
2197
  f_ndx = TREE_CHAIN (f_reg);
2198
 
2199
  stk = build3 (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk, NULL_TREE);
2200
  reg = build3 (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg, NULL_TREE);
2201
  ndx = build3 (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx, NULL_TREE);
2202
 
2203
  type_size = size_in_bytes (type);
2204
  va_size = round_up (type_size, UNITS_PER_WORD);
2205
  gimplify_expr (&va_size, pre_p, NULL, is_gimple_val, fb_rvalue);
2206
 
2207
 
2208
  /* First align __va_ndx if necessary for this arg:
2209
 
2210
     orig_ndx = (AP).__va_ndx;
2211
     if (__alignof__ (TYPE) > 4 )
2212
       orig_ndx = ((orig_ndx + __alignof__ (TYPE) - 1)
2213
                        & -__alignof__ (TYPE)); */
2214
 
2215
  orig_ndx = get_initialized_tmp_var (ndx, pre_p, NULL);
2216
 
2217
  if (TYPE_ALIGN (type) > BITS_PER_WORD)
2218
    {
2219
      int align = MIN (TYPE_ALIGN (type), STACK_BOUNDARY) / BITS_PER_UNIT;
2220
 
2221
      t = build2 (PLUS_EXPR, integer_type_node, orig_ndx,
2222
                  build_int_cst (NULL_TREE, align - 1));
2223
      t = build2 (BIT_AND_EXPR, integer_type_node, t,
2224
                  build_int_cst (NULL_TREE, -align));
2225
      t = build2 (MODIFY_EXPR, integer_type_node, orig_ndx, t);
2226
      gimplify_and_add (t, pre_p);
2227
    }
2228
 
2229
 
2230
  /* Increment __va_ndx to point past the argument:
2231
 
2232
     (AP).__va_ndx = orig_ndx + __va_size (TYPE); */
2233
 
2234
  t = fold_convert (integer_type_node, va_size);
2235
  t = build2 (PLUS_EXPR, integer_type_node, orig_ndx, t);
2236
  t = build2 (MODIFY_EXPR, integer_type_node, ndx, t);
2237
  gimplify_and_add (t, pre_p);
2238
 
2239
 
2240
  /* Check if the argument is in registers:
2241
 
2242
     if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2243
         && !must_pass_in_stack (type))
2244
        __array = (AP).__va_reg; */
2245
 
2246
  array = create_tmp_var (ptr_type_node, NULL);
2247
 
2248
  lab_over = NULL;
2249
  if (!targetm.calls.must_pass_in_stack (TYPE_MODE (type), type))
2250
    {
2251
      lab_false = create_artificial_label ();
2252
      lab_over = create_artificial_label ();
2253
 
2254
      t = build_int_cst (NULL_TREE, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD);
2255
      t = build2 (GT_EXPR, boolean_type_node, ndx, t);
2256
      t = build3 (COND_EXPR, void_type_node, t,
2257
                  build1 (GOTO_EXPR, void_type_node, lab_false),
2258
                  NULL_TREE);
2259
      gimplify_and_add (t, pre_p);
2260
 
2261
      t = build2 (MODIFY_EXPR, void_type_node, array, reg);
2262
      gimplify_and_add (t, pre_p);
2263
 
2264
      t = build1 (GOTO_EXPR, void_type_node, lab_over);
2265
      gimplify_and_add (t, pre_p);
2266
 
2267
      t = build1 (LABEL_EXPR, void_type_node, lab_false);
2268
      gimplify_and_add (t, pre_p);
2269
    }
2270
 
2271
 
2272
  /* ...otherwise, the argument is on the stack (never split between
2273
     registers and the stack -- change __va_ndx if necessary):
2274
 
2275
     else
2276
       {
2277
         if (orig_ndx <= __MAX_ARGS_IN_REGISTERS * 4)
2278
             (AP).__va_ndx = 32 + __va_size (TYPE);
2279
         __array = (AP).__va_stk;
2280
       } */
2281
 
2282
  lab_false2 = create_artificial_label ();
2283
 
2284
  t = build_int_cst (NULL_TREE, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD);
2285
  t = build2 (GT_EXPR, boolean_type_node, orig_ndx, t);
2286
  t = build3 (COND_EXPR, void_type_node, t,
2287
              build1 (GOTO_EXPR, void_type_node, lab_false2),
2288
              NULL_TREE);
2289
  gimplify_and_add (t, pre_p);
2290
 
2291
  t = size_binop (PLUS_EXPR, va_size, size_int (32));
2292
  t = fold_convert (integer_type_node, t);
2293
  t = build2 (MODIFY_EXPR, integer_type_node, ndx, t);
2294
  gimplify_and_add (t, pre_p);
2295
 
2296
  t = build1 (LABEL_EXPR, void_type_node, lab_false2);
2297
  gimplify_and_add (t, pre_p);
2298
 
2299
  t = build2 (MODIFY_EXPR, void_type_node, array, stk);
2300
  gimplify_and_add (t, pre_p);
2301
 
2302
  if (lab_over)
2303
    {
2304
      t = build1 (LABEL_EXPR, void_type_node, lab_over);
2305
      gimplify_and_add (t, pre_p);
2306
    }
2307
 
2308
 
2309
  /* Given the base array pointer (__array) and index to the subsequent
2310
     argument (__va_ndx), find the address:
2311
 
2312
     __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2313
                                ? sizeof (TYPE)
2314
                                : __va_size (TYPE))
2315
 
2316
     The results are endian-dependent because values smaller than one word
2317
     are aligned differently.  */
2318
 
2319
 
2320
  if (BYTES_BIG_ENDIAN && TREE_CODE (type_size) == INTEGER_CST)
2321
    {
2322
      t = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
2323
      t = fold_build2 (GE_EXPR, boolean_type_node, type_size, t);
2324
      t = fold_build3 (COND_EXPR, sizetype, t, va_size, type_size);
2325
      size = t;
2326
    }
2327
  else
2328
    size = va_size;
2329
 
2330
  t = fold_convert (ptr_type_node, ndx);
2331
  addr = build2 (PLUS_EXPR, ptr_type_node, array, t);
2332
  t = fold_convert (ptr_type_node, size);
2333
  addr = build2 (MINUS_EXPR, ptr_type_node, addr, t);
2334
 
2335
  addr = fold_convert (build_pointer_type (type), addr);
2336
  if (indirect)
2337
    addr = build_va_arg_indirect_ref (addr);
2338
  return build_va_arg_indirect_ref (addr);
2339
}
2340
 
2341
 
2342
enum reg_class
2343
xtensa_preferred_reload_class (rtx x, enum reg_class class, int isoutput)
2344
{
2345
  if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2346
    return NO_REGS;
2347
 
2348
  /* Don't use the stack pointer or hard frame pointer for reloads!
2349
     The hard frame pointer would normally be OK except that it may
2350
     briefly hold an incoming argument in the prologue, and reload
2351
     won't know that it is live because the hard frame pointer is
2352
     treated specially.  */
2353
 
2354
  if (class == AR_REGS || class == GR_REGS)
2355
    return RL_REGS;
2356
 
2357
  return class;
2358
}
2359
 
2360
 
2361
enum reg_class
2362
xtensa_secondary_reload_class (enum reg_class class,
2363
                               enum machine_mode mode ATTRIBUTE_UNUSED,
2364
                               rtx x, int isoutput)
2365
{
2366
  int regno;
2367
 
2368
  if (GET_CODE (x) == SIGN_EXTEND)
2369
    x = XEXP (x, 0);
2370
  regno = xt_true_regnum (x);
2371
 
2372
  if (!isoutput)
2373
    {
2374
      if (class == FP_REGS && constantpool_mem_p (x))
2375
        return RL_REGS;
2376
    }
2377
 
2378
  if (ACC_REG_P (regno))
2379
    return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
2380
  if (class == ACC_REG)
2381
    return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
2382
 
2383
  return NO_REGS;
2384
}
2385
 
2386
 
2387
void
2388
order_regs_for_local_alloc (void)
2389
{
2390
  if (!leaf_function_p ())
2391
    {
2392
      memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2393
              FIRST_PSEUDO_REGISTER * sizeof (int));
2394
    }
2395
  else
2396
    {
2397
      int i, num_arg_regs;
2398
      int nxt = 0;
2399
 
2400
      /* Use the AR registers in increasing order (skipping a0 and a1)
2401
         but save the incoming argument registers for a last resort.  */
2402
      num_arg_regs = current_function_args_info.arg_words;
2403
      if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2404
        num_arg_regs = MAX_ARGS_IN_REGISTERS;
2405
      for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2406
        reg_alloc_order[nxt++] = i + num_arg_regs;
2407
      for (i = 0; i < num_arg_regs; i++)
2408
        reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2409
 
2410
      /* List the coprocessor registers in order.  */
2411
      for (i = 0; i < BR_REG_NUM; i++)
2412
        reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2413
 
2414
      /* List the FP registers in order for now.  */
2415
      for (i = 0; i < 16; i++)
2416
        reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2417
 
2418
      /* GCC requires that we list *all* the registers....  */
2419
      reg_alloc_order[nxt++] = 0;        /* a0 = return address */
2420
      reg_alloc_order[nxt++] = 1;       /* a1 = stack pointer */
2421
      reg_alloc_order[nxt++] = 16;      /* pseudo frame pointer */
2422
      reg_alloc_order[nxt++] = 17;      /* pseudo arg pointer */
2423
 
2424
      reg_alloc_order[nxt++] = ACC_REG_FIRST;   /* MAC16 accumulator */
2425
    }
2426
}
2427
 
2428
 
2429
/* Some Xtensa targets support multiple bss sections.  If the section
2430
   name ends with ".bss", add SECTION_BSS to the flags.  */
2431
 
2432
static unsigned int
2433
xtensa_multibss_section_type_flags (tree decl, const char *name, int reloc)
2434
{
2435
  unsigned int flags = default_section_type_flags (decl, name, reloc);
2436
  const char *suffix;
2437
 
2438
  suffix = strrchr (name, '.');
2439
  if (suffix && strcmp (suffix, ".bss") == 0)
2440
    {
2441
      if (!decl || (TREE_CODE (decl) == VAR_DECL
2442
                    && DECL_INITIAL (decl) == NULL_TREE))
2443
        flags |= SECTION_BSS;  /* @nobits */
2444
      else
2445
        warning (0, "only uninitialized variables can be placed in a "
2446
                 ".bss section");
2447
    }
2448
 
2449
  return flags;
2450
}
2451
 
2452
 
2453
/* The literal pool stays with the function.  */
2454
 
2455
static section *
2456
xtensa_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
2457
                           rtx x ATTRIBUTE_UNUSED,
2458
                           unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2459
{
2460
  return function_section (current_function_decl);
2461
}
2462
 
2463
 
2464
/* Compute a (partial) cost for rtx X.  Return true if the complete
2465
   cost has been computed, and false if subexpressions should be
2466
   scanned.  In either case, *TOTAL contains the cost result.  */
2467
 
2468
static bool
2469
xtensa_rtx_costs (rtx x, int code, int outer_code, int *total)
2470
{
2471
  switch (code)
2472
    {
2473
    case CONST_INT:
2474
      switch (outer_code)
2475
        {
2476
        case SET:
2477
          if (xtensa_simm12b (INTVAL (x)))
2478
            {
2479
              *total = 4;
2480
              return true;
2481
            }
2482
          break;
2483
        case PLUS:
2484
          if (xtensa_simm8 (INTVAL (x))
2485
              || xtensa_simm8x256 (INTVAL (x)))
2486
            {
2487
              *total = 0;
2488
              return true;
2489
            }
2490
          break;
2491
        case AND:
2492
          if (xtensa_mask_immediate (INTVAL (x)))
2493
            {
2494
              *total = 0;
2495
              return true;
2496
            }
2497
          break;
2498
        case COMPARE:
2499
          if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
2500
            {
2501
              *total = 0;
2502
              return true;
2503
            }
2504
          break;
2505
        case ASHIFT:
2506
        case ASHIFTRT:
2507
        case LSHIFTRT:
2508
        case ROTATE:
2509
        case ROTATERT:
2510
          /* No way to tell if X is the 2nd operand so be conservative.  */
2511
        default: break;
2512
        }
2513
      if (xtensa_simm12b (INTVAL (x)))
2514
        *total = 5;
2515
      else if (TARGET_CONST16)
2516
        *total = COSTS_N_INSNS (2);
2517
      else
2518
        *total = 6;
2519
      return true;
2520
 
2521
    case CONST:
2522
    case LABEL_REF:
2523
    case SYMBOL_REF:
2524
      if (TARGET_CONST16)
2525
        *total = COSTS_N_INSNS (2);
2526
      else
2527
        *total = 5;
2528
      return true;
2529
 
2530
    case CONST_DOUBLE:
2531
      if (TARGET_CONST16)
2532
        *total = COSTS_N_INSNS (4);
2533
      else
2534
        *total = 7;
2535
      return true;
2536
 
2537
    case MEM:
2538
      {
2539
        int num_words =
2540
          (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ?  2 : 1;
2541
 
2542
        if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
2543
          *total = COSTS_N_INSNS (num_words);
2544
        else
2545
          *total = COSTS_N_INSNS (2*num_words);
2546
        return true;
2547
      }
2548
 
2549
    case FFS:
2550
      *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
2551
      return true;
2552
 
2553
    case NOT:
2554
      *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
2555
      return true;
2556
 
2557
    case AND:
2558
    case IOR:
2559
    case XOR:
2560
      if (GET_MODE (x) == DImode)
2561
        *total = COSTS_N_INSNS (2);
2562
      else
2563
        *total = COSTS_N_INSNS (1);
2564
      return true;
2565
 
2566
    case ASHIFT:
2567
    case ASHIFTRT:
2568
    case LSHIFTRT:
2569
      if (GET_MODE (x) == DImode)
2570
        *total = COSTS_N_INSNS (50);
2571
      else
2572
        *total = COSTS_N_INSNS (1);
2573
      return true;
2574
 
2575
    case ABS:
2576
      {
2577
        enum machine_mode xmode = GET_MODE (x);
2578
        if (xmode == SFmode)
2579
          *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2580
        else if (xmode == DFmode)
2581
          *total = COSTS_N_INSNS (50);
2582
        else
2583
          *total = COSTS_N_INSNS (4);
2584
        return true;
2585
      }
2586
 
2587
    case PLUS:
2588
    case MINUS:
2589
      {
2590
        enum machine_mode xmode = GET_MODE (x);
2591
        if (xmode == SFmode)
2592
          *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2593
        else if (xmode == DFmode || xmode == DImode)
2594
          *total = COSTS_N_INSNS (50);
2595
        else
2596
          *total = COSTS_N_INSNS (1);
2597
        return true;
2598
      }
2599
 
2600
    case NEG:
2601
      *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
2602
      return true;
2603
 
2604
    case MULT:
2605
      {
2606
        enum machine_mode xmode = GET_MODE (x);
2607
        if (xmode == SFmode)
2608
          *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
2609
        else if (xmode == DFmode || xmode == DImode)
2610
          *total = COSTS_N_INSNS (50);
2611
        else if (TARGET_MUL32)
2612
          *total = COSTS_N_INSNS (4);
2613
        else if (TARGET_MAC16)
2614
          *total = COSTS_N_INSNS (16);
2615
        else if (TARGET_MUL16)
2616
          *total = COSTS_N_INSNS (12);
2617
        else
2618
          *total = COSTS_N_INSNS (50);
2619
        return true;
2620
      }
2621
 
2622
    case DIV:
2623
    case MOD:
2624
      {
2625
        enum machine_mode xmode = GET_MODE (x);
2626
        if (xmode == SFmode)
2627
          {
2628
            *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
2629
            return true;
2630
          }
2631
        else if (xmode == DFmode)
2632
          {
2633
            *total = COSTS_N_INSNS (50);
2634
            return true;
2635
          }
2636
      }
2637
      /* Fall through.  */
2638
 
2639
    case UDIV:
2640
    case UMOD:
2641
      {
2642
        enum machine_mode xmode = GET_MODE (x);
2643
        if (xmode == DImode)
2644
          *total = COSTS_N_INSNS (50);
2645
        else if (TARGET_DIV32)
2646
          *total = COSTS_N_INSNS (32);
2647
        else
2648
          *total = COSTS_N_INSNS (50);
2649
        return true;
2650
      }
2651
 
2652
    case SQRT:
2653
      if (GET_MODE (x) == SFmode)
2654
        *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
2655
      else
2656
        *total = COSTS_N_INSNS (50);
2657
      return true;
2658
 
2659
    case SMIN:
2660
    case UMIN:
2661
    case SMAX:
2662
    case UMAX:
2663
      *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
2664
      return true;
2665
 
2666
    case SIGN_EXTRACT:
2667
    case SIGN_EXTEND:
2668
      *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
2669
      return true;
2670
 
2671
    case ZERO_EXTRACT:
2672
    case ZERO_EXTEND:
2673
      *total = COSTS_N_INSNS (1);
2674
      return true;
2675
 
2676
    default:
2677
      return false;
2678
    }
2679
}
2680
 
2681
/* Worker function for TARGET_RETURN_IN_MEMORY.  */
2682
 
2683
static bool
2684
xtensa_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
2685
{
2686
  return ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
2687
          > 4 * UNITS_PER_WORD);
2688
}
2689
 
2690
#include "gt-xtensa.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.