OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [config/] [stormy16/] [stormy16.c] - Blame information for rev 709

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 709 jeremybenn
/* Xstormy16 target functions.
2
   Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3
   2006, 2007, 2008, 2009, 2010, 2011  Free Software Foundation, Inc.
4
   Contributed by Red Hat, Inc.
5
 
6
   This file is part of GCC.
7
 
8
   GCC is free software; you can redistribute it and/or modify
9
   it under the terms of the GNU General Public License as published by
10
   the Free Software Foundation; either version 3, or (at your option)
11
   any later version.
12
 
13
   GCC is distributed in the hope that it will be useful,
14
   but WITHOUT ANY WARRANTY; without even the implied warranty of
15
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16
   GNU General Public License for more details.
17
 
18
   You should have received a copy of the GNU General Public License
19
   along with GCC; see the file COPYING3.  If not see
20
   <http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "rtl.h"
27
#include "regs.h"
28
#include "hard-reg-set.h"
29
#include "insn-config.h"
30
#include "conditions.h"
31
#include "insn-flags.h"
32
#include "output.h"
33
#include "insn-attr.h"
34
#include "flags.h"
35
#include "recog.h"
36
#include "diagnostic-core.h"
37
#include "obstack.h"
38
#include "tree.h"
39
#include "expr.h"
40
#include "optabs.h"
41
#include "except.h"
42
#include "function.h"
43
#include "target.h"
44
#include "target-def.h"
45
#include "tm_p.h"
46
#include "langhooks.h"
47
#include "gimple.h"
48
#include "df.h"
49
#include "reload.h"
50
#include "ggc.h"
51
 
52
static rtx emit_addhi3_postreload (rtx, rtx, rtx);
53
static void xstormy16_asm_out_constructor (rtx, int);
54
static void xstormy16_asm_out_destructor (rtx, int);
55
static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
56
                                           HOST_WIDE_INT, tree);
57
 
58
static void xstormy16_init_builtins (void);
59
static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
60
static bool xstormy16_rtx_costs (rtx, int, int, int, int *, bool);
61
static int xstormy16_address_cost (rtx, bool);
62
static bool xstormy16_return_in_memory (const_tree, const_tree);
63
 
64
static GTY(()) section *bss100_section;
65
 
66
/* Compute a (partial) cost for rtx X.  Return true if the complete
67
   cost has been computed, and false if subexpressions should be
68
   scanned.  In either case, *TOTAL contains the cost result.  */
69
 
70
static bool
71
xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
72
                     int opno ATTRIBUTE_UNUSED, int *total,
73
                     bool speed ATTRIBUTE_UNUSED)
74
{
75
  switch (code)
76
    {
77
    case CONST_INT:
78
      if (INTVAL (x) < 16 && INTVAL (x) >= 0)
79
        *total = COSTS_N_INSNS (1) / 2;
80
      else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
81
        *total = COSTS_N_INSNS (1);
82
      else
83
        *total = COSTS_N_INSNS (2);
84
      return true;
85
 
86
    case CONST_DOUBLE:
87
    case CONST:
88
    case SYMBOL_REF:
89
    case LABEL_REF:
90
      *total = COSTS_N_INSNS (2);
91
      return true;
92
 
93
    case MULT:
94
      *total = COSTS_N_INSNS (35 + 6);
95
      return true;
96
    case DIV:
97
      *total = COSTS_N_INSNS (51 - 6);
98
      return true;
99
 
100
    default:
101
      return false;
102
    }
103
}
104
 
105
static int
106
xstormy16_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
107
{
108
  return (CONST_INT_P (x) ? 2
109
          : GET_CODE (x) == PLUS ? 7
110
          : 5);
111
}
112
 
113
/* Worker function for TARGET_MEMORY_MOVE_COST.  */
114
 
115
static int
116
xstormy16_memory_move_cost (enum machine_mode mode, reg_class_t rclass,
117
                            bool in)
118
{
119
  return (5 + memory_move_secondary_cost (mode, rclass, in));
120
}
121
 
122
/* Branches are handled as follows:
123
 
124
   1. HImode compare-and-branches.  The machine supports these
125
      natively, so the appropriate pattern is emitted directly.
126
 
127
   2. SImode EQ and NE.  These are emitted as pairs of HImode
128
      compare-and-branches.
129
 
130
   3. SImode LT, GE, LTU and GEU.  These are emitted as a sequence
131
      of a SImode subtract followed by a branch (not a compare-and-branch),
132
      like this:
133
      sub
134
      sbc
135
      blt
136
 
137
   4. SImode GT, LE, GTU, LEU.  These are emitted as a sequence like:
138
      sub
139
      sbc
140
      blt
141
      or
142
      bne.  */
143
 
144
/* Emit a branch of kind CODE to location LOC.  */
145
 
146
void
147
xstormy16_emit_cbranch (enum rtx_code code, rtx op0, rtx op1, rtx loc)
148
{
149
  rtx condition_rtx, loc_ref, branch, cy_clobber;
150
  rtvec vec;
151
  enum machine_mode mode;
152
 
153
  mode = GET_MODE (op0);
154
  gcc_assert (mode == HImode || mode == SImode);
155
 
156
  if (mode == SImode
157
      && (code == GT || code == LE || code == GTU || code == LEU))
158
    {
159
      int unsigned_p = (code == GTU || code == LEU);
160
      int gt_p = (code == GT || code == GTU);
161
      rtx lab = NULL_RTX;
162
 
163
      if (gt_p)
164
        lab = gen_label_rtx ();
165
      xstormy16_emit_cbranch (unsigned_p ? LTU : LT, op0, op1, gt_p ? lab : loc);
166
      /* This should be generated as a comparison against the temporary
167
         created by the previous insn, but reload can't handle that.  */
168
      xstormy16_emit_cbranch (gt_p ? NE : EQ, op0, op1, loc);
169
      if (gt_p)
170
        emit_label (lab);
171
      return;
172
    }
173
  else if (mode == SImode
174
           && (code == NE || code == EQ)
175
           && op1 != const0_rtx)
176
    {
177
      rtx op0_word, op1_word;
178
      rtx lab = NULL_RTX;
179
      int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
180
      int i;
181
 
182
      if (code == EQ)
183
        lab = gen_label_rtx ();
184
 
185
      for (i = 0; i < num_words - 1; i++)
186
        {
187
          op0_word = simplify_gen_subreg (word_mode, op0, mode,
188
                                          i * UNITS_PER_WORD);
189
          op1_word = simplify_gen_subreg (word_mode, op1, mode,
190
                                          i * UNITS_PER_WORD);
191
          xstormy16_emit_cbranch (NE, op0_word, op1_word, code == EQ ? lab : loc);
192
        }
193
      op0_word = simplify_gen_subreg (word_mode, op0, mode,
194
                                      i * UNITS_PER_WORD);
195
      op1_word = simplify_gen_subreg (word_mode, op1, mode,
196
                                      i * UNITS_PER_WORD);
197
      xstormy16_emit_cbranch (code, op0_word, op1_word, loc);
198
 
199
      if (code == EQ)
200
        emit_label (lab);
201
      return;
202
    }
203
 
204
  /* We can't allow reload to try to generate any reload after a branch,
205
     so when some register must match we must make the temporary ourselves.  */
206
  if (mode != HImode)
207
    {
208
      rtx tmp;
209
      tmp = gen_reg_rtx (mode);
210
      emit_move_insn (tmp, op0);
211
      op0 = tmp;
212
    }
213
 
214
  condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
215
  loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
216
  branch = gen_rtx_SET (VOIDmode, pc_rtx,
217
                        gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
218
                                              loc_ref, pc_rtx));
219
 
220
  cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
221
 
222
  if (mode == HImode)
223
    vec = gen_rtvec (2, branch, cy_clobber);
224
  else if (code == NE || code == EQ)
225
    vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
226
  else
227
    {
228
      rtx sub;
229
#if 0
230
      sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
231
#else
232
      sub = gen_rtx_CLOBBER (SImode, op0);
233
#endif
234
      vec = gen_rtvec (3, branch, sub, cy_clobber);
235
    }
236
 
237
  emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
238
}
239
 
240
/* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
241
   the arithmetic operation.  Most of the work is done by
242
   xstormy16_expand_arith.  */
243
 
244
void
245
xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
246
                         rtx dest)
247
{
248
  rtx op0 = XEXP (comparison, 0);
249
  rtx op1 = XEXP (comparison, 1);
250
  rtx seq, last_insn;
251
  rtx compare;
252
 
253
  start_sequence ();
254
  xstormy16_expand_arith (mode, COMPARE, dest, op0, op1);
255
  seq = get_insns ();
256
  end_sequence ();
257
 
258
  gcc_assert (INSN_P (seq));
259
 
260
  last_insn = seq;
261
  while (NEXT_INSN (last_insn) != NULL_RTX)
262
    last_insn = NEXT_INSN (last_insn);
263
 
264
  compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
265
  PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
266
  XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
267
  emit_insn (seq);
268
}
269
 
270
 
271
/* Return the string to output a conditional branch to LABEL, which is
272
   the operand number of the label.
273
 
274
   OP is the conditional expression, or NULL for branch-always.
275
 
276
   REVERSED is nonzero if we should reverse the sense of the comparison.
277
 
278
   INSN is the insn.  */
279
 
280
char *
281
xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
282
{
283
  static char string[64];
284
  int need_longbranch = (op != NULL_RTX
285
                         ? get_attr_length (insn) == 8
286
                         : get_attr_length (insn) == 4);
287
  int really_reversed = reversed ^ need_longbranch;
288
  const char *ccode;
289
  const char *templ;
290
  const char *operands;
291
  enum rtx_code code;
292
 
293
  if (! op)
294
    {
295
      if (need_longbranch)
296
        ccode = "jmpf";
297
      else
298
        ccode = "br";
299
      sprintf (string, "%s %s", ccode, label);
300
      return string;
301
    }
302
 
303
  code = GET_CODE (op);
304
 
305
  if (! REG_P (XEXP (op, 0)))
306
    {
307
      code = swap_condition (code);
308
      operands = "%3,%2";
309
    }
310
  else
311
      operands = "%2,%3";
312
 
313
  /* Work out which way this really branches.  */
314
  if (really_reversed)
315
    code = reverse_condition (code);
316
 
317
  switch (code)
318
    {
319
    case EQ:   ccode = "z";   break;
320
    case NE:   ccode = "nz";  break;
321
    case GE:   ccode = "ge";  break;
322
    case LT:   ccode = "lt";  break;
323
    case GT:   ccode = "gt";  break;
324
    case LE:   ccode = "le";  break;
325
    case GEU:  ccode = "nc";  break;
326
    case LTU:  ccode = "c";   break;
327
    case GTU:  ccode = "hi";  break;
328
    case LEU:  ccode = "ls";  break;
329
 
330
    default:
331
      gcc_unreachable ();
332
    }
333
 
334
  if (need_longbranch)
335
    templ = "b%s %s,.+8 | jmpf %s";
336
  else
337
    templ = "b%s %s,%s";
338
  sprintf (string, templ, ccode, operands, label);
339
 
340
  return string;
341
}
342
 
343
/* Return the string to output a conditional branch to LABEL, which is
344
   the operand number of the label, but suitable for the tail of a
345
   SImode branch.
346
 
347
   OP is the conditional expression (OP is never NULL_RTX).
348
 
349
   REVERSED is nonzero if we should reverse the sense of the comparison.
350
 
351
   INSN is the insn.  */
352
 
353
char *
354
xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
355
{
356
  static char string[64];
357
  int need_longbranch = get_attr_length (insn) >= 8;
358
  int really_reversed = reversed ^ need_longbranch;
359
  const char *ccode;
360
  const char *templ;
361
  char prevop[16];
362
  enum rtx_code code;
363
 
364
  code = GET_CODE (op);
365
 
366
  /* Work out which way this really branches.  */
367
  if (really_reversed)
368
    code = reverse_condition (code);
369
 
370
  switch (code)
371
    {
372
    case EQ:   ccode = "z";   break;
373
    case NE:   ccode = "nz";  break;
374
    case GE:   ccode = "ge";  break;
375
    case LT:   ccode = "lt";  break;
376
    case GEU:  ccode = "nc";  break;
377
    case LTU:  ccode = "c";   break;
378
 
379
      /* The missing codes above should never be generated.  */
380
    default:
381
      gcc_unreachable ();
382
    }
383
 
384
  switch (code)
385
    {
386
    case EQ: case NE:
387
      {
388
        int regnum;
389
 
390
        gcc_assert (REG_P (XEXP (op, 0)));
391
 
392
        regnum = REGNO (XEXP (op, 0));
393
        sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
394
      }
395
      break;
396
 
397
    case GE: case LT: case GEU: case LTU:
398
      strcpy (prevop, "sbc %2,%3");
399
      break;
400
 
401
    default:
402
      gcc_unreachable ();
403
    }
404
 
405
  if (need_longbranch)
406
    templ = "%s | b%s .+6 | jmpf %s";
407
  else
408
    templ = "%s | b%s %s";
409
  sprintf (string, templ, prevop, ccode, label);
410
 
411
  return string;
412
}
413
 
414
/* Many machines have some registers that cannot be copied directly to or from
415
   memory or even from other types of registers.  An example is the `MQ'
416
   register, which on most machines, can only be copied to or from general
417
   registers, but not memory.  Some machines allow copying all registers to and
418
   from memory, but require a scratch register for stores to some memory
419
   locations (e.g., those with symbolic address on the RT, and those with
420
   certain symbolic address on the SPARC when compiling PIC).  In some cases,
421
   both an intermediate and a scratch register are required.
422
 
423
   You should define these macros to indicate to the reload phase that it may
424
   need to allocate at least one register for a reload in addition to the
425
   register to contain the data.  Specifically, if copying X to a register
426
   RCLASS in MODE requires an intermediate register, you should define
427
   `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
428
   whose registers can be used as intermediate registers or scratch registers.
429
 
430
   If copying a register RCLASS in MODE to X requires an intermediate or scratch
431
   register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
432
   largest register class required.  If the requirements for input and output
433
   reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
434
   instead of defining both macros identically.
435
 
436
   The values returned by these macros are often `GENERAL_REGS'.  Return
437
   `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
438
   to or from a register of RCLASS in MODE without requiring a scratch register.
439
   Do not define this macro if it would always return `NO_REGS'.
440
 
441
   If a scratch register is required (either with or without an intermediate
442
   register), you should define patterns for `reload_inM' or `reload_outM', as
443
   required..  These patterns, which will normally be implemented with a
444
   `define_expand', should be similar to the `movM' patterns, except that
445
   operand 2 is the scratch register.
446
 
447
   Define constraints for the reload register and scratch register that contain
448
   a single register class.  If the original reload register (whose class is
449
   RCLASS) can meet the constraint given in the pattern, the value returned by
450
   these macros is used for the class of the scratch register.  Otherwise, two
451
   additional reload registers are required.  Their classes are obtained from
452
   the constraints in the insn pattern.
453
 
454
   X might be a pseudo-register or a `subreg' of a pseudo-register, which could
455
   either be in a hard register or in memory.  Use `true_regnum' to find out;
456
   it will return -1 if the pseudo is in memory and the hard register number if
457
   it is in a register.
458
 
459
   These macros should not be used in the case where a particular class of
460
   registers can only be copied to memory and not to another class of
461
   registers.  In that case, secondary reload registers are not needed and
462
   would not be helpful.  Instead, a stack location must be used to perform the
463
   copy and the `movM' pattern should use memory as an intermediate storage.
464
   This case often occurs between floating-point and general registers.  */
465
 
466
enum reg_class
467
xstormy16_secondary_reload_class (enum reg_class rclass,
468
                                  enum machine_mode mode ATTRIBUTE_UNUSED,
469
                                  rtx x)
470
{
471
  /* This chip has the interesting property that only the first eight
472
     registers can be moved to/from memory.  */
473
  if ((MEM_P (x)
474
       || ((GET_CODE (x) == SUBREG || REG_P (x))
475
           && (true_regnum (x) == -1
476
               || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
477
      && ! reg_class_subset_p (rclass, EIGHT_REGS))
478
    return EIGHT_REGS;
479
 
480
  return NO_REGS;
481
}
482
 
483
/* Worker function for TARGET_PREFERRED_RELOAD_CLASS
484
   and TARGET_PREFERRED_OUTPUT_RELOAD_CLASS.  */
485
 
486
static reg_class_t
487
xstormy16_preferred_reload_class (rtx x, reg_class_t rclass)
488
{
489
  if (rclass == GENERAL_REGS && MEM_P (x))
490
    return EIGHT_REGS;
491
 
492
  return rclass;
493
}
494
 
495
/* Predicate for symbols and addresses that reflect special 8-bit
496
   addressing.  */
497
 
498
int
499
xstormy16_below100_symbol (rtx x,
500
                           enum machine_mode mode ATTRIBUTE_UNUSED)
501
{
502
  if (GET_CODE (x) == CONST)
503
    x = XEXP (x, 0);
504
  if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
505
    x = XEXP (x, 0);
506
 
507
  if (GET_CODE (x) == SYMBOL_REF)
508
    return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
509
 
510
  if (CONST_INT_P (x))
511
    {
512
      HOST_WIDE_INT i = INTVAL (x);
513
 
514
      if ((i >= 0x0000 && i <= 0x00ff)
515
          || (i >= 0x7f00 && i <= 0x7fff))
516
        return 1;
517
    }
518
  return 0;
519
}
520
 
521
/* Likewise, but only for non-volatile MEMs, for patterns where the
522
   MEM will get split into smaller sized accesses.  */
523
 
524
int
525
xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
526
{
527
  if (MEM_P (x) && MEM_VOLATILE_P (x))
528
    return 0;
529
  return xstormy16_below100_operand (x, mode);
530
}
531
 
532
/* Expand an 8-bit IOR.  This either detects the one case we can
533
   actually do, or uses a 16-bit IOR.  */
534
 
535
void
536
xstormy16_expand_iorqi3 (rtx *operands)
537
{
538
  rtx in, out, outsub, val;
539
 
540
  out = operands[0];
541
  in = operands[1];
542
  val = operands[2];
543
 
544
  if (xstormy16_onebit_set_operand (val, QImode))
545
    {
546
      if (!xstormy16_below100_or_register (in, QImode))
547
        in = copy_to_mode_reg (QImode, in);
548
      if (!xstormy16_below100_or_register (out, QImode))
549
        out = gen_reg_rtx (QImode);
550
      emit_insn (gen_iorqi3_internal (out, in, val));
551
      if (out != operands[0])
552
        emit_move_insn (operands[0], out);
553
      return;
554
    }
555
 
556
  if (! REG_P (in))
557
    in = copy_to_mode_reg (QImode, in);
558
 
559
  if (! REG_P (val) && ! CONST_INT_P (val))
560
    val = copy_to_mode_reg (QImode, val);
561
 
562
  if (! REG_P (out))
563
    out = gen_reg_rtx (QImode);
564
 
565
  in = simplify_gen_subreg (HImode, in, QImode, 0);
566
  outsub = simplify_gen_subreg (HImode, out, QImode, 0);
567
 
568
  if (! CONST_INT_P (val))
569
    val = simplify_gen_subreg (HImode, val, QImode, 0);
570
 
571
  emit_insn (gen_iorhi3 (outsub, in, val));
572
 
573
  if (out != operands[0])
574
    emit_move_insn (operands[0], out);
575
}
576
 
577
/* Expand an 8-bit AND.  This either detects the one case we can
578
   actually do, or uses a 16-bit AND.  */
579
 
580
void
581
xstormy16_expand_andqi3 (rtx *operands)
582
{
583
  rtx in, out, outsub, val;
584
 
585
  out = operands[0];
586
  in = operands[1];
587
  val = operands[2];
588
 
589
  if (xstormy16_onebit_clr_operand (val, QImode))
590
    {
591
      if (!xstormy16_below100_or_register (in, QImode))
592
        in = copy_to_mode_reg (QImode, in);
593
      if (!xstormy16_below100_or_register (out, QImode))
594
        out = gen_reg_rtx (QImode);
595
      emit_insn (gen_andqi3_internal (out, in, val));
596
      if (out != operands[0])
597
        emit_move_insn (operands[0], out);
598
      return;
599
    }
600
 
601
  if (! REG_P (in))
602
    in = copy_to_mode_reg (QImode, in);
603
 
604
  if (! REG_P (val) && ! CONST_INT_P (val))
605
    val = copy_to_mode_reg (QImode, val);
606
 
607
  if (! REG_P (out))
608
    out = gen_reg_rtx (QImode);
609
 
610
  in = simplify_gen_subreg (HImode, in, QImode, 0);
611
  outsub = simplify_gen_subreg (HImode, out, QImode, 0);
612
 
613
  if (! CONST_INT_P (val))
614
    val = simplify_gen_subreg (HImode, val, QImode, 0);
615
 
616
  emit_insn (gen_andhi3 (outsub, in, val));
617
 
618
  if (out != operands[0])
619
    emit_move_insn (operands[0], out);
620
}
621
 
622
#define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET)                         \
623
  (CONST_INT_P (X)                                                      \
624
  && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
625
 
626
#define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET)                        \
627
 (CONST_INT_P (X)                                                        \
628
  && INTVAL (X) + (OFFSET) >= 0                                           \
629
  && INTVAL (X) + (OFFSET) < 0x8000                                      \
630
  && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
631
 
632
bool
633
xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
634
                                rtx x, bool strict)
635
{
636
  if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
637
    return true;
638
 
639
  if (GET_CODE (x) == PLUS
640
      && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
641
    {
642
      x = XEXP (x, 0);
643
      /* PR 31232: Do not allow INT+INT as an address.  */
644
      if (CONST_INT_P (x))
645
        return false;
646
    }
647
 
648
  if ((GET_CODE (x) == PRE_MODIFY && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
649
      || GET_CODE (x) == POST_INC
650
      || GET_CODE (x) == PRE_DEC)
651
    x = XEXP (x, 0);
652
 
653
  if (REG_P (x)
654
      && REGNO_OK_FOR_BASE_P (REGNO (x))
655
      && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
656
    return true;
657
 
658
  if (xstormy16_below100_symbol (x, mode))
659
    return true;
660
 
661
  return false;
662
}
663
 
664
/* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P.
665
 
666
   On this chip, this is true if the address is valid with an offset
667
   of 0 but not of 6, because in that case it cannot be used as an
668
   address for DImode or DFmode, or if the address is a post-increment
669
   or pre-decrement address.  */
670
 
671
static bool
672
xstormy16_mode_dependent_address_p (const_rtx x)
673
{
674
  if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
675
      && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
676
    return true;
677
 
678
  if (GET_CODE (x) == PLUS
679
      && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
680
      && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
681
    return true;
682
 
683
  /* Auto-increment addresses are now treated generically in recog.c.  */
684
  return false;
685
}
686
 
687
int
688
short_memory_operand (rtx x, enum machine_mode mode)
689
{
690
  if (! memory_operand (x, mode))
691
    return 0;
692
  return (GET_CODE (XEXP (x, 0)) != PLUS);
693
}
694
 
695
/* Splitter for the 'move' patterns, for modes not directly implemented
696
   by hardware.  Emit insns to copy a value of mode MODE from SRC to
697
   DEST.
698
 
699
   This function is only called when reload_completed.  */
700
 
701
void
702
xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
703
{
704
  int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
705
  int direction, end, i;
706
  int src_modifies = 0;
707
  int dest_modifies = 0;
708
  int src_volatile = 0;
709
  int dest_volatile = 0;
710
  rtx mem_operand;
711
  rtx auto_inc_reg_rtx = NULL_RTX;
712
 
713
  /* Check initial conditions.  */
714
  gcc_assert (reload_completed
715
              && mode != QImode && mode != HImode
716
              && nonimmediate_operand (dest, mode)
717
              && general_operand (src, mode));
718
 
719
  /* This case is not supported below, and shouldn't be generated.  */
720
  gcc_assert (! MEM_P (dest) || ! MEM_P (src));
721
 
722
  /* This case is very very bad after reload, so trap it now.  */
723
  gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
724
 
725
  /* The general idea is to copy by words, offsetting the source and
726
     destination.  Normally the least-significant word will be copied
727
     first, but for pre-dec operations it's better to copy the
728
     most-significant word first.  Only one operand can be a pre-dec
729
     or post-inc operand.
730
 
731
     It's also possible that the copy overlaps so that the direction
732
     must be reversed.  */
733
  direction = 1;
734
 
735
  if (MEM_P (dest))
736
    {
737
      mem_operand = XEXP (dest, 0);
738
      dest_modifies = side_effects_p (mem_operand);
739
      if (auto_inc_p (mem_operand))
740
        auto_inc_reg_rtx = XEXP (mem_operand, 0);
741
      dest_volatile = MEM_VOLATILE_P (dest);
742
      if (dest_volatile)
743
        {
744
          dest = copy_rtx (dest);
745
          MEM_VOLATILE_P (dest) = 0;
746
        }
747
    }
748
  else if (MEM_P (src))
749
    {
750
      mem_operand = XEXP (src, 0);
751
      src_modifies = side_effects_p (mem_operand);
752
      if (auto_inc_p (mem_operand))
753
        auto_inc_reg_rtx = XEXP (mem_operand, 0);
754
      src_volatile = MEM_VOLATILE_P (src);
755
      if (src_volatile)
756
        {
757
          src = copy_rtx (src);
758
          MEM_VOLATILE_P (src) = 0;
759
        }
760
    }
761
  else
762
    mem_operand = NULL_RTX;
763
 
764
  if (mem_operand == NULL_RTX)
765
    {
766
      if (REG_P (src)
767
          && REG_P (dest)
768
          && reg_overlap_mentioned_p (dest, src)
769
          && REGNO (dest) > REGNO (src))
770
        direction = -1;
771
    }
772
  else if (GET_CODE (mem_operand) == PRE_DEC
773
      || (GET_CODE (mem_operand) == PLUS
774
          && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
775
    direction = -1;
776
  else if (MEM_P (src) && reg_overlap_mentioned_p (dest, src))
777
    {
778
      int regno;
779
 
780
      gcc_assert (REG_P (dest));
781
      regno = REGNO (dest);
782
 
783
      gcc_assert (refers_to_regno_p (regno, regno + num_words,
784
                                     mem_operand, 0));
785
 
786
      if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
787
        direction = -1;
788
      else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
789
                                  mem_operand, 0))
790
        direction = 1;
791
      else
792
        /* This means something like
793
           (set (reg:DI r0) (mem:DI (reg:HI r1)))
794
           which we'd need to support by doing the set of the second word
795
           last.  */
796
        gcc_unreachable ();
797
    }
798
 
799
  end = direction < 0 ? -1 : num_words;
800
  for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
801
    {
802
      rtx w_src, w_dest, insn;
803
 
804
      if (src_modifies)
805
        w_src = gen_rtx_MEM (word_mode, mem_operand);
806
      else
807
        w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
808
      if (src_volatile)
809
        MEM_VOLATILE_P (w_src) = 1;
810
      if (dest_modifies)
811
        w_dest = gen_rtx_MEM (word_mode, mem_operand);
812
      else
813
        w_dest = simplify_gen_subreg (word_mode, dest, mode,
814
                                      i * UNITS_PER_WORD);
815
      if (dest_volatile)
816
        MEM_VOLATILE_P (w_dest) = 1;
817
 
818
      /* The simplify_subreg calls must always be able to simplify.  */
819
      gcc_assert (GET_CODE (w_src) != SUBREG
820
                  && GET_CODE (w_dest) != SUBREG);
821
 
822
      insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
823
      if (auto_inc_reg_rtx)
824
        REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
825
                                            auto_inc_reg_rtx,
826
                                            REG_NOTES (insn));
827
    }
828
}
829
 
830
/* Expander for the 'move' patterns.  Emit insns to copy a value of
831
   mode MODE from SRC to DEST.  */
832
 
833
void
834
xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
835
{
836
  if (MEM_P (dest) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
837
    {
838
      rtx pmv      = XEXP (dest, 0);
839
      rtx dest_reg = XEXP (pmv, 0);
840
      rtx dest_mod = XEXP (pmv, 1);
841
      rtx set      = gen_rtx_SET (Pmode, dest_reg, dest_mod);
842
      rtx clobber  = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
843
 
844
      dest = gen_rtx_MEM (mode, dest_reg);
845
      emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
846
    }
847
  else if (MEM_P (src) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
848
    {
849
      rtx pmv     = XEXP (src, 0);
850
      rtx src_reg = XEXP (pmv, 0);
851
      rtx src_mod = XEXP (pmv, 1);
852
      rtx set     = gen_rtx_SET (Pmode, src_reg, src_mod);
853
      rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
854
 
855
      src = gen_rtx_MEM (mode, src_reg);
856
      emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
857
    }
858
 
859
  /* There are only limited immediate-to-memory move instructions.  */
860
  if (! reload_in_progress
861
      && ! reload_completed
862
      && MEM_P (dest)
863
      && (! CONST_INT_P (XEXP (dest, 0))
864
          || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
865
      && ! xstormy16_below100_operand (dest, mode)
866
      && ! REG_P (src)
867
      && GET_CODE (src) != SUBREG)
868
    src = copy_to_mode_reg (mode, src);
869
 
870
  /* Don't emit something we would immediately split.  */
871
  if (reload_completed
872
      && mode != HImode && mode != QImode)
873
    {
874
      xstormy16_split_move (mode, dest, src);
875
      return;
876
    }
877
 
878
  emit_insn (gen_rtx_SET (VOIDmode, dest, src));
879
}
880
 
881
/* Stack Layout:
882
 
883
   The stack is laid out as follows:
884
 
885
SP->
886
FP->    Local variables
887
        Register save area (up to 4 words)
888
        Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
889
 
890
AP->    Return address (two words)
891
        9th procedure parameter word
892
        10th procedure parameter word
893
        ...
894
        last procedure parameter word
895
 
896
  The frame pointer location is tuned to make it most likely that all
897
  parameters and local variables can be accessed using a load-indexed
898
  instruction.  */
899
 
900
/* A structure to describe the layout.  */
901
struct xstormy16_stack_layout
902
{
903
  /* Size of the topmost three items on the stack.  */
904
  int locals_size;
905
  int register_save_size;
906
  int stdarg_save_size;
907
  /* Sum of the above items.  */
908
  int frame_size;
909
  /* Various offsets.  */
910
  int first_local_minus_ap;
911
  int sp_minus_fp;
912
  int fp_minus_ap;
913
};
914
 
915
/* Does REGNO need to be saved?  */
916
#define REG_NEEDS_SAVE(REGNUM, IFUN)                                    \
917
  ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM])           \
918
   || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM]           \
919
       && (REGNUM != CARRY_REGNUM)                                      \
920
       && (df_regs_ever_live_p (REGNUM) || ! current_function_is_leaf)))
921
 
922
/* Compute the stack layout.  */
923
 
924
struct xstormy16_stack_layout
925
xstormy16_compute_stack_layout (void)
926
{
927
  struct xstormy16_stack_layout layout;
928
  int regno;
929
  const int ifun = xstormy16_interrupt_function_p ();
930
 
931
  layout.locals_size = get_frame_size ();
932
 
933
  layout.register_save_size = 0;
934
  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
935
    if (REG_NEEDS_SAVE (regno, ifun))
936
      layout.register_save_size += UNITS_PER_WORD;
937
 
938
  if (cfun->stdarg)
939
    layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
940
  else
941
    layout.stdarg_save_size = 0;
942
 
943
  layout.frame_size = (layout.locals_size
944
                       + layout.register_save_size
945
                       + layout.stdarg_save_size);
946
 
947
  if (crtl->args.size <= 2048 && crtl->args.size != -1)
948
    {
949
      if (layout.frame_size - INCOMING_FRAME_SP_OFFSET
950
          + crtl->args.size <= 2048)
951
        layout.fp_minus_ap = layout.frame_size - INCOMING_FRAME_SP_OFFSET;
952
      else
953
        layout.fp_minus_ap = 2048 - crtl->args.size;
954
    }
955
  else
956
    layout.fp_minus_ap = (layout.stdarg_save_size
957
                          + layout.register_save_size
958
                          - INCOMING_FRAME_SP_OFFSET);
959
  layout.sp_minus_fp = (layout.frame_size - INCOMING_FRAME_SP_OFFSET
960
                        - layout.fp_minus_ap);
961
  layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
962
  return layout;
963
}
964
 
965
/* Worker function for TARGET_CAN_ELIMINATE.  */
966
 
967
static bool
968
xstormy16_can_eliminate (const int from, const int to)
969
{
970
  return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
971
          ? ! frame_pointer_needed
972
          : true);
973
}
974
 
975
/* Determine how all the special registers get eliminated.  */
976
 
977
int
978
xstormy16_initial_elimination_offset (int from, int to)
979
{
980
  struct xstormy16_stack_layout layout;
981
  int result;
982
 
983
  layout = xstormy16_compute_stack_layout ();
984
 
985
  if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
986
    result = layout.sp_minus_fp - layout.locals_size;
987
  else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
988
    result = - layout.locals_size;
989
  else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
990
    result = - layout.fp_minus_ap;
991
  else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
992
    result = - (layout.sp_minus_fp + layout.fp_minus_ap);
993
  else
994
    gcc_unreachable ();
995
 
996
  return result;
997
}
998
 
999
static rtx
1000
emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1001
{
1002
  rtx set, clobber, insn;
1003
 
1004
  set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1005
  clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1006
  insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1007
  return insn;
1008
}
1009
 
1010
/* Called after register allocation to add any instructions needed for
1011
   the prologue.  Using a prologue insn is favored compared to putting
1012
   all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1013
   since it allows the scheduler to intermix instructions with the
1014
   saves of the caller saved registers.  In some cases, it might be
1015
   necessary to emit a barrier instruction as the last insn to prevent
1016
   such scheduling.
1017
 
1018
   Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1019
   so that the debug info generation code can handle them properly.  */
1020
 
1021
void
1022
xstormy16_expand_prologue (void)
1023
{
1024
  struct xstormy16_stack_layout layout;
1025
  int regno;
1026
  rtx insn;
1027
  rtx mem_push_rtx;
1028
  const int ifun = xstormy16_interrupt_function_p ();
1029
 
1030
  mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1031
  mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1032
 
1033
  layout = xstormy16_compute_stack_layout ();
1034
 
1035
  if (layout.locals_size >= 32768)
1036
    error ("local variable memory requirements exceed capacity");
1037
 
1038
  /* Save the argument registers if necessary.  */
1039
  if (layout.stdarg_save_size)
1040
    for (regno = FIRST_ARGUMENT_REGISTER;
1041
         regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1042
         regno++)
1043
      {
1044
        rtx dwarf;
1045
        rtx reg = gen_rtx_REG (HImode, regno);
1046
 
1047
        insn = emit_move_insn (mem_push_rtx, reg);
1048
        RTX_FRAME_RELATED_P (insn) = 1;
1049
 
1050
        dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1051
 
1052
        XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1053
                                             gen_rtx_MEM (Pmode, stack_pointer_rtx),
1054
                                             reg);
1055
        XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1056
                                             plus_constant (stack_pointer_rtx,
1057
                                                            GET_MODE_SIZE (Pmode)));
1058
        add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1059
        RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1060
        RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1061
      }
1062
 
1063
  /* Push each of the registers to save.  */
1064
  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1065
    if (REG_NEEDS_SAVE (regno, ifun))
1066
      {
1067
        rtx dwarf;
1068
        rtx reg = gen_rtx_REG (HImode, regno);
1069
 
1070
        insn = emit_move_insn (mem_push_rtx, reg);
1071
        RTX_FRAME_RELATED_P (insn) = 1;
1072
 
1073
        dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1074
 
1075
        XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1076
                                             gen_rtx_MEM (Pmode, stack_pointer_rtx),
1077
                                             reg);
1078
        XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1079
                                             plus_constant (stack_pointer_rtx,
1080
                                                            GET_MODE_SIZE (Pmode)));
1081
        add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1082
        RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1083
        RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1084
      }
1085
 
1086
  /* It's just possible that the SP here might be what we need for
1087
     the new FP...  */
1088
  if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1089
    {
1090
      insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1091
      RTX_FRAME_RELATED_P (insn) = 1;
1092
    }
1093
 
1094
  /* Allocate space for local variables.  */
1095
  if (layout.locals_size)
1096
    {
1097
      insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1098
                                     GEN_INT (layout.locals_size));
1099
      RTX_FRAME_RELATED_P (insn) = 1;
1100
    }
1101
 
1102
  /* Set up the frame pointer, if required.  */
1103
  if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1104
    {
1105
      insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1106
      RTX_FRAME_RELATED_P (insn) = 1;
1107
 
1108
      if (layout.sp_minus_fp)
1109
        {
1110
          insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1111
                                         hard_frame_pointer_rtx,
1112
                                         GEN_INT (- layout.sp_minus_fp));
1113
          RTX_FRAME_RELATED_P (insn) = 1;
1114
        }
1115
    }
1116
}
1117
 
1118
/* Do we need an epilogue at all?  */
1119
 
1120
int
1121
direct_return (void)
1122
{
1123
  return (reload_completed
1124
          && xstormy16_compute_stack_layout ().frame_size == 0
1125
          && ! xstormy16_interrupt_function_p ());
1126
}
1127
 
1128
/* Called after register allocation to add any instructions needed for
1129
   the epilogue.  Using an epilogue insn is favored compared to putting
1130
   all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1131
   since it allows the scheduler to intermix instructions with the
1132
   saves of the caller saved registers.  In some cases, it might be
1133
   necessary to emit a barrier instruction as the last insn to prevent
1134
   such scheduling.  */
1135
 
1136
void
1137
xstormy16_expand_epilogue (void)
1138
{
1139
  struct xstormy16_stack_layout layout;
1140
  rtx mem_pop_rtx;
1141
  int regno;
1142
  const int ifun = xstormy16_interrupt_function_p ();
1143
 
1144
  mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1145
  mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1146
 
1147
  layout = xstormy16_compute_stack_layout ();
1148
 
1149
  /* Pop the stack for the locals.  */
1150
  if (layout.locals_size)
1151
    {
1152
      if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1153
        emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1154
      else
1155
        emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1156
                                GEN_INT (- layout.locals_size));
1157
    }
1158
 
1159
  /* Restore any call-saved registers.  */
1160
  for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1161
    if (REG_NEEDS_SAVE (regno, ifun))
1162
      emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1163
 
1164
  /* Pop the stack for the stdarg save area.  */
1165
  if (layout.stdarg_save_size)
1166
    emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1167
                            GEN_INT (- layout.stdarg_save_size));
1168
 
1169
  /* Return.  */
1170
  if (ifun)
1171
    emit_jump_insn (gen_return_internal_interrupt ());
1172
  else
1173
    emit_jump_insn (gen_return_internal ());
1174
}
1175
 
1176
int
1177
xstormy16_epilogue_uses (int regno)
1178
{
1179
  if (reload_completed && call_used_regs[regno])
1180
    {
1181
      const int ifun = xstormy16_interrupt_function_p ();
1182
      return REG_NEEDS_SAVE (regno, ifun);
1183
    }
1184
  return 0;
1185
}
1186
 
1187
void
1188
xstormy16_function_profiler (void)
1189
{
1190
  sorry ("function_profiler support");
1191
}
1192
 
1193
/* Update CUM to advance past an argument in the argument list.  The
1194
   values MODE, TYPE and NAMED describe that argument.  Once this is
1195
   done, the variable CUM is suitable for analyzing the *following*
1196
   argument with `TARGET_FUNCTION_ARG', etc.
1197
 
1198
   This function need not do anything if the argument in question was
1199
   passed on the stack.  The compiler knows how to track the amount of
1200
   stack space used for arguments without any special help.  However,
1201
   it makes life easier for xstormy16_build_va_list if it does update
1202
   the word count.  */
1203
 
1204
static void
1205
xstormy16_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1206
                                const_tree type, bool named ATTRIBUTE_UNUSED)
1207
{
1208
  CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1209
 
1210
  /* If an argument would otherwise be passed partially in registers,
1211
     and partially on the stack, the whole of it is passed on the
1212
     stack.  */
1213
  if (*cum < NUM_ARGUMENT_REGISTERS
1214
      && *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1215
    *cum = NUM_ARGUMENT_REGISTERS;
1216
 
1217
  *cum += XSTORMY16_WORD_SIZE (type, mode);
1218
}
1219
 
1220
static rtx
1221
xstormy16_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1222
                        const_tree type, bool named ATTRIBUTE_UNUSED)
1223
{
1224
  CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1225
 
1226
  if (mode == VOIDmode)
1227
    return const0_rtx;
1228
  if (targetm.calls.must_pass_in_stack (mode, type)
1229
      || *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1230
    return NULL_RTX;
1231
  return gen_rtx_REG (mode, *cum + FIRST_ARGUMENT_REGISTER);
1232
}
1233
 
1234
/* Build the va_list type.
1235
 
1236
   For this chip, va_list is a record containing a counter and a pointer.
1237
   The counter is of type 'int' and indicates how many bytes
1238
   have been used to date.  The pointer indicates the stack position
1239
   for arguments that have not been passed in registers.
1240
   To keep the layout nice, the pointer is first in the structure.  */
1241
 
1242
static tree
1243
xstormy16_build_builtin_va_list (void)
1244
{
1245
  tree f_1, f_2, record, type_decl;
1246
 
1247
  record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1248
  type_decl = build_decl (BUILTINS_LOCATION,
1249
                          TYPE_DECL, get_identifier ("__va_list_tag"), record);
1250
 
1251
  f_1 = build_decl (BUILTINS_LOCATION,
1252
                    FIELD_DECL, get_identifier ("base"),
1253
                      ptr_type_node);
1254
  f_2 = build_decl (BUILTINS_LOCATION,
1255
                    FIELD_DECL, get_identifier ("count"),
1256
                      unsigned_type_node);
1257
 
1258
  DECL_FIELD_CONTEXT (f_1) = record;
1259
  DECL_FIELD_CONTEXT (f_2) = record;
1260
 
1261
  TYPE_STUB_DECL (record) = type_decl;
1262
  TYPE_NAME (record) = type_decl;
1263
  TYPE_FIELDS (record) = f_1;
1264
  DECL_CHAIN (f_1) = f_2;
1265
 
1266
  layout_type (record);
1267
 
1268
  return record;
1269
}
1270
 
1271
/* Implement the stdarg/varargs va_start macro.  STDARG_P is nonzero if this
1272
   is stdarg.h instead of varargs.h.  VALIST is the tree of the va_list
1273
   variable to initialize.  NEXTARG is the machine independent notion of the
1274
   'next' argument after the variable arguments.  */
1275
 
1276
static void
1277
xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1278
{
1279
  tree f_base, f_count;
1280
  tree base, count;
1281
  tree t,u;
1282
 
1283
  if (xstormy16_interrupt_function_p ())
1284
    error ("cannot use va_start in interrupt function");
1285
 
1286
  f_base = TYPE_FIELDS (va_list_type_node);
1287
  f_count = DECL_CHAIN (f_base);
1288
 
1289
  base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1290
  count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1291
                  NULL_TREE);
1292
 
1293
  t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1294
  u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET);
1295
  u = fold_convert (TREE_TYPE (count), u);
1296
  t = fold_build_pointer_plus (t, u);
1297
  t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
1298
  TREE_SIDE_EFFECTS (t) = 1;
1299
  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1300
 
1301
  t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
1302
              build_int_cst (NULL_TREE,
1303
                             crtl->args.info * UNITS_PER_WORD));
1304
  TREE_SIDE_EFFECTS (t) = 1;
1305
  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1306
}
1307
 
1308
/* Implement the stdarg/varargs va_arg macro.  VALIST is the variable
1309
   of type va_list as a tree, TYPE is the type passed to va_arg.
1310
   Note:  This algorithm is documented in stormy-abi.  */
1311
 
1312
static tree
1313
xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1314
                                gimple_seq *post_p ATTRIBUTE_UNUSED)
1315
{
1316
  tree f_base, f_count;
1317
  tree base, count;
1318
  tree count_tmp, addr, t;
1319
  tree lab_gotaddr, lab_fromstack;
1320
  int size, size_of_reg_args, must_stack;
1321
  tree size_tree;
1322
 
1323
  f_base = TYPE_FIELDS (va_list_type_node);
1324
  f_count = DECL_CHAIN (f_base);
1325
 
1326
  base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1327
  count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1328
                  NULL_TREE);
1329
 
1330
  must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1331
  size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1332
  gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1333
 
1334
  size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1335
 
1336
  count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1337
  lab_gotaddr = create_artificial_label (UNKNOWN_LOCATION);
1338
  lab_fromstack = create_artificial_label (UNKNOWN_LOCATION);
1339
  addr = create_tmp_var (ptr_type_node, NULL);
1340
 
1341
  if (!must_stack)
1342
    {
1343
      tree r;
1344
 
1345
      t = fold_convert (TREE_TYPE (count), size_tree);
1346
      t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1347
      r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1348
      t = build2 (GT_EXPR, boolean_type_node, t, r);
1349
      t = build3 (COND_EXPR, void_type_node, t,
1350
                  build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1351
                  NULL_TREE);
1352
      gimplify_and_add (t, pre_p);
1353
 
1354
      t = fold_build_pointer_plus (base, count_tmp);
1355
      gimplify_assign (addr, t, pre_p);
1356
 
1357
      t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1358
      gimplify_and_add (t, pre_p);
1359
 
1360
      t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1361
      gimplify_and_add (t, pre_p);
1362
    }
1363
 
1364
  /* Arguments larger than a word might need to skip over some
1365
     registers, since arguments are either passed entirely in
1366
     registers or entirely on the stack.  */
1367
  size = PUSH_ROUNDING (int_size_in_bytes (type));
1368
  if (size > 2 || size < 0 || must_stack)
1369
    {
1370
      tree r, u;
1371
 
1372
      r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1373
      u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r);
1374
 
1375
      t = fold_convert (TREE_TYPE (count), r);
1376
      t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1377
      t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1378
      gimplify_and_add (t, pre_p);
1379
    }
1380
 
1381
  t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1382
                + INCOMING_FRAME_SP_OFFSET);
1383
  t = fold_convert (TREE_TYPE (count), t);
1384
  t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1385
  t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1386
              fold_convert (TREE_TYPE (count), size_tree));
1387
  t = fold_convert (TREE_TYPE (t), fold (t));
1388
  t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1389
  t = fold_build_pointer_plus (base, t);
1390
  gimplify_assign (addr, t, pre_p);
1391
 
1392
  t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1393
  gimplify_and_add (t, pre_p);
1394
 
1395
  t = fold_convert (TREE_TYPE (count), size_tree);
1396
  t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1397
  gimplify_assign (count, t, pre_p);
1398
 
1399
  addr = fold_convert (build_pointer_type (type), addr);
1400
  return build_va_arg_indirect_ref (addr);
1401
}
1402
 
1403
/* Worker function for TARGET_TRAMPOLINE_INIT.  */
1404
 
1405
static void
1406
xstormy16_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
1407
{
1408
  rtx temp = gen_reg_rtx (HImode);
1409
  rtx reg_fnaddr = gen_reg_rtx (HImode);
1410
  rtx reg_addr, reg_addr_mem;
1411
 
1412
  reg_addr = copy_to_reg (XEXP (m_tramp, 0));
1413
  reg_addr_mem = adjust_automodify_address (m_tramp, HImode, reg_addr, 0);
1414
 
1415
  emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1416
  emit_move_insn (reg_addr_mem, temp);
1417
  emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1418
  reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1419
 
1420
  emit_move_insn (temp, static_chain);
1421
  emit_move_insn (reg_addr_mem, temp);
1422
  emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1423
  reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1424
 
1425
  emit_move_insn (reg_fnaddr, XEXP (DECL_RTL (fndecl), 0));
1426
  emit_move_insn (temp, reg_fnaddr);
1427
  emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1428
  emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1429
  emit_move_insn (reg_addr_mem, temp);
1430
  emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1431
  reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1432
 
1433
  emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1434
  emit_move_insn (reg_addr_mem, reg_fnaddr);
1435
}
1436
 
1437
/* Worker function for TARGET_FUNCTION_VALUE.  */
1438
 
1439
static rtx
1440
xstormy16_function_value (const_tree valtype,
1441
                          const_tree func ATTRIBUTE_UNUSED,
1442
                          bool outgoing ATTRIBUTE_UNUSED)
1443
{
1444
  enum machine_mode mode;
1445
  mode = TYPE_MODE (valtype);
1446
  PROMOTE_MODE (mode, 0, valtype);
1447
  return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1448
}
1449
 
1450
/* Worker function for TARGET_LIBCALL_VALUE.  */
1451
 
1452
static rtx
1453
xstormy16_libcall_value (enum machine_mode mode,
1454
                         const_rtx fun ATTRIBUTE_UNUSED)
1455
{
1456
  return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1457
}
1458
 
1459
/* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.  */
1460
 
1461
static bool
1462
xstormy16_function_value_regno_p (const unsigned int regno)
1463
{
1464
  return (regno == RETURN_VALUE_REGNUM);
1465
}
1466
 
1467
/* A C compound statement that outputs the assembler code for a thunk function,
1468
   used to implement C++ virtual function calls with multiple inheritance.  The
1469
   thunk acts as a wrapper around a virtual function, adjusting the implicit
1470
   object parameter before handing control off to the real function.
1471
 
1472
   First, emit code to add the integer DELTA to the location that contains the
1473
   incoming first argument.  Assume that this argument contains a pointer, and
1474
   is the one used to pass the `this' pointer in C++.  This is the incoming
1475
   argument *before* the function prologue, e.g. `%o0' on a sparc.  The
1476
   addition must preserve the values of all other incoming arguments.
1477
 
1478
   After the addition, emit code to jump to FUNCTION, which is a
1479
   `FUNCTION_DECL'.  This is a direct pure jump, not a call, and does not touch
1480
   the return address.  Hence returning from FUNCTION will return to whoever
1481
   called the current `thunk'.
1482
 
1483
   The effect must be as if @var{function} had been called directly
1484
   with the adjusted first argument.  This macro is responsible for
1485
   emitting all of the code for a thunk function;
1486
   TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1487
   not invoked.
1488
 
1489
   The THUNK_FNDECL is redundant.  (DELTA and FUNCTION have already been
1490
   extracted from it.)  It might possibly be useful on some targets, but
1491
   probably not.  */
1492
 
1493
static void
1494
xstormy16_asm_output_mi_thunk (FILE *file,
1495
                               tree thunk_fndecl ATTRIBUTE_UNUSED,
1496
                               HOST_WIDE_INT delta,
1497
                               HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1498
                               tree function)
1499
{
1500
  int regnum = FIRST_ARGUMENT_REGISTER;
1501
 
1502
  /* There might be a hidden first argument for a returned structure.  */
1503
  if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1504
    regnum += 1;
1505
 
1506
  fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1507
  fputs ("\tjmpf ", file);
1508
  assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1509
  putc ('\n', file);
1510
}
1511
 
1512
/* The purpose of this function is to override the default behavior of
1513
   BSS objects.  Normally, they go into .bss or .sbss via ".common"
1514
   directives, but we need to override that and put them in
1515
   .bss_below100.  We can't just use a section override (like we do
1516
   for .data_below100), because that makes them initialized rather
1517
   than uninitialized.  */
1518
 
1519
void
1520
xstormy16_asm_output_aligned_common (FILE *stream,
1521
                                     tree decl,
1522
                                     const char *name,
1523
                                     int size,
1524
                                     int align,
1525
                                     int global)
1526
{
1527
  rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
1528
  rtx symbol;
1529
 
1530
  if (mem != NULL_RTX
1531
      && MEM_P (mem)
1532
      && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1533
      && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1534
    {
1535
      const char *name2;
1536
      int p2align = 0;
1537
 
1538
      switch_to_section (bss100_section);
1539
 
1540
      while (align > 8)
1541
        {
1542
          align /= 2;
1543
          p2align ++;
1544
        }
1545
 
1546
      name2 = default_strip_name_encoding (name);
1547
      if (global)
1548
        fprintf (stream, "\t.globl\t%s\n", name2);
1549
      if (p2align)
1550
        fprintf (stream, "\t.p2align %d\n", p2align);
1551
      fprintf (stream, "\t.type\t%s, @object\n", name2);
1552
      fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1553
      fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1554
      return;
1555
    }
1556
 
1557
  if (!global)
1558
    {
1559
      fprintf (stream, "\t.local\t");
1560
      assemble_name (stream, name);
1561
      fprintf (stream, "\n");
1562
    }
1563
  fprintf (stream, "\t.comm\t");
1564
  assemble_name (stream, name);
1565
  fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1566
}
1567
 
1568
/* Implement TARGET_ASM_INIT_SECTIONS.  */
1569
 
1570
static void
1571
xstormy16_asm_init_sections (void)
1572
{
1573
  bss100_section
1574
    = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1575
                           output_section_asm_op,
1576
                           "\t.section \".bss_below100\",\"aw\",@nobits");
1577
}
1578
 
1579
/* Mark symbols with the "below100" attribute so that we can use the
1580
   special addressing modes for them.  */
1581
 
1582
static void
1583
xstormy16_encode_section_info (tree decl, rtx r, int first)
1584
{
1585
  default_encode_section_info (decl, r, first);
1586
 
1587
   if (TREE_CODE (decl) == VAR_DECL
1588
      && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1589
          || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1590
    {
1591
      rtx symbol = XEXP (r, 0);
1592
 
1593
      gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1594
      SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1595
    }
1596
}
1597
 
1598
#undef  TARGET_ASM_CONSTRUCTOR
1599
#define TARGET_ASM_CONSTRUCTOR  xstormy16_asm_out_constructor
1600
#undef  TARGET_ASM_DESTRUCTOR
1601
#define TARGET_ASM_DESTRUCTOR   xstormy16_asm_out_destructor
1602
 
1603
/* Output constructors and destructors.  Just like
1604
   default_named_section_asm_out_* but don't set the sections writable.  */
1605
 
1606
static void
1607
xstormy16_asm_out_destructor (rtx symbol, int priority)
1608
{
1609
  const char *section = ".dtors";
1610
  char buf[16];
1611
 
1612
  /* ??? This only works reliably with the GNU linker.  */
1613
  if (priority != DEFAULT_INIT_PRIORITY)
1614
    {
1615
      sprintf (buf, ".dtors.%.5u",
1616
               /* Invert the numbering so the linker puts us in the proper
1617
                  order; constructors are run from right to left, and the
1618
                  linker sorts in increasing order.  */
1619
               MAX_INIT_PRIORITY - priority);
1620
      section = buf;
1621
    }
1622
 
1623
  switch_to_section (get_section (section, 0, NULL));
1624
  assemble_align (POINTER_SIZE);
1625
  assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1626
}
1627
 
1628
static void
1629
xstormy16_asm_out_constructor (rtx symbol, int priority)
1630
{
1631
  const char *section = ".ctors";
1632
  char buf[16];
1633
 
1634
  /* ??? This only works reliably with the GNU linker.  */
1635
  if (priority != DEFAULT_INIT_PRIORITY)
1636
    {
1637
      sprintf (buf, ".ctors.%.5u",
1638
               /* Invert the numbering so the linker puts us in the proper
1639
                  order; constructors are run from right to left, and the
1640
                  linker sorts in increasing order.  */
1641
               MAX_INIT_PRIORITY - priority);
1642
      section = buf;
1643
    }
1644
 
1645
  switch_to_section (get_section (section, 0, NULL));
1646
  assemble_align (POINTER_SIZE);
1647
  assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1648
}
1649
 
1650
/* Worker function for TARGET_PRINT_OPERAND_ADDRESS.
1651
 
1652
   Print a memory address as an operand to reference that memory location.  */
1653
 
1654
static void
1655
xstormy16_print_operand_address (FILE *file, rtx address)
1656
{
1657
  HOST_WIDE_INT offset;
1658
  int pre_dec, post_inc;
1659
 
1660
  /* There are a few easy cases.  */
1661
  if (CONST_INT_P (address))
1662
    {
1663
      fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1664
      return;
1665
    }
1666
 
1667
  if (CONSTANT_P (address) || LABEL_P (address))
1668
    {
1669
      output_addr_const (file, address);
1670
      return;
1671
    }
1672
 
1673
  /* Otherwise, it's hopefully something of the form
1674
     (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)).  */
1675
  if (GET_CODE (address) == PLUS)
1676
    {
1677
      gcc_assert (CONST_INT_P (XEXP (address, 1)));
1678
      offset = INTVAL (XEXP (address, 1));
1679
      address = XEXP (address, 0);
1680
    }
1681
  else
1682
    offset = 0;
1683
 
1684
  pre_dec = (GET_CODE (address) == PRE_DEC);
1685
  post_inc = (GET_CODE (address) == POST_INC);
1686
  if (pre_dec || post_inc)
1687
    address = XEXP (address, 0);
1688
 
1689
  gcc_assert (REG_P (address));
1690
 
1691
  fputc ('(', file);
1692
  if (pre_dec)
1693
    fputs ("--", file);
1694
  fputs (reg_names [REGNO (address)], file);
1695
  if (post_inc)
1696
    fputs ("++", file);
1697
  if (offset != 0)
1698
    fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1699
  fputc (')', file);
1700
}
1701
 
1702
/* Worker function for TARGET_PRINT_OPERAND.
1703
 
1704
   Print an operand to an assembler instruction.  */
1705
 
1706
static void
1707
xstormy16_print_operand (FILE *file, rtx x, int code)
1708
{
1709
  switch (code)
1710
    {
1711
    case 'B':
1712
        /* There is either one bit set, or one bit clear, in X.
1713
           Print it preceded by '#'.  */
1714
      {
1715
        static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1716
        HOST_WIDE_INT xx = 1;
1717
        HOST_WIDE_INT l;
1718
 
1719
        if (CONST_INT_P (x))
1720
          xx = INTVAL (x);
1721
        else
1722
          output_operand_lossage ("'B' operand is not constant");
1723
 
1724
        /* GCC sign-extends masks with the MSB set, so we have to
1725
           detect all the cases that differ only in sign extension
1726
           beyond the bits we care about.  Normally, the predicates
1727
           and constraints ensure that we have the right values.  This
1728
           works correctly for valid masks.  */
1729
        if (bits_set[xx & 7] <= 1)
1730
          {
1731
            /* Remove sign extension bits.  */
1732
            if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1733
              xx &= 0xff;
1734
            else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1735
              xx &= 0xffff;
1736
            l = exact_log2 (xx);
1737
          }
1738
        else
1739
          {
1740
            /* Add sign extension bits.  */
1741
            if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1742
              xx |= ~(HOST_WIDE_INT)0xff;
1743
            else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1744
              xx |= ~(HOST_WIDE_INT)0xffff;
1745
            l = exact_log2 (~xx);
1746
          }
1747
 
1748
        if (l == -1)
1749
          output_operand_lossage ("'B' operand has multiple bits set");
1750
 
1751
        fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1752
        return;
1753
      }
1754
 
1755
    case 'C':
1756
      /* Print the symbol without a surrounding @fptr().  */
1757
      if (GET_CODE (x) == SYMBOL_REF)
1758
        assemble_name (file, XSTR (x, 0));
1759
      else if (LABEL_P (x))
1760
        output_asm_label (x);
1761
      else
1762
        xstormy16_print_operand_address (file, x);
1763
      return;
1764
 
1765
    case 'o':
1766
    case 'O':
1767
      /* Print the immediate operand less one, preceded by '#'.
1768
         For 'O', negate it first.  */
1769
      {
1770
        HOST_WIDE_INT xx = 0;
1771
 
1772
        if (CONST_INT_P (x))
1773
          xx = INTVAL (x);
1774
        else
1775
          output_operand_lossage ("'o' operand is not constant");
1776
 
1777
        if (code == 'O')
1778
          xx = -xx;
1779
 
1780
        fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1781
        return;
1782
      }
1783
 
1784
    case 'b':
1785
      /* Print the shift mask for bp/bn.  */
1786
      {
1787
        HOST_WIDE_INT xx = 1;
1788
        HOST_WIDE_INT l;
1789
 
1790
        if (CONST_INT_P (x))
1791
          xx = INTVAL (x);
1792
        else
1793
          output_operand_lossage ("'B' operand is not constant");
1794
 
1795
        l = 7 - xx;
1796
 
1797
        fputs (IMMEDIATE_PREFIX, file);
1798
        fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1799
        return;
1800
      }
1801
 
1802
    case 0:
1803
      /* Handled below.  */
1804
      break;
1805
 
1806
    default:
1807
      output_operand_lossage ("xstormy16_print_operand: unknown code");
1808
      return;
1809
    }
1810
 
1811
  switch (GET_CODE (x))
1812
    {
1813
    case REG:
1814
      fputs (reg_names [REGNO (x)], file);
1815
      break;
1816
 
1817
    case MEM:
1818
      xstormy16_print_operand_address (file, XEXP (x, 0));
1819
      break;
1820
 
1821
    default:
1822
      /* Some kind of constant or label; an immediate operand,
1823
         so prefix it with '#' for the assembler.  */
1824
      fputs (IMMEDIATE_PREFIX, file);
1825
      output_addr_const (file, x);
1826
      break;
1827
    }
1828
 
1829
  return;
1830
}
1831
 
1832
/* Expander for the `casesi' pattern.
1833
   INDEX is the index of the switch statement.
1834
   LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1835
     to the first table entry.
1836
   RANGE is the number of table entries.
1837
   TABLE is an ADDR_VEC that is the jump table.
1838
   DEFAULT_LABEL is the address to branch to if INDEX is outside the
1839
     range LOWER_BOUND to LOWER_BOUND + RANGE - 1.  */
1840
 
1841
void
1842
xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1843
                         rtx table, rtx default_label)
1844
{
1845
  HOST_WIDE_INT range_i = INTVAL (range);
1846
  rtx int_index;
1847
 
1848
  /* This code uses 'br', so it can deal only with tables of size up to
1849
     8192 entries.  */
1850
  if (range_i >= 8192)
1851
    sorry ("switch statement of size %lu entries too large",
1852
           (unsigned long) range_i);
1853
 
1854
  index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1855
                        OPTAB_LIB_WIDEN);
1856
  emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1857
                           default_label);
1858
  int_index = gen_lowpart_common (HImode, index);
1859
  emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1860
  emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1861
}
1862
 
1863
/* Output an ADDR_VEC.  It is output as a sequence of 'jmpf'
1864
   instructions, without label or alignment or any other special
1865
   constructs.  We know that the previous instruction will be the
1866
   `tablejump_pcrel' output above.
1867
 
1868
   TODO: it might be nice to output 'br' instructions if they could
1869
   all reach.  */
1870
 
1871
void
1872
xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1873
{
1874
  int vlen, idx;
1875
 
1876
  switch_to_section (current_function_section ());
1877
 
1878
  vlen = XVECLEN (table, 0);
1879
  for (idx = 0; idx < vlen; idx++)
1880
    {
1881
      fputs ("\tjmpf ", file);
1882
      output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1883
      fputc ('\n', file);
1884
    }
1885
}
1886
 
1887
/* Expander for the `call' patterns.
1888
   RETVAL is the RTL for the return register or NULL for void functions.
1889
   DEST is the function to call, expressed as a MEM.
1890
   COUNTER is ignored.  */
1891
 
1892
void
1893
xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1894
{
1895
  rtx call, temp;
1896
  enum machine_mode mode;
1897
 
1898
  gcc_assert (MEM_P (dest));
1899
  dest = XEXP (dest, 0);
1900
 
1901
  if (! CONSTANT_P (dest) && ! REG_P (dest))
1902
    dest = force_reg (Pmode, dest);
1903
 
1904
  if (retval == NULL)
1905
    mode = VOIDmode;
1906
  else
1907
    mode = GET_MODE (retval);
1908
 
1909
  call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1910
                       counter);
1911
  if (retval)
1912
    call = gen_rtx_SET (VOIDmode, retval, call);
1913
 
1914
  if (! CONSTANT_P (dest))
1915
    {
1916
      temp = gen_reg_rtx (HImode);
1917
      emit_move_insn (temp, const0_rtx);
1918
    }
1919
  else
1920
    temp = const0_rtx;
1921
 
1922
  call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1923
                                                gen_rtx_USE (VOIDmode, temp)));
1924
  emit_call_insn (call);
1925
}
1926
 
1927
/* Expanders for multiword computational operations.  */
1928
 
1929
/* Expander for arithmetic operations; emit insns to compute
1930
 
1931
   (set DEST (CODE:MODE SRC0 SRC1))
1932
 
1933
   When CODE is COMPARE, a branch template is generated
1934
   (this saves duplicating code in xstormy16_split_cbranch).  */
1935
 
1936
void
1937
xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
1938
                        rtx dest, rtx src0, rtx src1)
1939
{
1940
  int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1941
  int i;
1942
  int firstloop = 1;
1943
 
1944
  if (code == NEG)
1945
    emit_move_insn (src0, const0_rtx);
1946
 
1947
  for (i = 0; i < num_words; i++)
1948
    {
1949
      rtx w_src0, w_src1, w_dest;
1950
      rtx insn;
1951
 
1952
      w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1953
                                    i * UNITS_PER_WORD);
1954
      w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1955
      w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1956
 
1957
      switch (code)
1958
        {
1959
        case PLUS:
1960
          if (firstloop
1961
              && CONST_INT_P (w_src1)
1962
              && INTVAL (w_src1) == 0)
1963
            continue;
1964
 
1965
          if (firstloop)
1966
            insn = gen_addchi4 (w_dest, w_src0, w_src1);
1967
          else
1968
            insn = gen_addchi5 (w_dest, w_src0, w_src1);
1969
          break;
1970
 
1971
        case NEG:
1972
        case MINUS:
1973
        case COMPARE:
1974
          if (code == COMPARE && i == num_words - 1)
1975
            {
1976
              rtx branch, sub, clobber, sub_1;
1977
 
1978
              sub_1 = gen_rtx_MINUS (HImode, w_src0,
1979
                                     gen_rtx_ZERO_EXTEND (HImode, gen_rtx_REG (BImode, CARRY_REGNUM)));
1980
              sub = gen_rtx_SET (VOIDmode, w_dest,
1981
                                 gen_rtx_MINUS (HImode, sub_1, w_src1));
1982
              clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1983
              branch = gen_rtx_SET (VOIDmode, pc_rtx,
1984
                                    gen_rtx_IF_THEN_ELSE (VOIDmode,
1985
                                                          gen_rtx_EQ (HImode,
1986
                                                                      sub_1,
1987
                                                                      w_src1),
1988
                                                          pc_rtx,
1989
                                                          pc_rtx));
1990
              insn = gen_rtx_PARALLEL (VOIDmode,
1991
                                       gen_rtvec (3, branch, sub, clobber));
1992
            }
1993
          else if (firstloop
1994
                   && code != COMPARE
1995
                   && CONST_INT_P (w_src1)
1996
                   && INTVAL (w_src1) == 0)
1997
            continue;
1998
          else if (firstloop)
1999
            insn = gen_subchi4 (w_dest, w_src0, w_src1);
2000
          else
2001
            insn = gen_subchi5 (w_dest, w_src0, w_src1);
2002
          break;
2003
 
2004
        case IOR:
2005
        case XOR:
2006
        case AND:
2007
          if (CONST_INT_P (w_src1)
2008
              && INTVAL (w_src1) == -(code == AND))
2009
            continue;
2010
 
2011
          insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
2012
                                                                w_src0, w_src1));
2013
          break;
2014
 
2015
        case NOT:
2016
          insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2017
          break;
2018
 
2019
        default:
2020
          gcc_unreachable ();
2021
        }
2022
 
2023
      firstloop = 0;
2024
      emit (insn);
2025
    }
2026
 
2027
  /* If we emit nothing, try_split() will think we failed.  So emit
2028
     something that does nothing and can be optimized away.  */
2029
  if (firstloop)
2030
    emit (gen_nop ());
2031
}
2032
 
2033
/* The shift operations are split at output time for constant values;
2034
   variable-width shifts get handed off to a library routine.
2035
 
2036
   Generate an output string to do (set X (CODE:MODE X SIZE_R))
2037
   SIZE_R will be a CONST_INT, X will be a hard register.  */
2038
 
2039
const char *
2040
xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
2041
                        rtx x, rtx size_r, rtx temp)
2042
{
2043
  HOST_WIDE_INT size;
2044
  const char *r0, *r1, *rt;
2045
  static char r[64];
2046
 
2047
  gcc_assert (CONST_INT_P (size_r)
2048
              && REG_P (x)
2049
              && mode == SImode);
2050
 
2051
  size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2052
 
2053
  if (size == 0)
2054
    return "";
2055
 
2056
  r0 = reg_names [REGNO (x)];
2057
  r1 = reg_names [REGNO (x) + 1];
2058
 
2059
  /* For shifts of size 1, we can use the rotate instructions.  */
2060
  if (size == 1)
2061
    {
2062
      switch (code)
2063
        {
2064
        case ASHIFT:
2065
          sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2066
          break;
2067
        case ASHIFTRT:
2068
          sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2069
          break;
2070
        case LSHIFTRT:
2071
          sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2072
          break;
2073
        default:
2074
          gcc_unreachable ();
2075
        }
2076
      return r;
2077
    }
2078
 
2079
  /* For large shifts, there are easy special cases.  */
2080
  if (size == 16)
2081
    {
2082
      switch (code)
2083
        {
2084
        case ASHIFT:
2085
          sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2086
          break;
2087
        case ASHIFTRT:
2088
          sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2089
          break;
2090
        case LSHIFTRT:
2091
          sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2092
          break;
2093
        default:
2094
          gcc_unreachable ();
2095
        }
2096
      return r;
2097
    }
2098
  if (size > 16)
2099
    {
2100
      switch (code)
2101
        {
2102
        case ASHIFT:
2103
          sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2104
                   r1, r0, r0, r1, (int) size - 16);
2105
          break;
2106
        case ASHIFTRT:
2107
          sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2108
                   r0, r1, r1, r0, (int) size - 16);
2109
          break;
2110
        case LSHIFTRT:
2111
          sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2112
                   r0, r1, r1, r0, (int) size - 16);
2113
          break;
2114
        default:
2115
          gcc_unreachable ();
2116
        }
2117
      return r;
2118
    }
2119
 
2120
  /* For the rest, we have to do more work.  In particular, we
2121
     need a temporary.  */
2122
  rt = reg_names [REGNO (temp)];
2123
  switch (code)
2124
    {
2125
    case ASHIFT:
2126
      sprintf (r,
2127
               "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2128
               rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16 - size),
2129
               r1, rt);
2130
      break;
2131
    case ASHIFTRT:
2132
      sprintf (r,
2133
               "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2134
               rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2135
               r0, rt);
2136
      break;
2137
    case LSHIFTRT:
2138
      sprintf (r,
2139
               "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2140
               rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2141
               r0, rt);
2142
      break;
2143
    default:
2144
      gcc_unreachable ();
2145
    }
2146
  return r;
2147
}
2148
 
2149
/* Attribute handling.  */
2150
 
2151
/* Return nonzero if the function is an interrupt function.  */
2152
 
2153
int
2154
xstormy16_interrupt_function_p (void)
2155
{
2156
  tree attributes;
2157
 
2158
  /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2159
     any functions are declared, which is demonstrably wrong, but
2160
     it is worked around here.  FIXME.  */
2161
  if (!cfun)
2162
    return 0;
2163
 
2164
  attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2165
  return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2166
}
2167
 
2168
#undef  TARGET_ATTRIBUTE_TABLE
2169
#define TARGET_ATTRIBUTE_TABLE  xstormy16_attribute_table
2170
 
2171
static tree xstormy16_handle_interrupt_attribute
2172
  (tree *, tree, tree, int, bool *);
2173
static tree xstormy16_handle_below100_attribute
2174
  (tree *, tree, tree, int, bool *);
2175
 
2176
static const struct attribute_spec xstormy16_attribute_table[] =
2177
{
2178
  /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2179
     affects_type_identity.  */
2180
  { "interrupt", 0, 0, false, true,  true,
2181
    xstormy16_handle_interrupt_attribute , false },
2182
  { "BELOW100",  0, 0, false, false, false,
2183
    xstormy16_handle_below100_attribute, false },
2184
  { "below100",  0, 0, false, false, false,
2185
    xstormy16_handle_below100_attribute, false },
2186
  { NULL,        0, 0, false, false, false, NULL, false }
2187
};
2188
 
2189
/* Handle an "interrupt" attribute;
2190
   arguments as in struct attribute_spec.handler.  */
2191
 
2192
static tree
2193
xstormy16_handle_interrupt_attribute (tree *node, tree name,
2194
                                      tree args ATTRIBUTE_UNUSED,
2195
                                      int flags ATTRIBUTE_UNUSED,
2196
                                      bool *no_add_attrs)
2197
{
2198
  if (TREE_CODE (*node) != FUNCTION_TYPE)
2199
    {
2200
      warning (OPT_Wattributes, "%qE attribute only applies to functions",
2201
               name);
2202
      *no_add_attrs = true;
2203
    }
2204
 
2205
  return NULL_TREE;
2206
}
2207
 
2208
/* Handle an "below" attribute;
2209
   arguments as in struct attribute_spec.handler.  */
2210
 
2211
static tree
2212
xstormy16_handle_below100_attribute (tree *node,
2213
                                     tree name ATTRIBUTE_UNUSED,
2214
                                     tree args ATTRIBUTE_UNUSED,
2215
                                     int flags ATTRIBUTE_UNUSED,
2216
                                     bool *no_add_attrs)
2217
{
2218
  if (TREE_CODE (*node) != VAR_DECL
2219
      && TREE_CODE (*node) != POINTER_TYPE
2220
      && TREE_CODE (*node) != TYPE_DECL)
2221
    {
2222
      warning (OPT_Wattributes,
2223
               "%<__BELOW100__%> attribute only applies to variables");
2224
      *no_add_attrs = true;
2225
    }
2226
  else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2227
    {
2228
      if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2229
        {
2230
          warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2231
                   "with auto storage class");
2232
          *no_add_attrs = true;
2233
        }
2234
    }
2235
 
2236
  return NULL_TREE;
2237
}
2238
 
2239
#undef  TARGET_INIT_BUILTINS
2240
#define TARGET_INIT_BUILTINS   xstormy16_init_builtins
2241
#undef  TARGET_EXPAND_BUILTIN
2242
#define TARGET_EXPAND_BUILTIN  xstormy16_expand_builtin
2243
 
2244
static struct
2245
{
2246
  const char * name;
2247
  int          md_code;
2248
  const char * arg_ops;   /* 0..9, t for temp register, r for return value.  */
2249
  const char * arg_types; /* s=short,l=long, upper case for unsigned.  */
2250
}
2251
  s16builtins[] =
2252
{
2253
  { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2254
  { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2255
  { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2256
  { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2257
  { NULL, 0, NULL, NULL }
2258
};
2259
 
2260
static void
2261
xstormy16_init_builtins (void)
2262
{
2263
  tree args[2], ret_type, arg = NULL_TREE, ftype;
2264
  int i, a, n_args;
2265
 
2266
  ret_type = void_type_node;
2267
 
2268
  for (i = 0; s16builtins[i].name; i++)
2269
    {
2270
      n_args = strlen (s16builtins[i].arg_types) - 1;
2271
 
2272
      gcc_assert (n_args <= (int) ARRAY_SIZE (args));
2273
 
2274
      for (a = n_args - 1; a >= 0; a--)
2275
        args[a] = NULL_TREE;
2276
 
2277
      for (a = n_args; a >= 0; a--)
2278
        {
2279
          switch (s16builtins[i].arg_types[a])
2280
            {
2281
            case 's': arg = short_integer_type_node; break;
2282
            case 'S': arg = short_unsigned_type_node; break;
2283
            case 'l': arg = long_integer_type_node; break;
2284
            case 'L': arg = long_unsigned_type_node; break;
2285
            default: gcc_unreachable ();
2286
            }
2287
          if (a == 0)
2288
            ret_type = arg;
2289
          else
2290
            args[a-1] = arg;
2291
        }
2292
      ftype = build_function_type_list (ret_type, args[0], args[1], NULL_TREE);
2293
      add_builtin_function (s16builtins[i].name, ftype,
2294
                            i, BUILT_IN_MD, NULL, NULL_TREE);
2295
    }
2296
}
2297
 
2298
static rtx
2299
xstormy16_expand_builtin (tree exp, rtx target,
2300
                          rtx subtarget ATTRIBUTE_UNUSED,
2301
                          enum machine_mode mode ATTRIBUTE_UNUSED,
2302
                          int ignore ATTRIBUTE_UNUSED)
2303
{
2304
  rtx op[10], args[10], pat, copyto[10], retval = 0;
2305
  tree fndecl, argtree;
2306
  int i, a, o, code;
2307
 
2308
  fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2309
  argtree = TREE_OPERAND (exp, 1);
2310
  i = DECL_FUNCTION_CODE (fndecl);
2311
  code = s16builtins[i].md_code;
2312
 
2313
  for (a = 0; a < 10 && argtree; a++)
2314
    {
2315
      args[a] = expand_normal (TREE_VALUE (argtree));
2316
      argtree = TREE_CHAIN (argtree);
2317
    }
2318
 
2319
  for (o = 0; s16builtins[i].arg_ops[o]; o++)
2320
    {
2321
      char ao = s16builtins[i].arg_ops[o];
2322
      char c = insn_data[code].operand[o].constraint[0];
2323
      enum machine_mode omode;
2324
 
2325
      copyto[o] = 0;
2326
 
2327
      omode = (enum machine_mode) insn_data[code].operand[o].mode;
2328
      if (ao == 'r')
2329
        op[o] = target ? target : gen_reg_rtx (omode);
2330
      else if (ao == 't')
2331
        op[o] = gen_reg_rtx (omode);
2332
      else
2333
        op[o] = args[(int) hex_value (ao)];
2334
 
2335
      if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2336
        {
2337
          if (c == '+' || c == '=')
2338
            {
2339
              copyto[o] = op[o];
2340
              op[o] = gen_reg_rtx (omode);
2341
            }
2342
          else
2343
            op[o] = copy_to_mode_reg (omode, op[o]);
2344
        }
2345
 
2346
      if (ao == 'r')
2347
        retval = op[o];
2348
    }
2349
 
2350
  pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2351
                        op[5], op[6], op[7], op[8], op[9]);
2352
  emit_insn (pat);
2353
 
2354
  for (o = 0; s16builtins[i].arg_ops[o]; o++)
2355
    if (copyto[o])
2356
      {
2357
        emit_move_insn (copyto[o], op[o]);
2358
        if (op[o] == retval)
2359
          retval = copyto[o];
2360
      }
2361
 
2362
  return retval;
2363
}
2364
 
2365
/* Look for combinations of insns that can be converted to BN or BP
2366
   opcodes.  This is, unfortunately, too complex to do with MD
2367
   patterns.  */
2368
 
2369
static void
2370
combine_bnp (rtx insn)
2371
{
2372
  int insn_code, regno, need_extend;
2373
  unsigned int mask;
2374
  rtx cond, reg, and_insn, load, qireg, mem;
2375
  enum machine_mode load_mode = QImode;
2376
  enum machine_mode and_mode = QImode;
2377
  rtx shift = NULL_RTX;
2378
 
2379
  insn_code = recog_memoized (insn);
2380
  if (insn_code != CODE_FOR_cbranchhi
2381
      && insn_code != CODE_FOR_cbranchhi_neg)
2382
    return;
2383
 
2384
  cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2385
  cond = XEXP (cond, 1); /* if */
2386
  cond = XEXP (cond, 0); /* cond */
2387
  switch (GET_CODE (cond))
2388
    {
2389
    case NE:
2390
    case EQ:
2391
      need_extend = 0;
2392
      break;
2393
    case LT:
2394
    case GE:
2395
      need_extend = 1;
2396
      break;
2397
    default:
2398
      return;
2399
    }
2400
 
2401
  reg = XEXP (cond, 0);
2402
  if (! REG_P (reg))
2403
    return;
2404
  regno = REGNO (reg);
2405
  if (XEXP (cond, 1) != const0_rtx)
2406
    return;
2407
  if (! find_regno_note (insn, REG_DEAD, regno))
2408
    return;
2409
  qireg = gen_rtx_REG (QImode, regno);
2410
 
2411
  if (need_extend)
2412
    {
2413
      /* LT and GE conditionals should have a sign extend before
2414
         them.  */
2415
      for (and_insn = prev_real_insn (insn);
2416
           and_insn != NULL_RTX;
2417
           and_insn = prev_real_insn (and_insn))
2418
        {
2419
          int and_code = recog_memoized (and_insn);
2420
 
2421
          if (and_code == CODE_FOR_extendqihi2
2422
              && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2423
              && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), qireg))
2424
            break;
2425
 
2426
          if (and_code == CODE_FOR_movhi_internal
2427
              && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg))
2428
            {
2429
              /* This is for testing bit 15.  */
2430
              and_insn = insn;
2431
              break;
2432
            }
2433
 
2434
          if (reg_mentioned_p (reg, and_insn))
2435
            return;
2436
 
2437
          if (GET_CODE (and_insn) != NOTE
2438
              && GET_CODE (and_insn) != INSN)
2439
            return;
2440
        }
2441
    }
2442
  else
2443
    {
2444
      /* EQ and NE conditionals have an AND before them.  */
2445
      for (and_insn = prev_real_insn (insn);
2446
           and_insn != NULL_RTX;
2447
           and_insn = prev_real_insn (and_insn))
2448
        {
2449
          if (recog_memoized (and_insn) == CODE_FOR_andhi3
2450
              && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2451
              && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), reg))
2452
            break;
2453
 
2454
          if (reg_mentioned_p (reg, and_insn))
2455
            return;
2456
 
2457
          if (GET_CODE (and_insn) != NOTE
2458
              && GET_CODE (and_insn) != INSN)
2459
            return;
2460
        }
2461
 
2462
      if (and_insn)
2463
        {
2464
          /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2465
             followed by an AND like this:
2466
 
2467
               (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2468
                          (clobber (reg:BI carry))]
2469
 
2470
               (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2471
 
2472
             Attempt to detect this here.  */
2473
          for (shift = prev_real_insn (and_insn); shift;
2474
               shift = prev_real_insn (shift))
2475
            {
2476
              if (recog_memoized (shift) == CODE_FOR_lshrhi3
2477
                  && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2478
                  && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2479
                break;
2480
 
2481
              if (reg_mentioned_p (reg, shift)
2482
                  || (GET_CODE (shift) != NOTE
2483
                      && GET_CODE (shift) != INSN))
2484
                {
2485
                  shift = NULL_RTX;
2486
                  break;
2487
                }
2488
            }
2489
        }
2490
    }
2491
 
2492
  if (and_insn == NULL_RTX)
2493
    return;
2494
 
2495
  for (load = shift ? prev_real_insn (shift) : prev_real_insn (and_insn);
2496
       load;
2497
       load = prev_real_insn (load))
2498
    {
2499
      int load_code = recog_memoized (load);
2500
 
2501
      if (load_code == CODE_FOR_movhi_internal
2502
          && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2503
          && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2504
          && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2505
        {
2506
          load_mode = HImode;
2507
          break;
2508
        }
2509
 
2510
      if (load_code == CODE_FOR_movqi_internal
2511
          && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2512
          && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2513
        {
2514
          load_mode = QImode;
2515
          break;
2516
        }
2517
 
2518
      if (load_code == CODE_FOR_zero_extendqihi2
2519
          && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2520
          && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2521
        {
2522
          load_mode = QImode;
2523
          and_mode = HImode;
2524
          break;
2525
        }
2526
 
2527
      if (reg_mentioned_p (reg, load))
2528
        return;
2529
 
2530
      if (GET_CODE (load) != NOTE
2531
          && GET_CODE (load) != INSN)
2532
        return;
2533
    }
2534
  if (!load)
2535
    return;
2536
 
2537
  mem = SET_SRC (PATTERN (load));
2538
 
2539
  if (need_extend)
2540
    {
2541
      mask = (load_mode == HImode) ? 0x8000 : 0x80;
2542
 
2543
      /* If the mem includes a zero-extend operation and we are
2544
         going to generate a sign-extend operation then move the
2545
         mem inside the zero-extend.  */
2546
      if (GET_CODE (mem) == ZERO_EXTEND)
2547
        mem = XEXP (mem, 0);
2548
    }
2549
  else
2550
    {
2551
      if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and_insn)), 1),
2552
                                         load_mode))
2553
        return;
2554
 
2555
      mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and_insn)), 1));
2556
 
2557
      if (shift)
2558
        mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2559
    }
2560
 
2561
  if (load_mode == HImode)
2562
    {
2563
      rtx addr = XEXP (mem, 0);
2564
 
2565
      if (! (mask & 0xff))
2566
        {
2567
          addr = plus_constant (addr, 1);
2568
          mask >>= 8;
2569
        }
2570
      mem = gen_rtx_MEM (QImode, addr);
2571
    }
2572
 
2573
  if (need_extend)
2574
    XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2575
  else
2576
    XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2577
 
2578
  INSN_CODE (insn) = -1;
2579
  delete_insn (load);
2580
 
2581
  if (and_insn != insn)
2582
    delete_insn (and_insn);
2583
 
2584
  if (shift != NULL_RTX)
2585
    delete_insn (shift);
2586
}
2587
 
2588
static void
2589
xstormy16_reorg (void)
2590
{
2591
  rtx insn;
2592
 
2593
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2594
    {
2595
      if (! JUMP_P (insn))
2596
        continue;
2597
      combine_bnp (insn);
2598
    }
2599
}
2600
 
2601
/* Worker function for TARGET_RETURN_IN_MEMORY.  */
2602
 
2603
static bool
2604
xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2605
{
2606
  const HOST_WIDE_INT size = int_size_in_bytes (type);
2607
  return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2608
}
2609
 
2610
#undef  TARGET_ASM_ALIGNED_HI_OP
2611
#define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2612
#undef  TARGET_ASM_ALIGNED_SI_OP
2613
#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2614
#undef  TARGET_ENCODE_SECTION_INFO
2615
#define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2616
 
2617
/* Select_section doesn't handle .bss_below100.  */
2618
#undef  TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2619
#define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2620
 
2621
#undef  TARGET_ASM_OUTPUT_MI_THUNK
2622
#define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2623
#undef  TARGET_ASM_CAN_OUTPUT_MI_THUNK
2624
#define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2625
 
2626
#undef  TARGET_PRINT_OPERAND
2627
#define TARGET_PRINT_OPERAND xstormy16_print_operand
2628
#undef  TARGET_PRINT_OPERAND_ADDRESS
2629
#define TARGET_PRINT_OPERAND_ADDRESS xstormy16_print_operand_address
2630
 
2631
#undef  TARGET_MEMORY_MOVE_COST
2632
#define TARGET_MEMORY_MOVE_COST xstormy16_memory_move_cost
2633
#undef  TARGET_RTX_COSTS
2634
#define TARGET_RTX_COSTS xstormy16_rtx_costs
2635
#undef  TARGET_ADDRESS_COST
2636
#define TARGET_ADDRESS_COST xstormy16_address_cost
2637
 
2638
#undef  TARGET_BUILD_BUILTIN_VA_LIST
2639
#define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2640
#undef  TARGET_EXPAND_BUILTIN_VA_START
2641
#define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2642
#undef  TARGET_GIMPLIFY_VA_ARG_EXPR
2643
#define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2644
 
2645
#undef  TARGET_PROMOTE_FUNCTION_MODE
2646
#define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
2647
#undef  TARGET_PROMOTE_PROTOTYPES
2648
#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2649
 
2650
#undef  TARGET_FUNCTION_ARG
2651
#define TARGET_FUNCTION_ARG xstormy16_function_arg
2652
#undef  TARGET_FUNCTION_ARG_ADVANCE
2653
#define TARGET_FUNCTION_ARG_ADVANCE xstormy16_function_arg_advance
2654
 
2655
#undef  TARGET_RETURN_IN_MEMORY
2656
#define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2657
#undef TARGET_FUNCTION_VALUE
2658
#define TARGET_FUNCTION_VALUE xstormy16_function_value
2659
#undef TARGET_LIBCALL_VALUE
2660
#define TARGET_LIBCALL_VALUE xstormy16_libcall_value
2661
#undef TARGET_FUNCTION_VALUE_REGNO_P
2662
#define TARGET_FUNCTION_VALUE_REGNO_P xstormy16_function_value_regno_p
2663
 
2664
#undef  TARGET_MACHINE_DEPENDENT_REORG
2665
#define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2666
 
2667
#undef  TARGET_PREFERRED_RELOAD_CLASS
2668
#define TARGET_PREFERRED_RELOAD_CLASS xstormy16_preferred_reload_class
2669
#undef  TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2670
#define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xstormy16_preferred_reload_class
2671
 
2672
#undef TARGET_LEGITIMATE_ADDRESS_P
2673
#define TARGET_LEGITIMATE_ADDRESS_P     xstormy16_legitimate_address_p
2674
#undef TARGET_MODE_DEPENDENT_ADDRESS_P
2675
#define TARGET_MODE_DEPENDENT_ADDRESS_P xstormy16_mode_dependent_address_p
2676
 
2677
#undef TARGET_CAN_ELIMINATE
2678
#define TARGET_CAN_ELIMINATE xstormy16_can_eliminate
2679
 
2680
#undef TARGET_TRAMPOLINE_INIT
2681
#define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init
2682
 
2683
struct gcc_target targetm = TARGET_INITIALIZER;
2684
 
2685
#include "gt-stormy16.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.