OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.2.2/] [gcc/] [config/] [stormy16/] [stormy16.c] - Blame information for rev 310

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Xstormy16 target functions.
2
   Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3
   2007 Free Software Foundation, Inc.
4
   Contributed by Red Hat, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify
9
it under the terms of the GNU General Public License as published by
10
the Free Software Foundation; either version 3, or (at your option)
11
any later version.
12
 
13
GCC is distributed in the hope that it will be useful,
14
but WITHOUT ANY WARRANTY; without even the implied warranty of
15
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16
GNU General Public License for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "rtl.h"
27
#include "regs.h"
28
#include "hard-reg-set.h"
29
#include "real.h"
30
#include "insn-config.h"
31
#include "conditions.h"
32
#include "insn-flags.h"
33
#include "output.h"
34
#include "insn-attr.h"
35
#include "flags.h"
36
#include "recog.h"
37
#include "toplev.h"
38
#include "obstack.h"
39
#include "tree.h"
40
#include "expr.h"
41
#include "optabs.h"
42
#include "except.h"
43
#include "function.h"
44
#include "target.h"
45
#include "target-def.h"
46
#include "tm_p.h"
47
#include "langhooks.h"
48
#include "tree-gimple.h"
49
#include "ggc.h"
50
 
51
static rtx emit_addhi3_postreload (rtx, rtx, rtx);
52
static void xstormy16_asm_out_constructor (rtx, int);
53
static void xstormy16_asm_out_destructor (rtx, int);
54
static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
55
                                           HOST_WIDE_INT, tree);
56
 
57
static void xstormy16_init_builtins (void);
58
static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
59
static bool xstormy16_rtx_costs (rtx, int, int, int *);
60
static int xstormy16_address_cost (rtx);
61
static bool xstormy16_return_in_memory (tree, tree);
62
 
63
/* Define the information needed to generate branch and scc insns.  This is
64
   stored from the compare operation.  */
65
struct rtx_def * xstormy16_compare_op0;
66
struct rtx_def * xstormy16_compare_op1;
67
 
68
static GTY(()) section *bss100_section;
69
 
70
/* Compute a (partial) cost for rtx X.  Return true if the complete
71
   cost has been computed, and false if subexpressions should be
72
   scanned.  In either case, *TOTAL contains the cost result.  */
73
 
74
static bool
75
xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
76
                     int *total)
77
{
78
  switch (code)
79
    {
80
    case CONST_INT:
81
      if (INTVAL (x) < 16 && INTVAL (x) >= 0)
82
        *total = COSTS_N_INSNS (1) / 2;
83
      else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
84
        *total = COSTS_N_INSNS (1);
85
      else
86
        *total = COSTS_N_INSNS (2);
87
      return true;
88
 
89
    case CONST_DOUBLE:
90
    case CONST:
91
    case SYMBOL_REF:
92
    case LABEL_REF:
93
      *total = COSTS_N_INSNS(2);
94
      return true;
95
 
96
    case MULT:
97
      *total = COSTS_N_INSNS (35 + 6);
98
      return true;
99
    case DIV:
100
      *total = COSTS_N_INSNS (51 - 6);
101
      return true;
102
 
103
    default:
104
      return false;
105
    }
106
}
107
 
108
static int
109
xstormy16_address_cost (rtx x)
110
{
111
  return (GET_CODE (x) == CONST_INT ? 2
112
          : GET_CODE (x) == PLUS ? 7
113
          : 5);
114
}
115
 
116
/* Branches are handled as follows:
117
 
118
   1. HImode compare-and-branches.  The machine supports these
119
      natively, so the appropriate pattern is emitted directly.
120
 
121
   2. SImode EQ and NE.  These are emitted as pairs of HImode
122
      compare-and-branches.
123
 
124
   3. SImode LT, GE, LTU and GEU.  These are emitted as a sequence
125
      of a SImode subtract followed by a branch (not a compare-and-branch),
126
      like this:
127
      sub
128
      sbc
129
      blt
130
 
131
   4. SImode GT, LE, GTU, LEU.  These are emitted as a sequence like:
132
      sub
133
      sbc
134
      blt
135
      or
136
      bne
137
*/
138
 
139
/* Emit a branch of kind CODE to location LOC.  */
140
 
141
void
142
xstormy16_emit_cbranch (enum rtx_code code, rtx loc)
143
{
144
  rtx op0 = xstormy16_compare_op0;
145
  rtx op1 = xstormy16_compare_op1;
146
  rtx condition_rtx, loc_ref, branch, cy_clobber;
147
  rtvec vec;
148
  enum machine_mode mode;
149
 
150
  mode = GET_MODE (op0);
151
  gcc_assert (mode == HImode || mode == SImode);
152
 
153
  if (mode == SImode
154
      && (code == GT || code == LE || code == GTU || code == LEU))
155
    {
156
      int unsigned_p = (code == GTU || code == LEU);
157
      int gt_p = (code == GT || code == GTU);
158
      rtx lab = NULL_RTX;
159
 
160
      if (gt_p)
161
        lab = gen_label_rtx ();
162
      xstormy16_emit_cbranch (unsigned_p ? LTU : LT, gt_p ? lab : loc);
163
      /* This should be generated as a comparison against the temporary
164
         created by the previous insn, but reload can't handle that.  */
165
      xstormy16_emit_cbranch (gt_p ? NE : EQ, loc);
166
      if (gt_p)
167
        emit_label (lab);
168
      return;
169
    }
170
  else if (mode == SImode
171
           && (code == NE || code == EQ)
172
           && op1 != const0_rtx)
173
    {
174
      rtx lab = NULL_RTX;
175
      int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
176
      int i;
177
 
178
      if (code == EQ)
179
        lab = gen_label_rtx ();
180
 
181
      for (i = 0; i < num_words - 1; i++)
182
        {
183
          xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
184
                                                      i * UNITS_PER_WORD);
185
          xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
186
                                                      i * UNITS_PER_WORD);
187
          xstormy16_emit_cbranch (NE, code == EQ ? lab : loc);
188
        }
189
      xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
190
                                                  i * UNITS_PER_WORD);
191
      xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
192
                                                  i * UNITS_PER_WORD);
193
      xstormy16_emit_cbranch (code, loc);
194
 
195
      if (code == EQ)
196
        emit_label (lab);
197
      return;
198
    }
199
 
200
  /* We can't allow reload to try to generate any reload after a branch,
201
     so when some register must match we must make the temporary ourselves.  */
202
  if (mode != HImode)
203
    {
204
      rtx tmp;
205
      tmp = gen_reg_rtx (mode);
206
      emit_move_insn (tmp, op0);
207
      op0 = tmp;
208
    }
209
 
210
  condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
211
  loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
212
  branch = gen_rtx_SET (VOIDmode, pc_rtx,
213
                        gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
214
                                              loc_ref, pc_rtx));
215
 
216
  cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (BImode));
217
 
218
  if (mode == HImode)
219
    vec = gen_rtvec (2, branch, cy_clobber);
220
  else if (code == NE || code == EQ)
221
    vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
222
  else
223
    {
224
      rtx sub;
225
#if 0
226
      sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
227
#else
228
      sub = gen_rtx_CLOBBER (SImode, op0);
229
#endif
230
      vec = gen_rtvec (3, branch, sub, cy_clobber);
231
    }
232
 
233
  emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
234
}
235
 
236
/* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
237
   the arithmetic operation.  Most of the work is done by
238
   xstormy16_expand_arith.  */
239
 
240
void
241
xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
242
                         rtx dest, rtx carry)
243
{
244
  rtx op0 = XEXP (comparison, 0);
245
  rtx op1 = XEXP (comparison, 1);
246
  rtx seq, last_insn;
247
  rtx compare;
248
 
249
  start_sequence ();
250
  xstormy16_expand_arith (mode, COMPARE, dest, op0, op1, carry);
251
  seq = get_insns ();
252
  end_sequence ();
253
 
254
  gcc_assert (INSN_P (seq));
255
 
256
  last_insn = seq;
257
  while (NEXT_INSN (last_insn) != NULL_RTX)
258
    last_insn = NEXT_INSN (last_insn);
259
 
260
  compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
261
  PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
262
  XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
263
  emit_insn (seq);
264
}
265
 
266
 
267
/* Return the string to output a conditional branch to LABEL, which is
268
   the operand number of the label.
269
 
270
   OP is the conditional expression, or NULL for branch-always.
271
 
272
   REVERSED is nonzero if we should reverse the sense of the comparison.
273
 
274
   INSN is the insn.  */
275
 
276
char *
277
xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
278
{
279
  static char string[64];
280
  int need_longbranch = (op != NULL_RTX
281
                         ? get_attr_length (insn) == 8
282
                         : get_attr_length (insn) == 4);
283
  int really_reversed = reversed ^ need_longbranch;
284
  const char *ccode;
285
  const char *template;
286
  const char *operands;
287
  enum rtx_code code;
288
 
289
  if (! op)
290
    {
291
      if (need_longbranch)
292
        ccode = "jmpf";
293
      else
294
        ccode = "br";
295
      sprintf (string, "%s %s", ccode, label);
296
      return string;
297
    }
298
 
299
  code = GET_CODE (op);
300
 
301
  if (GET_CODE (XEXP (op, 0)) != REG)
302
    {
303
      code = swap_condition (code);
304
      operands = "%3,%2";
305
    }
306
  else
307
      operands = "%2,%3";
308
 
309
  /* Work out which way this really branches.  */
310
  if (really_reversed)
311
    code = reverse_condition (code);
312
 
313
  switch (code)
314
    {
315
    case EQ:   ccode = "z";   break;
316
    case NE:   ccode = "nz";  break;
317
    case GE:   ccode = "ge";  break;
318
    case LT:   ccode = "lt";  break;
319
    case GT:   ccode = "gt";  break;
320
    case LE:   ccode = "le";  break;
321
    case GEU:  ccode = "nc";  break;
322
    case LTU:  ccode = "c";   break;
323
    case GTU:  ccode = "hi";  break;
324
    case LEU:  ccode = "ls";  break;
325
 
326
    default:
327
      gcc_unreachable ();
328
    }
329
 
330
  if (need_longbranch)
331
    template = "b%s %s,.+8 | jmpf %s";
332
  else
333
    template = "b%s %s,%s";
334
  sprintf (string, template, ccode, operands, label);
335
 
336
  return string;
337
}
338
 
339
/* Return the string to output a conditional branch to LABEL, which is
340
   the operand number of the label, but suitable for the tail of a
341
   SImode branch.
342
 
343
   OP is the conditional expression (OP is never NULL_RTX).
344
 
345
   REVERSED is nonzero if we should reverse the sense of the comparison.
346
 
347
   INSN is the insn.  */
348
 
349
char *
350
xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
351
{
352
  static char string[64];
353
  int need_longbranch = get_attr_length (insn) >= 8;
354
  int really_reversed = reversed ^ need_longbranch;
355
  const char *ccode;
356
  const char *template;
357
  char prevop[16];
358
  enum rtx_code code;
359
 
360
  code = GET_CODE (op);
361
 
362
  /* Work out which way this really branches.  */
363
  if (really_reversed)
364
    code = reverse_condition (code);
365
 
366
  switch (code)
367
    {
368
    case EQ:   ccode = "z";   break;
369
    case NE:   ccode = "nz";  break;
370
    case GE:   ccode = "ge";  break;
371
    case LT:   ccode = "lt";  break;
372
    case GEU:  ccode = "nc";  break;
373
    case LTU:  ccode = "c";   break;
374
 
375
      /* The missing codes above should never be generated.  */
376
    default:
377
      gcc_unreachable ();
378
    }
379
 
380
  switch (code)
381
    {
382
    case EQ: case NE:
383
      {
384
        int regnum;
385
 
386
        gcc_assert (GET_CODE (XEXP (op, 0)) == REG);
387
 
388
        regnum = REGNO (XEXP (op, 0));
389
        sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
390
      }
391
      break;
392
 
393
    case GE: case LT: case GEU: case LTU:
394
      strcpy (prevop, "sbc %2,%3");
395
      break;
396
 
397
    default:
398
      gcc_unreachable ();
399
    }
400
 
401
  if (need_longbranch)
402
    template = "%s | b%s .+6 | jmpf %s";
403
  else
404
    template = "%s | b%s %s";
405
  sprintf (string, template, prevop, ccode, label);
406
 
407
  return string;
408
}
409
 
410
/* Many machines have some registers that cannot be copied directly to or from
411
   memory or even from other types of registers.  An example is the `MQ'
412
   register, which on most machines, can only be copied to or from general
413
   registers, but not memory.  Some machines allow copying all registers to and
414
   from memory, but require a scratch register for stores to some memory
415
   locations (e.g., those with symbolic address on the RT, and those with
416
   certain symbolic address on the SPARC when compiling PIC).  In some cases,
417
   both an intermediate and a scratch register are required.
418
 
419
   You should define these macros to indicate to the reload phase that it may
420
   need to allocate at least one register for a reload in addition to the
421
   register to contain the data.  Specifically, if copying X to a register
422
   CLASS in MODE requires an intermediate register, you should define
423
   `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
424
   whose registers can be used as intermediate registers or scratch registers.
425
 
426
   If copying a register CLASS in MODE to X requires an intermediate or scratch
427
   register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
428
   largest register class required.  If the requirements for input and output
429
   reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
430
   instead of defining both macros identically.
431
 
432
   The values returned by these macros are often `GENERAL_REGS'.  Return
433
   `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
434
   to or from a register of CLASS in MODE without requiring a scratch register.
435
   Do not define this macro if it would always return `NO_REGS'.
436
 
437
   If a scratch register is required (either with or without an intermediate
438
   register), you should define patterns for `reload_inM' or `reload_outM', as
439
   required..  These patterns, which will normally be implemented with a
440
   `define_expand', should be similar to the `movM' patterns, except that
441
   operand 2 is the scratch register.
442
 
443
   Define constraints for the reload register and scratch register that contain
444
   a single register class.  If the original reload register (whose class is
445
   CLASS) can meet the constraint given in the pattern, the value returned by
446
   these macros is used for the class of the scratch register.  Otherwise, two
447
   additional reload registers are required.  Their classes are obtained from
448
   the constraints in the insn pattern.
449
 
450
   X might be a pseudo-register or a `subreg' of a pseudo-register, which could
451
   either be in a hard register or in memory.  Use `true_regnum' to find out;
452
   it will return -1 if the pseudo is in memory and the hard register number if
453
   it is in a register.
454
 
455
   These macros should not be used in the case where a particular class of
456
   registers can only be copied to memory and not to another class of
457
   registers.  In that case, secondary reload registers are not needed and
458
   would not be helpful.  Instead, a stack location must be used to perform the
459
   copy and the `movM' pattern should use memory as an intermediate storage.
460
   This case often occurs between floating-point and general registers.  */
461
 
462
enum reg_class
463
xstormy16_secondary_reload_class (enum reg_class class,
464
                                  enum machine_mode mode,
465
                                  rtx x)
466
{
467
  /* This chip has the interesting property that only the first eight
468
     registers can be moved to/from memory.  */
469
  if ((GET_CODE (x) == MEM
470
       || ((GET_CODE (x) == SUBREG || GET_CODE (x) == REG)
471
           && (true_regnum (x) == -1
472
               || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
473
      && ! reg_class_subset_p (class, EIGHT_REGS))
474
    return EIGHT_REGS;
475
 
476
  /* When reloading a PLUS, the carry register will be required
477
     unless the inc or dec instructions can be used.  */
478
  if (xstormy16_carry_plus_operand (x, mode))
479
    return CARRY_REGS;
480
 
481
  return NO_REGS;
482
}
483
 
484
/* Recognize a PLUS that needs the carry register.  */
485
int
486
xstormy16_carry_plus_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
487
{
488
  return (GET_CODE (x) == PLUS
489
          && GET_CODE (XEXP (x, 1)) == CONST_INT
490
          && (INTVAL (XEXP (x, 1)) < -4 || INTVAL (XEXP (x, 1)) > 4));
491
}
492
 
493
/* Detect and error out on out-of-range constants for movhi.  */
494
int
495
xs_hi_general_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
496
{
497
  if ((GET_CODE (x) == CONST_INT)
498
   && ((INTVAL (x) >= 32768) || (INTVAL (x) < -32768)))
499
    error ("constant halfword load operand out of range");
500
  return general_operand (x, mode);
501
}
502
 
503
/* Detect and error out on out-of-range constants for addhi and subhi.  */
504
int
505
xs_hi_nonmemory_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
506
{
507
  if ((GET_CODE (x) == CONST_INT)
508
   && ((INTVAL (x) >= 32768) || (INTVAL (x) < -32768)))
509
    error ("constant arithmetic operand out of range");
510
  return nonmemory_operand (x, mode);
511
}
512
 
513
enum reg_class
514
xstormy16_preferred_reload_class (rtx x, enum reg_class class)
515
{
516
  if (class == GENERAL_REGS
517
      && GET_CODE (x) == MEM)
518
    return EIGHT_REGS;
519
 
520
  return class;
521
}
522
 
523
/* Predicate for symbols and addresses that reflect special 8-bit
524
   addressing.  */
525
int
526
xstormy16_below100_symbol (rtx x,
527
                           enum machine_mode mode ATTRIBUTE_UNUSED)
528
{
529
  if (GET_CODE (x) == CONST)
530
    x = XEXP (x, 0);
531
  if (GET_CODE (x) == PLUS
532
      && GET_CODE (XEXP (x, 1)) == CONST_INT)
533
    x = XEXP (x, 0);
534
 
535
  if (GET_CODE (x) == SYMBOL_REF)
536
    return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
537
 
538
  if (GET_CODE (x) == CONST_INT)
539
    {
540
      HOST_WIDE_INT i = INTVAL (x);
541
      if ((i >= 0x0000 && i <= 0x00ff)
542
          || (i >= 0x7f00 && i <= 0x7fff))
543
        return 1;
544
    }
545
  return 0;
546
}
547
 
548
/* Likewise, but only for non-volatile MEMs, for patterns where the
549
   MEM will get split into smaller sized accesses.  */
550
int
551
xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
552
{
553
  if (GET_CODE (x) == MEM && MEM_VOLATILE_P (x))
554
    return 0;
555
  return xstormy16_below100_operand (x, mode);
556
}
557
 
558
/* Expand an 8-bit IOR.  This either detects the one case we can
559
   actually do, or uses a 16-bit IOR.  */
560
void
561
xstormy16_expand_iorqi3 (rtx *operands)
562
{
563
  rtx in, out, outsub, val;
564
 
565
  out = operands[0];
566
  in = operands[1];
567
  val = operands[2];
568
 
569
  if (xstormy16_onebit_set_operand (val, QImode))
570
    {
571
      if (!xstormy16_below100_or_register (in, QImode))
572
        in = copy_to_mode_reg (QImode, in);
573
      if (!xstormy16_below100_or_register (out, QImode))
574
        out = gen_reg_rtx (QImode);
575
      emit_insn (gen_iorqi3_internal (out, in, val));
576
      if (out != operands[0])
577
        emit_move_insn (operands[0], out);
578
      return;
579
    }
580
 
581
  if (GET_CODE (in) != REG)
582
    in = copy_to_mode_reg (QImode, in);
583
  if (GET_CODE (val) != REG
584
      && GET_CODE (val) != CONST_INT)
585
    val = copy_to_mode_reg (QImode, val);
586
  if (GET_CODE (out) != REG)
587
    out = gen_reg_rtx (QImode);
588
 
589
  in = simplify_gen_subreg (HImode, in, QImode, 0);
590
  outsub = simplify_gen_subreg (HImode, out, QImode, 0);
591
  if (GET_CODE (val) != CONST_INT)
592
    val = simplify_gen_subreg (HImode, val, QImode, 0);
593
 
594
  emit_insn (gen_iorhi3 (outsub, in, val));
595
 
596
  if (out != operands[0])
597
    emit_move_insn (operands[0], out);
598
}
599
 
600
/* Likewise, for AND.  */
601
void
602
xstormy16_expand_andqi3 (rtx *operands)
603
{
604
  rtx in, out, outsub, val;
605
 
606
  out = operands[0];
607
  in = operands[1];
608
  val = operands[2];
609
 
610
  if (xstormy16_onebit_clr_operand (val, QImode))
611
    {
612
      if (!xstormy16_below100_or_register (in, QImode))
613
        in = copy_to_mode_reg (QImode, in);
614
      if (!xstormy16_below100_or_register (out, QImode))
615
        out = gen_reg_rtx (QImode);
616
      emit_insn (gen_andqi3_internal (out, in, val));
617
      if (out != operands[0])
618
        emit_move_insn (operands[0], out);
619
      return;
620
    }
621
 
622
  if (GET_CODE (in) != REG)
623
    in = copy_to_mode_reg (QImode, in);
624
  if (GET_CODE (val) != REG
625
      && GET_CODE (val) != CONST_INT)
626
    val = copy_to_mode_reg (QImode, val);
627
  if (GET_CODE (out) != REG)
628
    out = gen_reg_rtx (QImode);
629
 
630
  in = simplify_gen_subreg (HImode, in, QImode, 0);
631
  outsub = simplify_gen_subreg (HImode, out, QImode, 0);
632
  if (GET_CODE (val) != CONST_INT)
633
    val = simplify_gen_subreg (HImode, val, QImode, 0);
634
 
635
  emit_insn (gen_andhi3 (outsub, in, val));
636
 
637
  if (out != operands[0])
638
    emit_move_insn (operands[0], out);
639
}
640
 
641
#define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET)                         \
642
 (GET_CODE (X) == CONST_INT                                             \
643
  && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
644
 
645
#define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET)                        \
646
 (GET_CODE (X) == CONST_INT                                              \
647
  && INTVAL (X) + (OFFSET) >= 0                                           \
648
  && INTVAL (X) + (OFFSET) < 0x8000                                      \
649
  && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
650
 
651
int
652
xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
653
                                rtx x, int strict)
654
{
655
  if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
656
    return 1;
657
 
658
  if (GET_CODE (x) == PLUS
659
      && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
660
    x = XEXP (x, 0);
661
 
662
  if ((GET_CODE (x) == PRE_MODIFY
663
       && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
664
      || GET_CODE (x) == POST_INC
665
      || GET_CODE (x) == PRE_DEC)
666
    x = XEXP (x, 0);
667
 
668
  if (GET_CODE (x) == REG && REGNO_OK_FOR_BASE_P (REGNO (x))
669
      && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
670
    return 1;
671
 
672
  if (xstormy16_below100_symbol(x, mode))
673
    return 1;
674
 
675
  return 0;
676
}
677
 
678
/* Return nonzero if memory address X (an RTX) can have different
679
   meanings depending on the machine mode of the memory reference it
680
   is used for or if the address is valid for some modes but not
681
   others.
682
 
683
   Autoincrement and autodecrement addresses typically have mode-dependent
684
   effects because the amount of the increment or decrement is the size of the
685
   operand being addressed.  Some machines have other mode-dependent addresses.
686
   Many RISC machines have no mode-dependent addresses.
687
 
688
   You may assume that ADDR is a valid address for the machine.
689
 
690
   On this chip, this is true if the address is valid with an offset
691
   of 0 but not of 6, because in that case it cannot be used as an
692
   address for DImode or DFmode, or if the address is a post-increment
693
   or pre-decrement address.  */
694
int
695
xstormy16_mode_dependent_address_p (rtx x)
696
{
697
  if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
698
      && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
699
    return 1;
700
 
701
  if (GET_CODE (x) == PLUS
702
      && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
703
      && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
704
    return 1;
705
 
706
  if (GET_CODE (x) == PLUS)
707
    x = XEXP (x, 0);
708
 
709
  if (GET_CODE (x) == POST_INC
710
      || GET_CODE (x) == PRE_DEC)
711
    return 1;
712
 
713
  return 0;
714
}
715
 
716
/* A C expression that defines the optional machine-dependent constraint
717
   letters (`Q', `R', `S', `T', `U') that can be used to segregate specific
718
   types of operands, usually memory references, for the target machine.
719
   Normally this macro will not be defined.  If it is required for a particular
720
   target machine, it should return 1 if VALUE corresponds to the operand type
721
   represented by the constraint letter C.  If C is not defined as an extra
722
   constraint, the value returned should be 0 regardless of VALUE.  */
723
int
724
xstormy16_extra_constraint_p (rtx x, int c)
725
{
726
  switch (c)
727
    {
728
      /* 'Q' is for pushes.  */
729
    case 'Q':
730
      return (GET_CODE (x) == MEM
731
              && GET_CODE (XEXP (x, 0)) == POST_INC
732
              && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
733
 
734
      /* 'R' is for pops.  */
735
    case 'R':
736
      return (GET_CODE (x) == MEM
737
              && GET_CODE (XEXP (x, 0)) == PRE_DEC
738
              && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
739
 
740
      /* 'S' is for immediate memory addresses.  */
741
    case 'S':
742
      return (GET_CODE (x) == MEM
743
              && GET_CODE (XEXP (x, 0)) == CONST_INT
744
              && xstormy16_legitimate_address_p (VOIDmode, XEXP (x, 0), 0));
745
 
746
      /* 'T' is for Rx.  */
747
    case 'T':
748
      /* Not implemented yet.  */
749
      return 0;
750
 
751
      /* 'U' is for CONST_INT values not between 2 and 15 inclusive,
752
         for allocating a scratch register for 32-bit shifts.  */
753
    case 'U':
754
      return (GET_CODE (x) == CONST_INT
755
              && (INTVAL (x) < 2 || INTVAL (x) > 15));
756
 
757
      /* 'Z' is for CONST_INT value zero.  This is for adding zero to
758
         a register in addhi3, which would otherwise require a carry.  */
759
    case 'Z':
760
      return (GET_CODE (x) == CONST_INT
761
              && (INTVAL (x) == 0));
762
 
763
    case 'W':
764
      return xstormy16_below100_operand(x, GET_MODE(x));
765
 
766
    default:
767
      return 0;
768
    }
769
}
770
 
771
int
772
short_memory_operand (rtx x, enum machine_mode mode)
773
{
774
  if (! memory_operand (x, mode))
775
    return 0;
776
  return (GET_CODE (XEXP (x, 0)) != PLUS);
777
}
778
 
779
/* Splitter for the 'move' patterns, for modes not directly implemented
780
   by hardware.  Emit insns to copy a value of mode MODE from SRC to
781
   DEST.
782
 
783
   This function is only called when reload_completed.
784
   */
785
 
786
void
787
xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
788
{
789
  int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
790
  int direction, end, i;
791
  int src_modifies = 0;
792
  int dest_modifies = 0;
793
  int src_volatile = 0;
794
  int dest_volatile = 0;
795
  rtx mem_operand;
796
  rtx auto_inc_reg_rtx = NULL_RTX;
797
 
798
  /* Check initial conditions.  */
799
  gcc_assert (reload_completed
800
              && mode != QImode && mode != HImode
801
              && nonimmediate_operand (dest, mode)
802
              && general_operand (src, mode));
803
 
804
  /* This case is not supported below, and shouldn't be generated.  */
805
  gcc_assert (GET_CODE (dest) != MEM || GET_CODE (src) != MEM);
806
 
807
  /* This case is very very bad after reload, so trap it now.  */
808
  gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
809
 
810
  /* The general idea is to copy by words, offsetting the source and
811
     destination.  Normally the least-significant word will be copied
812
     first, but for pre-dec operations it's better to copy the
813
     most-significant word first.  Only one operand can be a pre-dec
814
     or post-inc operand.
815
 
816
     It's also possible that the copy overlaps so that the direction
817
     must be reversed.  */
818
  direction = 1;
819
 
820
  if (GET_CODE (dest) == MEM)
821
    {
822
      mem_operand = XEXP (dest, 0);
823
      dest_modifies = side_effects_p (mem_operand);
824
      if (auto_inc_p (mem_operand))
825
        auto_inc_reg_rtx = XEXP (mem_operand, 0);
826
      dest_volatile = MEM_VOLATILE_P (dest);
827
      if (dest_volatile)
828
        {
829
          dest = copy_rtx (dest);
830
          MEM_VOLATILE_P (dest) = 0;
831
        }
832
    }
833
  else if (GET_CODE (src) == MEM)
834
    {
835
      mem_operand = XEXP (src, 0);
836
      src_modifies = side_effects_p (mem_operand);
837
      if (auto_inc_p (mem_operand))
838
        auto_inc_reg_rtx = XEXP (mem_operand, 0);
839
      src_volatile = MEM_VOLATILE_P (src);
840
      if (src_volatile)
841
        {
842
          src = copy_rtx (src);
843
          MEM_VOLATILE_P (src) = 0;
844
        }
845
    }
846
  else
847
    mem_operand = NULL_RTX;
848
 
849
  if (mem_operand == NULL_RTX)
850
    {
851
      if (GET_CODE (src) == REG
852
          && GET_CODE (dest) == REG
853
          && reg_overlap_mentioned_p (dest, src)
854
          && REGNO (dest) > REGNO (src))
855
        direction = -1;
856
    }
857
  else if (GET_CODE (mem_operand) == PRE_DEC
858
      || (GET_CODE (mem_operand) == PLUS
859
          && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
860
    direction = -1;
861
  else if (GET_CODE (src) == MEM
862
           && reg_overlap_mentioned_p (dest, src))
863
    {
864
      int regno;
865
 
866
      gcc_assert (GET_CODE (dest) == REG);
867
      regno = REGNO (dest);
868
 
869
      gcc_assert (refers_to_regno_p (regno, regno + num_words,
870
                                     mem_operand, 0));
871
 
872
      if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
873
        direction = -1;
874
      else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
875
                                  mem_operand, 0))
876
        direction = 1;
877
      else
878
        /* This means something like
879
           (set (reg:DI r0) (mem:DI (reg:HI r1)))
880
           which we'd need to support by doing the set of the second word
881
           last.  */
882
        gcc_unreachable ();
883
    }
884
 
885
  end = direction < 0 ? -1 : num_words;
886
  for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
887
    {
888
      rtx w_src, w_dest, insn;
889
 
890
      if (src_modifies)
891
        w_src = gen_rtx_MEM (word_mode, mem_operand);
892
      else
893
        w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
894
      if (src_volatile)
895
        MEM_VOLATILE_P (w_src) = 1;
896
      if (dest_modifies)
897
        w_dest = gen_rtx_MEM (word_mode, mem_operand);
898
      else
899
        w_dest = simplify_gen_subreg (word_mode, dest, mode,
900
                                      i * UNITS_PER_WORD);
901
      if (dest_volatile)
902
        MEM_VOLATILE_P (w_dest) = 1;
903
 
904
      /* The simplify_subreg calls must always be able to simplify.  */
905
      gcc_assert (GET_CODE (w_src) != SUBREG
906
                  && GET_CODE (w_dest) != SUBREG);
907
 
908
      insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
909
      if (auto_inc_reg_rtx)
910
        REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
911
                                            auto_inc_reg_rtx,
912
                                            REG_NOTES (insn));
913
    }
914
}
915
 
916
/* Expander for the 'move' patterns.  Emit insns to copy a value of
917
   mode MODE from SRC to DEST.  */
918
 
919
void
920
xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
921
{
922
  if ((GET_CODE (dest) == MEM) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
923
    {
924
      rtx pmv      = XEXP (dest, 0);
925
      rtx dest_reg = XEXP (pmv, 0);
926
      rtx dest_mod = XEXP (pmv, 1);
927
      rtx set      = gen_rtx_SET (Pmode, dest_reg, dest_mod);
928
      rtx clobber  = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
929
 
930
      dest = gen_rtx_MEM (mode, dest_reg);
931
      emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
932
    }
933
  else if ((GET_CODE (src) == MEM) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
934
    {
935
      rtx pmv     = XEXP (src, 0);
936
      rtx src_reg = XEXP (pmv, 0);
937
      rtx src_mod = XEXP (pmv, 1);
938
      rtx set     = gen_rtx_SET (Pmode, src_reg, src_mod);
939
      rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
940
 
941
      src = gen_rtx_MEM (mode, src_reg);
942
      emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
943
    }
944
 
945
  /* There are only limited immediate-to-memory move instructions.  */
946
  if (! reload_in_progress
947
      && ! reload_completed
948
      && GET_CODE (dest) == MEM
949
      && (GET_CODE (XEXP (dest, 0)) != CONST_INT
950
          || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
951
      && ! xstormy16_below100_operand (dest, mode)
952
      && GET_CODE (src) != REG
953
      && GET_CODE (src) != SUBREG)
954
    src = copy_to_mode_reg (mode, src);
955
 
956
  /* Don't emit something we would immediately split.  */
957
  if (reload_completed
958
      && mode != HImode && mode != QImode)
959
    {
960
      xstormy16_split_move (mode, dest, src);
961
      return;
962
    }
963
 
964
  emit_insn (gen_rtx_SET (VOIDmode, dest, src));
965
}
966
 
967
 
968
/* Stack Layout:
969
 
970
   The stack is laid out as follows:
971
 
972
SP->
973
FP->    Local variables
974
        Register save area (up to 4 words)
975
        Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
976
 
977
AP->    Return address (two words)
978
        9th procedure parameter word
979
        10th procedure parameter word
980
        ...
981
        last procedure parameter word
982
 
983
  The frame pointer location is tuned to make it most likely that all
984
  parameters and local variables can be accessed using a load-indexed
985
  instruction.  */
986
 
987
/* A structure to describe the layout.  */
988
struct xstormy16_stack_layout
989
{
990
  /* Size of the topmost three items on the stack.  */
991
  int locals_size;
992
  int register_save_size;
993
  int stdarg_save_size;
994
  /* Sum of the above items.  */
995
  int frame_size;
996
  /* Various offsets.  */
997
  int first_local_minus_ap;
998
  int sp_minus_fp;
999
  int fp_minus_ap;
1000
};
1001
 
1002
/* Does REGNO need to be saved?  */
1003
#define REG_NEEDS_SAVE(REGNUM, IFUN)                                    \
1004
  ((regs_ever_live[REGNUM] && ! call_used_regs[REGNUM])                 \
1005
   || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM]           \
1006
       && (REGNO_REG_CLASS (REGNUM) != CARRY_REGS)                      \
1007
       && (regs_ever_live[REGNUM] || ! current_function_is_leaf)))
1008
 
1009
/* Compute the stack layout.  */
1010
struct xstormy16_stack_layout
1011
xstormy16_compute_stack_layout (void)
1012
{
1013
  struct xstormy16_stack_layout layout;
1014
  int regno;
1015
  const int ifun = xstormy16_interrupt_function_p ();
1016
 
1017
  layout.locals_size = get_frame_size ();
1018
 
1019
  layout.register_save_size = 0;
1020
  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1021
    if (REG_NEEDS_SAVE (regno, ifun))
1022
      layout.register_save_size += UNITS_PER_WORD;
1023
 
1024
  if (current_function_stdarg)
1025
    layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1026
  else
1027
    layout.stdarg_save_size = 0;
1028
 
1029
  layout.frame_size = (layout.locals_size
1030
                       + layout.register_save_size
1031
                       + layout.stdarg_save_size);
1032
 
1033
  if (current_function_args_size <= 2048 && current_function_args_size != -1)
1034
    {
1035
      if (layout.frame_size + INCOMING_FRAME_SP_OFFSET
1036
          + current_function_args_size <= 2048)
1037
        layout.fp_minus_ap = layout.frame_size + INCOMING_FRAME_SP_OFFSET;
1038
      else
1039
        layout.fp_minus_ap = 2048 - current_function_args_size;
1040
    }
1041
  else
1042
    layout.fp_minus_ap = (layout.stdarg_save_size
1043
                          + layout.register_save_size
1044
                          + INCOMING_FRAME_SP_OFFSET);
1045
  layout.sp_minus_fp = (layout.frame_size + INCOMING_FRAME_SP_OFFSET
1046
                        - layout.fp_minus_ap);
1047
  layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
1048
  return layout;
1049
}
1050
 
1051
/* Determine how all the special registers get eliminated.  */
1052
int
1053
xstormy16_initial_elimination_offset (int from, int to)
1054
{
1055
  struct xstormy16_stack_layout layout;
1056
  int result;
1057
 
1058
  layout = xstormy16_compute_stack_layout ();
1059
 
1060
  if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1061
    result = layout.sp_minus_fp - layout.locals_size;
1062
  else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1063
    result = -layout.locals_size;
1064
  else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1065
    result = -layout.fp_minus_ap;
1066
  else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1067
    result = -(layout.sp_minus_fp + layout.fp_minus_ap);
1068
  else
1069
    gcc_unreachable ();
1070
 
1071
  return result;
1072
}
1073
 
1074
static rtx
1075
emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1076
{
1077
  rtx set, clobber, insn;
1078
 
1079
  set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1080
  clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
1081
  insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1082
  return insn;
1083
}
1084
 
1085
/* Called after register allocation to add any instructions needed for
1086
   the prologue.  Using a prologue insn is favored compared to putting
1087
   all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1088
   since it allows the scheduler to intermix instructions with the
1089
   saves of the caller saved registers.  In some cases, it might be
1090
   necessary to emit a barrier instruction as the last insn to prevent
1091
   such scheduling.
1092
 
1093
   Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1094
   so that the debug info generation code can handle them properly.  */
1095
void
1096
xstormy16_expand_prologue (void)
1097
{
1098
  struct xstormy16_stack_layout layout;
1099
  int regno;
1100
  rtx insn;
1101
  rtx mem_push_rtx;
1102
  const int ifun = xstormy16_interrupt_function_p ();
1103
 
1104
  mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1105
  mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1106
 
1107
  layout = xstormy16_compute_stack_layout ();
1108
 
1109
  if (layout.locals_size >= 32768)
1110
    error ("local variable memory requirements exceed capacity");
1111
 
1112
  /* Save the argument registers if necessary.  */
1113
  if (layout.stdarg_save_size)
1114
    for (regno = FIRST_ARGUMENT_REGISTER;
1115
         regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1116
         regno++)
1117
      {
1118
        rtx dwarf;
1119
        rtx reg = gen_rtx_REG (HImode, regno);
1120
 
1121
        insn = emit_move_insn (mem_push_rtx, reg);
1122
        RTX_FRAME_RELATED_P (insn) = 1;
1123
 
1124
        dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1125
 
1126
        XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1127
                                             gen_rtx_MEM (Pmode, stack_pointer_rtx),
1128
                                             reg);
1129
        XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1130
                                             plus_constant (stack_pointer_rtx,
1131
                                                            GET_MODE_SIZE (Pmode)));
1132
        REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1133
                                              dwarf,
1134
                                              REG_NOTES (insn));
1135
        RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1136
        RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1137
      }
1138
 
1139
  /* Push each of the registers to save.  */
1140
  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1141
    if (REG_NEEDS_SAVE (regno, ifun))
1142
      {
1143
        rtx dwarf;
1144
        rtx reg = gen_rtx_REG (HImode, regno);
1145
 
1146
        insn = emit_move_insn (mem_push_rtx, reg);
1147
        RTX_FRAME_RELATED_P (insn) = 1;
1148
 
1149
        dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1150
 
1151
        XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1152
                                             gen_rtx_MEM (Pmode, stack_pointer_rtx),
1153
                                             reg);
1154
        XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1155
                                             plus_constant (stack_pointer_rtx,
1156
                                                            GET_MODE_SIZE (Pmode)));
1157
        REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1158
                                              dwarf,
1159
                                              REG_NOTES (insn));
1160
        RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1161
        RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1162
      }
1163
 
1164
  /* It's just possible that the SP here might be what we need for
1165
     the new FP...  */
1166
  if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1167
    emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1168
 
1169
  /* Allocate space for local variables.  */
1170
  if (layout.locals_size)
1171
    {
1172
      insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1173
                                     GEN_INT (layout.locals_size));
1174
      RTX_FRAME_RELATED_P (insn) = 1;
1175
    }
1176
 
1177
  /* Set up the frame pointer, if required.  */
1178
  if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1179
    {
1180
      insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1181
 
1182
      if (layout.sp_minus_fp)
1183
        emit_addhi3_postreload (hard_frame_pointer_rtx,
1184
                                hard_frame_pointer_rtx,
1185
                                GEN_INT (-layout.sp_minus_fp));
1186
    }
1187
}
1188
 
1189
/* Do we need an epilogue at all?  */
1190
int
1191
direct_return (void)
1192
{
1193
  return (reload_completed
1194
          && xstormy16_compute_stack_layout ().frame_size == 0);
1195
}
1196
 
1197
/* Called after register allocation to add any instructions needed for
1198
   the epilogue.  Using an epilogue insn is favored compared to putting
1199
   all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1200
   since it allows the scheduler to intermix instructions with the
1201
   saves of the caller saved registers.  In some cases, it might be
1202
   necessary to emit a barrier instruction as the last insn to prevent
1203
   such scheduling.  */
1204
 
1205
void
1206
xstormy16_expand_epilogue (void)
1207
{
1208
  struct xstormy16_stack_layout layout;
1209
  rtx mem_pop_rtx, insn;
1210
  int regno;
1211
  const int ifun = xstormy16_interrupt_function_p ();
1212
 
1213
  mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1214
  mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1215
 
1216
  layout = xstormy16_compute_stack_layout ();
1217
 
1218
  /* Pop the stack for the locals.  */
1219
  if (layout.locals_size)
1220
    {
1221
      if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1222
        emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1223
      else
1224
        {
1225
          insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1226
                                         GEN_INT (- layout.locals_size));
1227
          RTX_FRAME_RELATED_P (insn) = 1;
1228
        }
1229
    }
1230
 
1231
  /* Restore any call-saved registers.  */
1232
  for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1233
    if (REG_NEEDS_SAVE (regno, ifun))
1234
      {
1235
        rtx dwarf;
1236
 
1237
        insn = emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1238
        RTX_FRAME_RELATED_P (insn) = 1;
1239
        dwarf = gen_rtx_SET (Pmode, stack_pointer_rtx,
1240
                             plus_constant (stack_pointer_rtx,
1241
                                            -GET_MODE_SIZE (Pmode)));
1242
        REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1243
                                              dwarf,
1244
                                              REG_NOTES (insn));
1245
      }
1246
 
1247
  /* Pop the stack for the stdarg save area.  */
1248
  if (layout.stdarg_save_size)
1249
    {
1250
      insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1251
                                     GEN_INT (- layout.stdarg_save_size));
1252
      RTX_FRAME_RELATED_P (insn) = 1;
1253
    }
1254
 
1255
  /* Return.  */
1256
  if (ifun)
1257
    emit_jump_insn (gen_return_internal_interrupt ());
1258
  else
1259
    emit_jump_insn (gen_return_internal ());
1260
}
1261
 
1262
int
1263
xstormy16_epilogue_uses (int regno)
1264
{
1265
  if (reload_completed && call_used_regs[regno])
1266
    {
1267
      const int ifun = xstormy16_interrupt_function_p ();
1268
      return REG_NEEDS_SAVE (regno, ifun);
1269
    }
1270
  return 0;
1271
}
1272
 
1273
void
1274
xstormy16_function_profiler (void)
1275
{
1276
  sorry ("function_profiler support");
1277
}
1278
 
1279
 
1280
/* Return an updated summarizer variable CUM to advance past an
1281
   argument in the argument list.  The values MODE, TYPE and NAMED
1282
   describe that argument.  Once this is done, the variable CUM is
1283
   suitable for analyzing the *following* argument with
1284
   `FUNCTION_ARG', etc.
1285
 
1286
   This function need not do anything if the argument in question was
1287
   passed on the stack.  The compiler knows how to track the amount of
1288
   stack space used for arguments without any special help.  However,
1289
   it makes life easier for xstormy16_build_va_list if it does update
1290
   the word count.  */
1291
CUMULATIVE_ARGS
1292
xstormy16_function_arg_advance (CUMULATIVE_ARGS cum, enum machine_mode mode,
1293
                                tree type, int named ATTRIBUTE_UNUSED)
1294
{
1295
  /* If an argument would otherwise be passed partially in registers,
1296
     and partially on the stack, the whole of it is passed on the
1297
     stack.  */
1298
  if (cum < NUM_ARGUMENT_REGISTERS
1299
      && cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1300
    cum = NUM_ARGUMENT_REGISTERS;
1301
 
1302
  cum += XSTORMY16_WORD_SIZE (type, mode);
1303
 
1304
  return cum;
1305
}
1306
 
1307
rtx
1308
xstormy16_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
1309
                        tree type, int named ATTRIBUTE_UNUSED)
1310
{
1311
  if (mode == VOIDmode)
1312
    return const0_rtx;
1313
  if (targetm.calls.must_pass_in_stack (mode, type)
1314
      || cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1315
    return 0;
1316
  return gen_rtx_REG (mode, cum + 2);
1317
}
1318
 
1319
/* Build the va_list type.
1320
 
1321
   For this chip, va_list is a record containing a counter and a pointer.
1322
   The counter is of type 'int' and indicates how many bytes
1323
   have been used to date.  The pointer indicates the stack position
1324
   for arguments that have not been passed in registers.
1325
   To keep the layout nice, the pointer is first in the structure.  */
1326
 
1327
static tree
1328
xstormy16_build_builtin_va_list (void)
1329
{
1330
  tree f_1, f_2, record, type_decl;
1331
 
1332
  record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1333
  type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
1334
 
1335
  f_1 = build_decl (FIELD_DECL, get_identifier ("base"),
1336
                      ptr_type_node);
1337
  f_2 = build_decl (FIELD_DECL, get_identifier ("count"),
1338
                      unsigned_type_node);
1339
 
1340
  DECL_FIELD_CONTEXT (f_1) = record;
1341
  DECL_FIELD_CONTEXT (f_2) = record;
1342
 
1343
  TREE_CHAIN (record) = type_decl;
1344
  TYPE_NAME (record) = type_decl;
1345
  TYPE_FIELDS (record) = f_1;
1346
  TREE_CHAIN (f_1) = f_2;
1347
 
1348
  layout_type (record);
1349
 
1350
  return record;
1351
}
1352
 
1353
/* Implement the stdarg/varargs va_start macro.  STDARG_P is nonzero if this
1354
   is stdarg.h instead of varargs.h.  VALIST is the tree of the va_list
1355
   variable to initialize.  NEXTARG is the machine independent notion of the
1356
   'next' argument after the variable arguments.  */
1357
void
1358
xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1359
{
1360
  tree f_base, f_count;
1361
  tree base, count;
1362
  tree t;
1363
 
1364
  if (xstormy16_interrupt_function_p ())
1365
    error ("cannot use va_start in interrupt function");
1366
 
1367
  f_base = TYPE_FIELDS (va_list_type_node);
1368
  f_count = TREE_CHAIN (f_base);
1369
 
1370
  base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1371
  count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1372
                  NULL_TREE);
1373
 
1374
  t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1375
  t = build2 (PLUS_EXPR, TREE_TYPE (base), t,
1376
              build_int_cst (NULL_TREE, INCOMING_FRAME_SP_OFFSET));
1377
  t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
1378
  TREE_SIDE_EFFECTS (t) = 1;
1379
  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1380
 
1381
  t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
1382
              build_int_cst (NULL_TREE,
1383
                             current_function_args_info * UNITS_PER_WORD));
1384
  TREE_SIDE_EFFECTS (t) = 1;
1385
  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1386
}
1387
 
1388
/* Implement the stdarg/varargs va_arg macro.  VALIST is the variable
1389
   of type va_list as a tree, TYPE is the type passed to va_arg.
1390
   Note:  This algorithm is documented in stormy-abi.  */
1391
 
1392
static tree
1393
xstormy16_expand_builtin_va_arg (tree valist, tree type, tree *pre_p,
1394
                                 tree *post_p ATTRIBUTE_UNUSED)
1395
{
1396
  tree f_base, f_count;
1397
  tree base, count;
1398
  tree count_tmp, addr, t;
1399
  tree lab_gotaddr, lab_fromstack;
1400
  int size, size_of_reg_args, must_stack;
1401
  tree size_tree;
1402
 
1403
  f_base = TYPE_FIELDS (va_list_type_node);
1404
  f_count = TREE_CHAIN (f_base);
1405
 
1406
  base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1407
  count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1408
                  NULL_TREE);
1409
 
1410
  must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1411
  size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1412
  gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1413
 
1414
  size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1415
 
1416
  count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1417
  lab_gotaddr = create_artificial_label ();
1418
  lab_fromstack = create_artificial_label ();
1419
  addr = create_tmp_var (ptr_type_node, NULL);
1420
 
1421
  if (!must_stack)
1422
    {
1423
      tree r;
1424
 
1425
      t = fold_convert (TREE_TYPE (count), size_tree);
1426
      t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1427
      r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1428
      t = build2 (GT_EXPR, boolean_type_node, t, r);
1429
      t = build3 (COND_EXPR, void_type_node, t,
1430
                  build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1431
                  NULL_TREE);
1432
      gimplify_and_add (t, pre_p);
1433
 
1434
      t = fold_convert (ptr_type_node, count_tmp);
1435
      t = build2 (PLUS_EXPR, ptr_type_node, base, t);
1436
      t = build2 (MODIFY_EXPR, void_type_node, addr, t);
1437
      gimplify_and_add (t, pre_p);
1438
 
1439
      t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1440
      gimplify_and_add (t, pre_p);
1441
 
1442
      t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1443
      gimplify_and_add (t, pre_p);
1444
    }
1445
 
1446
  /* Arguments larger than a word might need to skip over some
1447
     registers, since arguments are either passed entirely in
1448
     registers or entirely on the stack.  */
1449
  size = PUSH_ROUNDING (int_size_in_bytes (type));
1450
  if (size > 2 || size < 0 || must_stack)
1451
    {
1452
      tree r, u;
1453
 
1454
      r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1455
      u = build2 (MODIFY_EXPR, void_type_node, count_tmp, r);
1456
 
1457
      t = fold_convert (TREE_TYPE (count), r);
1458
      t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1459
      t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1460
      gimplify_and_add (t, pre_p);
1461
    }
1462
 
1463
  t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1464
                - INCOMING_FRAME_SP_OFFSET);
1465
  t = fold_convert (TREE_TYPE (count), t);
1466
  t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1467
  t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1468
              fold_convert (TREE_TYPE (count), size_tree));
1469
  t = fold_convert (TREE_TYPE (base), fold (t));
1470
  t = build2 (MINUS_EXPR, TREE_TYPE (base), base, t);
1471
  t = build2 (MODIFY_EXPR, void_type_node, addr, t);
1472
  gimplify_and_add (t, pre_p);
1473
 
1474
  t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1475
  gimplify_and_add (t, pre_p);
1476
 
1477
  t = fold_convert (TREE_TYPE (count), size_tree);
1478
  t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1479
  t = build2 (MODIFY_EXPR, TREE_TYPE (count), count, t);
1480
  gimplify_and_add (t, pre_p);
1481
 
1482
  addr = fold_convert (build_pointer_type (type), addr);
1483
  return build_va_arg_indirect_ref (addr);
1484
}
1485
 
1486
/* Initialize the variable parts of a trampoline.  ADDR is an RTX for
1487
   the address of the trampoline; FNADDR is an RTX for the address of
1488
   the nested function; STATIC_CHAIN is an RTX for the static chain
1489
   value that should be passed to the function when it is called.  */
1490
void
1491
xstormy16_initialize_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
1492
{
1493
  rtx reg_addr = gen_reg_rtx (Pmode);
1494
  rtx temp = gen_reg_rtx (HImode);
1495
  rtx reg_fnaddr = gen_reg_rtx (HImode);
1496
  rtx reg_addr_mem;
1497
 
1498
  reg_addr_mem = gen_rtx_MEM (HImode, reg_addr);
1499
 
1500
  emit_move_insn (reg_addr, addr);
1501
  emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1502
  emit_move_insn (reg_addr_mem, temp);
1503
  emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1504
  emit_move_insn (temp, static_chain);
1505
  emit_move_insn (reg_addr_mem, temp);
1506
  emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1507
  emit_move_insn (reg_fnaddr, fnaddr);
1508
  emit_move_insn (temp, reg_fnaddr);
1509
  emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1510
  emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1511
  emit_move_insn (reg_addr_mem, temp);
1512
  emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1513
  emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1514
  emit_move_insn (reg_addr_mem, reg_fnaddr);
1515
}
1516
 
1517
/* Worker function for FUNCTION_VALUE.  */
1518
 
1519
rtx
1520
xstormy16_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
1521
{
1522
  enum machine_mode mode;
1523
  mode = TYPE_MODE (valtype);
1524
  PROMOTE_MODE (mode, 0, valtype);
1525
  return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1526
}
1527
 
1528
/* A C compound statement that outputs the assembler code for a thunk function,
1529
   used to implement C++ virtual function calls with multiple inheritance.  The
1530
   thunk acts as a wrapper around a virtual function, adjusting the implicit
1531
   object parameter before handing control off to the real function.
1532
 
1533
   First, emit code to add the integer DELTA to the location that contains the
1534
   incoming first argument.  Assume that this argument contains a pointer, and
1535
   is the one used to pass the `this' pointer in C++.  This is the incoming
1536
   argument *before* the function prologue, e.g. `%o0' on a sparc.  The
1537
   addition must preserve the values of all other incoming arguments.
1538
 
1539
   After the addition, emit code to jump to FUNCTION, which is a
1540
   `FUNCTION_DECL'.  This is a direct pure jump, not a call, and does not touch
1541
   the return address.  Hence returning from FUNCTION will return to whoever
1542
   called the current `thunk'.
1543
 
1544
   The effect must be as if @var{function} had been called directly
1545
   with the adjusted first argument.  This macro is responsible for
1546
   emitting all of the code for a thunk function;
1547
   TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1548
   not invoked.
1549
 
1550
   The THUNK_FNDECL is redundant.  (DELTA and FUNCTION have already been
1551
   extracted from it.)  It might possibly be useful on some targets, but
1552
   probably not.  */
1553
 
1554
static void
1555
xstormy16_asm_output_mi_thunk (FILE *file,
1556
                               tree thunk_fndecl ATTRIBUTE_UNUSED,
1557
                               HOST_WIDE_INT delta,
1558
                               HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1559
                               tree function)
1560
{
1561
  int regnum = FIRST_ARGUMENT_REGISTER;
1562
 
1563
  /* There might be a hidden first argument for a returned structure.  */
1564
  if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1565
    regnum += 1;
1566
 
1567
  fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1568
  fputs ("\tjmpf ", file);
1569
  assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1570
  putc ('\n', file);
1571
}
1572
 
1573
/* The purpose of this function is to override the default behavior of
1574
   BSS objects.  Normally, they go into .bss or .sbss via ".common"
1575
   directives, but we need to override that and put them in
1576
   .bss_below100.  We can't just use a section override (like we do
1577
   for .data_below100), because that makes them initialized rather
1578
   than uninitialized.  */
1579
void
1580
xstormy16_asm_output_aligned_common (FILE *stream,
1581
                                     tree decl,
1582
                                     const char *name,
1583
                                     int size,
1584
                                     int align,
1585
                                     int global)
1586
{
1587
  rtx mem = DECL_RTL (decl);
1588
  rtx symbol;
1589
 
1590
  if (mem != NULL_RTX
1591
      && GET_CODE (mem) == MEM
1592
      && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1593
      && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1594
    {
1595
      const char *name2;
1596
      int p2align = 0;
1597
 
1598
      switch_to_section (bss100_section);
1599
 
1600
      while (align > 8)
1601
        {
1602
          align /= 2;
1603
          p2align ++;
1604
        }
1605
 
1606
      name2 = default_strip_name_encoding (name);
1607
      if (global)
1608
        fprintf (stream, "\t.globl\t%s\n", name2);
1609
      if (p2align)
1610
        fprintf (stream, "\t.p2align %d\n", p2align);
1611
      fprintf (stream, "\t.type\t%s, @object\n", name2);
1612
      fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1613
      fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1614
      return;
1615
    }
1616
 
1617
  if (!global)
1618
    {
1619
      fprintf (stream, "\t.local\t");
1620
      assemble_name (stream, name);
1621
      fprintf (stream, "\n");
1622
    }
1623
  fprintf (stream, "\t.comm\t");
1624
  assemble_name (stream, name);
1625
  fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1626
}
1627
 
1628
/* Implement TARGET_ASM_INIT_SECTIONS.  */
1629
 
1630
static void
1631
xstormy16_asm_init_sections (void)
1632
{
1633
  bss100_section
1634
    = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1635
                           output_section_asm_op,
1636
                           "\t.section \".bss_below100\",\"aw\",@nobits");
1637
}
1638
 
1639
/* Mark symbols with the "below100" attribute so that we can use the
1640
   special addressing modes for them.  */
1641
 
1642
static void
1643
xstormy16_encode_section_info (tree decl, rtx r, int first)
1644
{
1645
  default_encode_section_info (decl, r, first);
1646
 
1647
   if (TREE_CODE (decl) == VAR_DECL
1648
      && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1649
          || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1650
    {
1651
      rtx symbol = XEXP (r, 0);
1652
 
1653
      gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1654
      SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1655
    }
1656
}
1657
 
1658
/* Output constructors and destructors.  Just like
1659
   default_named_section_asm_out_* but don't set the sections writable.  */
1660
#undef  TARGET_ASM_CONSTRUCTOR
1661
#define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1662
#undef  TARGET_ASM_DESTRUCTOR
1663
#define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1664
 
1665
static void
1666
xstormy16_asm_out_destructor (rtx symbol, int priority)
1667
{
1668
  const char *section = ".dtors";
1669
  char buf[16];
1670
 
1671
  /* ??? This only works reliably with the GNU linker.  */
1672
  if (priority != DEFAULT_INIT_PRIORITY)
1673
    {
1674
      sprintf (buf, ".dtors.%.5u",
1675
               /* Invert the numbering so the linker puts us in the proper
1676
                  order; constructors are run from right to left, and the
1677
                  linker sorts in increasing order.  */
1678
               MAX_INIT_PRIORITY - priority);
1679
      section = buf;
1680
    }
1681
 
1682
  switch_to_section (get_section (section, 0, NULL));
1683
  assemble_align (POINTER_SIZE);
1684
  assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1685
}
1686
 
1687
static void
1688
xstormy16_asm_out_constructor (rtx symbol, int priority)
1689
{
1690
  const char *section = ".ctors";
1691
  char buf[16];
1692
 
1693
  /* ??? This only works reliably with the GNU linker.  */
1694
  if (priority != DEFAULT_INIT_PRIORITY)
1695
    {
1696
      sprintf (buf, ".ctors.%.5u",
1697
               /* Invert the numbering so the linker puts us in the proper
1698
                  order; constructors are run from right to left, and the
1699
                  linker sorts in increasing order.  */
1700
               MAX_INIT_PRIORITY - priority);
1701
      section = buf;
1702
    }
1703
 
1704
  switch_to_section (get_section (section, 0, NULL));
1705
  assemble_align (POINTER_SIZE);
1706
  assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1707
}
1708
 
1709
/* Print a memory address as an operand to reference that memory location.  */
1710
void
1711
xstormy16_print_operand_address (FILE *file, rtx address)
1712
{
1713
  HOST_WIDE_INT offset;
1714
  int pre_dec, post_inc;
1715
 
1716
  /* There are a few easy cases.  */
1717
  if (GET_CODE (address) == CONST_INT)
1718
    {
1719
      fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1720
      return;
1721
    }
1722
 
1723
  if (CONSTANT_P (address) || GET_CODE (address) == CODE_LABEL)
1724
    {
1725
      output_addr_const (file, address);
1726
      return;
1727
    }
1728
 
1729
  /* Otherwise, it's hopefully something of the form
1730
     (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...))
1731
  */
1732
 
1733
  if (GET_CODE (address) == PLUS)
1734
    {
1735
      gcc_assert (GET_CODE (XEXP (address, 1)) == CONST_INT);
1736
      offset = INTVAL (XEXP (address, 1));
1737
      address = XEXP (address, 0);
1738
    }
1739
  else
1740
    offset = 0;
1741
 
1742
  pre_dec = (GET_CODE (address) == PRE_DEC);
1743
  post_inc = (GET_CODE (address) == POST_INC);
1744
  if (pre_dec || post_inc)
1745
    address = XEXP (address, 0);
1746
 
1747
  gcc_assert (GET_CODE (address) == REG);
1748
 
1749
  fputc ('(', file);
1750
  if (pre_dec)
1751
    fputs ("--", file);
1752
  fputs (reg_names [REGNO (address)], file);
1753
  if (post_inc)
1754
    fputs ("++", file);
1755
  if (offset != 0)
1756
    fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1757
  fputc (')', file);
1758
}
1759
 
1760
/* Print an operand to an assembler instruction.  */
1761
void
1762
xstormy16_print_operand (FILE *file, rtx x, int code)
1763
{
1764
  switch (code)
1765
    {
1766
    case 'B':
1767
        /* There is either one bit set, or one bit clear, in X.
1768
           Print it preceded by '#'.  */
1769
      {
1770
        static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1771
        HOST_WIDE_INT xx = 1;
1772
        HOST_WIDE_INT l;
1773
 
1774
        if (GET_CODE (x) == CONST_INT)
1775
          xx = INTVAL (x);
1776
        else
1777
          output_operand_lossage ("'B' operand is not constant");
1778
 
1779
        /* GCC sign-extends masks with the MSB set, so we have to
1780
           detect all the cases that differ only in sign extension
1781
           beyond the bits we care about.  Normally, the predicates
1782
           and constraints ensure that we have the right values.  This
1783
           works correctly for valid masks.  */
1784
        if (bits_set[xx & 7] <= 1)
1785
          {
1786
            /* Remove sign extension bits.  */
1787
            if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1788
              xx &= 0xff;
1789
            else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1790
              xx &= 0xffff;
1791
            l = exact_log2 (xx);
1792
          }
1793
        else
1794
          {
1795
            /* Add sign extension bits.  */
1796
            if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1797
              xx |= ~(HOST_WIDE_INT)0xff;
1798
            else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1799
              xx |= ~(HOST_WIDE_INT)0xffff;
1800
            l = exact_log2 (~xx);
1801
          }
1802
 
1803
        if (l == -1)
1804
          output_operand_lossage ("'B' operand has multiple bits set");
1805
 
1806
        fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1807
        return;
1808
      }
1809
 
1810
    case 'C':
1811
      /* Print the symbol without a surrounding @fptr().  */
1812
      if (GET_CODE (x) == SYMBOL_REF)
1813
        assemble_name (file, XSTR (x, 0));
1814
      else if (GET_CODE (x) == LABEL_REF)
1815
        output_asm_label (x);
1816
      else
1817
        xstormy16_print_operand_address (file, x);
1818
      return;
1819
 
1820
    case 'o':
1821
    case 'O':
1822
      /* Print the immediate operand less one, preceded by '#'.
1823
         For 'O', negate it first.  */
1824
      {
1825
        HOST_WIDE_INT xx = 0;
1826
 
1827
        if (GET_CODE (x) == CONST_INT)
1828
          xx = INTVAL (x);
1829
        else
1830
          output_operand_lossage ("'o' operand is not constant");
1831
 
1832
        if (code == 'O')
1833
          xx = -xx;
1834
 
1835
        fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1836
        return;
1837
      }
1838
 
1839
    case 'b':
1840
      /* Print the shift mask for bp/bn.  */
1841
      {
1842
        HOST_WIDE_INT xx = 1;
1843
        HOST_WIDE_INT l;
1844
 
1845
        if (GET_CODE (x) == CONST_INT)
1846
          xx = INTVAL (x);
1847
        else
1848
          output_operand_lossage ("'B' operand is not constant");
1849
 
1850
        l = 7 - xx;
1851
 
1852
        fputs (IMMEDIATE_PREFIX, file);
1853
        fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1854
        return;
1855
      }
1856
 
1857
    case 0:
1858
      /* Handled below.  */
1859
      break;
1860
 
1861
    default:
1862
      output_operand_lossage ("xstormy16_print_operand: unknown code");
1863
      return;
1864
    }
1865
 
1866
  switch (GET_CODE (x))
1867
    {
1868
    case REG:
1869
      fputs (reg_names [REGNO (x)], file);
1870
      break;
1871
 
1872
    case MEM:
1873
      xstormy16_print_operand_address (file, XEXP (x, 0));
1874
      break;
1875
 
1876
    default:
1877
      /* Some kind of constant or label; an immediate operand,
1878
         so prefix it with '#' for the assembler.  */
1879
      fputs (IMMEDIATE_PREFIX, file);
1880
      output_addr_const (file, x);
1881
      break;
1882
    }
1883
 
1884
  return;
1885
}
1886
 
1887
 
1888
/* Expander for the `casesi' pattern.
1889
   INDEX is the index of the switch statement.
1890
   LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1891
     to the first table entry.
1892
   RANGE is the number of table entries.
1893
   TABLE is an ADDR_VEC that is the jump table.
1894
   DEFAULT_LABEL is the address to branch to if INDEX is outside the
1895
     range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1896
*/
1897
 
1898
void
1899
xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1900
                         rtx table, rtx default_label)
1901
{
1902
  HOST_WIDE_INT range_i = INTVAL (range);
1903
  rtx int_index;
1904
 
1905
  /* This code uses 'br', so it can deal only with tables of size up to
1906
     8192 entries.  */
1907
  if (range_i >= 8192)
1908
    sorry ("switch statement of size %lu entries too large",
1909
           (unsigned long) range_i);
1910
 
1911
  index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1912
                        OPTAB_LIB_WIDEN);
1913
  emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1914
                           default_label);
1915
  int_index = gen_lowpart_common (HImode, index);
1916
  emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1917
  emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1918
}
1919
 
1920
/* Output an ADDR_VEC.  It is output as a sequence of 'jmpf'
1921
   instructions, without label or alignment or any other special
1922
   constructs.  We know that the previous instruction will be the
1923
   `tablejump_pcrel' output above.
1924
 
1925
   TODO: it might be nice to output 'br' instructions if they could
1926
   all reach.  */
1927
 
1928
void
1929
xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1930
{
1931
  int vlen, idx;
1932
 
1933
  switch_to_section (current_function_section ());
1934
 
1935
  vlen = XVECLEN (table, 0);
1936
  for (idx = 0; idx < vlen; idx++)
1937
    {
1938
      fputs ("\tjmpf ", file);
1939
      output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1940
      fputc ('\n', file);
1941
    }
1942
}
1943
 
1944
 
1945
/* Expander for the `call' patterns.
1946
   INDEX is the index of the switch statement.
1947
   LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1948
     to the first table entry.
1949
   RANGE is the number of table entries.
1950
   TABLE is an ADDR_VEC that is the jump table.
1951
   DEFAULT_LABEL is the address to branch to if INDEX is outside the
1952
     range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1953
*/
1954
 
1955
void
1956
xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1957
{
1958
  rtx call, temp;
1959
  enum machine_mode mode;
1960
 
1961
  gcc_assert (GET_CODE (dest) == MEM);
1962
  dest = XEXP (dest, 0);
1963
 
1964
  if (! CONSTANT_P (dest)
1965
      && GET_CODE (dest) != REG)
1966
    dest = force_reg (Pmode, dest);
1967
 
1968
  if (retval == NULL)
1969
    mode = VOIDmode;
1970
  else
1971
    mode = GET_MODE (retval);
1972
 
1973
  call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1974
                       counter);
1975
  if (retval)
1976
    call = gen_rtx_SET (VOIDmode, retval, call);
1977
 
1978
  if (! CONSTANT_P (dest))
1979
    {
1980
      temp = gen_reg_rtx (HImode);
1981
      emit_move_insn (temp, const0_rtx);
1982
    }
1983
  else
1984
    temp = const0_rtx;
1985
 
1986
  call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1987
                                                gen_rtx_USE (VOIDmode, temp)));
1988
  emit_call_insn (call);
1989
}
1990
 
1991
/* Expanders for multiword computational operations.  */
1992
 
1993
/* Expander for arithmetic operations; emit insns to compute
1994
 
1995
   (set DEST (CODE:MODE SRC0 SRC1))
1996
 
1997
   using CARRY as a temporary.  When CODE is COMPARE, a branch
1998
   template is generated (this saves duplicating code in
1999
   xstormy16_split_cbranch).  */
2000
 
2001
void
2002
xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
2003
                        rtx dest, rtx src0, rtx src1, rtx carry)
2004
{
2005
  int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
2006
  int i;
2007
  int firstloop = 1;
2008
 
2009
  if (code == NEG)
2010
    emit_move_insn (src0, const0_rtx);
2011
 
2012
  for (i = 0; i < num_words; i++)
2013
    {
2014
      rtx w_src0, w_src1, w_dest;
2015
      rtx insn;
2016
 
2017
      w_src0 = simplify_gen_subreg (word_mode, src0, mode,
2018
                                    i * UNITS_PER_WORD);
2019
      w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
2020
      w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
2021
 
2022
      switch (code)
2023
        {
2024
        case PLUS:
2025
          if (firstloop
2026
              && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
2027
            continue;
2028
 
2029
          if (firstloop)
2030
            insn = gen_addchi4 (w_dest, w_src0, w_src1, carry);
2031
          else
2032
            insn = gen_addchi5 (w_dest, w_src0, w_src1, carry, carry);
2033
          break;
2034
 
2035
        case NEG:
2036
        case MINUS:
2037
        case COMPARE:
2038
          if (code == COMPARE && i == num_words - 1)
2039
            {
2040
              rtx branch, sub, clobber, sub_1;
2041
 
2042
              sub_1 = gen_rtx_MINUS (HImode, w_src0,
2043
                                     gen_rtx_ZERO_EXTEND (HImode, carry));
2044
              sub = gen_rtx_SET (VOIDmode, w_dest,
2045
                                 gen_rtx_MINUS (HImode, sub_1, w_src1));
2046
              clobber = gen_rtx_CLOBBER (VOIDmode, carry);
2047
              branch = gen_rtx_SET (VOIDmode, pc_rtx,
2048
                                    gen_rtx_IF_THEN_ELSE (VOIDmode,
2049
                                                          gen_rtx_EQ (HImode,
2050
                                                                      sub_1,
2051
                                                                      w_src1),
2052
                                                          pc_rtx,
2053
                                                          pc_rtx));
2054
              insn = gen_rtx_PARALLEL (VOIDmode,
2055
                                       gen_rtvec (3, branch, sub, clobber));
2056
            }
2057
          else if (firstloop
2058
                   && code != COMPARE
2059
                   && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
2060
            continue;
2061
          else if (firstloop)
2062
            insn = gen_subchi4 (w_dest, w_src0, w_src1, carry);
2063
          else
2064
            insn = gen_subchi5 (w_dest, w_src0, w_src1, carry, carry);
2065
          break;
2066
 
2067
        case IOR:
2068
        case XOR:
2069
        case AND:
2070
          if (GET_CODE (w_src1) == CONST_INT
2071
              && INTVAL (w_src1) == -(code == AND))
2072
            continue;
2073
 
2074
          insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
2075
                                                                w_src0, w_src1));
2076
          break;
2077
 
2078
        case NOT:
2079
          insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2080
          break;
2081
 
2082
        default:
2083
          gcc_unreachable ();
2084
        }
2085
 
2086
      firstloop = 0;
2087
      emit (insn);
2088
    }
2089
 
2090
  /* If we emit nothing, try_split() will think we failed.  So emit
2091
     something that does nothing and can be optimized away.  */
2092
  if (firstloop)
2093
    emit (gen_nop ());
2094
}
2095
 
2096
/* The shift operations are split at output time for constant values;
2097
   variable-width shifts get handed off to a library routine.
2098
 
2099
   Generate an output string to do (set X (CODE:MODE X SIZE_R))
2100
   SIZE_R will be a CONST_INT, X will be a hard register.  */
2101
 
2102
const char *
2103
xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
2104
                        rtx x, rtx size_r, rtx temp)
2105
{
2106
  HOST_WIDE_INT size;
2107
  const char *r0, *r1, *rt;
2108
  static char r[64];
2109
 
2110
  gcc_assert (GET_CODE (size_r) == CONST_INT
2111
              && GET_CODE (x) == REG && mode == SImode);
2112
  size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2113
 
2114
  if (size == 0)
2115
    return "";
2116
 
2117
  r0 = reg_names [REGNO (x)];
2118
  r1 = reg_names [REGNO (x) + 1];
2119
 
2120
  /* For shifts of size 1, we can use the rotate instructions.  */
2121
  if (size == 1)
2122
    {
2123
      switch (code)
2124
        {
2125
        case ASHIFT:
2126
          sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2127
          break;
2128
        case ASHIFTRT:
2129
          sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2130
          break;
2131
        case LSHIFTRT:
2132
          sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2133
          break;
2134
        default:
2135
          gcc_unreachable ();
2136
        }
2137
      return r;
2138
    }
2139
 
2140
  /* For large shifts, there are easy special cases.  */
2141
  if (size == 16)
2142
    {
2143
      switch (code)
2144
        {
2145
        case ASHIFT:
2146
          sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2147
          break;
2148
        case ASHIFTRT:
2149
          sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2150
          break;
2151
        case LSHIFTRT:
2152
          sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2153
          break;
2154
        default:
2155
          gcc_unreachable ();
2156
        }
2157
      return r;
2158
    }
2159
  if (size > 16)
2160
    {
2161
      switch (code)
2162
        {
2163
        case ASHIFT:
2164
          sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2165
                   r1, r0, r0, r1, (int) size - 16);
2166
          break;
2167
        case ASHIFTRT:
2168
          sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2169
                   r0, r1, r1, r0, (int) size - 16);
2170
          break;
2171
        case LSHIFTRT:
2172
          sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2173
                   r0, r1, r1, r0, (int) size - 16);
2174
          break;
2175
        default:
2176
          gcc_unreachable ();
2177
        }
2178
      return r;
2179
    }
2180
 
2181
  /* For the rest, we have to do more work.  In particular, we
2182
     need a temporary.  */
2183
  rt = reg_names [REGNO (temp)];
2184
  switch (code)
2185
    {
2186
    case ASHIFT:
2187
      sprintf (r,
2188
               "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2189
               rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16-size),
2190
               r1, rt);
2191
      break;
2192
    case ASHIFTRT:
2193
      sprintf (r,
2194
               "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2195
               rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16-size),
2196
               r0, rt);
2197
      break;
2198
    case LSHIFTRT:
2199
      sprintf (r,
2200
               "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2201
               rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16-size),
2202
               r0, rt);
2203
      break;
2204
    default:
2205
      gcc_unreachable ();
2206
    }
2207
  return r;
2208
}
2209
 
2210
/* Attribute handling.  */
2211
 
2212
/* Return nonzero if the function is an interrupt function.  */
2213
int
2214
xstormy16_interrupt_function_p (void)
2215
{
2216
  tree attributes;
2217
 
2218
  /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2219
     any functions are declared, which is demonstrably wrong, but
2220
     it is worked around here.  FIXME.  */
2221
  if (!cfun)
2222
    return 0;
2223
 
2224
  attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2225
  return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2226
}
2227
 
2228
#undef TARGET_ATTRIBUTE_TABLE
2229
#define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2230
static tree xstormy16_handle_interrupt_attribute
2231
  (tree *, tree, tree, int, bool *);
2232
static tree xstormy16_handle_below100_attribute
2233
  (tree *, tree, tree, int, bool *);
2234
 
2235
static const struct attribute_spec xstormy16_attribute_table[] =
2236
{
2237
  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2238
  { "interrupt", 0, 0, false, true,  true,  xstormy16_handle_interrupt_attribute },
2239
  { "BELOW100",  0, 0, false, false, false, xstormy16_handle_below100_attribute },
2240
  { "below100",  0, 0, false, false, false, xstormy16_handle_below100_attribute },
2241
  { NULL,        0, 0, false, false, false, NULL }
2242
};
2243
 
2244
/* Handle an "interrupt" attribute;
2245
   arguments as in struct attribute_spec.handler.  */
2246
static tree
2247
xstormy16_handle_interrupt_attribute (tree *node, tree name,
2248
                                      tree args ATTRIBUTE_UNUSED,
2249
                                      int flags ATTRIBUTE_UNUSED,
2250
                                      bool *no_add_attrs)
2251
{
2252
  if (TREE_CODE (*node) != FUNCTION_TYPE)
2253
    {
2254
      warning (OPT_Wattributes, "%qs attribute only applies to functions",
2255
               IDENTIFIER_POINTER (name));
2256
      *no_add_attrs = true;
2257
    }
2258
 
2259
  return NULL_TREE;
2260
}
2261
 
2262
/* Handle an "below" attribute;
2263
   arguments as in struct attribute_spec.handler.  */
2264
static tree
2265
xstormy16_handle_below100_attribute (tree *node,
2266
                                     tree name ATTRIBUTE_UNUSED,
2267
                                     tree args ATTRIBUTE_UNUSED,
2268
                                     int flags ATTRIBUTE_UNUSED,
2269
                                     bool *no_add_attrs)
2270
{
2271
  if (TREE_CODE (*node) != VAR_DECL
2272
      && TREE_CODE (*node) != POINTER_TYPE
2273
      && TREE_CODE (*node) != TYPE_DECL)
2274
    {
2275
      warning (OPT_Wattributes,
2276
               "%<__BELOW100__%> attribute only applies to variables");
2277
      *no_add_attrs = true;
2278
    }
2279
  else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2280
    {
2281
      if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2282
        {
2283
          warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2284
                   "with auto storage class");
2285
          *no_add_attrs = true;
2286
        }
2287
    }
2288
 
2289
  return NULL_TREE;
2290
}
2291
 
2292
#undef TARGET_INIT_BUILTINS
2293
#define TARGET_INIT_BUILTINS xstormy16_init_builtins
2294
#undef TARGET_EXPAND_BUILTIN
2295
#define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2296
 
2297
static struct {
2298
  const char *name;
2299
  int md_code;
2300
  const char *arg_ops; /* 0..9, t for temp register, r for return value */
2301
  const char *arg_types; /* s=short,l=long, upper case for unsigned */
2302
} s16builtins[] = {
2303
  { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2304
  { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2305
  { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2306
  { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2307
  { 0, 0, 0, 0 }
2308
};
2309
 
2310
static void
2311
xstormy16_init_builtins (void)
2312
{
2313
  tree args, ret_type, arg;
2314
  int i, a;
2315
 
2316
  ret_type = void_type_node;
2317
 
2318
  for (i=0; s16builtins[i].name; i++)
2319
    {
2320
      args = void_list_node;
2321
      for (a=strlen (s16builtins[i].arg_types)-1; a>=0; a--)
2322
        {
2323
          switch (s16builtins[i].arg_types[a])
2324
            {
2325
            case 's': arg = short_integer_type_node; break;
2326
            case 'S': arg = short_unsigned_type_node; break;
2327
            case 'l': arg = long_integer_type_node; break;
2328
            case 'L': arg = long_unsigned_type_node; break;
2329
            default: gcc_unreachable ();
2330
            }
2331
          if (a == 0)
2332
            ret_type = arg;
2333
          else
2334
            args = tree_cons (NULL_TREE, arg, args);
2335
        }
2336
      lang_hooks.builtin_function (s16builtins[i].name,
2337
                                   build_function_type (ret_type, args),
2338
                                   i, BUILT_IN_MD, NULL, NULL);
2339
    }
2340
}
2341
 
2342
static rtx
2343
xstormy16_expand_builtin(tree exp, rtx target,
2344
                         rtx subtarget ATTRIBUTE_UNUSED,
2345
                         enum machine_mode mode ATTRIBUTE_UNUSED,
2346
                         int ignore ATTRIBUTE_UNUSED)
2347
{
2348
  rtx op[10], args[10], pat, copyto[10], retval = 0;
2349
  tree fndecl, argtree;
2350
  int i, a, o, code;
2351
 
2352
  fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2353
  argtree = TREE_OPERAND (exp, 1);
2354
  i = DECL_FUNCTION_CODE (fndecl);
2355
  code = s16builtins[i].md_code;
2356
 
2357
  for (a = 0; a < 10 && argtree; a++)
2358
    {
2359
      args[a] = expand_expr (TREE_VALUE (argtree), NULL_RTX, VOIDmode, 0);
2360
      argtree = TREE_CHAIN (argtree);
2361
    }
2362
 
2363
  for (o = 0; s16builtins[i].arg_ops[o]; o++)
2364
    {
2365
      char ao = s16builtins[i].arg_ops[o];
2366
      char c = insn_data[code].operand[o].constraint[0];
2367
      int omode;
2368
 
2369
      copyto[o] = 0;
2370
 
2371
      omode = insn_data[code].operand[o].mode;
2372
      if (ao == 'r')
2373
        op[o] = target ? target : gen_reg_rtx (omode);
2374
      else if (ao == 't')
2375
        op[o] = gen_reg_rtx (omode);
2376
      else
2377
        op[o] = args[(int) hex_value (ao)];
2378
 
2379
      if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2380
        {
2381
          if (c == '+' || c == '=')
2382
            {
2383
              copyto[o] = op[o];
2384
              op[o] = gen_reg_rtx (omode);
2385
            }
2386
          else
2387
            op[o] = copy_to_mode_reg (omode, op[o]);
2388
        }
2389
 
2390
      if (ao == 'r')
2391
        retval = op[o];
2392
    }
2393
 
2394
  pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2395
                        op[5], op[6], op[7], op[8], op[9]);
2396
  emit_insn (pat);
2397
 
2398
  for (o = 0; s16builtins[i].arg_ops[o]; o++)
2399
    if (copyto[o])
2400
      {
2401
        emit_move_insn (copyto[o], op[o]);
2402
        if (op[o] == retval)
2403
          retval = copyto[o];
2404
      }
2405
 
2406
  return retval;
2407
}
2408
 
2409
 
2410
/* Look for combinations of insns that can be converted to BN or BP
2411
   opcodes.  This is, unfortunately, too complex to do with MD
2412
   patterns.  */
2413
static void
2414
combine_bnp (rtx insn)
2415
{
2416
  int insn_code, regno, need_extend;
2417
  unsigned int mask;
2418
  rtx cond, reg, and, load, qireg, mem;
2419
  enum machine_mode load_mode = QImode;
2420
  enum machine_mode and_mode = QImode;
2421
  rtx shift = NULL_RTX;
2422
 
2423
  insn_code = recog_memoized (insn);
2424
  if (insn_code != CODE_FOR_cbranchhi
2425
      && insn_code != CODE_FOR_cbranchhi_neg)
2426
    return;
2427
 
2428
  cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2429
  cond = XEXP (cond, 1); /* if */
2430
  cond = XEXP (cond, 0); /* cond */
2431
  switch (GET_CODE (cond))
2432
    {
2433
    case NE:
2434
    case EQ:
2435
      need_extend = 0;
2436
      break;
2437
    case LT:
2438
    case GE:
2439
      need_extend = 1;
2440
      break;
2441
    default:
2442
      return;
2443
    }
2444
 
2445
  reg = XEXP (cond, 0);
2446
  if (GET_CODE (reg) != REG)
2447
    return;
2448
  regno = REGNO (reg);
2449
  if (XEXP (cond, 1) != const0_rtx)
2450
    return;
2451
  if (! find_regno_note (insn, REG_DEAD, regno))
2452
    return;
2453
  qireg = gen_rtx_REG (QImode, regno);
2454
 
2455
  if (need_extend)
2456
    {
2457
      /* LT and GE conditionals should have a sign extend before
2458
         them.  */
2459
      for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
2460
        {
2461
          int and_code = recog_memoized (and);
2462
 
2463
          if (and_code == CODE_FOR_extendqihi2
2464
              && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
2465
              && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), qireg))
2466
            break;
2467
 
2468
          if (and_code == CODE_FOR_movhi_internal
2469
              && rtx_equal_p (SET_DEST (PATTERN (and)), reg))
2470
            {
2471
              /* This is for testing bit 15.  */
2472
              and = insn;
2473
              break;
2474
            }
2475
 
2476
          if (reg_mentioned_p (reg, and))
2477
            return;
2478
 
2479
          if (GET_CODE (and) != NOTE
2480
              && GET_CODE (and) != INSN)
2481
            return;
2482
        }
2483
    }
2484
  else
2485
    {
2486
      /* EQ and NE conditionals have an AND before them.  */
2487
      for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
2488
        {
2489
          if (recog_memoized (and) == CODE_FOR_andhi3
2490
              && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
2491
              && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), reg))
2492
            break;
2493
 
2494
          if (reg_mentioned_p (reg, and))
2495
            return;
2496
 
2497
          if (GET_CODE (and) != NOTE
2498
              && GET_CODE (and) != INSN)
2499
            return;
2500
        }
2501
 
2502
      if (and)
2503
        {
2504
          /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2505
             followed by an AND like this:
2506
 
2507
               (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2508
                          (clobber (reg:BI carry))]
2509
 
2510
               (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2511
 
2512
             Attempt to detect this here.  */
2513
          for (shift = prev_real_insn (and); shift; shift = prev_real_insn (shift))
2514
            {
2515
              if (recog_memoized (shift) == CODE_FOR_lshrhi3
2516
                  && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2517
                  && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2518
                break;
2519
 
2520
              if (reg_mentioned_p (reg, shift)
2521
                  || (GET_CODE (shift) != NOTE
2522
                      && GET_CODE (shift) != INSN))
2523
                {
2524
                  shift = NULL_RTX;
2525
                  break;
2526
                }
2527
            }
2528
        }
2529
    }
2530
  if (!and)
2531
    return;
2532
 
2533
  for (load = shift ? prev_real_insn (shift) : prev_real_insn (and);
2534
       load;
2535
       load = prev_real_insn (load))
2536
    {
2537
      int load_code = recog_memoized (load);
2538
 
2539
      if (load_code == CODE_FOR_movhi_internal
2540
          && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2541
          && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2542
          && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2543
        {
2544
          load_mode = HImode;
2545
          break;
2546
        }
2547
 
2548
      if (load_code == CODE_FOR_movqi_internal
2549
          && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2550
          && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2551
        {
2552
          load_mode = QImode;
2553
          break;
2554
        }
2555
 
2556
      if (load_code == CODE_FOR_zero_extendqihi2
2557
          && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2558
          && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2559
        {
2560
          load_mode = QImode;
2561
          and_mode = HImode;
2562
          break;
2563
        }
2564
 
2565
      if (reg_mentioned_p (reg, load))
2566
        return;
2567
 
2568
      if (GET_CODE (load) != NOTE
2569
          && GET_CODE (load) != INSN)
2570
        return;
2571
    }
2572
  if (!load)
2573
    return;
2574
 
2575
  mem = SET_SRC (PATTERN (load));
2576
 
2577
  if (need_extend)
2578
    {
2579
      mask = (load_mode == HImode) ? 0x8000 : 0x80;
2580
 
2581
      /* If the mem includes a zero-extend operation and we are
2582
         going to generate a sign-extend operation then move the
2583
         mem inside the zero-extend.  */
2584
      if (GET_CODE (mem) == ZERO_EXTEND)
2585
        mem = XEXP (mem, 0);
2586
    }
2587
  else
2588
    {
2589
      if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and)), 1), load_mode))
2590
        return;
2591
 
2592
      mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and)), 1));
2593
 
2594
      if (shift)
2595
        mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2596
    }
2597
 
2598
  if (load_mode == HImode)
2599
    {
2600
      rtx addr = XEXP (mem, 0);
2601
 
2602
      if (! (mask & 0xff))
2603
        {
2604
          addr = plus_constant (addr, 1);
2605
          mask >>= 8;
2606
        }
2607
      mem = gen_rtx_MEM (QImode, addr);
2608
    }
2609
 
2610
  if (need_extend)
2611
    XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2612
  else
2613
    XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2614
 
2615
  INSN_CODE (insn) = -1;
2616
  delete_insn (load);
2617
 
2618
  if (and != insn)
2619
    delete_insn (and);
2620
 
2621
  if (shift != NULL_RTX)
2622
    delete_insn (shift);
2623
}
2624
 
2625
static void
2626
xstormy16_reorg (void)
2627
{
2628
  rtx insn;
2629
 
2630
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2631
    {
2632
      if (! JUMP_P (insn))
2633
        continue;
2634
      combine_bnp (insn);
2635
    }
2636
}
2637
 
2638
 
2639
/* Worker function for TARGET_RETURN_IN_MEMORY.  */
2640
 
2641
static bool
2642
xstormy16_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
2643
{
2644
  HOST_WIDE_INT size = int_size_in_bytes (type);
2645
  return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2646
}
2647
 
2648
#undef TARGET_ASM_ALIGNED_HI_OP
2649
#define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2650
#undef TARGET_ASM_ALIGNED_SI_OP
2651
#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2652
#undef TARGET_ENCODE_SECTION_INFO
2653
#define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2654
 
2655
/* select_section doesn't handle .bss_below100.  */
2656
#undef  TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2657
#define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2658
 
2659
#undef TARGET_ASM_OUTPUT_MI_THUNK
2660
#define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2661
#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2662
#define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2663
 
2664
#undef TARGET_RTX_COSTS
2665
#define TARGET_RTX_COSTS xstormy16_rtx_costs
2666
#undef TARGET_ADDRESS_COST
2667
#define TARGET_ADDRESS_COST xstormy16_address_cost
2668
 
2669
#undef TARGET_BUILD_BUILTIN_VA_LIST
2670
#define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2671
#undef TARGET_GIMPLIFY_VA_ARG_EXPR
2672
#define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_expand_builtin_va_arg
2673
 
2674
#undef TARGET_PROMOTE_FUNCTION_ARGS
2675
#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
2676
#undef TARGET_PROMOTE_FUNCTION_RETURN
2677
#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
2678
#undef TARGET_PROMOTE_PROTOTYPES
2679
#define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
2680
 
2681
#undef TARGET_RETURN_IN_MEMORY
2682
#define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2683
 
2684
#undef TARGET_MACHINE_DEPENDENT_REORG
2685
#define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2686
 
2687
struct gcc_target targetm = TARGET_INITIALIZER;
2688
 
2689
#include "gt-stormy16.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.