OpenCores
URL https://opencores.org/ocsvn/openrisc_2011-10-31/openrisc_2011-10-31/trunk

Subversion Repositories openrisc_2011-10-31

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [config/] [vax/] [vax.c] - Blame information for rev 378

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 282 jeremybenn
/* Subroutines for insn-output.c for VAX.
2
   Copyright (C) 1987, 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002,
3
   2004, 2005, 2006, 2007, 2008, 2009
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify
9
it under the terms of the GNU General Public License as published by
10
the Free Software Foundation; either version 3, or (at your option)
11
any later version.
12
 
13
GCC is distributed in the hope that it will be useful,
14
but WITHOUT ANY WARRANTY; without even the implied warranty of
15
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16
GNU General Public License for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "rtl.h"
27
#include "df.h"
28
#include "tree.h"
29
#include "regs.h"
30
#include "hard-reg-set.h"
31
#include "real.h"
32
#include "insn-config.h"
33
#include "conditions.h"
34
#include "function.h"
35
#include "output.h"
36
#include "insn-attr.h"
37
#include "recog.h"
38
#include "expr.h"
39
#include "optabs.h"
40
#include "flags.h"
41
#include "debug.h"
42
#include "toplev.h"
43
#include "tm-preds.h"
44
#include "tm-constrs.h"
45
#include "tm_p.h"
46
#include "target.h"
47
#include "target-def.h"
48
 
49
static bool vax_legitimate_address_p (enum machine_mode, rtx, bool);
50
static void vax_output_function_prologue (FILE *, HOST_WIDE_INT);
51
static void vax_file_start (void);
52
static void vax_init_libfuncs (void);
53
static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
54
                                 HOST_WIDE_INT, tree);
55
static int vax_address_cost_1 (rtx);
56
static int vax_address_cost (rtx, bool);
57
static bool vax_rtx_costs (rtx, int, int, int *, bool);
58
static rtx vax_struct_value_rtx (tree, int);
59
static rtx vax_builtin_setjmp_frame_value (void);
60
static void vax_asm_trampoline_template (FILE *);
61
static void vax_trampoline_init (rtx, tree, rtx);
62
 
63
/* Initialize the GCC target structure.  */
64
#undef TARGET_ASM_ALIGNED_HI_OP
65
#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
66
 
67
#undef TARGET_ASM_FUNCTION_PROLOGUE
68
#define TARGET_ASM_FUNCTION_PROLOGUE vax_output_function_prologue
69
 
70
#undef TARGET_ASM_FILE_START
71
#define TARGET_ASM_FILE_START vax_file_start
72
#undef TARGET_ASM_FILE_START_APP_OFF
73
#define TARGET_ASM_FILE_START_APP_OFF true
74
 
75
#undef TARGET_INIT_LIBFUNCS
76
#define TARGET_INIT_LIBFUNCS vax_init_libfuncs
77
 
78
#undef TARGET_ASM_OUTPUT_MI_THUNK
79
#define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
80
#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
81
#define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
82
 
83
#undef TARGET_DEFAULT_TARGET_FLAGS
84
#define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
85
 
86
#undef TARGET_RTX_COSTS
87
#define TARGET_RTX_COSTS vax_rtx_costs
88
#undef TARGET_ADDRESS_COST
89
#define TARGET_ADDRESS_COST vax_address_cost
90
 
91
#undef TARGET_PROMOTE_PROTOTYPES
92
#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
93
 
94
#undef TARGET_STRUCT_VALUE_RTX
95
#define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
96
 
97
#undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
98
#define TARGET_BUILTIN_SETJMP_FRAME_VALUE vax_builtin_setjmp_frame_value
99
 
100
#undef TARGET_LEGITIMATE_ADDRESS_P
101
#define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
102
 
103
#undef TARGET_FRAME_POINTER_REQUIRED
104
#define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
105
 
106
#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
107
#define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
108
#undef TARGET_TRAMPOLINE_INIT
109
#define TARGET_TRAMPOLINE_INIT vax_trampoline_init
110
 
111
struct gcc_target targetm = TARGET_INITIALIZER;
112
 
113
/* Set global variables as needed for the options enabled.  */
114
 
115
void
116
override_options (void)
117
{
118
  /* We're VAX floating point, not IEEE floating point.  */
119
  if (TARGET_G_FLOAT)
120
    REAL_MODE_FORMAT (DFmode) = &vax_g_format;
121
}
122
 
123
/* Generate the assembly code for function entry.  FILE is a stdio
124
   stream to output the code to.  SIZE is an int: how many units of
125
   temporary storage to allocate.
126
 
127
   Refer to the array `regs_ever_live' to determine which registers to
128
   save; `regs_ever_live[I]' is nonzero if register number I is ever
129
   used in the function.  This function is responsible for knowing
130
   which registers should not be saved even if used.  */
131
 
132
static void
133
vax_output_function_prologue (FILE * file, HOST_WIDE_INT size)
134
{
135
  int regno;
136
  int mask = 0;
137
 
138
  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
139
    if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
140
      mask |= 1 << regno;
141
 
142
  fprintf (file, "\t.word 0x%x\n", mask);
143
 
144
  if (dwarf2out_do_frame ())
145
    {
146
      const char *label = dwarf2out_cfi_label (false);
147
      int offset = 0;
148
 
149
      for (regno = FIRST_PSEUDO_REGISTER-1; regno >= 0; --regno)
150
        if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
151
          dwarf2out_reg_save (label, regno, offset -= 4);
152
 
153
      dwarf2out_reg_save (label, PC_REGNUM, offset -= 4);
154
      dwarf2out_reg_save (label, FRAME_POINTER_REGNUM, offset -= 4);
155
      dwarf2out_reg_save (label, ARG_POINTER_REGNUM, offset -= 4);
156
      dwarf2out_def_cfa (label, FRAME_POINTER_REGNUM, -(offset - 4));
157
    }
158
 
159
  size -= STARTING_FRAME_OFFSET;
160
  if (size >= 64)
161
    asm_fprintf (file, "\tmovab %wd(%Rsp),%Rsp\n", -size);
162
  else if (size)
163
    asm_fprintf (file, "\tsubl2 $%wd,%Rsp\n", size);
164
}
165
 
166
/* When debugging with stabs, we want to output an extra dummy label
167
   so that gas can distinguish between D_float and G_float prior to
168
   processing the .stabs directive identifying type double.  */
169
static void
170
vax_file_start (void)
171
{
172
  default_file_start ();
173
 
174
  if (write_symbols == DBX_DEBUG)
175
    fprintf (asm_out_file, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR);
176
}
177
 
178
/* We can use the BSD C library routines for the libgcc calls that are
179
   still generated, since that's what they boil down to anyways.  When
180
   ELF, avoid the user's namespace.  */
181
 
182
static void
183
vax_init_libfuncs (void)
184
{
185
  if (TARGET_BSD_DIVMOD)
186
    {
187
      set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
188
      set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
189
    }
190
}
191
 
192
/* This is like nonimmediate_operand with a restriction on the type of MEM.  */
193
 
194
static void
195
split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
196
                         rtx * low, int n)
197
{
198
  int i;
199
 
200
  for (i = 0; i < n; i++)
201
    low[i] = 0;
202
 
203
  for (i = 0; i < n; i++)
204
    {
205
      if (MEM_P (operands[i])
206
          && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
207
              || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
208
        {
209
          rtx addr = XEXP (operands[i], 0);
210
          operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
211
        }
212
      else if (optimize_size && MEM_P (operands[i])
213
               && REG_P (XEXP (operands[i], 0))
214
               && (code != MINUS || operands[1] != const0_rtx)
215
               && find_regno_note (insn, REG_DEAD,
216
                                   REGNO (XEXP (operands[i], 0))))
217
        {
218
          low[i] = gen_rtx_MEM (SImode,
219
                                gen_rtx_POST_INC (Pmode,
220
                                                  XEXP (operands[i], 0)));
221
          operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
222
        }
223
      else
224
        {
225
          low[i] = operand_subword (operands[i], 0, 0, DImode);
226
          operands[i] = operand_subword (operands[i], 1, 0, DImode);
227
        }
228
    }
229
}
230
 
231
void
232
print_operand_address (FILE * file, rtx addr)
233
{
234
  rtx orig = addr;
235
  rtx reg1, breg, ireg;
236
  rtx offset;
237
 
238
 retry:
239
  switch (GET_CODE (addr))
240
    {
241
    case MEM:
242
      fprintf (file, "*");
243
      addr = XEXP (addr, 0);
244
      goto retry;
245
 
246
    case REG:
247
      fprintf (file, "(%s)", reg_names[REGNO (addr)]);
248
      break;
249
 
250
    case PRE_DEC:
251
      fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
252
      break;
253
 
254
    case POST_INC:
255
      fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
256
      break;
257
 
258
    case PLUS:
259
      /* There can be either two or three things added here.  One must be a
260
         REG.  One can be either a REG or a MULT of a REG and an appropriate
261
         constant, and the third can only be a constant or a MEM.
262
 
263
         We get these two or three things and put the constant or MEM in
264
         OFFSET, the MULT or REG in IREG, and the REG in BREG.  If we have
265
         a register and can't tell yet if it is a base or index register,
266
         put it into REG1.  */
267
 
268
      reg1 = 0; ireg = 0; breg = 0; offset = 0;
269
 
270
      if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
271
          || MEM_P (XEXP (addr, 0)))
272
        {
273
          offset = XEXP (addr, 0);
274
          addr = XEXP (addr, 1);
275
        }
276
      else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
277
               || MEM_P (XEXP (addr, 1)))
278
        {
279
          offset = XEXP (addr, 1);
280
          addr = XEXP (addr, 0);
281
        }
282
      else if (GET_CODE (XEXP (addr, 1)) == MULT)
283
        {
284
          ireg = XEXP (addr, 1);
285
          addr = XEXP (addr, 0);
286
        }
287
      else if (GET_CODE (XEXP (addr, 0)) == MULT)
288
        {
289
          ireg = XEXP (addr, 0);
290
          addr = XEXP (addr, 1);
291
        }
292
      else if (REG_P (XEXP (addr, 1)))
293
        {
294
          reg1 = XEXP (addr, 1);
295
          addr = XEXP (addr, 0);
296
        }
297
      else if (REG_P (XEXP (addr, 0)))
298
        {
299
          reg1 = XEXP (addr, 0);
300
          addr = XEXP (addr, 1);
301
        }
302
      else
303
        gcc_unreachable ();
304
 
305
      if (REG_P (addr))
306
        {
307
          if (reg1)
308
            ireg = addr;
309
          else
310
            reg1 = addr;
311
        }
312
      else if (GET_CODE (addr) == MULT)
313
        ireg = addr;
314
      else
315
        {
316
          gcc_assert (GET_CODE (addr) == PLUS);
317
          if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
318
              || MEM_P (XEXP (addr, 0)))
319
            {
320
              if (offset)
321
                {
322
                  if (CONST_INT_P (offset))
323
                    offset = plus_constant (XEXP (addr, 0), INTVAL (offset));
324
                  else
325
                    {
326
                      gcc_assert (CONST_INT_P (XEXP (addr, 0)));
327
                      offset = plus_constant (offset, INTVAL (XEXP (addr, 0)));
328
                    }
329
                }
330
              offset = XEXP (addr, 0);
331
            }
332
          else if (REG_P (XEXP (addr, 0)))
333
            {
334
              if (reg1)
335
                ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
336
              else
337
                reg1 = XEXP (addr, 0);
338
            }
339
          else
340
            {
341
              gcc_assert (GET_CODE (XEXP (addr, 0)) == MULT);
342
              gcc_assert (!ireg);
343
              ireg = XEXP (addr, 0);
344
            }
345
 
346
          if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
347
              || MEM_P (XEXP (addr, 1)))
348
            {
349
              if (offset)
350
                {
351
                  if (CONST_INT_P (offset))
352
                    offset = plus_constant (XEXP (addr, 1), INTVAL (offset));
353
                  else
354
                    {
355
                      gcc_assert (CONST_INT_P (XEXP (addr, 1)));
356
                      offset = plus_constant (offset, INTVAL (XEXP (addr, 1)));
357
                    }
358
                }
359
              offset = XEXP (addr, 1);
360
            }
361
          else if (REG_P (XEXP (addr, 1)))
362
            {
363
              if (reg1)
364
                ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
365
              else
366
                reg1 = XEXP (addr, 1);
367
            }
368
          else
369
            {
370
              gcc_assert (GET_CODE (XEXP (addr, 1)) == MULT);
371
              gcc_assert (!ireg);
372
              ireg = XEXP (addr, 1);
373
            }
374
        }
375
 
376
      /* If REG1 is nonzero, figure out if it is a base or index register.  */
377
      if (reg1)
378
        {
379
          if (breg
380
              || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
381
              || (offset
382
                  && (MEM_P (offset)
383
                      || (flag_pic && symbolic_operand (offset, SImode)))))
384
            {
385
              gcc_assert (!ireg);
386
              ireg = reg1;
387
            }
388
          else
389
            breg = reg1;
390
        }
391
 
392
      if (offset != 0)
393
        {
394
          if (flag_pic && symbolic_operand (offset, SImode))
395
            {
396
              if (breg && ireg)
397
                {
398
                  debug_rtx (orig);
399
                  output_operand_lossage ("symbol used with both base and indexed registers");
400
                }
401
 
402
#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
403
              if (flag_pic > 1 && GET_CODE (offset) == CONST
404
                  && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
405
                  && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
406
                {
407
                  debug_rtx (orig);
408
                  output_operand_lossage ("symbol with offset used in PIC mode");
409
                }
410
#endif
411
 
412
              /* symbol(reg) isn't PIC, but symbol[reg] is.  */
413
              if (breg)
414
                {
415
                  ireg = breg;
416
                  breg = 0;
417
                }
418
 
419
            }
420
 
421
          output_address (offset);
422
        }
423
 
424
      if (breg != 0)
425
        fprintf (file, "(%s)", reg_names[REGNO (breg)]);
426
 
427
      if (ireg != 0)
428
        {
429
          if (GET_CODE (ireg) == MULT)
430
            ireg = XEXP (ireg, 0);
431
          gcc_assert (REG_P (ireg));
432
          fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
433
        }
434
      break;
435
 
436
    default:
437
      output_addr_const (file, addr);
438
    }
439
}
440
 
441
void
442
print_operand (FILE *file, rtx x, int code)
443
{
444
  if (code == '#')
445
    fputc (ASM_DOUBLE_CHAR, file);
446
  else if (code == '|')
447
    fputs (REGISTER_PREFIX, file);
448
  else if (code == 'c')
449
    fputs (cond_name (x), file);
450
  else if (code == 'C')
451
    fputs (rev_cond_name (x), file);
452
  else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
453
    fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
454
  else if (code == 'P' && CONST_INT_P (x))
455
    fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
456
  else if (code == 'N' && CONST_INT_P (x))
457
    fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
458
  /* rotl instruction cannot deal with negative arguments.  */
459
  else if (code == 'R' && CONST_INT_P (x))
460
    fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
461
  else if (code == 'H' && CONST_INT_P (x))
462
    fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
463
  else if (code == 'h' && CONST_INT_P (x))
464
    fprintf (file, "$%d", (short) - INTVAL (x));
465
  else if (code == 'B' && CONST_INT_P (x))
466
    fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
467
  else if (code == 'b' && CONST_INT_P (x))
468
    fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
469
  else if (code == 'M' && CONST_INT_P (x))
470
    fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
471
  else if (REG_P (x))
472
    fprintf (file, "%s", reg_names[REGNO (x)]);
473
  else if (MEM_P (x))
474
    output_address (XEXP (x, 0));
475
  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
476
    {
477
      char dstr[30];
478
      real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
479
                       sizeof (dstr), 0, 1);
480
      fprintf (file, "$0f%s", dstr);
481
    }
482
  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
483
    {
484
      char dstr[30];
485
      real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
486
                       sizeof (dstr), 0, 1);
487
      fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
488
    }
489
  else
490
    {
491
      if (flag_pic > 1 && symbolic_operand (x, SImode))
492
        {
493
          debug_rtx (x);
494
          output_operand_lossage ("symbol used as immediate operand");
495
        }
496
      putc ('$', file);
497
      output_addr_const (file, x);
498
    }
499
}
500
 
501
const char *
502
cond_name (rtx op)
503
{
504
  switch (GET_CODE (op))
505
    {
506
    case NE:
507
      return "neq";
508
    case EQ:
509
      return "eql";
510
    case GE:
511
      return "geq";
512
    case GT:
513
      return "gtr";
514
    case LE:
515
      return "leq";
516
    case LT:
517
      return "lss";
518
    case GEU:
519
      return "gequ";
520
    case GTU:
521
      return "gtru";
522
    case LEU:
523
      return "lequ";
524
    case LTU:
525
      return "lssu";
526
 
527
    default:
528
      gcc_unreachable ();
529
    }
530
}
531
 
532
const char *
533
rev_cond_name (rtx op)
534
{
535
  switch (GET_CODE (op))
536
    {
537
    case EQ:
538
      return "neq";
539
    case NE:
540
      return "eql";
541
    case LT:
542
      return "geq";
543
    case LE:
544
      return "gtr";
545
    case GT:
546
      return "leq";
547
    case GE:
548
      return "lss";
549
    case LTU:
550
      return "gequ";
551
    case LEU:
552
      return "gtru";
553
    case GTU:
554
      return "lequ";
555
    case GEU:
556
      return "lssu";
557
 
558
    default:
559
      gcc_unreachable ();
560
    }
561
}
562
 
563
static bool
564
vax_float_literal (rtx c)
565
{
566
  enum machine_mode mode;
567
  REAL_VALUE_TYPE r, s;
568
  int i;
569
 
570
  if (GET_CODE (c) != CONST_DOUBLE)
571
    return false;
572
 
573
  mode = GET_MODE (c);
574
 
575
  if (c == const_tiny_rtx[(int) mode][0]
576
      || c == const_tiny_rtx[(int) mode][1]
577
      || c == const_tiny_rtx[(int) mode][2])
578
    return true;
579
 
580
  REAL_VALUE_FROM_CONST_DOUBLE (r, c);
581
 
582
  for (i = 0; i < 7; i++)
583
    {
584
      int x = 1 << i;
585
      bool ok;
586
      REAL_VALUE_FROM_INT (s, x, 0, mode);
587
 
588
      if (REAL_VALUES_EQUAL (r, s))
589
        return true;
590
      ok = exact_real_inverse (mode, &s);
591
      gcc_assert (ok);
592
      if (REAL_VALUES_EQUAL (r, s))
593
        return true;
594
    }
595
  return false;
596
}
597
 
598
 
599
/* Return the cost in cycles of a memory address, relative to register
600
   indirect.
601
 
602
   Each of the following adds the indicated number of cycles:
603
 
604
   1 - symbolic address
605
   1 - pre-decrement
606
   1 - indexing and/or offset(register)
607
   2 - indirect */
608
 
609
 
610
static int
611
vax_address_cost_1 (rtx addr)
612
{
613
  int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
614
  rtx plus_op0 = 0, plus_op1 = 0;
615
 restart:
616
  switch (GET_CODE (addr))
617
    {
618
    case PRE_DEC:
619
      predec = 1;
620
    case REG:
621
    case SUBREG:
622
    case POST_INC:
623
      reg = 1;
624
      break;
625
    case MULT:
626
      indexed = 1;      /* 2 on VAX 2 */
627
      break;
628
    case CONST_INT:
629
      /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
630
      if (offset == 0)
631
        offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
632
      break;
633
    case CONST:
634
    case SYMBOL_REF:
635
      offset = 1;       /* 2 on VAX 2 */
636
      break;
637
    case LABEL_REF:     /* this is probably a byte offset from the pc */
638
      if (offset == 0)
639
        offset = 1;
640
      break;
641
    case PLUS:
642
      if (plus_op0)
643
        plus_op1 = XEXP (addr, 0);
644
      else
645
        plus_op0 = XEXP (addr, 0);
646
      addr = XEXP (addr, 1);
647
      goto restart;
648
    case MEM:
649
      indir = 2;        /* 3 on VAX 2 */
650
      addr = XEXP (addr, 0);
651
      goto restart;
652
    default:
653
      break;
654
    }
655
 
656
  /* Up to 3 things can be added in an address.  They are stored in
657
     plus_op0, plus_op1, and addr.  */
658
 
659
  if (plus_op0)
660
    {
661
      addr = plus_op0;
662
      plus_op0 = 0;
663
      goto restart;
664
    }
665
  if (plus_op1)
666
    {
667
      addr = plus_op1;
668
      plus_op1 = 0;
669
      goto restart;
670
    }
671
  /* Indexing and register+offset can both be used (except on a VAX 2)
672
     without increasing execution time over either one alone.  */
673
  if (reg && indexed && offset)
674
    return reg + indir + offset + predec;
675
  return reg + indexed + indir + offset + predec;
676
}
677
 
678
static int
679
vax_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
680
{
681
  return (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
682
}
683
 
684
/* Cost of an expression on a VAX.  This version has costs tuned for the
685
   CVAX chip (found in the VAX 3 series) with comments for variations on
686
   other models.
687
 
688
   FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
689
   and FLOAT_TRUNCATE.  We need a -mcpu option to allow provision of
690
   costs on a per cpu basis.  */
691
 
692
static bool
693
vax_rtx_costs (rtx x, int code, int outer_code, int *total,
694
               bool speed ATTRIBUTE_UNUSED)
695
{
696
  enum machine_mode mode = GET_MODE (x);
697
  int i = 0;                                /* may be modified in switch */
698
  const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
699
 
700
  switch (code)
701
    {
702
      /* On a VAX, constants from 0..63 are cheap because they can use the
703
         1 byte literal constant format.  Compare to -1 should be made cheap
704
         so that decrement-and-branch insns can be formed more easily (if
705
         the value -1 is copied to a register some decrement-and-branch
706
         patterns will not match).  */
707
    case CONST_INT:
708
      if (INTVAL (x) == 0)
709
        {
710
          *total = 0;
711
          return true;
712
        }
713
      if (outer_code == AND)
714
        {
715
          *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
716
          return true;
717
        }
718
      if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
719
          || (outer_code == COMPARE
720
              && INTVAL (x) == -1)
721
          || ((outer_code == PLUS || outer_code == MINUS)
722
              && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
723
        {
724
          *total = 1;
725
          return true;
726
        }
727
      /* FALLTHRU */
728
 
729
    case CONST:
730
    case LABEL_REF:
731
    case SYMBOL_REF:
732
      *total = 3;
733
      return true;
734
 
735
    case CONST_DOUBLE:
736
      if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
737
        *total = vax_float_literal (x) ? 5 : 8;
738
      else
739
        *total = ((CONST_DOUBLE_HIGH (x) == 0
740
                   && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
741
                  || (outer_code == PLUS
742
                      && CONST_DOUBLE_HIGH (x) == -1
743
                      && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64))
744
                 ? 2 : 5;
745
      return true;
746
 
747
    case POST_INC:
748
      *total = 2;
749
      return true;              /* Implies register operand.  */
750
 
751
    case PRE_DEC:
752
      *total = 3;
753
      return true;              /* Implies register operand.  */
754
 
755
    case MULT:
756
      switch (mode)
757
        {
758
        case DFmode:
759
          *total = 16;          /* 4 on VAX 9000 */
760
          break;
761
        case SFmode:
762
          *total = 9;           /* 4 on VAX 9000, 12 on VAX 2 */
763
          break;
764
        case DImode:
765
          *total = 16;          /* 6 on VAX 9000, 28 on VAX 2 */
766
          break;
767
        case SImode:
768
        case HImode:
769
        case QImode:
770
          *total = 10;          /* 3-4 on VAX 9000, 20-28 on VAX 2 */
771
          break;
772
        default:
773
          *total = MAX_COST;    /* Mode is not supported.  */
774
          return true;
775
        }
776
      break;
777
 
778
    case UDIV:
779
      if (mode != SImode)
780
        {
781
          *total = MAX_COST;    /* Mode is not supported.  */
782
          return true;
783
        }
784
      *total = 17;
785
      break;
786
 
787
    case DIV:
788
      if (mode == DImode)
789
        *total = 30;            /* Highly variable.  */
790
      else if (mode == DFmode)
791
        /* divide takes 28 cycles if the result is not zero, 13 otherwise */
792
        *total = 24;
793
      else
794
        *total = 11;            /* 25 on VAX 2 */
795
      break;
796
 
797
    case MOD:
798
      *total = 23;
799
      break;
800
 
801
    case UMOD:
802
      if (mode != SImode)
803
        {
804
          *total = MAX_COST;    /* Mode is not supported.  */
805
          return true;
806
        }
807
      *total = 29;
808
      break;
809
 
810
    case FLOAT:
811
      *total = (6               /* 4 on VAX 9000 */
812
                + (mode == DFmode) + (GET_MODE (XEXP (x, 0)) != SImode));
813
      break;
814
 
815
    case FIX:
816
      *total = 7;               /* 17 on VAX 2 */
817
      break;
818
 
819
    case ASHIFT:
820
    case LSHIFTRT:
821
    case ASHIFTRT:
822
      if (mode == DImode)
823
        *total = 12;
824
      else
825
        *total = 10;            /* 6 on VAX 9000 */
826
      break;
827
 
828
    case ROTATE:
829
    case ROTATERT:
830
      *total = 6;               /* 5 on VAX 2, 4 on VAX 9000 */
831
      if (CONST_INT_P (XEXP (x, 1)))
832
        fmt = "e";              /* all constant rotate counts are short */
833
      break;
834
 
835
    case PLUS:
836
    case MINUS:
837
      *total = (mode == DFmode) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
838
      /* Small integer operands can use subl2 and addl2.  */
839
      if ((CONST_INT_P (XEXP (x, 1)))
840
          && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
841
        fmt = "e";
842
      break;
843
 
844
    case IOR:
845
    case XOR:
846
      *total = 3;
847
      break;
848
 
849
    case AND:
850
      /* AND is special because the first operand is complemented.  */
851
      *total = 3;
852
      if (CONST_INT_P (XEXP (x, 0)))
853
        {
854
          if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
855
            *total = 4;
856
          fmt = "e";
857
          i = 1;
858
        }
859
      break;
860
 
861
    case NEG:
862
      if (mode == DFmode)
863
        *total = 9;
864
      else if (mode == SFmode)
865
        *total = 6;
866
      else if (mode == DImode)
867
        *total = 4;
868
      else
869
        *total = 2;
870
      break;
871
 
872
    case NOT:
873
      *total = 2;
874
      break;
875
 
876
    case ZERO_EXTRACT:
877
    case SIGN_EXTRACT:
878
      *total = 15;
879
      break;
880
 
881
    case MEM:
882
      if (mode == DImode || mode == DFmode)
883
        *total = 5;             /* 7 on VAX 2 */
884
      else
885
        *total = 3;             /* 4 on VAX 2 */
886
      x = XEXP (x, 0);
887
      if (!REG_P (x) && GET_CODE (x) != POST_INC)
888
        *total += vax_address_cost_1 (x);
889
      return true;
890
 
891
    case FLOAT_EXTEND:
892
    case FLOAT_TRUNCATE:
893
    case TRUNCATE:
894
      *total = 3;               /* FIXME: Costs need to be checked  */
895
      break;
896
 
897
    default:
898
      return false;
899
    }
900
 
901
  /* Now look inside the expression.  Operands which are not registers or
902
     short constants add to the cost.
903
 
904
     FMT and I may have been adjusted in the switch above for instructions
905
     which require special handling.  */
906
 
907
  while (*fmt++ == 'e')
908
    {
909
      rtx op = XEXP (x, i);
910
 
911
      i += 1;
912
      code = GET_CODE (op);
913
 
914
      /* A NOT is likely to be found as the first operand of an AND
915
         (in which case the relevant cost is of the operand inside
916
         the not) and not likely to be found anywhere else.  */
917
      if (code == NOT)
918
        op = XEXP (op, 0), code = GET_CODE (op);
919
 
920
      switch (code)
921
        {
922
        case CONST_INT:
923
          if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
924
              && GET_MODE (x) != QImode)
925
            *total += 1;        /* 2 on VAX 2 */
926
          break;
927
        case CONST:
928
        case LABEL_REF:
929
        case SYMBOL_REF:
930
          *total += 1;          /* 2 on VAX 2 */
931
          break;
932
        case CONST_DOUBLE:
933
          if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
934
            {
935
              /* Registers are faster than floating point constants -- even
936
                 those constants which can be encoded in a single byte.  */
937
              if (vax_float_literal (op))
938
                *total += 1;
939
              else
940
                *total += (GET_MODE (x) == DFmode) ? 3 : 2;
941
            }
942
          else
943
            {
944
              if (CONST_DOUBLE_HIGH (op) != 0
945
                  || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
946
                *total += 2;
947
            }
948
          break;
949
        case MEM:
950
          *total += 1;          /* 2 on VAX 2 */
951
          if (!REG_P (XEXP (op, 0)))
952
            *total += vax_address_cost_1 (XEXP (op, 0));
953
          break;
954
        case REG:
955
        case SUBREG:
956
          break;
957
        default:
958
          *total += 1;
959
          break;
960
        }
961
    }
962
  return true;
963
}
964
 
965
/* Output code to add DELTA to the first argument, and then jump to FUNCTION.
966
   Used for C++ multiple inheritance.
967
        .mask   ^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11>  #conservative entry mask
968
        addl2   $DELTA, 4(ap)   #adjust first argument
969
        jmp     FUNCTION+2      #jump beyond FUNCTION's entry mask
970
*/
971
 
972
static void
973
vax_output_mi_thunk (FILE * file,
974
                     tree thunk ATTRIBUTE_UNUSED,
975
                     HOST_WIDE_INT delta,
976
                     HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
977
                     tree function)
978
{
979
  fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
980
  asm_fprintf (file, ",4(%Rap)\n");
981
  fprintf (file, "\tjmp ");
982
  assemble_name (file,  XSTR (XEXP (DECL_RTL (function), 0), 0));
983
  fprintf (file, "+2\n");
984
}
985
 
986
static rtx
987
vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
988
                      int incoming ATTRIBUTE_UNUSED)
989
{
990
  return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
991
}
992
 
993
static rtx
994
vax_builtin_setjmp_frame_value (void)
995
{
996
  return hard_frame_pointer_rtx;
997
}
998
 
999
/* Worker function for NOTICE_UPDATE_CC.  */
1000
 
1001
void
1002
vax_notice_update_cc (rtx exp, rtx insn ATTRIBUTE_UNUSED)
1003
{
1004
  if (GET_CODE (exp) == SET)
1005
    {
1006
      if (GET_CODE (SET_SRC (exp)) == CALL)
1007
        CC_STATUS_INIT;
1008
      else if (GET_CODE (SET_DEST (exp)) != ZERO_EXTRACT
1009
               && GET_CODE (SET_DEST (exp)) != PC)
1010
        {
1011
          cc_status.flags = 0;
1012
          /* The integer operations below don't set carry or
1013
             set it in an incompatible way.  That's ok though
1014
             as the Z bit is all we need when doing unsigned
1015
             comparisons on the result of these insns (since
1016
             they're always with 0).  Set CC_NO_OVERFLOW to
1017
             generate the correct unsigned branches.  */
1018
          switch (GET_CODE (SET_SRC (exp)))
1019
            {
1020
            case NEG:
1021
              if (GET_MODE_CLASS (GET_MODE (exp)) == MODE_FLOAT)
1022
                break;
1023
            case AND:
1024
            case IOR:
1025
            case XOR:
1026
            case NOT:
1027
            case MEM:
1028
            case REG:
1029
              cc_status.flags = CC_NO_OVERFLOW;
1030
              break;
1031
            default:
1032
              break;
1033
            }
1034
          cc_status.value1 = SET_DEST (exp);
1035
          cc_status.value2 = SET_SRC (exp);
1036
        }
1037
    }
1038
  else if (GET_CODE (exp) == PARALLEL
1039
           && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
1040
    {
1041
      if (GET_CODE (SET_SRC (XVECEXP (exp, 0, 0))) == CALL)
1042
        CC_STATUS_INIT;
1043
      else if (GET_CODE (SET_DEST (XVECEXP (exp, 0, 0))) != PC)
1044
        {
1045
          cc_status.flags = 0;
1046
          cc_status.value1 = SET_DEST (XVECEXP (exp, 0, 0));
1047
          cc_status.value2 = SET_SRC (XVECEXP (exp, 0, 0));
1048
        }
1049
      else
1050
        /* PARALLELs whose first element sets the PC are aob,
1051
           sob insns.  They do change the cc's.  */
1052
        CC_STATUS_INIT;
1053
    }
1054
  else
1055
    CC_STATUS_INIT;
1056
  if (cc_status.value1 && REG_P (cc_status.value1)
1057
      && cc_status.value2
1058
      && reg_overlap_mentioned_p (cc_status.value1, cc_status.value2))
1059
    cc_status.value2 = 0;
1060
  if (cc_status.value1 && MEM_P (cc_status.value1)
1061
      && cc_status.value2
1062
      && MEM_P (cc_status.value2))
1063
    cc_status.value2 = 0;
1064
  /* Actual condition, one line up, should be that value2's address
1065
     depends on value1, but that is too much of a pain.  */
1066
}
1067
 
1068
/* Output integer move instructions.  */
1069
 
1070
const char *
1071
vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
1072
                     enum machine_mode mode)
1073
{
1074
  rtx hi[3], lo[3];
1075
  const char *pattern_hi, *pattern_lo;
1076
 
1077
  switch (mode)
1078
    {
1079
    case DImode:
1080
      if (operands[1] == const0_rtx)
1081
        return "clrq %0";
1082
      if (TARGET_QMATH && optimize_size
1083
          && (CONST_INT_P (operands[1])
1084
              || GET_CODE (operands[1]) == CONST_DOUBLE))
1085
        {
1086
          unsigned HOST_WIDE_INT hval, lval;
1087
          int n;
1088
 
1089
          if (GET_CODE (operands[1]) == CONST_DOUBLE)
1090
            {
1091
              gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1092
 
1093
              /* Make sure only the low 32 bits are valid.  */
1094
              lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1095
              hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1096
            }
1097
          else
1098
            {
1099
              lval = INTVAL (operands[1]);
1100
              hval = 0;
1101
            }
1102
 
1103
          /* Here we see if we are trying to see if the 64bit value is really
1104
             a 6bit shifted some arbitrary amount.  If so, we can use ashq to
1105
             shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1106
             8 bytes - 1 shift byte - 1 short literal byte.  */
1107
          if (lval != 0
1108
              && (n = exact_log2 (lval & (- lval))) != -1
1109
              && (lval >> n) < 64)
1110
            {
1111
              lval >>= n;
1112
 
1113
#if HOST_BITS_PER_WIDE_INT == 32
1114
              /* On 32bit platforms, if the 6bits didn't overflow into the
1115
                 upper 32bit value that value better be 0.  If we have
1116
                 overflowed, make sure it wasn't too much.  */
1117
              if (hval != 0)
1118
                {
1119
                  if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1120
                    n = 0;       /* failure */
1121
                  else
1122
                    lval |= hval << (32 - n);
1123
                }
1124
#endif
1125
              /*  If n is 0, then ashq is not the best way to emit this.  */
1126
              if (n > 0)
1127
                {
1128
                  operands[1] = GEN_INT (lval);
1129
                  operands[2] = GEN_INT (n);
1130
                  return "ashq %2,%1,%0";
1131
                }
1132
#if HOST_BITS_PER_WIDE_INT == 32
1133
            }
1134
          /* On 32bit platforms, if the low 32bit value is 0, checkout the
1135
             upper 32bit value.  */
1136
          else if (hval != 0
1137
                   && (n = exact_log2 (hval & (- hval)) - 1) != -1
1138
                   && (hval >> n) < 64)
1139
            {
1140
              operands[1] = GEN_INT (hval >> n);
1141
              operands[2] = GEN_INT (n + 32);
1142
              return "ashq %2,%1,%0";
1143
#endif
1144
            }
1145
        }
1146
 
1147
      if (TARGET_QMATH
1148
          && (!MEM_P (operands[0])
1149
              || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1150
              || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1151
              || !illegal_addsub_di_memory_operand (operands[0], DImode))
1152
          && ((CONST_INT_P (operands[1])
1153
               && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1154
              || GET_CODE (operands[1]) == CONST_DOUBLE))
1155
        {
1156
          hi[0] = operands[0];
1157
          hi[1] = operands[1];
1158
 
1159
          split_quadword_operands (insn, SET, hi, lo, 2);
1160
 
1161
          pattern_lo = vax_output_int_move (NULL, lo, SImode);
1162
          pattern_hi = vax_output_int_move (NULL, hi, SImode);
1163
 
1164
          /* The patterns are just movl/movl or pushl/pushl then a movq will
1165
             be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1166
             bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1167
             value bytes.  */
1168
          if ((!strncmp (pattern_lo, "movl", 4)
1169
              && !strncmp (pattern_hi, "movl", 4))
1170
              || (!strncmp (pattern_lo, "pushl", 5)
1171
                  && !strncmp (pattern_hi, "pushl", 5)))
1172
            return "movq %1,%0";
1173
 
1174
          if (MEM_P (operands[0])
1175
              && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1176
            {
1177
              output_asm_insn (pattern_hi, hi);
1178
              operands[0] = lo[0];
1179
              operands[1] = lo[1];
1180
              operands[2] = lo[2];
1181
              return pattern_lo;
1182
            }
1183
          else
1184
            {
1185
              output_asm_insn (pattern_lo, lo);
1186
              operands[0] = hi[0];
1187
              operands[1] = hi[1];
1188
              operands[2] = hi[2];
1189
              return pattern_hi;
1190
            }
1191
        }
1192
      return "movq %1,%0";
1193
 
1194
    case SImode:
1195
      if (symbolic_operand (operands[1], SImode))
1196
        {
1197
          if (push_operand (operands[0], SImode))
1198
            return "pushab %a1";
1199
          return "movab %a1,%0";
1200
        }
1201
 
1202
      if (operands[1] == const0_rtx)
1203
        {
1204
          if (push_operand (operands[1], SImode))
1205
            return "pushl %1";
1206
          return "clrl %0";
1207
        }
1208
 
1209
      if (CONST_INT_P (operands[1])
1210
          && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1211
        {
1212
          HOST_WIDE_INT i = INTVAL (operands[1]);
1213
          int n;
1214
          if ((unsigned HOST_WIDE_INT)(~i) < 64)
1215
            return "mcoml %N1,%0";
1216
          if ((unsigned HOST_WIDE_INT)i < 0x100)
1217
            return "movzbl %1,%0";
1218
          if (i >= -0x80 && i < 0)
1219
            return "cvtbl %1,%0";
1220
          if (optimize_size
1221
              && (n = exact_log2 (i & (-i))) != -1
1222
              && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1223
            {
1224
              operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1225
              operands[2] = GEN_INT (n);
1226
              return "ashl %2,%1,%0";
1227
            }
1228
          if ((unsigned HOST_WIDE_INT)i < 0x10000)
1229
            return "movzwl %1,%0";
1230
          if (i >= -0x8000 && i < 0)
1231
            return "cvtwl %1,%0";
1232
        }
1233
      if (push_operand (operands[0], SImode))
1234
        return "pushl %1";
1235
      return "movl %1,%0";
1236
 
1237
    case HImode:
1238
      if (CONST_INT_P (operands[1]))
1239
        {
1240
          HOST_WIDE_INT i = INTVAL (operands[1]);
1241
          if (i == 0)
1242
            return "clrw %0";
1243
          else if ((unsigned HOST_WIDE_INT)i < 64)
1244
            return "movw %1,%0";
1245
          else if ((unsigned HOST_WIDE_INT)~i < 64)
1246
            return "mcomw %H1,%0";
1247
          else if ((unsigned HOST_WIDE_INT)i < 256)
1248
            return "movzbw %1,%0";
1249
          else if (i >= -0x80 && i < 0)
1250
            return "cvtbw %1,%0";
1251
        }
1252
      return "movw %1,%0";
1253
 
1254
    case QImode:
1255
      if (CONST_INT_P (operands[1]))
1256
        {
1257
          HOST_WIDE_INT i = INTVAL (operands[1]);
1258
          if (i == 0)
1259
            return "clrb %0";
1260
          else if ((unsigned HOST_WIDE_INT)~i < 64)
1261
            return "mcomb %B1,%0";
1262
        }
1263
      return "movb %1,%0";
1264
 
1265
    default:
1266
      gcc_unreachable ();
1267
    }
1268
}
1269
 
1270
/* Output integer add instructions.
1271
 
1272
   The space-time-opcode tradeoffs for addition vary by model of VAX.
1273
 
1274
   On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1275
   but it not faster on other models.
1276
 
1277
   "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1278
   faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1279
   a register is used in an address too soon after it is set.
1280
   Compromise by using movab only when it is shorter than the add
1281
   or the base register in the address is one of sp, ap, and fp,
1282
   which are not modified very often.  */
1283
 
1284
const char *
1285
vax_output_int_add (rtx insn, rtx *operands, enum machine_mode mode)
1286
{
1287
  switch (mode)
1288
    {
1289
    case DImode:
1290
      {
1291
        rtx low[3];
1292
        const char *pattern;
1293
        int carry = 1;
1294
        bool sub;
1295
 
1296
        if (TARGET_QMATH && 0)
1297
          debug_rtx (insn);
1298
 
1299
        split_quadword_operands (insn, PLUS, operands, low, 3);
1300
 
1301
        if (TARGET_QMATH)
1302
          {
1303
            gcc_assert (rtx_equal_p (operands[0], operands[1]));
1304
#ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1305
            gcc_assert (!flag_pic || !external_memory_operand (low[2], SImode));
1306
            gcc_assert (!flag_pic || !external_memory_operand (low[0], SImode));
1307
#endif
1308
 
1309
            /* No reason to add a 0 to the low part and thus no carry, so just
1310
               emit the appropriate add/sub instruction.  */
1311
            if (low[2] == const0_rtx)
1312
              return vax_output_int_add (NULL, operands, SImode);
1313
 
1314
            /* Are we doing addition or subtraction?  */
1315
            sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1316
 
1317
            /* We can't use vax_output_int_add since some the patterns don't
1318
               modify the carry bit.  */
1319
            if (sub)
1320
              {
1321
                if (low[2] == constm1_rtx)
1322
                  pattern = "decl %0";
1323
                else
1324
                  pattern = "subl2 $%n2,%0";
1325
              }
1326
            else
1327
              {
1328
                if (low[2] == const1_rtx)
1329
                  pattern = "incl %0";
1330
                else
1331
                  pattern = "addl2 %2,%0";
1332
              }
1333
            output_asm_insn (pattern, low);
1334
 
1335
            /* In 2's complement, -n = ~n + 1.  Since we are dealing with
1336
               two 32bit parts, we complement each and then add one to
1337
               low part.  We know that the low part can't overflow since
1338
               it's value can never be 0.  */
1339
            if (sub)
1340
                return "sbwc %N2,%0";
1341
            return "adwc %2,%0";
1342
          }
1343
 
1344
        /* Add low parts.  */
1345
        if (rtx_equal_p (operands[0], operands[1]))
1346
          {
1347
            if (low[2] == const0_rtx)
1348
        /* Should examine operand, punt if not POST_INC.  */
1349
              pattern = "tstl %0", carry = 0;
1350
            else if (low[2] == const1_rtx)
1351
              pattern = "incl %0";
1352
            else
1353
              pattern = "addl2 %2,%0";
1354
          }
1355
        else
1356
          {
1357
            if (low[2] == const0_rtx)
1358
              pattern = "movl %1,%0", carry = 0;
1359
            else
1360
              pattern = "addl3 %2,%1,%0";
1361
          }
1362
        if (pattern)
1363
          output_asm_insn (pattern, low);
1364
        if (!carry)
1365
          /* If CARRY is 0, we don't have any carry value to worry about.  */
1366
          return get_insn_template (CODE_FOR_addsi3, insn);
1367
        /* %0 = C + %1 + %2 */
1368
        if (!rtx_equal_p (operands[0], operands[1]))
1369
          output_asm_insn ((operands[1] == const0_rtx
1370
                            ? "clrl %0"
1371
                            : "movl %1,%0"), operands);
1372
        return "adwc %2,%0";
1373
      }
1374
 
1375
    case SImode:
1376
      if (rtx_equal_p (operands[0], operands[1]))
1377
        {
1378
          if (operands[2] == const1_rtx)
1379
            return "incl %0";
1380
          if (operands[2] == constm1_rtx)
1381
            return "decl %0";
1382
          if (CONST_INT_P (operands[2])
1383
              && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1384
            return "subl2 $%n2,%0";
1385
          if (CONST_INT_P (operands[2])
1386
              && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1387
              && REG_P (operands[1])
1388
              && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1389
                   || REGNO (operands[1]) > 11))
1390
            return "movab %c2(%1),%0";
1391
          if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1392
            return "movab %a2[%0],%0";
1393
          return "addl2 %2,%0";
1394
        }
1395
 
1396
      if (rtx_equal_p (operands[0], operands[2]))
1397
        {
1398
          if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1399
            return "movab %a1[%0],%0";
1400
          return "addl2 %1,%0";
1401
        }
1402
 
1403
      if (CONST_INT_P (operands[2])
1404
          && INTVAL (operands[2]) < 32767
1405
          && INTVAL (operands[2]) > -32768
1406
          && REG_P (operands[1])
1407
          && push_operand (operands[0], SImode))
1408
        return "pushab %c2(%1)";
1409
 
1410
      if (CONST_INT_P (operands[2])
1411
          && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1412
        return "subl3 $%n2,%1,%0";
1413
 
1414
      if (CONST_INT_P (operands[2])
1415
          && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1416
          && REG_P (operands[1])
1417
          && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1418
               || REGNO (operands[1]) > 11))
1419
        return "movab %c2(%1),%0";
1420
 
1421
      /* Add this if using gcc on a VAX 3xxx:
1422
      if (REG_P (operands[1]) && REG_P (operands[2]))
1423
        return "movab (%1)[%2],%0";
1424
      */
1425
 
1426
      if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1427
        {
1428
          if (push_operand (operands[0], SImode))
1429
            return "pushab %a2[%1]";
1430
          return "movab %a2[%1],%0";
1431
        }
1432
 
1433
      if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1434
        {
1435
          if (push_operand (operands[0], SImode))
1436
            return "pushab %a1[%2]";
1437
          return "movab %a1[%2],%0";
1438
        }
1439
 
1440
      if (flag_pic && REG_P (operands[0])
1441
          && symbolic_operand (operands[2], SImode))
1442
        return "movab %a2,%0;addl2 %1,%0";
1443
 
1444
      if (flag_pic
1445
          && (symbolic_operand (operands[1], SImode)
1446
              || symbolic_operand (operands[1], SImode)))
1447
        debug_rtx (insn);
1448
 
1449
      return "addl3 %1,%2,%0";
1450
 
1451
    case HImode:
1452
      if (rtx_equal_p (operands[0], operands[1]))
1453
        {
1454
          if (operands[2] == const1_rtx)
1455
            return "incw %0";
1456
          if (operands[2] == constm1_rtx)
1457
            return "decw %0";
1458
          if (CONST_INT_P (operands[2])
1459
              && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1460
            return "subw2 $%n2,%0";
1461
          return "addw2 %2,%0";
1462
        }
1463
      if (rtx_equal_p (operands[0], operands[2]))
1464
        return "addw2 %1,%0";
1465
      if (CONST_INT_P (operands[2])
1466
          && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1467
        return "subw3 $%n2,%1,%0";
1468
      return "addw3 %1,%2,%0";
1469
 
1470
    case QImode:
1471
      if (rtx_equal_p (operands[0], operands[1]))
1472
        {
1473
          if (operands[2] == const1_rtx)
1474
            return "incb %0";
1475
          if (operands[2] == constm1_rtx)
1476
            return "decb %0";
1477
          if (CONST_INT_P (operands[2])
1478
              && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1479
            return "subb2 $%n2,%0";
1480
          return "addb2 %2,%0";
1481
        }
1482
      if (rtx_equal_p (operands[0], operands[2]))
1483
        return "addb2 %1,%0";
1484
      if (CONST_INT_P (operands[2])
1485
          && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1486
        return "subb3 $%n2,%1,%0";
1487
      return "addb3 %1,%2,%0";
1488
 
1489
    default:
1490
      gcc_unreachable ();
1491
    }
1492
}
1493
 
1494
const char *
1495
vax_output_int_subtract (rtx insn, rtx *operands, enum machine_mode mode)
1496
{
1497
  switch (mode)
1498
    {
1499
    case DImode:
1500
      {
1501
        rtx low[3];
1502
        const char *pattern;
1503
        int carry = 1;
1504
 
1505
        if (TARGET_QMATH && 0)
1506
          debug_rtx (insn);
1507
 
1508
        split_quadword_operands (insn, MINUS, operands, low, 3);
1509
 
1510
        if (TARGET_QMATH)
1511
          {
1512
            if (operands[1] == const0_rtx && low[1] == const0_rtx)
1513
              {
1514
                /* Negation is tricky.  It's basically complement and increment.
1515
                   Negate hi, then lo, and subtract the carry back.  */
1516
                if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1517
                    || (MEM_P (operands[0])
1518
                        && GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1519
                  fatal_insn ("illegal operand detected", insn);
1520
                output_asm_insn ("mnegl %2,%0", operands);
1521
                output_asm_insn ("mnegl %2,%0", low);
1522
                return "sbwc $0,%0";
1523
              }
1524
            gcc_assert (rtx_equal_p (operands[0], operands[1]));
1525
            gcc_assert (rtx_equal_p (low[0], low[1]));
1526
            if (low[2] == const1_rtx)
1527
              output_asm_insn ("decl %0", low);
1528
            else
1529
              output_asm_insn ("subl2 %2,%0", low);
1530
            return "sbwc %2,%0";
1531
          }
1532
 
1533
        /* Subtract low parts.  */
1534
        if (rtx_equal_p (operands[0], operands[1]))
1535
          {
1536
            if (low[2] == const0_rtx)
1537
              pattern = 0, carry = 0;
1538
            else if (low[2] == constm1_rtx)
1539
              pattern = "decl %0";
1540
            else
1541
              pattern = "subl2 %2,%0";
1542
          }
1543
        else
1544
          {
1545
            if (low[2] == constm1_rtx)
1546
              pattern = "decl %0";
1547
            else if (low[2] == const0_rtx)
1548
              pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1549
            else
1550
              pattern = "subl3 %2,%1,%0";
1551
          }
1552
        if (pattern)
1553
          output_asm_insn (pattern, low);
1554
        if (carry)
1555
          {
1556
            if (!rtx_equal_p (operands[0], operands[1]))
1557
              return "movl %1,%0;sbwc %2,%0";
1558
            return "sbwc %2,%0";
1559
            /* %0 = %2 - %1 - C */
1560
          }
1561
        return get_insn_template (CODE_FOR_subsi3, insn);
1562
      }
1563
 
1564
    default:
1565
      gcc_unreachable ();
1566
  }
1567
}
1568
 
1569
/* True if X is an rtx for a constant that is a valid address.  */
1570
 
1571
bool
1572
legitimate_constant_address_p (rtx x)
1573
{
1574
  if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1575
          || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1576
    return true;
1577
  if (GET_CODE (x) != CONST)
1578
    return false;
1579
#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1580
  if (flag_pic
1581
      && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1582
      && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1583
    return false;
1584
#endif
1585
   return true;
1586
}
1587
 
1588
/* True if the constant value X is a legitimate general operand.
1589
   It is given that X satisfies CONSTANT_P or is a CONST_DOUBLE.  */
1590
 
1591
bool
1592
legitimate_constant_p (rtx x ATTRIBUTE_UNUSED)
1593
{
1594
  return true;
1595
}
1596
 
1597
/* The other macros defined here are used only in legitimate_address_p ().  */
1598
 
1599
/* Nonzero if X is a hard reg that can be used as an index
1600
   or, if not strict, if it is a pseudo reg.  */
1601
#define INDEX_REGISTER_P(X, STRICT) \
1602
(REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1603
 
1604
/* Nonzero if X is a hard reg that can be used as a base reg
1605
   or, if not strict, if it is a pseudo reg.  */
1606
#define BASE_REGISTER_P(X, STRICT) \
1607
(REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1608
 
1609
#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1610
 
1611
/* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1612
   are no SYMBOL_REFs for external symbols present.  */
1613
 
1614
static bool
1615
indirectable_constant_address_p (rtx x, bool indirect)
1616
{
1617
  if (GET_CODE (x) == SYMBOL_REF)
1618
    return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1619
 
1620
  if (GET_CODE (x) == CONST)
1621
    return !flag_pic
1622
           || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1623
           || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1624
 
1625
  return CONSTANT_ADDRESS_P (x);
1626
}
1627
 
1628
#else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1629
 
1630
static bool
1631
indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
1632
{
1633
  return CONSTANT_ADDRESS_P (x);
1634
}
1635
 
1636
#endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1637
 
1638
/* True if X is an address which can be indirected.  External symbols
1639
   could be in a sharable image library, so we disallow those.  */
1640
 
1641
static bool
1642
indirectable_address_p (rtx x, bool strict, bool indirect)
1643
{
1644
  if (indirectable_constant_address_p (x, indirect)
1645
      || BASE_REGISTER_P (x, strict))
1646
    return true;
1647
  if (GET_CODE (x) != PLUS
1648
      || !BASE_REGISTER_P (XEXP (x, 0), strict)
1649
      || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1650
    return false;
1651
  return indirectable_constant_address_p (XEXP (x, 1), indirect);
1652
}
1653
 
1654
/* Return true if x is a valid address not using indexing.
1655
   (This much is the easy part.)  */
1656
static bool
1657
nonindexed_address_p (rtx x, bool strict)
1658
{
1659
  rtx xfoo0;
1660
  if (REG_P (x))
1661
    {
1662
      extern rtx *reg_equiv_mem;
1663
      if (! reload_in_progress
1664
          || reg_equiv_mem[REGNO (x)] == 0
1665
          || indirectable_address_p (reg_equiv_mem[REGNO (x)], strict, false))
1666
        return true;
1667
    }
1668
  if (indirectable_constant_address_p (x, false))
1669
    return true;
1670
  if (indirectable_address_p (x, strict, false))
1671
    return true;
1672
  xfoo0 = XEXP (x, 0);
1673
  if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1674
    return true;
1675
  if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1676
      && BASE_REGISTER_P (xfoo0, strict))
1677
    return true;
1678
  return false;
1679
}
1680
 
1681
/* True if PROD is either a reg times size of mode MODE and MODE is less
1682
   than or equal 8 bytes, or just a reg if MODE is one byte.  */
1683
 
1684
static bool
1685
index_term_p (rtx prod, enum machine_mode mode, bool strict)
1686
{
1687
  rtx xfoo0, xfoo1;
1688
 
1689
  if (GET_MODE_SIZE (mode) == 1)
1690
    return BASE_REGISTER_P (prod, strict);
1691
 
1692
  if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8)
1693
    return false;
1694
 
1695
  xfoo0 = XEXP (prod, 0);
1696
  xfoo1 = XEXP (prod, 1);
1697
 
1698
  if (CONST_INT_P (xfoo0)
1699
      && INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode)
1700
      && INDEX_REGISTER_P (xfoo1, strict))
1701
    return true;
1702
 
1703
  if (CONST_INT_P (xfoo1)
1704
      && INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode)
1705
      && INDEX_REGISTER_P (xfoo0, strict))
1706
    return true;
1707
 
1708
  return false;
1709
}
1710
 
1711
/* Return true if X is the sum of a register
1712
   and a valid index term for mode MODE.  */
1713
static bool
1714
reg_plus_index_p (rtx x, enum machine_mode mode, bool strict)
1715
{
1716
  rtx xfoo0, xfoo1;
1717
 
1718
  if (GET_CODE (x) != PLUS)
1719
    return false;
1720
 
1721
  xfoo0 = XEXP (x, 0);
1722
  xfoo1 = XEXP (x, 1);
1723
 
1724
  if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
1725
    return true;
1726
 
1727
  if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
1728
    return true;
1729
 
1730
  return false;
1731
}
1732
 
1733
/* Return true if xfoo0 and xfoo1 constitute a valid indexed address.  */
1734
static bool
1735
indexable_address_p (rtx xfoo0, rtx xfoo1, enum machine_mode mode, bool strict)
1736
{
1737
  if (!CONSTANT_ADDRESS_P (xfoo0))
1738
    return false;
1739
  if (BASE_REGISTER_P (xfoo1, strict))
1740
    return !flag_pic || mode == QImode;
1741
  if (flag_pic && symbolic_operand (xfoo0, SImode))
1742
    return false;
1743
  return reg_plus_index_p (xfoo1, mode, strict);
1744
}
1745
 
1746
/* legitimate_address_p returns true if it recognizes an RTL expression "x"
1747
   that is a valid memory address for an instruction.
1748
   The MODE argument is the machine mode for the MEM expression
1749
   that wants to use this address.  */
1750
bool
1751
vax_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1752
{
1753
  rtx xfoo0, xfoo1;
1754
 
1755
  if (nonindexed_address_p (x, strict))
1756
    return true;
1757
 
1758
  if (GET_CODE (x) != PLUS)
1759
    return false;
1760
 
1761
  /* Handle <address>[index] represented with index-sum outermost */
1762
 
1763
  xfoo0 = XEXP (x, 0);
1764
  xfoo1 = XEXP (x, 1);
1765
 
1766
  if (index_term_p (xfoo0, mode, strict)
1767
      && nonindexed_address_p (xfoo1, strict))
1768
    return true;
1769
 
1770
  if (index_term_p (xfoo1, mode, strict)
1771
      && nonindexed_address_p (xfoo0, strict))
1772
    return true;
1773
 
1774
  /* Handle offset(reg)[index] with offset added outermost */
1775
 
1776
  if (indexable_address_p (xfoo0, xfoo1, mode, strict)
1777
      || indexable_address_p (xfoo1, xfoo0, mode, strict))
1778
    return true;
1779
 
1780
  return false;
1781
}
1782
 
1783
/* Return true if x (a legitimate address expression) has an effect that
1784
   depends on the machine mode it is used for.  On the VAX, the predecrement
1785
   and postincrement address depend thus (the amount of decrement or
1786
   increment being the length of the operand) and all indexed address depend
1787
   thus (because the index scale factor is the length of the operand).  */
1788
 
1789
bool
1790
vax_mode_dependent_address_p (rtx x)
1791
{
1792
  rtx xfoo0, xfoo1;
1793
 
1794
  /* Auto-increment cases are now dealt with generically in recog.c.  */
1795
  if (GET_CODE (x) != PLUS)
1796
    return false;
1797
 
1798
  xfoo0 = XEXP (x, 0);
1799
  xfoo1 = XEXP (x, 1);
1800
 
1801
  if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
1802
    return false;
1803
  if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
1804
    return false;
1805
  if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
1806
    return false;
1807
  if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
1808
    return false;
1809
 
1810
  return true;
1811
}
1812
 
1813
static rtx
1814
fixup_mathdi_operand (rtx x, enum machine_mode mode)
1815
{
1816
  if (illegal_addsub_di_memory_operand (x, mode))
1817
    {
1818
      rtx addr = XEXP (x, 0);
1819
      rtx temp = gen_reg_rtx (Pmode);
1820
      rtx offset = 0;
1821
#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1822
      if (GET_CODE (addr) == CONST && flag_pic)
1823
        {
1824
          offset = XEXP (XEXP (addr, 0), 1);
1825
          addr = XEXP (XEXP (addr, 0), 0);
1826
        }
1827
#endif
1828
      emit_move_insn (temp, addr);
1829
      if (offset)
1830
        temp = gen_rtx_PLUS (Pmode, temp, offset);
1831
      x = gen_rtx_MEM (DImode, temp);
1832
    }
1833
  return x;
1834
}
1835
 
1836
void
1837
vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
1838
{
1839
  int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
1840
  rtx temp;
1841
 
1842
  rtx (*gen_old_insn)(rtx, rtx, rtx);
1843
  rtx (*gen_si_insn)(rtx, rtx, rtx);
1844
  rtx (*gen_insn)(rtx, rtx, rtx);
1845
 
1846
  if (code == PLUS)
1847
    {
1848
      gen_old_insn = gen_adddi3_old;
1849
      gen_si_insn = gen_addsi3;
1850
      gen_insn = gen_adcdi3;
1851
    }
1852
  else if (code == MINUS)
1853
    {
1854
      gen_old_insn = gen_subdi3_old;
1855
      gen_si_insn = gen_subsi3;
1856
      gen_insn = gen_sbcdi3;
1857
    }
1858
  else
1859
    gcc_unreachable ();
1860
 
1861
  /* If this is addition (thus operands are commutative) and if there is one
1862
     addend that duplicates the desination, we want that addend to be the
1863
     first addend.  */
1864
  if (code == PLUS
1865
      && rtx_equal_p (operands[0], operands[2])
1866
      && !rtx_equal_p (operands[1], operands[2]))
1867
    {
1868
      temp = operands[2];
1869
      operands[2] = operands[1];
1870
      operands[1] = temp;
1871
    }
1872
 
1873
  if (!TARGET_QMATH)
1874
    {
1875
      emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
1876
    }
1877
  else if (hi_only)
1878
    {
1879
      if (!rtx_equal_p (operands[0], operands[1])
1880
          && (REG_P (operands[0]) && MEM_P (operands[1])))
1881
        {
1882
          emit_move_insn (operands[0], operands[1]);
1883
          operands[1] = operands[0];
1884
        }
1885
 
1886
      operands[0] = fixup_mathdi_operand (operands[0], DImode);
1887
      operands[1] = fixup_mathdi_operand (operands[1], DImode);
1888
      operands[2] = fixup_mathdi_operand (operands[2], DImode);
1889
 
1890
      if (!rtx_equal_p (operands[0], operands[1]))
1891
        emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
1892
                          operand_subword (operands[1], 0, 0, DImode));
1893
 
1894
      emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
1895
                                 operand_subword (operands[1], 1, 0, DImode),
1896
                                 operand_subword (operands[2], 1, 0, DImode)));
1897
    }
1898
  else
1899
    {
1900
      /* If are adding the same value together, that's really a multiply by 2,
1901
         and that's just a left shift of 1.  */
1902
      if (rtx_equal_p (operands[1], operands[2]))
1903
        {
1904
          gcc_assert (code != MINUS);
1905
          emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
1906
          return;
1907
        }
1908
 
1909
      operands[0] = fixup_mathdi_operand (operands[0], DImode);
1910
 
1911
      /* If an operand is the same as operand[0], use the operand[0] rtx
1912
         because fixup will an equivalent rtx but not an equal one. */
1913
 
1914
      if (rtx_equal_p (operands[0], operands[1]))
1915
        operands[1] = operands[0];
1916
      else
1917
        operands[1] = fixup_mathdi_operand (operands[1], DImode);
1918
 
1919
      if (rtx_equal_p (operands[0], operands[2]))
1920
        operands[2] = operands[0];
1921
      else
1922
        operands[2] = fixup_mathdi_operand (operands[2], DImode);
1923
 
1924
      /* If we are subtracting not from ourselves [d = a - b], and because the
1925
         carry ops are two operand only, we would need to do a move prior to
1926
         the subtract.  And if d == b, we would need a temp otherwise
1927
         [d = a, d -= d] and we end up with 0.  Instead we rewrite d = a - b
1928
         into d = -b, d += a.  Since -b can never overflow, even if b == d,
1929
         no temp is needed.
1930
 
1931
         If we are doing addition, since the carry ops are two operand, if
1932
         we aren't adding to ourselves, move the first addend to the
1933
         destination first.  */
1934
 
1935
      gcc_assert (operands[1] != const0_rtx || code == MINUS);
1936
      if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
1937
        {
1938
          if (code == MINUS && CONSTANT_P (operands[1]))
1939
            {
1940
              temp = gen_reg_rtx (DImode);
1941
              emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
1942
              code = PLUS;
1943
              gen_insn = gen_adcdi3;
1944
              operands[2] = operands[1];
1945
              operands[1] = operands[0];
1946
            }
1947
          else
1948
            emit_move_insn (operands[0], operands[1]);
1949
        }
1950
 
1951
      /* Subtracting a constant will have been rewritten to an addition of the
1952
         negative of that constant before we get here.  */
1953
      gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
1954
      emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
1955
    }
1956
}
1957
 
1958
bool
1959
adjacent_operands_p (rtx lo, rtx hi, enum machine_mode mode)
1960
{
1961
  HOST_WIDE_INT lo_offset;
1962
  HOST_WIDE_INT hi_offset;
1963
 
1964
  if (GET_CODE (lo) != GET_CODE (hi))
1965
    return false;
1966
 
1967
  if (REG_P (lo))
1968
    return mode == SImode && REGNO (lo) + 1 == REGNO (hi);
1969
  if (CONST_INT_P (lo))
1970
    return INTVAL (hi) == 0 && 0 <= INTVAL (lo) && INTVAL (lo) < 64;
1971
  if (CONST_INT_P (lo))
1972
    return mode != SImode;
1973
 
1974
  if (!MEM_P (lo))
1975
    return false;
1976
 
1977
  if (MEM_VOLATILE_P (lo) || MEM_VOLATILE_P (hi))
1978
    return false;
1979
 
1980
  lo = XEXP (lo, 0);
1981
  hi = XEXP (hi, 0);
1982
 
1983
  if (GET_CODE (lo) == POST_INC /* || GET_CODE (lo) == PRE_DEC */)
1984
    return rtx_equal_p (lo, hi);
1985
 
1986
  switch (GET_CODE (lo))
1987
    {
1988
    case REG:
1989
    case SYMBOL_REF:
1990
      lo_offset = 0;
1991
      break;
1992
    case CONST:
1993
      lo = XEXP (lo, 0);
1994
      /* FALLTHROUGH */
1995
    case PLUS:
1996
      if (!CONST_INT_P (XEXP (lo, 1)))
1997
        return false;
1998
      lo_offset = INTVAL (XEXP (lo, 1));
1999
      lo = XEXP (lo, 0);
2000
      break;
2001
    default:
2002
      return false;
2003
    }
2004
 
2005
  switch (GET_CODE (hi))
2006
    {
2007
    case REG:
2008
    case SYMBOL_REF:
2009
      hi_offset = 0;
2010
      break;
2011
    case CONST:
2012
      hi = XEXP (hi, 0);
2013
      /* FALLTHROUGH */
2014
    case PLUS:
2015
      if (!CONST_INT_P (XEXP (hi, 1)))
2016
        return false;
2017
      hi_offset = INTVAL (XEXP (hi, 1));
2018
      hi = XEXP (hi, 0);
2019
      break;
2020
    default:
2021
      return false;
2022
    }
2023
 
2024
  if (GET_CODE (lo) == MULT || GET_CODE (lo) == PLUS)
2025
    return false;
2026
 
2027
  return rtx_equal_p (lo, hi)
2028
         && hi_offset - lo_offset == GET_MODE_SIZE (mode);
2029
}
2030
 
2031
/* Output assembler code for a block containing the constant parts
2032
   of a trampoline, leaving space for the variable parts.  */
2033
 
2034
/* On the VAX, the trampoline contains an entry mask and two instructions:
2035
     .word NN
2036
     movl $STATIC,r0   (store the functions static chain)
2037
     jmp  *$FUNCTION   (jump to function code at address FUNCTION)  */
2038
 
2039
static void
2040
vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED)
2041
{
2042
  assemble_aligned_integer (2, const0_rtx);
2043
  assemble_aligned_integer (2, GEN_INT (0x8fd0));
2044
  assemble_aligned_integer (4, const0_rtx);
2045
  assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM));
2046
  assemble_aligned_integer (2, GEN_INT (0x9f17));
2047
  assemble_aligned_integer (4, const0_rtx);
2048
}
2049
 
2050
/* We copy the register-mask from the function's pure code
2051
   to the start of the trampoline.  */
2052
 
2053
static void
2054
vax_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2055
{
2056
  rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2057
  rtx mem;
2058
 
2059
  emit_block_move (m_tramp, assemble_trampoline_template (),
2060
                   GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2061
 
2062
  mem = adjust_address (m_tramp, HImode, 0);
2063
  emit_move_insn (mem, gen_const_mem (HImode, fnaddr));
2064
 
2065
  mem = adjust_address (m_tramp, SImode, 4);
2066
  emit_move_insn (mem, cxt);
2067
  mem = adjust_address (m_tramp, SImode, 11);
2068
  emit_move_insn (mem, plus_constant (fnaddr, 2));
2069
  emit_insn (gen_sync_istream ());
2070
}
2071
 

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.