OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [config/] [vax/] [vax.c] - Blame information for rev 848

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 709 jeremybenn
/* Subroutines for insn-output.c for VAX.
2
   Copyright (C) 1987, 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002,
3
   2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify
9
it under the terms of the GNU General Public License as published by
10
the Free Software Foundation; either version 3, or (at your option)
11
any later version.
12
 
13
GCC is distributed in the hope that it will be useful,
14
but WITHOUT ANY WARRANTY; without even the implied warranty of
15
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16
GNU General Public License for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "rtl.h"
27
#include "df.h"
28
#include "tree.h"
29
#include "regs.h"
30
#include "hard-reg-set.h"
31
#include "insn-config.h"
32
#include "conditions.h"
33
#include "function.h"
34
#include "output.h"
35
#include "insn-attr.h"
36
#include "recog.h"
37
#include "expr.h"
38
#include "optabs.h"
39
#include "flags.h"
40
#include "debug.h"
41
#include "diagnostic-core.h"
42
#include "reload.h"
43
#include "tm-preds.h"
44
#include "tm-constrs.h"
45
#include "tm_p.h"
46
#include "target.h"
47
#include "target-def.h"
48
 
49
static void vax_option_override (void);
50
static bool vax_legitimate_address_p (enum machine_mode, rtx, bool);
51
static void vax_file_start (void);
52
static void vax_init_libfuncs (void);
53
static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
54
                                 HOST_WIDE_INT, tree);
55
static int vax_address_cost_1 (rtx);
56
static int vax_address_cost (rtx, bool);
57
static bool vax_rtx_costs (rtx, int, int, int, int *, bool);
58
static rtx vax_function_arg (cumulative_args_t, enum machine_mode,
59
                             const_tree, bool);
60
static void vax_function_arg_advance (cumulative_args_t, enum machine_mode,
61
                                      const_tree, bool);
62
static rtx vax_struct_value_rtx (tree, int);
63
static rtx vax_builtin_setjmp_frame_value (void);
64
static void vax_asm_trampoline_template (FILE *);
65
static void vax_trampoline_init (rtx, tree, rtx);
66
static int vax_return_pops_args (tree, tree, int);
67
 
68
/* Initialize the GCC target structure.  */
69
#undef TARGET_ASM_ALIGNED_HI_OP
70
#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
71
 
72
#undef TARGET_ASM_FILE_START
73
#define TARGET_ASM_FILE_START vax_file_start
74
#undef TARGET_ASM_FILE_START_APP_OFF
75
#define TARGET_ASM_FILE_START_APP_OFF true
76
 
77
#undef TARGET_INIT_LIBFUNCS
78
#define TARGET_INIT_LIBFUNCS vax_init_libfuncs
79
 
80
#undef TARGET_ASM_OUTPUT_MI_THUNK
81
#define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
82
#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
83
#define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
84
 
85
#undef TARGET_RTX_COSTS
86
#define TARGET_RTX_COSTS vax_rtx_costs
87
#undef TARGET_ADDRESS_COST
88
#define TARGET_ADDRESS_COST vax_address_cost
89
 
90
#undef TARGET_PROMOTE_PROTOTYPES
91
#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
92
 
93
#undef TARGET_FUNCTION_ARG
94
#define TARGET_FUNCTION_ARG vax_function_arg
95
#undef TARGET_FUNCTION_ARG_ADVANCE
96
#define TARGET_FUNCTION_ARG_ADVANCE vax_function_arg_advance
97
 
98
#undef TARGET_STRUCT_VALUE_RTX
99
#define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
100
 
101
#undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
102
#define TARGET_BUILTIN_SETJMP_FRAME_VALUE vax_builtin_setjmp_frame_value
103
 
104
#undef TARGET_LEGITIMATE_ADDRESS_P
105
#define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
106
 
107
#undef TARGET_FRAME_POINTER_REQUIRED
108
#define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
109
 
110
#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
111
#define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
112
#undef TARGET_TRAMPOLINE_INIT
113
#define TARGET_TRAMPOLINE_INIT vax_trampoline_init
114
#undef TARGET_RETURN_POPS_ARGS
115
#define TARGET_RETURN_POPS_ARGS vax_return_pops_args
116
 
117
#undef TARGET_OPTION_OVERRIDE
118
#define TARGET_OPTION_OVERRIDE vax_option_override
119
 
120
struct gcc_target targetm = TARGET_INITIALIZER;
121
 
122
/* Set global variables as needed for the options enabled.  */
123
 
124
static void
125
vax_option_override (void)
126
{
127
  /* We're VAX floating point, not IEEE floating point.  */
128
  if (TARGET_G_FLOAT)
129
    REAL_MODE_FORMAT (DFmode) = &vax_g_format;
130
 
131
#ifdef SUBTARGET_OVERRIDE_OPTIONS
132
  SUBTARGET_OVERRIDE_OPTIONS;
133
#endif
134
}
135
 
136
static void
137
vax_add_reg_cfa_offset (rtx insn, int offset, rtx src)
138
{
139
  rtx x;
140
 
141
  x = plus_constant (frame_pointer_rtx, offset);
142
  x = gen_rtx_MEM (SImode, x);
143
  x = gen_rtx_SET (VOIDmode, x, src);
144
  add_reg_note (insn, REG_CFA_OFFSET, x);
145
}
146
 
147
/* Generate the assembly code for function entry.  FILE is a stdio
148
   stream to output the code to.  SIZE is an int: how many units of
149
   temporary storage to allocate.
150
 
151
   Refer to the array `regs_ever_live' to determine which registers to
152
   save; `regs_ever_live[I]' is nonzero if register number I is ever
153
   used in the function.  This function is responsible for knowing
154
   which registers should not be saved even if used.  */
155
 
156
void
157
vax_expand_prologue (void)
158
{
159
  int regno, offset;
160
  int mask = 0;
161
  HOST_WIDE_INT size;
162
  rtx insn;
163
 
164
  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
165
    if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
166
      mask |= 1 << regno;
167
 
168
  insn = emit_insn (gen_procedure_entry_mask (GEN_INT (mask)));
169
  RTX_FRAME_RELATED_P (insn) = 1;
170
 
171
  /* The layout of the CALLG/S stack frame is follows:
172
 
173
                <- CFA, AP
174
        r11
175
        r10
176
        ...     Registers saved as specified by MASK
177
        r3
178
        r2
179
        return-addr
180
        old fp
181
        old ap
182
        old psw
183
        zero
184
                <- FP, SP
185
 
186
     The rest of the prologue will adjust the SP for the local frame.  */
187
 
188
  vax_add_reg_cfa_offset (insn, 4, arg_pointer_rtx);
189
  vax_add_reg_cfa_offset (insn, 8, frame_pointer_rtx);
190
  vax_add_reg_cfa_offset (insn, 12, pc_rtx);
191
 
192
  offset = 16;
193
  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
194
    if (mask & (1 << regno))
195
      {
196
        vax_add_reg_cfa_offset (insn, offset, gen_rtx_REG (SImode, regno));
197
        offset += 4;
198
      }
199
 
200
  /* Because add_reg_note pushes the notes, adding this last means that
201
     it will be processed first.  This is required to allow the other
202
     notes be interpreted properly.  */
203
  add_reg_note (insn, REG_CFA_DEF_CFA,
204
                plus_constant (frame_pointer_rtx, offset));
205
 
206
  /* Allocate the local stack frame.  */
207
  size = get_frame_size ();
208
  size -= STARTING_FRAME_OFFSET;
209
  emit_insn (gen_addsi3 (stack_pointer_rtx,
210
                         stack_pointer_rtx, GEN_INT (-size)));
211
 
212
  /* Do not allow instructions referencing local stack memory to be
213
     scheduled before the frame is allocated.  This is more pedantic
214
     than anything else, given that VAX does not currently have a
215
     scheduling description.  */
216
  emit_insn (gen_blockage ());
217
}
218
 
219
/* When debugging with stabs, we want to output an extra dummy label
220
   so that gas can distinguish between D_float and G_float prior to
221
   processing the .stabs directive identifying type double.  */
222
static void
223
vax_file_start (void)
224
{
225
  default_file_start ();
226
 
227
  if (write_symbols == DBX_DEBUG)
228
    fprintf (asm_out_file, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR);
229
}
230
 
231
/* We can use the BSD C library routines for the libgcc calls that are
232
   still generated, since that's what they boil down to anyways.  When
233
   ELF, avoid the user's namespace.  */
234
 
235
static void
236
vax_init_libfuncs (void)
237
{
238
  if (TARGET_BSD_DIVMOD)
239
    {
240
      set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
241
      set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
242
    }
243
}
244
 
245
/* This is like nonimmediate_operand with a restriction on the type of MEM.  */
246
 
247
static void
248
split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
249
                         rtx * low, int n)
250
{
251
  int i;
252
 
253
  for (i = 0; i < n; i++)
254
    low[i] = 0;
255
 
256
  for (i = 0; i < n; i++)
257
    {
258
      if (MEM_P (operands[i])
259
          && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
260
              || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
261
        {
262
          rtx addr = XEXP (operands[i], 0);
263
          operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
264
        }
265
      else if (optimize_size && MEM_P (operands[i])
266
               && REG_P (XEXP (operands[i], 0))
267
               && (code != MINUS || operands[1] != const0_rtx)
268
               && find_regno_note (insn, REG_DEAD,
269
                                   REGNO (XEXP (operands[i], 0))))
270
        {
271
          low[i] = gen_rtx_MEM (SImode,
272
                                gen_rtx_POST_INC (Pmode,
273
                                                  XEXP (operands[i], 0)));
274
          operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
275
        }
276
      else
277
        {
278
          low[i] = operand_subword (operands[i], 0, 0, DImode);
279
          operands[i] = operand_subword (operands[i], 1, 0, DImode);
280
        }
281
    }
282
}
283
 
284
void
285
print_operand_address (FILE * file, rtx addr)
286
{
287
  rtx orig = addr;
288
  rtx reg1, breg, ireg;
289
  rtx offset;
290
 
291
 retry:
292
  switch (GET_CODE (addr))
293
    {
294
    case MEM:
295
      fprintf (file, "*");
296
      addr = XEXP (addr, 0);
297
      goto retry;
298
 
299
    case REG:
300
      fprintf (file, "(%s)", reg_names[REGNO (addr)]);
301
      break;
302
 
303
    case PRE_DEC:
304
      fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
305
      break;
306
 
307
    case POST_INC:
308
      fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
309
      break;
310
 
311
    case PLUS:
312
      /* There can be either two or three things added here.  One must be a
313
         REG.  One can be either a REG or a MULT of a REG and an appropriate
314
         constant, and the third can only be a constant or a MEM.
315
 
316
         We get these two or three things and put the constant or MEM in
317
         OFFSET, the MULT or REG in IREG, and the REG in BREG.  If we have
318
         a register and can't tell yet if it is a base or index register,
319
         put it into REG1.  */
320
 
321
      reg1 = 0; ireg = 0; breg = 0; offset = 0;
322
 
323
      if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
324
          || MEM_P (XEXP (addr, 0)))
325
        {
326
          offset = XEXP (addr, 0);
327
          addr = XEXP (addr, 1);
328
        }
329
      else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
330
               || MEM_P (XEXP (addr, 1)))
331
        {
332
          offset = XEXP (addr, 1);
333
          addr = XEXP (addr, 0);
334
        }
335
      else if (GET_CODE (XEXP (addr, 1)) == MULT)
336
        {
337
          ireg = XEXP (addr, 1);
338
          addr = XEXP (addr, 0);
339
        }
340
      else if (GET_CODE (XEXP (addr, 0)) == MULT)
341
        {
342
          ireg = XEXP (addr, 0);
343
          addr = XEXP (addr, 1);
344
        }
345
      else if (REG_P (XEXP (addr, 1)))
346
        {
347
          reg1 = XEXP (addr, 1);
348
          addr = XEXP (addr, 0);
349
        }
350
      else if (REG_P (XEXP (addr, 0)))
351
        {
352
          reg1 = XEXP (addr, 0);
353
          addr = XEXP (addr, 1);
354
        }
355
      else
356
        gcc_unreachable ();
357
 
358
      if (REG_P (addr))
359
        {
360
          if (reg1)
361
            ireg = addr;
362
          else
363
            reg1 = addr;
364
        }
365
      else if (GET_CODE (addr) == MULT)
366
        ireg = addr;
367
      else
368
        {
369
          gcc_assert (GET_CODE (addr) == PLUS);
370
          if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
371
              || MEM_P (XEXP (addr, 0)))
372
            {
373
              if (offset)
374
                {
375
                  if (CONST_INT_P (offset))
376
                    offset = plus_constant (XEXP (addr, 0), INTVAL (offset));
377
                  else
378
                    {
379
                      gcc_assert (CONST_INT_P (XEXP (addr, 0)));
380
                      offset = plus_constant (offset, INTVAL (XEXP (addr, 0)));
381
                    }
382
                }
383
              offset = XEXP (addr, 0);
384
            }
385
          else if (REG_P (XEXP (addr, 0)))
386
            {
387
              if (reg1)
388
                ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
389
              else
390
                reg1 = XEXP (addr, 0);
391
            }
392
          else
393
            {
394
              gcc_assert (GET_CODE (XEXP (addr, 0)) == MULT);
395
              gcc_assert (!ireg);
396
              ireg = XEXP (addr, 0);
397
            }
398
 
399
          if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
400
              || MEM_P (XEXP (addr, 1)))
401
            {
402
              if (offset)
403
                {
404
                  if (CONST_INT_P (offset))
405
                    offset = plus_constant (XEXP (addr, 1), INTVAL (offset));
406
                  else
407
                    {
408
                      gcc_assert (CONST_INT_P (XEXP (addr, 1)));
409
                      offset = plus_constant (offset, INTVAL (XEXP (addr, 1)));
410
                    }
411
                }
412
              offset = XEXP (addr, 1);
413
            }
414
          else if (REG_P (XEXP (addr, 1)))
415
            {
416
              if (reg1)
417
                ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
418
              else
419
                reg1 = XEXP (addr, 1);
420
            }
421
          else
422
            {
423
              gcc_assert (GET_CODE (XEXP (addr, 1)) == MULT);
424
              gcc_assert (!ireg);
425
              ireg = XEXP (addr, 1);
426
            }
427
        }
428
 
429
      /* If REG1 is nonzero, figure out if it is a base or index register.  */
430
      if (reg1)
431
        {
432
          if (breg
433
              || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
434
              || (offset
435
                  && (MEM_P (offset)
436
                      || (flag_pic && symbolic_operand (offset, SImode)))))
437
            {
438
              gcc_assert (!ireg);
439
              ireg = reg1;
440
            }
441
          else
442
            breg = reg1;
443
        }
444
 
445
      if (offset != 0)
446
        {
447
          if (flag_pic && symbolic_operand (offset, SImode))
448
            {
449
              if (breg && ireg)
450
                {
451
                  debug_rtx (orig);
452
                  output_operand_lossage ("symbol used with both base and indexed registers");
453
                }
454
 
455
#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
456
              if (flag_pic > 1 && GET_CODE (offset) == CONST
457
                  && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
458
                  && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
459
                {
460
                  debug_rtx (orig);
461
                  output_operand_lossage ("symbol with offset used in PIC mode");
462
                }
463
#endif
464
 
465
              /* symbol(reg) isn't PIC, but symbol[reg] is.  */
466
              if (breg)
467
                {
468
                  ireg = breg;
469
                  breg = 0;
470
                }
471
 
472
            }
473
 
474
          output_address (offset);
475
        }
476
 
477
      if (breg != 0)
478
        fprintf (file, "(%s)", reg_names[REGNO (breg)]);
479
 
480
      if (ireg != 0)
481
        {
482
          if (GET_CODE (ireg) == MULT)
483
            ireg = XEXP (ireg, 0);
484
          gcc_assert (REG_P (ireg));
485
          fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
486
        }
487
      break;
488
 
489
    default:
490
      output_addr_const (file, addr);
491
    }
492
}
493
 
494
void
495
print_operand (FILE *file, rtx x, int code)
496
{
497
  if (code == '#')
498
    fputc (ASM_DOUBLE_CHAR, file);
499
  else if (code == '|')
500
    fputs (REGISTER_PREFIX, file);
501
  else if (code == 'c')
502
    fputs (cond_name (x), file);
503
  else if (code == 'C')
504
    fputs (rev_cond_name (x), file);
505
  else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
506
    fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
507
  else if (code == 'P' && CONST_INT_P (x))
508
    fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
509
  else if (code == 'N' && CONST_INT_P (x))
510
    fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
511
  /* rotl instruction cannot deal with negative arguments.  */
512
  else if (code == 'R' && CONST_INT_P (x))
513
    fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
514
  else if (code == 'H' && CONST_INT_P (x))
515
    fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
516
  else if (code == 'h' && CONST_INT_P (x))
517
    fprintf (file, "$%d", (short) - INTVAL (x));
518
  else if (code == 'B' && CONST_INT_P (x))
519
    fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
520
  else if (code == 'b' && CONST_INT_P (x))
521
    fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
522
  else if (code == 'M' && CONST_INT_P (x))
523
    fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
524
  else if (code == 'x' && CONST_INT_P (x))
525
    fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
526
  else if (REG_P (x))
527
    fprintf (file, "%s", reg_names[REGNO (x)]);
528
  else if (MEM_P (x))
529
    output_address (XEXP (x, 0));
530
  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
531
    {
532
      char dstr[30];
533
      real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
534
                       sizeof (dstr), 0, 1);
535
      fprintf (file, "$0f%s", dstr);
536
    }
537
  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
538
    {
539
      char dstr[30];
540
      real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
541
                       sizeof (dstr), 0, 1);
542
      fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
543
    }
544
  else
545
    {
546
      if (flag_pic > 1 && symbolic_operand (x, SImode))
547
        {
548
          debug_rtx (x);
549
          output_operand_lossage ("symbol used as immediate operand");
550
        }
551
      putc ('$', file);
552
      output_addr_const (file, x);
553
    }
554
}
555
 
556
const char *
557
cond_name (rtx op)
558
{
559
  switch (GET_CODE (op))
560
    {
561
    case NE:
562
      return "neq";
563
    case EQ:
564
      return "eql";
565
    case GE:
566
      return "geq";
567
    case GT:
568
      return "gtr";
569
    case LE:
570
      return "leq";
571
    case LT:
572
      return "lss";
573
    case GEU:
574
      return "gequ";
575
    case GTU:
576
      return "gtru";
577
    case LEU:
578
      return "lequ";
579
    case LTU:
580
      return "lssu";
581
 
582
    default:
583
      gcc_unreachable ();
584
    }
585
}
586
 
587
const char *
588
rev_cond_name (rtx op)
589
{
590
  switch (GET_CODE (op))
591
    {
592
    case EQ:
593
      return "neq";
594
    case NE:
595
      return "eql";
596
    case LT:
597
      return "geq";
598
    case LE:
599
      return "gtr";
600
    case GT:
601
      return "leq";
602
    case GE:
603
      return "lss";
604
    case LTU:
605
      return "gequ";
606
    case LEU:
607
      return "gtru";
608
    case GTU:
609
      return "lequ";
610
    case GEU:
611
      return "lssu";
612
 
613
    default:
614
      gcc_unreachable ();
615
    }
616
}
617
 
618
static bool
619
vax_float_literal (rtx c)
620
{
621
  enum machine_mode mode;
622
  REAL_VALUE_TYPE r, s;
623
  int i;
624
 
625
  if (GET_CODE (c) != CONST_DOUBLE)
626
    return false;
627
 
628
  mode = GET_MODE (c);
629
 
630
  if (c == const_tiny_rtx[(int) mode][0]
631
      || c == const_tiny_rtx[(int) mode][1]
632
      || c == const_tiny_rtx[(int) mode][2])
633
    return true;
634
 
635
  REAL_VALUE_FROM_CONST_DOUBLE (r, c);
636
 
637
  for (i = 0; i < 7; i++)
638
    {
639
      int x = 1 << i;
640
      bool ok;
641
      REAL_VALUE_FROM_INT (s, x, 0, mode);
642
 
643
      if (REAL_VALUES_EQUAL (r, s))
644
        return true;
645
      ok = exact_real_inverse (mode, &s);
646
      gcc_assert (ok);
647
      if (REAL_VALUES_EQUAL (r, s))
648
        return true;
649
    }
650
  return false;
651
}
652
 
653
 
654
/* Return the cost in cycles of a memory address, relative to register
655
   indirect.
656
 
657
   Each of the following adds the indicated number of cycles:
658
 
659
   1 - symbolic address
660
   1 - pre-decrement
661
   1 - indexing and/or offset(register)
662
   2 - indirect */
663
 
664
 
665
static int
666
vax_address_cost_1 (rtx addr)
667
{
668
  int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
669
  rtx plus_op0 = 0, plus_op1 = 0;
670
 restart:
671
  switch (GET_CODE (addr))
672
    {
673
    case PRE_DEC:
674
      predec = 1;
675
    case REG:
676
    case SUBREG:
677
    case POST_INC:
678
      reg = 1;
679
      break;
680
    case MULT:
681
      indexed = 1;      /* 2 on VAX 2 */
682
      break;
683
    case CONST_INT:
684
      /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
685
      if (offset == 0)
686
        offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
687
      break;
688
    case CONST:
689
    case SYMBOL_REF:
690
      offset = 1;       /* 2 on VAX 2 */
691
      break;
692
    case LABEL_REF:     /* this is probably a byte offset from the pc */
693
      if (offset == 0)
694
        offset = 1;
695
      break;
696
    case PLUS:
697
      if (plus_op0)
698
        plus_op1 = XEXP (addr, 0);
699
      else
700
        plus_op0 = XEXP (addr, 0);
701
      addr = XEXP (addr, 1);
702
      goto restart;
703
    case MEM:
704
      indir = 2;        /* 3 on VAX 2 */
705
      addr = XEXP (addr, 0);
706
      goto restart;
707
    default:
708
      break;
709
    }
710
 
711
  /* Up to 3 things can be added in an address.  They are stored in
712
     plus_op0, plus_op1, and addr.  */
713
 
714
  if (plus_op0)
715
    {
716
      addr = plus_op0;
717
      plus_op0 = 0;
718
      goto restart;
719
    }
720
  if (plus_op1)
721
    {
722
      addr = plus_op1;
723
      plus_op1 = 0;
724
      goto restart;
725
    }
726
  /* Indexing and register+offset can both be used (except on a VAX 2)
727
     without increasing execution time over either one alone.  */
728
  if (reg && indexed && offset)
729
    return reg + indir + offset + predec;
730
  return reg + indexed + indir + offset + predec;
731
}
732
 
733
static int
734
vax_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
735
{
736
  return (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
737
}
738
 
739
/* Cost of an expression on a VAX.  This version has costs tuned for the
740
   CVAX chip (found in the VAX 3 series) with comments for variations on
741
   other models.
742
 
743
   FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
744
   and FLOAT_TRUNCATE.  We need a -mcpu option to allow provision of
745
   costs on a per cpu basis.  */
746
 
747
static bool
748
vax_rtx_costs (rtx x, int code, int outer_code, int opno ATTRIBUTE_UNUSED,
749
               int *total, bool speed ATTRIBUTE_UNUSED)
750
{
751
  enum machine_mode mode = GET_MODE (x);
752
  int i = 0;                                /* may be modified in switch */
753
  const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
754
 
755
  switch (code)
756
    {
757
      /* On a VAX, constants from 0..63 are cheap because they can use the
758
         1 byte literal constant format.  Compare to -1 should be made cheap
759
         so that decrement-and-branch insns can be formed more easily (if
760
         the value -1 is copied to a register some decrement-and-branch
761
         patterns will not match).  */
762
    case CONST_INT:
763
      if (INTVAL (x) == 0)
764
        {
765
          *total = 0;
766
          return true;
767
        }
768
      if (outer_code == AND)
769
        {
770
          *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
771
          return true;
772
        }
773
      if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
774
          || (outer_code == COMPARE
775
              && INTVAL (x) == -1)
776
          || ((outer_code == PLUS || outer_code == MINUS)
777
              && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
778
        {
779
          *total = 1;
780
          return true;
781
        }
782
      /* FALLTHRU */
783
 
784
    case CONST:
785
    case LABEL_REF:
786
    case SYMBOL_REF:
787
      *total = 3;
788
      return true;
789
 
790
    case CONST_DOUBLE:
791
      if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
792
        *total = vax_float_literal (x) ? 5 : 8;
793
      else
794
        *total = ((CONST_DOUBLE_HIGH (x) == 0
795
                   && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
796
                  || (outer_code == PLUS
797
                      && CONST_DOUBLE_HIGH (x) == -1
798
                      && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64))
799
                 ? 2 : 5;
800
      return true;
801
 
802
    case POST_INC:
803
      *total = 2;
804
      return true;              /* Implies register operand.  */
805
 
806
    case PRE_DEC:
807
      *total = 3;
808
      return true;              /* Implies register operand.  */
809
 
810
    case MULT:
811
      switch (mode)
812
        {
813
        case DFmode:
814
          *total = 16;          /* 4 on VAX 9000 */
815
          break;
816
        case SFmode:
817
          *total = 9;           /* 4 on VAX 9000, 12 on VAX 2 */
818
          break;
819
        case DImode:
820
          *total = 16;          /* 6 on VAX 9000, 28 on VAX 2 */
821
          break;
822
        case SImode:
823
        case HImode:
824
        case QImode:
825
          *total = 10;          /* 3-4 on VAX 9000, 20-28 on VAX 2 */
826
          break;
827
        default:
828
          *total = MAX_COST;    /* Mode is not supported.  */
829
          return true;
830
        }
831
      break;
832
 
833
    case UDIV:
834
      if (mode != SImode)
835
        {
836
          *total = MAX_COST;    /* Mode is not supported.  */
837
          return true;
838
        }
839
      *total = 17;
840
      break;
841
 
842
    case DIV:
843
      if (mode == DImode)
844
        *total = 30;            /* Highly variable.  */
845
      else if (mode == DFmode)
846
        /* divide takes 28 cycles if the result is not zero, 13 otherwise */
847
        *total = 24;
848
      else
849
        *total = 11;            /* 25 on VAX 2 */
850
      break;
851
 
852
    case MOD:
853
      *total = 23;
854
      break;
855
 
856
    case UMOD:
857
      if (mode != SImode)
858
        {
859
          *total = MAX_COST;    /* Mode is not supported.  */
860
          return true;
861
        }
862
      *total = 29;
863
      break;
864
 
865
    case FLOAT:
866
      *total = (6               /* 4 on VAX 9000 */
867
                + (mode == DFmode) + (GET_MODE (XEXP (x, 0)) != SImode));
868
      break;
869
 
870
    case FIX:
871
      *total = 7;               /* 17 on VAX 2 */
872
      break;
873
 
874
    case ASHIFT:
875
    case LSHIFTRT:
876
    case ASHIFTRT:
877
      if (mode == DImode)
878
        *total = 12;
879
      else
880
        *total = 10;            /* 6 on VAX 9000 */
881
      break;
882
 
883
    case ROTATE:
884
    case ROTATERT:
885
      *total = 6;               /* 5 on VAX 2, 4 on VAX 9000 */
886
      if (CONST_INT_P (XEXP (x, 1)))
887
        fmt = "e";              /* all constant rotate counts are short */
888
      break;
889
 
890
    case PLUS:
891
    case MINUS:
892
      *total = (mode == DFmode) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
893
      /* Small integer operands can use subl2 and addl2.  */
894
      if ((CONST_INT_P (XEXP (x, 1)))
895
          && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
896
        fmt = "e";
897
      break;
898
 
899
    case IOR:
900
    case XOR:
901
      *total = 3;
902
      break;
903
 
904
    case AND:
905
      /* AND is special because the first operand is complemented.  */
906
      *total = 3;
907
      if (CONST_INT_P (XEXP (x, 0)))
908
        {
909
          if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
910
            *total = 4;
911
          fmt = "e";
912
          i = 1;
913
        }
914
      break;
915
 
916
    case NEG:
917
      if (mode == DFmode)
918
        *total = 9;
919
      else if (mode == SFmode)
920
        *total = 6;
921
      else if (mode == DImode)
922
        *total = 4;
923
      else
924
        *total = 2;
925
      break;
926
 
927
    case NOT:
928
      *total = 2;
929
      break;
930
 
931
    case ZERO_EXTRACT:
932
    case SIGN_EXTRACT:
933
      *total = 15;
934
      break;
935
 
936
    case MEM:
937
      if (mode == DImode || mode == DFmode)
938
        *total = 5;             /* 7 on VAX 2 */
939
      else
940
        *total = 3;             /* 4 on VAX 2 */
941
      x = XEXP (x, 0);
942
      if (!REG_P (x) && GET_CODE (x) != POST_INC)
943
        *total += vax_address_cost_1 (x);
944
      return true;
945
 
946
    case FLOAT_EXTEND:
947
    case FLOAT_TRUNCATE:
948
    case TRUNCATE:
949
      *total = 3;               /* FIXME: Costs need to be checked  */
950
      break;
951
 
952
    default:
953
      return false;
954
    }
955
 
956
  /* Now look inside the expression.  Operands which are not registers or
957
     short constants add to the cost.
958
 
959
     FMT and I may have been adjusted in the switch above for instructions
960
     which require special handling.  */
961
 
962
  while (*fmt++ == 'e')
963
    {
964
      rtx op = XEXP (x, i);
965
 
966
      i += 1;
967
      code = GET_CODE (op);
968
 
969
      /* A NOT is likely to be found as the first operand of an AND
970
         (in which case the relevant cost is of the operand inside
971
         the not) and not likely to be found anywhere else.  */
972
      if (code == NOT)
973
        op = XEXP (op, 0), code = GET_CODE (op);
974
 
975
      switch (code)
976
        {
977
        case CONST_INT:
978
          if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
979
              && GET_MODE (x) != QImode)
980
            *total += 1;        /* 2 on VAX 2 */
981
          break;
982
        case CONST:
983
        case LABEL_REF:
984
        case SYMBOL_REF:
985
          *total += 1;          /* 2 on VAX 2 */
986
          break;
987
        case CONST_DOUBLE:
988
          if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
989
            {
990
              /* Registers are faster than floating point constants -- even
991
                 those constants which can be encoded in a single byte.  */
992
              if (vax_float_literal (op))
993
                *total += 1;
994
              else
995
                *total += (GET_MODE (x) == DFmode) ? 3 : 2;
996
            }
997
          else
998
            {
999
              if (CONST_DOUBLE_HIGH (op) != 0
1000
                  || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
1001
                *total += 2;
1002
            }
1003
          break;
1004
        case MEM:
1005
          *total += 1;          /* 2 on VAX 2 */
1006
          if (!REG_P (XEXP (op, 0)))
1007
            *total += vax_address_cost_1 (XEXP (op, 0));
1008
          break;
1009
        case REG:
1010
        case SUBREG:
1011
          break;
1012
        default:
1013
          *total += 1;
1014
          break;
1015
        }
1016
    }
1017
  return true;
1018
}
1019
 
1020
/* Output code to add DELTA to the first argument, and then jump to FUNCTION.
1021
   Used for C++ multiple inheritance.
1022
        .mask   ^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11>  #conservative entry mask
1023
        addl2   $DELTA, 4(ap)   #adjust first argument
1024
        jmp     FUNCTION+2      #jump beyond FUNCTION's entry mask
1025
*/
1026
 
1027
static void
1028
vax_output_mi_thunk (FILE * file,
1029
                     tree thunk ATTRIBUTE_UNUSED,
1030
                     HOST_WIDE_INT delta,
1031
                     HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1032
                     tree function)
1033
{
1034
  fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
1035
  asm_fprintf (file, ",4(%Rap)\n");
1036
  fprintf (file, "\tjmp ");
1037
  assemble_name (file,  XSTR (XEXP (DECL_RTL (function), 0), 0));
1038
  fprintf (file, "+2\n");
1039
}
1040
 
1041
static rtx
1042
vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1043
                      int incoming ATTRIBUTE_UNUSED)
1044
{
1045
  return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
1046
}
1047
 
1048
static rtx
1049
vax_builtin_setjmp_frame_value (void)
1050
{
1051
  return hard_frame_pointer_rtx;
1052
}
1053
 
1054
/* Worker function for NOTICE_UPDATE_CC.  */
1055
 
1056
void
1057
vax_notice_update_cc (rtx exp, rtx insn ATTRIBUTE_UNUSED)
1058
{
1059
  if (GET_CODE (exp) == SET)
1060
    {
1061
      if (GET_CODE (SET_SRC (exp)) == CALL)
1062
        CC_STATUS_INIT;
1063
      else if (GET_CODE (SET_DEST (exp)) != ZERO_EXTRACT
1064
               && GET_CODE (SET_DEST (exp)) != PC)
1065
        {
1066
          cc_status.flags = 0;
1067
          /* The integer operations below don't set carry or
1068
             set it in an incompatible way.  That's ok though
1069
             as the Z bit is all we need when doing unsigned
1070
             comparisons on the result of these insns (since
1071
             they're always with 0).  Set CC_NO_OVERFLOW to
1072
             generate the correct unsigned branches.  */
1073
          switch (GET_CODE (SET_SRC (exp)))
1074
            {
1075
            case NEG:
1076
              if (GET_MODE_CLASS (GET_MODE (exp)) == MODE_FLOAT)
1077
                break;
1078
            case AND:
1079
            case IOR:
1080
            case XOR:
1081
            case NOT:
1082
            case MEM:
1083
            case REG:
1084
              cc_status.flags = CC_NO_OVERFLOW;
1085
              break;
1086
            default:
1087
              break;
1088
            }
1089
          cc_status.value1 = SET_DEST (exp);
1090
          cc_status.value2 = SET_SRC (exp);
1091
        }
1092
    }
1093
  else if (GET_CODE (exp) == PARALLEL
1094
           && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
1095
    {
1096
      if (GET_CODE (SET_SRC (XVECEXP (exp, 0, 0))) == CALL)
1097
        CC_STATUS_INIT;
1098
      else if (GET_CODE (SET_DEST (XVECEXP (exp, 0, 0))) != PC)
1099
        {
1100
          cc_status.flags = 0;
1101
          cc_status.value1 = SET_DEST (XVECEXP (exp, 0, 0));
1102
          cc_status.value2 = SET_SRC (XVECEXP (exp, 0, 0));
1103
        }
1104
      else
1105
        /* PARALLELs whose first element sets the PC are aob,
1106
           sob insns.  They do change the cc's.  */
1107
        CC_STATUS_INIT;
1108
    }
1109
  else
1110
    CC_STATUS_INIT;
1111
  if (cc_status.value1 && REG_P (cc_status.value1)
1112
      && cc_status.value2
1113
      && reg_overlap_mentioned_p (cc_status.value1, cc_status.value2))
1114
    cc_status.value2 = 0;
1115
  if (cc_status.value1 && MEM_P (cc_status.value1)
1116
      && cc_status.value2
1117
      && MEM_P (cc_status.value2))
1118
    cc_status.value2 = 0;
1119
  /* Actual condition, one line up, should be that value2's address
1120
     depends on value1, but that is too much of a pain.  */
1121
}
1122
 
1123
/* Output integer move instructions.  */
1124
 
1125
const char *
1126
vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
1127
                     enum machine_mode mode)
1128
{
1129
  rtx hi[3], lo[3];
1130
  const char *pattern_hi, *pattern_lo;
1131
 
1132
  switch (mode)
1133
    {
1134
    case DImode:
1135
      if (operands[1] == const0_rtx)
1136
        return "clrq %0";
1137
      if (TARGET_QMATH && optimize_size
1138
          && (CONST_INT_P (operands[1])
1139
              || GET_CODE (operands[1]) == CONST_DOUBLE))
1140
        {
1141
          unsigned HOST_WIDE_INT hval, lval;
1142
          int n;
1143
 
1144
          if (GET_CODE (operands[1]) == CONST_DOUBLE)
1145
            {
1146
              gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1147
 
1148
              /* Make sure only the low 32 bits are valid.  */
1149
              lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1150
              hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1151
            }
1152
          else
1153
            {
1154
              lval = INTVAL (operands[1]);
1155
              hval = 0;
1156
            }
1157
 
1158
          /* Here we see if we are trying to see if the 64bit value is really
1159
             a 6bit shifted some arbitrary amount.  If so, we can use ashq to
1160
             shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1161
             8 bytes - 1 shift byte - 1 short literal byte.  */
1162
          if (lval != 0
1163
              && (n = exact_log2 (lval & (- lval))) != -1
1164
              && (lval >> n) < 64)
1165
            {
1166
              lval >>= n;
1167
 
1168
              /* On 32bit platforms, if the 6bits didn't overflow into the
1169
                 upper 32bit value that value better be 0.  If we have
1170
                 overflowed, make sure it wasn't too much.  */
1171
              if (HOST_BITS_PER_WIDE_INT == 32 && hval != 0)
1172
                {
1173
                  if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1174
                    n = 0;       /* failure */
1175
                  else
1176
                    lval |= hval << (32 - n);
1177
                }
1178
              /*  If n is 0, then ashq is not the best way to emit this.  */
1179
              if (n > 0)
1180
                {
1181
                  operands[1] = GEN_INT (lval);
1182
                  operands[2] = GEN_INT (n);
1183
                  return "ashq %2,%1,%0";
1184
                }
1185
#if HOST_BITS_PER_WIDE_INT == 32
1186
            }
1187
          /* On 32bit platforms, if the low 32bit value is 0, checkout the
1188
             upper 32bit value.  */
1189
          else if (hval != 0
1190
                   && (n = exact_log2 (hval & (- hval)) - 1) != -1
1191
                   && (hval >> n) < 64)
1192
            {
1193
              operands[1] = GEN_INT (hval >> n);
1194
              operands[2] = GEN_INT (n + 32);
1195
              return "ashq %2,%1,%0";
1196
#endif
1197
            }
1198
        }
1199
 
1200
      if (TARGET_QMATH
1201
          && (!MEM_P (operands[0])
1202
              || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1203
              || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1204
              || !illegal_addsub_di_memory_operand (operands[0], DImode))
1205
          && ((CONST_INT_P (operands[1])
1206
               && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1207
              || GET_CODE (operands[1]) == CONST_DOUBLE))
1208
        {
1209
          hi[0] = operands[0];
1210
          hi[1] = operands[1];
1211
 
1212
          split_quadword_operands (insn, SET, hi, lo, 2);
1213
 
1214
          pattern_lo = vax_output_int_move (NULL, lo, SImode);
1215
          pattern_hi = vax_output_int_move (NULL, hi, SImode);
1216
 
1217
          /* The patterns are just movl/movl or pushl/pushl then a movq will
1218
             be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1219
             bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1220
             value bytes.  */
1221
          if ((!strncmp (pattern_lo, "movl", 4)
1222
              && !strncmp (pattern_hi, "movl", 4))
1223
              || (!strncmp (pattern_lo, "pushl", 5)
1224
                  && !strncmp (pattern_hi, "pushl", 5)))
1225
            return "movq %1,%0";
1226
 
1227
          if (MEM_P (operands[0])
1228
              && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1229
            {
1230
              output_asm_insn (pattern_hi, hi);
1231
              operands[0] = lo[0];
1232
              operands[1] = lo[1];
1233
              operands[2] = lo[2];
1234
              return pattern_lo;
1235
            }
1236
          else
1237
            {
1238
              output_asm_insn (pattern_lo, lo);
1239
              operands[0] = hi[0];
1240
              operands[1] = hi[1];
1241
              operands[2] = hi[2];
1242
              return pattern_hi;
1243
            }
1244
        }
1245
      return "movq %1,%0";
1246
 
1247
    case SImode:
1248
      if (symbolic_operand (operands[1], SImode))
1249
        {
1250
          if (push_operand (operands[0], SImode))
1251
            return "pushab %a1";
1252
          return "movab %a1,%0";
1253
        }
1254
 
1255
      if (operands[1] == const0_rtx)
1256
        {
1257
          if (push_operand (operands[1], SImode))
1258
            return "pushl %1";
1259
          return "clrl %0";
1260
        }
1261
 
1262
      if (CONST_INT_P (operands[1])
1263
          && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1264
        {
1265
          HOST_WIDE_INT i = INTVAL (operands[1]);
1266
          int n;
1267
          if ((unsigned HOST_WIDE_INT)(~i) < 64)
1268
            return "mcoml %N1,%0";
1269
          if ((unsigned HOST_WIDE_INT)i < 0x100)
1270
            return "movzbl %1,%0";
1271
          if (i >= -0x80 && i < 0)
1272
            return "cvtbl %1,%0";
1273
          if (optimize_size
1274
              && (n = exact_log2 (i & (-i))) != -1
1275
              && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1276
            {
1277
              operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1278
              operands[2] = GEN_INT (n);
1279
              return "ashl %2,%1,%0";
1280
            }
1281
          if ((unsigned HOST_WIDE_INT)i < 0x10000)
1282
            return "movzwl %1,%0";
1283
          if (i >= -0x8000 && i < 0)
1284
            return "cvtwl %1,%0";
1285
        }
1286
      if (push_operand (operands[0], SImode))
1287
        return "pushl %1";
1288
      return "movl %1,%0";
1289
 
1290
    case HImode:
1291
      if (CONST_INT_P (operands[1]))
1292
        {
1293
          HOST_WIDE_INT i = INTVAL (operands[1]);
1294
          if (i == 0)
1295
            return "clrw %0";
1296
          else if ((unsigned HOST_WIDE_INT)i < 64)
1297
            return "movw %1,%0";
1298
          else if ((unsigned HOST_WIDE_INT)~i < 64)
1299
            return "mcomw %H1,%0";
1300
          else if ((unsigned HOST_WIDE_INT)i < 256)
1301
            return "movzbw %1,%0";
1302
          else if (i >= -0x80 && i < 0)
1303
            return "cvtbw %1,%0";
1304
        }
1305
      return "movw %1,%0";
1306
 
1307
    case QImode:
1308
      if (CONST_INT_P (operands[1]))
1309
        {
1310
          HOST_WIDE_INT i = INTVAL (operands[1]);
1311
          if (i == 0)
1312
            return "clrb %0";
1313
          else if ((unsigned HOST_WIDE_INT)~i < 64)
1314
            return "mcomb %B1,%0";
1315
        }
1316
      return "movb %1,%0";
1317
 
1318
    default:
1319
      gcc_unreachable ();
1320
    }
1321
}
1322
 
1323
/* Output integer add instructions.
1324
 
1325
   The space-time-opcode tradeoffs for addition vary by model of VAX.
1326
 
1327
   On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1328
   but it not faster on other models.
1329
 
1330
   "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1331
   faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1332
   a register is used in an address too soon after it is set.
1333
   Compromise by using movab only when it is shorter than the add
1334
   or the base register in the address is one of sp, ap, and fp,
1335
   which are not modified very often.  */
1336
 
1337
const char *
1338
vax_output_int_add (rtx insn, rtx *operands, enum machine_mode mode)
1339
{
1340
  switch (mode)
1341
    {
1342
    case DImode:
1343
      {
1344
        rtx low[3];
1345
        const char *pattern;
1346
        int carry = 1;
1347
        bool sub;
1348
 
1349
        if (TARGET_QMATH && 0)
1350
          debug_rtx (insn);
1351
 
1352
        split_quadword_operands (insn, PLUS, operands, low, 3);
1353
 
1354
        if (TARGET_QMATH)
1355
          {
1356
            gcc_assert (rtx_equal_p (operands[0], operands[1]));
1357
#ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1358
            gcc_assert (!flag_pic || !external_memory_operand (low[2], SImode));
1359
            gcc_assert (!flag_pic || !external_memory_operand (low[0], SImode));
1360
#endif
1361
 
1362
            /* No reason to add a 0 to the low part and thus no carry, so just
1363
               emit the appropriate add/sub instruction.  */
1364
            if (low[2] == const0_rtx)
1365
              return vax_output_int_add (NULL, operands, SImode);
1366
 
1367
            /* Are we doing addition or subtraction?  */
1368
            sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1369
 
1370
            /* We can't use vax_output_int_add since some the patterns don't
1371
               modify the carry bit.  */
1372
            if (sub)
1373
              {
1374
                if (low[2] == constm1_rtx)
1375
                  pattern = "decl %0";
1376
                else
1377
                  pattern = "subl2 $%n2,%0";
1378
              }
1379
            else
1380
              {
1381
                if (low[2] == const1_rtx)
1382
                  pattern = "incl %0";
1383
                else
1384
                  pattern = "addl2 %2,%0";
1385
              }
1386
            output_asm_insn (pattern, low);
1387
 
1388
            /* In 2's complement, -n = ~n + 1.  Since we are dealing with
1389
               two 32bit parts, we complement each and then add one to
1390
               low part.  We know that the low part can't overflow since
1391
               it's value can never be 0.  */
1392
            if (sub)
1393
                return "sbwc %N2,%0";
1394
            return "adwc %2,%0";
1395
          }
1396
 
1397
        /* Add low parts.  */
1398
        if (rtx_equal_p (operands[0], operands[1]))
1399
          {
1400
            if (low[2] == const0_rtx)
1401
        /* Should examine operand, punt if not POST_INC.  */
1402
              pattern = "tstl %0", carry = 0;
1403
            else if (low[2] == const1_rtx)
1404
              pattern = "incl %0";
1405
            else
1406
              pattern = "addl2 %2,%0";
1407
          }
1408
        else
1409
          {
1410
            if (low[2] == const0_rtx)
1411
              pattern = "movl %1,%0", carry = 0;
1412
            else
1413
              pattern = "addl3 %2,%1,%0";
1414
          }
1415
        if (pattern)
1416
          output_asm_insn (pattern, low);
1417
        if (!carry)
1418
          /* If CARRY is 0, we don't have any carry value to worry about.  */
1419
          return get_insn_template (CODE_FOR_addsi3, insn);
1420
        /* %0 = C + %1 + %2 */
1421
        if (!rtx_equal_p (operands[0], operands[1]))
1422
          output_asm_insn ((operands[1] == const0_rtx
1423
                            ? "clrl %0"
1424
                            : "movl %1,%0"), operands);
1425
        return "adwc %2,%0";
1426
      }
1427
 
1428
    case SImode:
1429
      if (rtx_equal_p (operands[0], operands[1]))
1430
        {
1431
          if (operands[2] == const1_rtx)
1432
            return "incl %0";
1433
          if (operands[2] == constm1_rtx)
1434
            return "decl %0";
1435
          if (CONST_INT_P (operands[2])
1436
              && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1437
            return "subl2 $%n2,%0";
1438
          if (CONST_INT_P (operands[2])
1439
              && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1440
              && REG_P (operands[1])
1441
              && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1442
                   || REGNO (operands[1]) > 11))
1443
            return "movab %c2(%1),%0";
1444
          if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1445
            return "movab %a2[%0],%0";
1446
          return "addl2 %2,%0";
1447
        }
1448
 
1449
      if (rtx_equal_p (operands[0], operands[2]))
1450
        {
1451
          if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1452
            return "movab %a1[%0],%0";
1453
          return "addl2 %1,%0";
1454
        }
1455
 
1456
      if (CONST_INT_P (operands[2])
1457
          && INTVAL (operands[2]) < 32767
1458
          && INTVAL (operands[2]) > -32768
1459
          && REG_P (operands[1])
1460
          && push_operand (operands[0], SImode))
1461
        return "pushab %c2(%1)";
1462
 
1463
      if (CONST_INT_P (operands[2])
1464
          && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1465
        return "subl3 $%n2,%1,%0";
1466
 
1467
      if (CONST_INT_P (operands[2])
1468
          && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1469
          && REG_P (operands[1])
1470
          && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1471
               || REGNO (operands[1]) > 11))
1472
        return "movab %c2(%1),%0";
1473
 
1474
      /* Add this if using gcc on a VAX 3xxx:
1475
      if (REG_P (operands[1]) && REG_P (operands[2]))
1476
        return "movab (%1)[%2],%0";
1477
      */
1478
 
1479
      if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1480
        {
1481
          if (push_operand (operands[0], SImode))
1482
            return "pushab %a2[%1]";
1483
          return "movab %a2[%1],%0";
1484
        }
1485
 
1486
      if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1487
        {
1488
          if (push_operand (operands[0], SImode))
1489
            return "pushab %a1[%2]";
1490
          return "movab %a1[%2],%0";
1491
        }
1492
 
1493
      if (flag_pic && REG_P (operands[0])
1494
          && symbolic_operand (operands[2], SImode))
1495
        return "movab %a2,%0;addl2 %1,%0";
1496
 
1497
      if (flag_pic
1498
          && (symbolic_operand (operands[1], SImode)
1499
              || symbolic_operand (operands[1], SImode)))
1500
        debug_rtx (insn);
1501
 
1502
      return "addl3 %1,%2,%0";
1503
 
1504
    case HImode:
1505
      if (rtx_equal_p (operands[0], operands[1]))
1506
        {
1507
          if (operands[2] == const1_rtx)
1508
            return "incw %0";
1509
          if (operands[2] == constm1_rtx)
1510
            return "decw %0";
1511
          if (CONST_INT_P (operands[2])
1512
              && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1513
            return "subw2 $%n2,%0";
1514
          return "addw2 %2,%0";
1515
        }
1516
      if (rtx_equal_p (operands[0], operands[2]))
1517
        return "addw2 %1,%0";
1518
      if (CONST_INT_P (operands[2])
1519
          && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1520
        return "subw3 $%n2,%1,%0";
1521
      return "addw3 %1,%2,%0";
1522
 
1523
    case QImode:
1524
      if (rtx_equal_p (operands[0], operands[1]))
1525
        {
1526
          if (operands[2] == const1_rtx)
1527
            return "incb %0";
1528
          if (operands[2] == constm1_rtx)
1529
            return "decb %0";
1530
          if (CONST_INT_P (operands[2])
1531
              && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1532
            return "subb2 $%n2,%0";
1533
          return "addb2 %2,%0";
1534
        }
1535
      if (rtx_equal_p (operands[0], operands[2]))
1536
        return "addb2 %1,%0";
1537
      if (CONST_INT_P (operands[2])
1538
          && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1539
        return "subb3 $%n2,%1,%0";
1540
      return "addb3 %1,%2,%0";
1541
 
1542
    default:
1543
      gcc_unreachable ();
1544
    }
1545
}
1546
 
1547
const char *
1548
vax_output_int_subtract (rtx insn, rtx *operands, enum machine_mode mode)
1549
{
1550
  switch (mode)
1551
    {
1552
    case DImode:
1553
      {
1554
        rtx low[3];
1555
        const char *pattern;
1556
        int carry = 1;
1557
 
1558
        if (TARGET_QMATH && 0)
1559
          debug_rtx (insn);
1560
 
1561
        split_quadword_operands (insn, MINUS, operands, low, 3);
1562
 
1563
        if (TARGET_QMATH)
1564
          {
1565
            if (operands[1] == const0_rtx && low[1] == const0_rtx)
1566
              {
1567
                /* Negation is tricky.  It's basically complement and increment.
1568
                   Negate hi, then lo, and subtract the carry back.  */
1569
                if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1570
                    || (MEM_P (operands[0])
1571
                        && GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1572
                  fatal_insn ("illegal operand detected", insn);
1573
                output_asm_insn ("mnegl %2,%0", operands);
1574
                output_asm_insn ("mnegl %2,%0", low);
1575
                return "sbwc $0,%0";
1576
              }
1577
            gcc_assert (rtx_equal_p (operands[0], operands[1]));
1578
            gcc_assert (rtx_equal_p (low[0], low[1]));
1579
            if (low[2] == const1_rtx)
1580
              output_asm_insn ("decl %0", low);
1581
            else
1582
              output_asm_insn ("subl2 %2,%0", low);
1583
            return "sbwc %2,%0";
1584
          }
1585
 
1586
        /* Subtract low parts.  */
1587
        if (rtx_equal_p (operands[0], operands[1]))
1588
          {
1589
            if (low[2] == const0_rtx)
1590
              pattern = 0, carry = 0;
1591
            else if (low[2] == constm1_rtx)
1592
              pattern = "decl %0";
1593
            else
1594
              pattern = "subl2 %2,%0";
1595
          }
1596
        else
1597
          {
1598
            if (low[2] == constm1_rtx)
1599
              pattern = "decl %0";
1600
            else if (low[2] == const0_rtx)
1601
              pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1602
            else
1603
              pattern = "subl3 %2,%1,%0";
1604
          }
1605
        if (pattern)
1606
          output_asm_insn (pattern, low);
1607
        if (carry)
1608
          {
1609
            if (!rtx_equal_p (operands[0], operands[1]))
1610
              return "movl %1,%0;sbwc %2,%0";
1611
            return "sbwc %2,%0";
1612
            /* %0 = %2 - %1 - C */
1613
          }
1614
        return get_insn_template (CODE_FOR_subsi3, insn);
1615
      }
1616
 
1617
    default:
1618
      gcc_unreachable ();
1619
  }
1620
}
1621
 
1622
/* True if X is an rtx for a constant that is a valid address.  */
1623
 
1624
bool
1625
legitimate_constant_address_p (rtx x)
1626
{
1627
  if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1628
          || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1629
    return true;
1630
  if (GET_CODE (x) != CONST)
1631
    return false;
1632
#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1633
  if (flag_pic
1634
      && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1635
      && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1636
    return false;
1637
#endif
1638
   return true;
1639
}
1640
 
1641
/* The other macros defined here are used only in legitimate_address_p ().  */
1642
 
1643
/* Nonzero if X is a hard reg that can be used as an index
1644
   or, if not strict, if it is a pseudo reg.  */
1645
#define INDEX_REGISTER_P(X, STRICT) \
1646
(REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1647
 
1648
/* Nonzero if X is a hard reg that can be used as a base reg
1649
   or, if not strict, if it is a pseudo reg.  */
1650
#define BASE_REGISTER_P(X, STRICT) \
1651
(REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1652
 
1653
#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1654
 
1655
/* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1656
   are no SYMBOL_REFs for external symbols present.  */
1657
 
1658
static bool
1659
indirectable_constant_address_p (rtx x, bool indirect)
1660
{
1661
  if (GET_CODE (x) == SYMBOL_REF)
1662
    return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1663
 
1664
  if (GET_CODE (x) == CONST)
1665
    return !flag_pic
1666
           || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1667
           || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1668
 
1669
  return CONSTANT_ADDRESS_P (x);
1670
}
1671
 
1672
#else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1673
 
1674
static bool
1675
indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
1676
{
1677
  return CONSTANT_ADDRESS_P (x);
1678
}
1679
 
1680
#endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1681
 
1682
/* True if X is an address which can be indirected.  External symbols
1683
   could be in a sharable image library, so we disallow those.  */
1684
 
1685
static bool
1686
indirectable_address_p (rtx x, bool strict, bool indirect)
1687
{
1688
  if (indirectable_constant_address_p (x, indirect)
1689
      || BASE_REGISTER_P (x, strict))
1690
    return true;
1691
  if (GET_CODE (x) != PLUS
1692
      || !BASE_REGISTER_P (XEXP (x, 0), strict)
1693
      || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1694
    return false;
1695
  return indirectable_constant_address_p (XEXP (x, 1), indirect);
1696
}
1697
 
1698
/* Return true if x is a valid address not using indexing.
1699
   (This much is the easy part.)  */
1700
static bool
1701
nonindexed_address_p (rtx x, bool strict)
1702
{
1703
  rtx xfoo0;
1704
  if (REG_P (x))
1705
    {
1706
      if (! reload_in_progress
1707
          || reg_equiv_mem (REGNO (x)) == 0
1708
          || indirectable_address_p (reg_equiv_mem (REGNO (x)), strict, false))
1709
        return true;
1710
    }
1711
  if (indirectable_constant_address_p (x, false))
1712
    return true;
1713
  if (indirectable_address_p (x, strict, false))
1714
    return true;
1715
  xfoo0 = XEXP (x, 0);
1716
  if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1717
    return true;
1718
  if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1719
      && BASE_REGISTER_P (xfoo0, strict))
1720
    return true;
1721
  return false;
1722
}
1723
 
1724
/* True if PROD is either a reg times size of mode MODE and MODE is less
1725
   than or equal 8 bytes, or just a reg if MODE is one byte.  */
1726
 
1727
static bool
1728
index_term_p (rtx prod, enum machine_mode mode, bool strict)
1729
{
1730
  rtx xfoo0, xfoo1;
1731
 
1732
  if (GET_MODE_SIZE (mode) == 1)
1733
    return BASE_REGISTER_P (prod, strict);
1734
 
1735
  if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8)
1736
    return false;
1737
 
1738
  xfoo0 = XEXP (prod, 0);
1739
  xfoo1 = XEXP (prod, 1);
1740
 
1741
  if (CONST_INT_P (xfoo0)
1742
      && INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode)
1743
      && INDEX_REGISTER_P (xfoo1, strict))
1744
    return true;
1745
 
1746
  if (CONST_INT_P (xfoo1)
1747
      && INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode)
1748
      && INDEX_REGISTER_P (xfoo0, strict))
1749
    return true;
1750
 
1751
  return false;
1752
}
1753
 
1754
/* Return true if X is the sum of a register
1755
   and a valid index term for mode MODE.  */
1756
static bool
1757
reg_plus_index_p (rtx x, enum machine_mode mode, bool strict)
1758
{
1759
  rtx xfoo0, xfoo1;
1760
 
1761
  if (GET_CODE (x) != PLUS)
1762
    return false;
1763
 
1764
  xfoo0 = XEXP (x, 0);
1765
  xfoo1 = XEXP (x, 1);
1766
 
1767
  if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
1768
    return true;
1769
 
1770
  if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
1771
    return true;
1772
 
1773
  return false;
1774
}
1775
 
1776
/* Return true if xfoo0 and xfoo1 constitute a valid indexed address.  */
1777
static bool
1778
indexable_address_p (rtx xfoo0, rtx xfoo1, enum machine_mode mode, bool strict)
1779
{
1780
  if (!CONSTANT_ADDRESS_P (xfoo0))
1781
    return false;
1782
  if (BASE_REGISTER_P (xfoo1, strict))
1783
    return !flag_pic || mode == QImode;
1784
  if (flag_pic && symbolic_operand (xfoo0, SImode))
1785
    return false;
1786
  return reg_plus_index_p (xfoo1, mode, strict);
1787
}
1788
 
1789
/* legitimate_address_p returns true if it recognizes an RTL expression "x"
1790
   that is a valid memory address for an instruction.
1791
   The MODE argument is the machine mode for the MEM expression
1792
   that wants to use this address.  */
1793
bool
1794
vax_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1795
{
1796
  rtx xfoo0, xfoo1;
1797
 
1798
  if (nonindexed_address_p (x, strict))
1799
    return true;
1800
 
1801
  if (GET_CODE (x) != PLUS)
1802
    return false;
1803
 
1804
  /* Handle <address>[index] represented with index-sum outermost */
1805
 
1806
  xfoo0 = XEXP (x, 0);
1807
  xfoo1 = XEXP (x, 1);
1808
 
1809
  if (index_term_p (xfoo0, mode, strict)
1810
      && nonindexed_address_p (xfoo1, strict))
1811
    return true;
1812
 
1813
  if (index_term_p (xfoo1, mode, strict)
1814
      && nonindexed_address_p (xfoo0, strict))
1815
    return true;
1816
 
1817
  /* Handle offset(reg)[index] with offset added outermost */
1818
 
1819
  if (indexable_address_p (xfoo0, xfoo1, mode, strict)
1820
      || indexable_address_p (xfoo1, xfoo0, mode, strict))
1821
    return true;
1822
 
1823
  return false;
1824
}
1825
 
1826
/* Return true if x (a legitimate address expression) has an effect that
1827
   depends on the machine mode it is used for.  On the VAX, the predecrement
1828
   and postincrement address depend thus (the amount of decrement or
1829
   increment being the length of the operand) and all indexed address depend
1830
   thus (because the index scale factor is the length of the operand).  */
1831
 
1832
bool
1833
vax_mode_dependent_address_p (rtx x)
1834
{
1835
  rtx xfoo0, xfoo1;
1836
 
1837
  /* Auto-increment cases are now dealt with generically in recog.c.  */
1838
  if (GET_CODE (x) != PLUS)
1839
    return false;
1840
 
1841
  xfoo0 = XEXP (x, 0);
1842
  xfoo1 = XEXP (x, 1);
1843
 
1844
  if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
1845
    return false;
1846
  if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
1847
    return false;
1848
  if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
1849
    return false;
1850
  if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
1851
    return false;
1852
 
1853
  return true;
1854
}
1855
 
1856
static rtx
1857
fixup_mathdi_operand (rtx x, enum machine_mode mode)
1858
{
1859
  if (illegal_addsub_di_memory_operand (x, mode))
1860
    {
1861
      rtx addr = XEXP (x, 0);
1862
      rtx temp = gen_reg_rtx (Pmode);
1863
      rtx offset = 0;
1864
#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1865
      if (GET_CODE (addr) == CONST && flag_pic)
1866
        {
1867
          offset = XEXP (XEXP (addr, 0), 1);
1868
          addr = XEXP (XEXP (addr, 0), 0);
1869
        }
1870
#endif
1871
      emit_move_insn (temp, addr);
1872
      if (offset)
1873
        temp = gen_rtx_PLUS (Pmode, temp, offset);
1874
      x = gen_rtx_MEM (DImode, temp);
1875
    }
1876
  return x;
1877
}
1878
 
1879
void
1880
vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
1881
{
1882
  int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
1883
  rtx temp;
1884
 
1885
  rtx (*gen_old_insn)(rtx, rtx, rtx);
1886
  rtx (*gen_si_insn)(rtx, rtx, rtx);
1887
  rtx (*gen_insn)(rtx, rtx, rtx);
1888
 
1889
  if (code == PLUS)
1890
    {
1891
      gen_old_insn = gen_adddi3_old;
1892
      gen_si_insn = gen_addsi3;
1893
      gen_insn = gen_adcdi3;
1894
    }
1895
  else if (code == MINUS)
1896
    {
1897
      gen_old_insn = gen_subdi3_old;
1898
      gen_si_insn = gen_subsi3;
1899
      gen_insn = gen_sbcdi3;
1900
    }
1901
  else
1902
    gcc_unreachable ();
1903
 
1904
  /* If this is addition (thus operands are commutative) and if there is one
1905
     addend that duplicates the desination, we want that addend to be the
1906
     first addend.  */
1907
  if (code == PLUS
1908
      && rtx_equal_p (operands[0], operands[2])
1909
      && !rtx_equal_p (operands[1], operands[2]))
1910
    {
1911
      temp = operands[2];
1912
      operands[2] = operands[1];
1913
      operands[1] = temp;
1914
    }
1915
 
1916
  if (!TARGET_QMATH)
1917
    {
1918
      emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
1919
    }
1920
  else if (hi_only)
1921
    {
1922
      if (!rtx_equal_p (operands[0], operands[1])
1923
          && (REG_P (operands[0]) && MEM_P (operands[1])))
1924
        {
1925
          emit_move_insn (operands[0], operands[1]);
1926
          operands[1] = operands[0];
1927
        }
1928
 
1929
      operands[0] = fixup_mathdi_operand (operands[0], DImode);
1930
      operands[1] = fixup_mathdi_operand (operands[1], DImode);
1931
      operands[2] = fixup_mathdi_operand (operands[2], DImode);
1932
 
1933
      if (!rtx_equal_p (operands[0], operands[1]))
1934
        emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
1935
                          operand_subword (operands[1], 0, 0, DImode));
1936
 
1937
      emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
1938
                                 operand_subword (operands[1], 1, 0, DImode),
1939
                                 operand_subword (operands[2], 1, 0, DImode)));
1940
    }
1941
  else
1942
    {
1943
      /* If are adding the same value together, that's really a multiply by 2,
1944
         and that's just a left shift of 1.  */
1945
      if (rtx_equal_p (operands[1], operands[2]))
1946
        {
1947
          gcc_assert (code != MINUS);
1948
          emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
1949
          return;
1950
        }
1951
 
1952
      operands[0] = fixup_mathdi_operand (operands[0], DImode);
1953
 
1954
      /* If an operand is the same as operand[0], use the operand[0] rtx
1955
         because fixup will an equivalent rtx but not an equal one. */
1956
 
1957
      if (rtx_equal_p (operands[0], operands[1]))
1958
        operands[1] = operands[0];
1959
      else
1960
        operands[1] = fixup_mathdi_operand (operands[1], DImode);
1961
 
1962
      if (rtx_equal_p (operands[0], operands[2]))
1963
        operands[2] = operands[0];
1964
      else
1965
        operands[2] = fixup_mathdi_operand (operands[2], DImode);
1966
 
1967
      /* If we are subtracting not from ourselves [d = a - b], and because the
1968
         carry ops are two operand only, we would need to do a move prior to
1969
         the subtract.  And if d == b, we would need a temp otherwise
1970
         [d = a, d -= d] and we end up with 0.  Instead we rewrite d = a - b
1971
         into d = -b, d += a.  Since -b can never overflow, even if b == d,
1972
         no temp is needed.
1973
 
1974
         If we are doing addition, since the carry ops are two operand, if
1975
         we aren't adding to ourselves, move the first addend to the
1976
         destination first.  */
1977
 
1978
      gcc_assert (operands[1] != const0_rtx || code == MINUS);
1979
      if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
1980
        {
1981
          if (code == MINUS && CONSTANT_P (operands[1]))
1982
            {
1983
              temp = gen_reg_rtx (DImode);
1984
              emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
1985
              code = PLUS;
1986
              gen_insn = gen_adcdi3;
1987
              operands[2] = operands[1];
1988
              operands[1] = operands[0];
1989
            }
1990
          else
1991
            emit_move_insn (operands[0], operands[1]);
1992
        }
1993
 
1994
      /* Subtracting a constant will have been rewritten to an addition of the
1995
         negative of that constant before we get here.  */
1996
      gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
1997
      emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
1998
    }
1999
}
2000
 
2001
bool
2002
adjacent_operands_p (rtx lo, rtx hi, enum machine_mode mode)
2003
{
2004
  HOST_WIDE_INT lo_offset;
2005
  HOST_WIDE_INT hi_offset;
2006
 
2007
  if (GET_CODE (lo) != GET_CODE (hi))
2008
    return false;
2009
 
2010
  if (REG_P (lo))
2011
    return mode == SImode && REGNO (lo) + 1 == REGNO (hi);
2012
  if (CONST_INT_P (lo))
2013
    return INTVAL (hi) == 0 && 0 <= INTVAL (lo) && INTVAL (lo) < 64;
2014
  if (CONST_INT_P (lo))
2015
    return mode != SImode;
2016
 
2017
  if (!MEM_P (lo))
2018
    return false;
2019
 
2020
  if (MEM_VOLATILE_P (lo) || MEM_VOLATILE_P (hi))
2021
    return false;
2022
 
2023
  lo = XEXP (lo, 0);
2024
  hi = XEXP (hi, 0);
2025
 
2026
  if (GET_CODE (lo) == POST_INC /* || GET_CODE (lo) == PRE_DEC */)
2027
    return rtx_equal_p (lo, hi);
2028
 
2029
  switch (GET_CODE (lo))
2030
    {
2031
    case REG:
2032
    case SYMBOL_REF:
2033
      lo_offset = 0;
2034
      break;
2035
    case CONST:
2036
      lo = XEXP (lo, 0);
2037
      /* FALLTHROUGH */
2038
    case PLUS:
2039
      if (!CONST_INT_P (XEXP (lo, 1)))
2040
        return false;
2041
      lo_offset = INTVAL (XEXP (lo, 1));
2042
      lo = XEXP (lo, 0);
2043
      break;
2044
    default:
2045
      return false;
2046
    }
2047
 
2048
  switch (GET_CODE (hi))
2049
    {
2050
    case REG:
2051
    case SYMBOL_REF:
2052
      hi_offset = 0;
2053
      break;
2054
    case CONST:
2055
      hi = XEXP (hi, 0);
2056
      /* FALLTHROUGH */
2057
    case PLUS:
2058
      if (!CONST_INT_P (XEXP (hi, 1)))
2059
        return false;
2060
      hi_offset = INTVAL (XEXP (hi, 1));
2061
      hi = XEXP (hi, 0);
2062
      break;
2063
    default:
2064
      return false;
2065
    }
2066
 
2067
  if (GET_CODE (lo) == MULT || GET_CODE (lo) == PLUS)
2068
    return false;
2069
 
2070
  return rtx_equal_p (lo, hi)
2071
         && hi_offset - lo_offset == GET_MODE_SIZE (mode);
2072
}
2073
 
2074
/* Output assembler code for a block containing the constant parts
2075
   of a trampoline, leaving space for the variable parts.  */
2076
 
2077
/* On the VAX, the trampoline contains an entry mask and two instructions:
2078
     .word NN
2079
     movl $STATIC,r0   (store the functions static chain)
2080
     jmp  *$FUNCTION   (jump to function code at address FUNCTION)  */
2081
 
2082
static void
2083
vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED)
2084
{
2085
  assemble_aligned_integer (2, const0_rtx);
2086
  assemble_aligned_integer (2, GEN_INT (0x8fd0));
2087
  assemble_aligned_integer (4, const0_rtx);
2088
  assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM));
2089
  assemble_aligned_integer (2, GEN_INT (0x9f17));
2090
  assemble_aligned_integer (4, const0_rtx);
2091
}
2092
 
2093
/* We copy the register-mask from the function's pure code
2094
   to the start of the trampoline.  */
2095
 
2096
static void
2097
vax_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2098
{
2099
  rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2100
  rtx mem;
2101
 
2102
  emit_block_move (m_tramp, assemble_trampoline_template (),
2103
                   GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2104
 
2105
  mem = adjust_address (m_tramp, HImode, 0);
2106
  emit_move_insn (mem, gen_const_mem (HImode, fnaddr));
2107
 
2108
  mem = adjust_address (m_tramp, SImode, 4);
2109
  emit_move_insn (mem, cxt);
2110
  mem = adjust_address (m_tramp, SImode, 11);
2111
  emit_move_insn (mem, plus_constant (fnaddr, 2));
2112
  emit_insn (gen_sync_istream ());
2113
}
2114
 
2115
/* Value is the number of bytes of arguments automatically
2116
   popped when returning from a subroutine call.
2117
   FUNDECL is the declaration node of the function (as a tree),
2118
   FUNTYPE is the data type of the function (as a tree),
2119
   or for a library call it is an identifier node for the subroutine name.
2120
   SIZE is the number of bytes of arguments passed on the stack.
2121
 
2122
   On the VAX, the RET insn pops a maximum of 255 args for any function.  */
2123
 
2124
static int
2125
vax_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
2126
                      tree funtype ATTRIBUTE_UNUSED, int size)
2127
{
2128
  return size > 255 * 4 ? 0 : size;
2129
}
2130
 
2131
/* Define where to put the arguments to a function.
2132
   Value is zero to push the argument on the stack,
2133
   or a hard register in which to store the argument.
2134
 
2135
   MODE is the argument's machine mode.
2136
   TYPE is the data type of the argument (as a tree).
2137
    This is null for libcalls where that information may
2138
    not be available.
2139
   CUM is a variable of type CUMULATIVE_ARGS which gives info about
2140
    the preceding args and about the function being called.
2141
   NAMED is nonzero if this argument is a named parameter
2142
    (otherwise it is an extra parameter matching an ellipsis).  */
2143
 
2144
/* On the VAX all args are pushed.  */
2145
 
2146
static rtx
2147
vax_function_arg (cumulative_args_t cum ATTRIBUTE_UNUSED,
2148
                  enum machine_mode mode ATTRIBUTE_UNUSED,
2149
                  const_tree type ATTRIBUTE_UNUSED,
2150
                  bool named ATTRIBUTE_UNUSED)
2151
{
2152
  return NULL_RTX;
2153
}
2154
 
2155
/* Update the data in CUM to advance over an argument of mode MODE and
2156
   data type TYPE.  (TYPE is null for libcalls where that information
2157
   may not be available.)  */
2158
 
2159
static void
2160
vax_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2161
                          const_tree type, bool named ATTRIBUTE_UNUSED)
2162
{
2163
  CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2164
 
2165
  *cum += (mode != BLKmode
2166
           ? (GET_MODE_SIZE (mode) + 3) & ~3
2167
           : (int_size_in_bytes (type) + 3) & ~3);
2168
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.