OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [config/] [tilegx/] [tilegx.c] - Blame information for rev 713

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 709 jeremybenn
/* Subroutines used for code generation on the Tilera TILE-Gx.
2
   Copyright (C) 2011, 2012
3
   Free Software Foundation, Inc.
4
   Contributed by Walter Lee (walt@tilera.com)
5
 
6
   This file is part of GCC.
7
 
8
   GCC is free software; you can redistribute it and/or modify it
9
   under the terms of the GNU General Public License as published
10
   by the Free Software Foundation; either version 3, or (at your
11
   option) any later version.
12
 
13
   GCC is distributed in the hope that it will be useful, but WITHOUT
14
   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15
   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
16
   License for more details.
17
 
18
   You should have received a copy of the GNU General Public License
19
   along with GCC; see the file COPYING3.  If not see
20
   <http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "rtl.h"
27
#include "regs.h"
28
#include "insn-config.h"
29
#include "output.h"
30
#include "insn-attr.h"
31
#include "recog.h"
32
#include "expr.h"
33
#include "langhooks.h"
34
#include "optabs.h"
35
#include "sched-int.h"
36
#include "tm_p.h"
37
#include "tm-constrs.h"
38
#include "target.h"
39
#include "target-def.h"
40
#include "integrate.h"
41
#include "dwarf2.h"
42
#include "timevar.h"
43
#include "gimple.h"
44
#include "cfgloop.h"
45
#include "tilegx-builtins.h"
46
#include "tilegx-multiply.h"
47
#include "diagnostic.h"
48
 
49
/* SYMBOL_REF for GOT */
50
static GTY(()) rtx g_got_symbol = NULL;
51
 
52
/* In case of a POST_INC or POST_DEC memory reference, we must report
53
   the mode of the memory reference from TARGET_PRINT_OPERAND to
54
   TARGET_PRINT_OPERAND_ADDRESS.  */
55
static enum machine_mode output_memory_reference_mode;
56
 
57
/* Report whether we're printing out the first address fragment of a
58
   POST_INC or POST_DEC memory reference, from TARGET_PRINT_OPERAND to
59
   TARGET_PRINT_OPERAND_ADDRESS.  */
60
static bool output_memory_autoinc_first;
61
 
62
 
63
 
64
/* Option handling  */
65
 
66
/* Implement TARGET_OPTION_OVERRIDE.  */
67
static void
68
tilegx_option_override (void)
69
{
70
  /* When modulo scheduling is enabled, we still rely on regular
71
     scheduler for bundling.  */
72
  if (flag_modulo_sched)
73
    flag_resched_modulo_sched = 1;
74
}
75
 
76
 
77
 
78
/* Implement TARGET_SCALAR_MODE_SUPPORTED_P.  */
79
static bool
80
tilegx_scalar_mode_supported_p (enum machine_mode mode)
81
{
82
  switch (mode)
83
    {
84
    case QImode:
85
    case HImode:
86
    case SImode:
87
    case DImode:
88
    case TImode:
89
      return true;
90
 
91
    case SFmode:
92
    case DFmode:
93
      return true;
94
 
95
    default:
96
      return false;
97
    }
98
}
99
 
100
 
101
/* Implement TARGET_VECTOR_MODE_SUPPORTED_P.  */
102
static bool
103
tilegx_vector_mode_supported_p (enum machine_mode mode)
104
{
105
  return mode == V8QImode || mode == V4HImode || mode == V2SImode;
106
}
107
 
108
 
109
/* Implement TARGET_CANNOT_FORCE_CONST_MEM.  */
110
static bool
111
tilegx_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED,
112
                               rtx x ATTRIBUTE_UNUSED)
113
{
114
  return true;
115
}
116
 
117
 
118
/* Implement TARGET_FUNCTION_OK_FOR_SIBCALL.  */
119
static bool
120
tilegx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
121
{
122
  return decl != NULL;
123
}
124
 
125
 
126
/* Implement TARGET_PASS_BY_REFERENCE.  Variable sized types are
127
   passed by reference.  */
128
static bool
129
tilegx_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
130
                          enum machine_mode mode ATTRIBUTE_UNUSED,
131
                          const_tree type, bool named ATTRIBUTE_UNUSED)
132
{
133
  return (type && TYPE_SIZE (type)
134
          && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST);
135
}
136
 
137
 
138
/* Implement TARGET_RETURN_IN_MEMORY.  */
139
static bool
140
tilegx_return_in_memory (const_tree type, const_tree fndecl ATTRIBUTE_UNUSED)
141
{
142
  return !IN_RANGE (int_size_in_bytes (type),
143
                    0, TILEGX_NUM_RETURN_REGS * UNITS_PER_WORD);
144
}
145
 
146
 
147
/* TARGET_MODE_REP_EXTENDED.  */
148
static int
149
tilegx_mode_rep_extended (enum machine_mode mode, enum machine_mode mode_rep)
150
{
151
  /* SImode register values are sign-extended to DImode.  */
152
  if (mode == SImode && mode_rep == DImode)
153
    return SIGN_EXTEND;
154
 
155
  return UNKNOWN;
156
}
157
 
158
 
159
/* Implement TARGET_FUNCTION_ARG_BOUNDARY.  */
160
static unsigned int
161
tilegx_function_arg_boundary (enum machine_mode mode, const_tree type)
162
{
163
  unsigned int alignment;
164
 
165
  alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
166
  if (alignment < PARM_BOUNDARY)
167
    alignment = PARM_BOUNDARY;
168
  if (alignment > STACK_BOUNDARY)
169
    alignment = STACK_BOUNDARY;
170
  return alignment;
171
}
172
 
173
 
174
/* Implement TARGET_FUNCTION_ARG.  */
175
static rtx
176
tilegx_function_arg (cumulative_args_t cum_v,
177
                     enum machine_mode mode,
178
                     const_tree type, bool named ATTRIBUTE_UNUSED)
179
{
180
  CUMULATIVE_ARGS cum = *get_cumulative_args (cum_v);
181
  int byte_size = ((mode == BLKmode)
182
                   ? int_size_in_bytes (type) : GET_MODE_SIZE (mode));
183
 
184
  if (cum >= TILEGX_NUM_ARG_REGS)
185
    return NULL_RTX;
186
 
187
  /* The ABI does not allow parameters to be passed partially in reg
188
     and partially in stack.  */
189
  if ((cum + (byte_size + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
190
      > TILEGX_NUM_ARG_REGS)
191
    return NULL_RTX;
192
 
193
  return gen_rtx_REG (mode, cum);
194
}
195
 
196
 
197
/* Implement TARGET_FUNCTION_ARG_ADVANCE.  */
198
static void
199
tilegx_function_arg_advance (cumulative_args_t cum_v,
200
                             enum machine_mode mode,
201
                             const_tree type, bool named ATTRIBUTE_UNUSED)
202
{
203
  CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
204
 
205
  int byte_size = ((mode == BLKmode)
206
                   ? int_size_in_bytes (type) : GET_MODE_SIZE (mode));
207
  int word_size = (byte_size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
208
 
209
  /* If the current argument does not fit in the pretend_args space,
210
     skip over it.  */
211
  if (*cum < TILEGX_NUM_ARG_REGS
212
      && *cum + word_size > TILEGX_NUM_ARG_REGS)
213
    *cum = TILEGX_NUM_ARG_REGS;
214
 
215
  *cum += word_size;
216
}
217
 
218
 
219
/* Implement TARGET_FUNCTION_VALUE.  */
220
static rtx
221
tilegx_function_value (const_tree valtype, const_tree fn_decl_or_type,
222
                       bool outgoing ATTRIBUTE_UNUSED)
223
{
224
  enum machine_mode mode;
225
  int unsigned_p;
226
 
227
  mode = TYPE_MODE (valtype);
228
  unsigned_p = TYPE_UNSIGNED (valtype);
229
 
230
  mode = promote_function_mode (valtype, mode, &unsigned_p,
231
                                fn_decl_or_type, 1);
232
 
233
  return gen_rtx_REG (mode, 0);
234
}
235
 
236
 
237
/* Implement TARGET_LIBCALL_VALUE.  */
238
static rtx
239
tilegx_libcall_value (enum machine_mode mode,
240
                       const_rtx fun ATTRIBUTE_UNUSED)
241
{
242
  return gen_rtx_REG (mode, 0);
243
}
244
 
245
 
246
/* Implement FUNCTION_VALUE_REGNO_P.  */
247
static bool
248
tilegx_function_value_regno_p (const unsigned int regno)
249
{
250
  return regno < TILEGX_NUM_RETURN_REGS;
251
}
252
 
253
 
254
/* Implement TARGET_BUILD_BUILTIN_VA_LIST.  */
255
static tree
256
tilegx_build_builtin_va_list (void)
257
{
258
  tree f_args, f_skip, record, type_decl;
259
  bool owp;
260
 
261
  record = lang_hooks.types.make_type (RECORD_TYPE);
262
 
263
  type_decl = build_decl (BUILTINS_LOCATION, TYPE_DECL,
264
                          get_identifier ("__va_list_tag"), record);
265
 
266
  f_args = build_decl (BUILTINS_LOCATION, FIELD_DECL,
267
                       get_identifier ("__args"), ptr_type_node);
268
  f_skip = build_decl (BUILTINS_LOCATION, FIELD_DECL,
269
                       get_identifier ("__skip"), ptr_type_node);
270
 
271
  DECL_FIELD_CONTEXT (f_args) = record;
272
 
273
  DECL_FIELD_CONTEXT (f_skip) = record;
274
 
275
  TREE_CHAIN (record) = type_decl;
276
  TYPE_NAME (record) = type_decl;
277
  TYPE_FIELDS (record) = f_args;
278
  TREE_CHAIN (f_args) = f_skip;
279
 
280
  /* We know this is being padded and we want it too.  It is an
281
     internal type so hide the warnings from the user.  */
282
  owp = warn_padded;
283
  warn_padded = false;
284
 
285
  layout_type (record);
286
 
287
  warn_padded = owp;
288
 
289
  /* The correct type is an array type of one element.  */
290
  return record;
291
}
292
 
293
 
294
/* Implement TARGET_EXPAND_BUILTIN_VA_START.  */
295
static void
296
tilegx_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
297
{
298
  tree f_args, f_skip;
299
  tree args, skip, t;
300
 
301
  f_args = TYPE_FIELDS (TREE_TYPE (valist));
302
  f_skip = TREE_CHAIN (f_args);
303
 
304
  args =
305
    build3 (COMPONENT_REF, TREE_TYPE (f_args), valist, f_args, NULL_TREE);
306
  skip =
307
    build3 (COMPONENT_REF, TREE_TYPE (f_skip), valist, f_skip, NULL_TREE);
308
 
309
  /* Find the __args area.  */
310
  t = make_tree (TREE_TYPE (args), virtual_incoming_args_rtx);
311
  t = fold_build_pointer_plus_hwi (t,
312
                                   UNITS_PER_WORD *
313
                                   (crtl->args.info - TILEGX_NUM_ARG_REGS));
314
 
315
  if (crtl->args.pretend_args_size > 0)
316
    t = fold_build_pointer_plus_hwi (t, -STACK_POINTER_OFFSET);
317
 
318
  t = build2 (MODIFY_EXPR, TREE_TYPE (args), args, t);
319
  TREE_SIDE_EFFECTS (t) = 1;
320
  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
321
 
322
  /* Find the __skip area.  */
323
  t = make_tree (TREE_TYPE (skip), virtual_incoming_args_rtx);
324
  t = fold_build_pointer_plus_hwi (t, -STACK_POINTER_OFFSET);
325
  t = build2 (MODIFY_EXPR, TREE_TYPE (skip), skip, t);
326
  TREE_SIDE_EFFECTS (t) = 1;
327
  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
328
}
329
 
330
 
331
/* Implement TARGET_SETUP_INCOMING_VARARGS.  */
332
static void
333
tilegx_setup_incoming_varargs (cumulative_args_t cum,
334
                               enum machine_mode mode,
335
                               tree type, int *pretend_args, int no_rtl)
336
{
337
  CUMULATIVE_ARGS local_cum = *get_cumulative_args (cum);
338
  int first_reg;
339
 
340
  /* The caller has advanced CUM up to, but not beyond, the last named
341
     argument.  Advance a local copy of CUM past the last "real" named
342
     argument, to find out how many registers are left over.  */
343
  targetm.calls.function_arg_advance (pack_cumulative_args (&local_cum),
344
                                      mode, type, true);
345
  first_reg = local_cum;
346
 
347
  if (local_cum < TILEGX_NUM_ARG_REGS)
348
    {
349
      *pretend_args = UNITS_PER_WORD * (TILEGX_NUM_ARG_REGS - first_reg);
350
 
351
      if (!no_rtl)
352
        {
353
          alias_set_type set = get_varargs_alias_set ();
354
          rtx tmp =
355
            gen_rtx_MEM (BLKmode, plus_constant (virtual_incoming_args_rtx,
356
                                                 -STACK_POINTER_OFFSET -
357
                                                 UNITS_PER_WORD *
358
                                                 (TILEGX_NUM_ARG_REGS -
359
                                                  first_reg)));
360
          MEM_NOTRAP_P (tmp) = 1;
361
          set_mem_alias_set (tmp, set);
362
          move_block_from_reg (first_reg, tmp,
363
                               TILEGX_NUM_ARG_REGS - first_reg);
364
        }
365
    }
366
  else
367
    *pretend_args = 0;
368
}
369
 
370
 
371
/* Implement TARGET_GIMPLIFY_VA_ARG_EXPR.  Gimplify va_arg by updating
372
   the va_list structure VALIST as required to retrieve an argument of
373
   type TYPE, and returning that argument.
374
 
375
   ret = va_arg(VALIST, TYPE);
376
 
377
   generates code equivalent to:
378
 
379
    paddedsize = (sizeof(TYPE) + 3) & -4;
380
    if (  (VALIST.__args + paddedsize > VALIST.__skip)
381
        & (VALIST.__args <= VALIST.__skip))
382
      addr = VALIST.__skip + STACK_POINTER_OFFSET;
383
    else
384
      addr = VALIST.__args;
385
    VALIST.__args = addr + paddedsize;
386
    ret = *(TYPE *)addr;
387
 */
388
static tree
389
tilegx_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
390
                             gimple_seq *post_p ATTRIBUTE_UNUSED)
391
{
392
  tree f_args, f_skip;
393
  tree args, skip;
394
  HOST_WIDE_INT size, rsize;
395
  tree addr, tmp;
396
  bool pass_by_reference_p;
397
 
398
  f_args = TYPE_FIELDS (va_list_type_node);
399
  f_skip = TREE_CHAIN (f_args);
400
 
401
  args =
402
    build3 (COMPONENT_REF, TREE_TYPE (f_args), valist, f_args, NULL_TREE);
403
  skip =
404
    build3 (COMPONENT_REF, TREE_TYPE (f_skip), valist, f_skip, NULL_TREE);
405
 
406
  addr = create_tmp_var (ptr_type_node, "va_arg");
407
 
408
  /* if an object is dynamically sized, a pointer to it is passed
409
     instead of the object itself.  */
410
  pass_by_reference_p = pass_by_reference (NULL, TYPE_MODE (type), type,
411
                                           false);
412
 
413
  if (pass_by_reference_p)
414
    type = build_pointer_type (type);
415
 
416
  size = int_size_in_bytes (type);
417
  rsize = ((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD) * UNITS_PER_WORD;
418
 
419
  /* Assert alignment assumption.  */
420
  gcc_assert (STACK_BOUNDARY == PARM_BOUNDARY);
421
 
422
  /* Build conditional expression to calculate addr. The expression
423
     will be gimplified later.  */
424
  tmp = fold_build_pointer_plus_hwi (unshare_expr (args), rsize);
425
  tmp = build2 (TRUTH_AND_EXPR, boolean_type_node,
426
                build2 (GT_EXPR, boolean_type_node, tmp, unshare_expr (skip)),
427
                build2 (LE_EXPR, boolean_type_node, unshare_expr (args),
428
                        unshare_expr (skip)));
429
 
430
  tmp = build3 (COND_EXPR, ptr_type_node, tmp,
431
                build2 (POINTER_PLUS_EXPR, ptr_type_node, unshare_expr (skip),
432
                        size_int (STACK_POINTER_OFFSET)),
433
                unshare_expr (args));
434
 
435
  gimplify_assign (addr, tmp, pre_p);
436
 
437
  /* Update VALIST.__args.  */
438
  tmp = fold_build_pointer_plus_hwi (addr, rsize);
439
  gimplify_assign (unshare_expr (args), tmp, pre_p);
440
 
441
  addr = fold_convert (build_pointer_type (type), addr);
442
 
443
  if (pass_by_reference_p)
444
    addr = build_va_arg_indirect_ref (addr);
445
 
446
  return build_va_arg_indirect_ref (addr);
447
}
448
 
449
 
450
 
451
/* Implement TARGET_RTX_COSTS.  */
452
static bool
453
tilegx_rtx_costs (rtx x, int code, int outer_code, int opno, int *total,
454
                  bool speed)
455
{
456
  switch (code)
457
    {
458
    case CONST_INT:
459
      /* If this is an 8-bit constant, return zero since it can be
460
         used nearly anywhere with no cost.  If it is a valid operand
461
         for an ADD or AND, likewise return 0 if we know it will be
462
         used in that context.  Otherwise, return 2 since it might be
463
         used there later.  All other constants take at least two
464
         insns.  */
465
      if (satisfies_constraint_I (x))
466
        {
467
          *total = 0;
468
          return true;
469
        }
470
      else if (outer_code == PLUS && add_operand (x, VOIDmode))
471
        {
472
          /* Slightly penalize large constants even though we can add
473
             them in one instruction, because it forces the use of
474
             2-wide bundling mode.  */
475
          *total = 1;
476
          return true;
477
        }
478
      else if (move_operand (x, SImode))
479
        {
480
          /* We can materialize in one move.  */
481
          *total = COSTS_N_INSNS (1);
482
          return true;
483
        }
484
      else
485
        {
486
          /* We can materialize in two moves.  */
487
          *total = COSTS_N_INSNS (2);
488
          return true;
489
        }
490
 
491
      return false;
492
 
493
    case CONST:
494
    case LABEL_REF:
495
    case SYMBOL_REF:
496
      *total = COSTS_N_INSNS (2);
497
      return true;
498
 
499
    case CONST_DOUBLE:
500
      *total = COSTS_N_INSNS (4);
501
      return true;
502
 
503
    case HIGH:
504
      *total = 0;
505
      return true;
506
 
507
    case MEM:
508
      /* If outer-code was a sign or zero extension, a cost of
509
         COSTS_N_INSNS (1) was already added in, so account for
510
         that.  */
511
      if (outer_code == ZERO_EXTEND || outer_code == SIGN_EXTEND)
512
        *total = COSTS_N_INSNS (1);
513
      else
514
        *total = COSTS_N_INSNS (2);
515
      return true;
516
 
517
    case PLUS:
518
      /* Convey that shl[123]add are efficient.  */
519
      if (GET_CODE (XEXP (x, 0)) == MULT
520
          && cint_248_operand (XEXP (XEXP (x, 0), 1), VOIDmode))
521
        {
522
          *total = (rtx_cost (XEXP (XEXP (x, 0), 0),
523
                              (enum rtx_code) outer_code, opno, speed)
524
                    + rtx_cost (XEXP (x, 1),
525
                                (enum rtx_code) outer_code, opno, speed)
526
                    + COSTS_N_INSNS (1));
527
          return true;
528
        }
529
      return false;
530
 
531
    case MULT:
532
      *total = COSTS_N_INSNS (2);
533
      return false;
534
 
535
    case DIV:
536
    case UDIV:
537
    case MOD:
538
    case UMOD:
539
      /* These are handled by software and are very expensive.  */
540
      *total = COSTS_N_INSNS (100);
541
      return false;
542
 
543
    case UNSPEC:
544
    case UNSPEC_VOLATILE:
545
      {
546
        int num = XINT (x, 1);
547
 
548
        if (num <= TILEGX_LAST_LATENCY_1_INSN)
549
          *total = COSTS_N_INSNS (1);
550
        else if (num <= TILEGX_LAST_LATENCY_2_INSN)
551
          *total = COSTS_N_INSNS (2);
552
        else if (num > TILEGX_LAST_LATENCY_INSN)
553
          {
554
            if (num == UNSPEC_NON_TEMPORAL)
555
              {
556
                /* These are basically loads.  */
557
                if (outer_code == ZERO_EXTEND || outer_code == SIGN_EXTEND)
558
                  *total = COSTS_N_INSNS (1);
559
                else
560
                  *total = COSTS_N_INSNS (2);
561
              }
562
            else
563
              {
564
                if (outer_code == PLUS)
565
                  *total = 0;
566
                else
567
                  *total = COSTS_N_INSNS (1);
568
              }
569
          }
570
        else
571
          {
572
            switch (num)
573
              {
574
              case UNSPEC_BLOCKAGE:
575
              case UNSPEC_NETWORK_BARRIER:
576
              case UNSPEC_ATOMIC:
577
                *total = 0;
578
                break;
579
 
580
              case UNSPEC_LNK_AND_LABEL:
581
              case UNSPEC_MF:
582
              case UNSPEC_MOV_PCREL_STEP3:
583
              case UNSPEC_NETWORK_RECEIVE:
584
              case UNSPEC_NETWORK_SEND:
585
              case UNSPEC_SPR_MOVE:
586
              case UNSPEC_TLS_GD_ADD:
587
                *total = COSTS_N_INSNS (1);
588
                break;
589
 
590
              case UNSPEC_TLS_IE_LOAD:
591
              case UNSPEC_XCHG:
592
                *total = COSTS_N_INSNS (2);
593
                break;
594
 
595
              case UNSPEC_SP_SET:
596
                *total = COSTS_N_INSNS (3);
597
                break;
598
 
599
              case UNSPEC_SP_TEST:
600
                *total = COSTS_N_INSNS (4);
601
                break;
602
 
603
              case UNSPEC_CMPXCHG:
604
              case UNSPEC_INSN_CMPEXCH:
605
              case UNSPEC_LATENCY_L2:
606
                *total = COSTS_N_INSNS (11);
607
                break;
608
 
609
              case UNSPEC_TLS_GD_CALL:
610
                *total = COSTS_N_INSNS (30);
611
                break;
612
 
613
              case UNSPEC_LATENCY_MISS:
614
                *total = COSTS_N_INSNS (80);
615
                break;
616
 
617
              default:
618
                *total = COSTS_N_INSNS (1);
619
              }
620
          }
621
        return true;
622
      }
623
 
624
    default:
625
      return false;
626
    }
627
}
628
 
629
 
630
 
631
/* Rtl lowering.  */
632
 
633
/* Create a temporary variable to hold a partial result, to enable
634
   CSE.  */
635
static rtx
636
create_temp_reg_if_possible (enum machine_mode mode, rtx default_reg)
637
{
638
  return can_create_pseudo_p ()? gen_reg_rtx (mode) : default_reg;
639
}
640
 
641
 
642
/* Functions to save and restore machine-specific function data.  */
643
static struct machine_function *
644
tilegx_init_machine_status (void)
645
{
646
  return ggc_alloc_cleared_machine_function ();
647
}
648
 
649
 
650
/* Do anything needed before RTL is emitted for each function.  */
651
void
652
tilegx_init_expanders (void)
653
{
654
  /* Arrange to initialize and mark the machine per-function
655
     status.  */
656
  init_machine_status = tilegx_init_machine_status;
657
 
658
  if (cfun && cfun->machine && flag_pic)
659
    {
660
      static int label_num = 0;
661
 
662
      char text_label_name[32];
663
 
664
      struct machine_function *machine = cfun->machine;
665
 
666
      ASM_GENERATE_INTERNAL_LABEL (text_label_name, "L_PICLNK", label_num++);
667
 
668
      machine->text_label_symbol =
669
        gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (text_label_name));
670
 
671
      machine->text_label_rtx =
672
        gen_rtx_REG (Pmode, TILEGX_PIC_TEXT_LABEL_REGNUM);
673
 
674
      machine->got_rtx = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
675
 
676
      machine->calls_tls_get_addr = false;
677
    }
678
}
679
 
680
 
681
/* Implement TARGET_SHIFT_TRUNCATION_MASK.  DImode shifts use the mode
682
   matching insns and therefore guarantee that the shift count is
683
   modulo 64.  SImode shifts sometimes use the 64 bit version so do
684
   not hold such guarantee.  */
685
static unsigned HOST_WIDE_INT
686
tilegx_shift_truncation_mask (enum machine_mode mode)
687
{
688
  return mode == DImode ? 63 : 0;
689
}
690
 
691
 
692
/* Implement TARGET_INIT_LIBFUNCS.  */
693
static void
694
tilegx_init_libfuncs (void)
695
{
696
  /* We need to explicitly generate these libfunc's to support
697
     conversion of divide by constant to multiply (the divide stubs in
698
     tilegx.md exist also for this reason).  Normally we'd expect gcc
699
     to lazily generate them when they are needed, but for some reason
700
     it's set up to only generate them if the mode is the word
701
     mode.  */
702
  set_optab_libfunc (sdiv_optab, SImode, "__divsi3");
703
  set_optab_libfunc (udiv_optab, SImode, "__udivsi3");
704
  set_optab_libfunc (smod_optab, SImode, "__modsi3");
705
  set_optab_libfunc (umod_optab, SImode, "__umodsi3");
706
}
707
 
708
 
709
/* Return true if X contains a thread-local symbol.  */
710
static bool
711
tilegx_tls_referenced_p (rtx x)
712
{
713
  if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS)
714
    x = XEXP (XEXP (x, 0), 0);
715
 
716
  if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x))
717
    return true;
718
 
719
  /* That's all we handle in tilegx_legitimize_tls_address for
720
     now.  */
721
  return false;
722
}
723
 
724
 
725
/* Return true if X requires a scratch register.  It is given that
726
   flag_pic is on and that X satisfies CONSTANT_P.  */
727
static int
728
tilegx_pic_address_needs_scratch (rtx x)
729
{
730
  if (GET_CODE (x) == CONST
731
      && GET_CODE (XEXP (x, 0)) == PLUS
732
      && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
733
          || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
734
      && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
735
    return true;
736
 
737
  return false;
738
}
739
 
740
 
741
/* Implement TARGET_LEGITIMATE_CONSTANT_P.  This is all constants for
742
   which we are willing to load the value into a register via a move
743
   pattern.  TLS cannot be treated as a constant because it can
744
   include a function call.  */
745
static bool
746
tilegx_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
747
{
748
  switch (GET_CODE (x))
749
    {
750
    case CONST:
751
    case SYMBOL_REF:
752
      return !tilegx_tls_referenced_p (x);
753
 
754
    default:
755
      return true;
756
    }
757
}
758
 
759
 
760
/* Return true if the constant value X is a legitimate general operand
761
   when generating PIC code.  It is given that flag_pic is on and that
762
   X satisfies CONSTANT_P.  */
763
bool
764
tilegx_legitimate_pic_operand_p (rtx x)
765
{
766
  if (tilegx_pic_address_needs_scratch (x))
767
    return false;
768
 
769
  if (tilegx_tls_referenced_p (x))
770
    return false;
771
 
772
  return true;
773
}
774
 
775
 
776
/* Return true if the rtx X can be used as an address operand.  */
777
static bool
778
tilegx_legitimate_address_p (enum machine_mode ARG_UNUSED (mode), rtx x,
779
                             bool strict)
780
{
781
  if (GET_CODE (x) == SUBREG)
782
    x = SUBREG_REG (x);
783
 
784
  switch (GET_CODE (x))
785
    {
786
    case POST_INC:
787
    case POST_DEC:
788
      if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
789
        return false;
790
 
791
      x = XEXP (x, 0);
792
      break;
793
 
794
    case POST_MODIFY:
795
      if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
796
        return false;
797
 
798
      if (GET_CODE (XEXP (x, 1)) != PLUS)
799
        return false;
800
 
801
      if (!rtx_equal_p (XEXP (x, 0), XEXP (XEXP (x, 1), 0)))
802
        return false;
803
 
804
      if (!satisfies_constraint_I (XEXP (XEXP (x, 1), 1)))
805
        return false;
806
 
807
      x = XEXP (x, 0);
808
      break;
809
 
810
    case REG:
811
      break;
812
 
813
    default:
814
      return false;
815
    }
816
 
817
  /* Check if x is a valid reg.  */
818
  if (!REG_P (x))
819
    return false;
820
 
821
  if (strict)
822
    return REGNO_OK_FOR_BASE_P (REGNO (x));
823
  else
824
    return true;
825
}
826
 
827
 
828
/* Return the rtx containing SYMBOL_REF to the text label.  */
829
static rtx
830
tilegx_text_label_symbol (void)
831
{
832
  return cfun->machine->text_label_symbol;
833
}
834
 
835
 
836
/* Return the register storing the value of the text label.  */
837
static rtx
838
tilegx_text_label_rtx (void)
839
{
840
  return cfun->machine->text_label_rtx;
841
}
842
 
843
 
844
/* Return the register storing the value of the global offset
845
   table.  */
846
static rtx
847
tilegx_got_rtx (void)
848
{
849
  return cfun->machine->got_rtx;
850
}
851
 
852
 
853
/* Return the SYMBOL_REF for _GLOBAL_OFFSET_TABLE_.  */
854
static rtx
855
tilegx_got_symbol (void)
856
{
857
  if (g_got_symbol == NULL)
858
    g_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
859
 
860
  return g_got_symbol;
861
}
862
 
863
 
864
/* Return a reference to the got to be used by tls references.  */
865
static rtx
866
tilegx_tls_got (void)
867
{
868
  rtx temp;
869
  if (flag_pic)
870
    {
871
      crtl->uses_pic_offset_table = 1;
872
      return tilegx_got_rtx ();
873
    }
874
 
875
  temp = gen_reg_rtx (Pmode);
876
  emit_move_insn (temp, tilegx_got_symbol ());
877
 
878
  return temp;
879
}
880
 
881
 
882
/* ADDR contains a thread-local SYMBOL_REF.  Generate code to compute
883
   this (thread-local) address.  */
884
static rtx
885
tilegx_legitimize_tls_address (rtx addr)
886
{
887
  rtx ret;
888
 
889
  gcc_assert (can_create_pseudo_p ());
890
 
891
  if (GET_CODE (addr) == SYMBOL_REF)
892
    switch (SYMBOL_REF_TLS_MODEL (addr))
893
      {
894
      case TLS_MODEL_GLOBAL_DYNAMIC:
895
      case TLS_MODEL_LOCAL_DYNAMIC:
896
        {
897
          rtx r0, temp, temp2, temp3, got, last;
898
 
899
          ret = gen_reg_rtx (Pmode);
900
          r0 = gen_rtx_REG (Pmode, 0);
901
          temp = gen_reg_rtx (Pmode);
902
          temp2 = gen_reg_rtx (Pmode);
903
          temp3 = gen_reg_rtx (Pmode);
904
 
905
          got = tilegx_tls_got ();
906
          if (TARGET_32BIT)
907
            {
908
              emit_insn (gen_mov_tls_gd_step1_32bit (temp, addr));
909
              emit_insn (gen_mov_tls_gd_step2_32bit (temp2, temp, addr));
910
              emit_insn (gen_tls_add_32bit (temp2, got, temp2, addr));
911
            }
912
          else
913
            {
914
              emit_insn (gen_mov_tls_gd_step1 (temp, addr));
915
              emit_insn (gen_mov_tls_gd_step2 (temp2, temp, addr));
916
              emit_insn (gen_tls_add (temp2, got, temp2, addr));
917
            }
918
 
919
          emit_move_insn (r0, temp2);
920
 
921
          if (TARGET_32BIT)
922
            {
923
              emit_insn (gen_tls_gd_call_32bit (addr));
924
            }
925
          else
926
            {
927
              emit_insn (gen_tls_gd_call (addr));
928
            }
929
 
930
          emit_move_insn (temp3, r0);
931
 
932
          if (TARGET_32BIT)
933
            last = emit_insn (gen_tls_gd_add_32bit (ret, temp3, addr));
934
          else
935
            last = emit_insn (gen_tls_gd_add (ret, temp3, addr));
936
 
937
          set_unique_reg_note (last, REG_EQUAL, copy_rtx (addr));
938
          break;
939
        }
940
      case TLS_MODEL_INITIAL_EXEC:
941
        {
942
          rtx temp, temp2, temp3, got, last;
943
 
944
          ret = gen_reg_rtx (Pmode);
945
          temp = gen_reg_rtx (Pmode);
946
          temp2 = gen_reg_rtx (Pmode);
947
          temp3 = gen_reg_rtx (Pmode);
948
 
949
          got = tilegx_tls_got ();
950
          if (TARGET_32BIT)
951
            {
952
              emit_insn (gen_mov_tls_ie_step1_32bit (temp, addr));
953
              emit_insn (gen_mov_tls_ie_step2_32bit (temp2, temp, addr));
954
              emit_insn (gen_tls_add_32bit (temp2, got, temp2, addr));
955
              emit_insn (gen_tls_ie_load_32bit (temp3, temp2, addr));
956
            }
957
          else
958
            {
959
              emit_insn (gen_mov_tls_ie_step1 (temp, addr));
960
              emit_insn (gen_mov_tls_ie_step2 (temp2, temp, addr));
961
              emit_insn (gen_tls_add (temp2, got, temp2, addr));
962
              emit_insn (gen_tls_ie_load (temp3, temp2, addr));
963
            }
964
 
965
          last =
966
            emit_move_insn(ret,
967
                           gen_rtx_PLUS (Pmode,
968
                                         gen_rtx_REG (Pmode,
969
                                                      THREAD_POINTER_REGNUM),
970
                                         temp3));
971
          set_unique_reg_note (last, REG_EQUAL, copy_rtx (addr));
972
          break;
973
        }
974
      case TLS_MODEL_LOCAL_EXEC:
975
        {
976
          rtx temp, temp2, last;
977
 
978
          ret = gen_reg_rtx (Pmode);
979
          temp = gen_reg_rtx (Pmode);
980
          temp2 = gen_reg_rtx (Pmode);
981
 
982
          if (TARGET_32BIT)
983
            {
984
              emit_insn (gen_mov_tls_le_step1_32bit (temp, addr));
985
              emit_insn (gen_mov_tls_le_step2_32bit (temp2, temp, addr));
986
            }
987
          else
988
            {
989
              emit_insn (gen_mov_tls_le_step1 (temp, addr));
990
              emit_insn (gen_mov_tls_le_step2 (temp2, temp, addr));
991
            }
992
 
993
          last =
994
            emit_move_insn (ret,
995
                            gen_rtx_PLUS (Pmode,
996
                                          gen_rtx_REG (Pmode,
997
                                                       THREAD_POINTER_REGNUM),
998
                                          temp2));
999
          set_unique_reg_note (last, REG_EQUAL, copy_rtx (addr));
1000
          break;
1001
        }
1002
      default:
1003
        gcc_unreachable ();
1004
      }
1005
  else if (GET_CODE (addr) == CONST)
1006
    {
1007
      rtx base, offset;
1008
 
1009
      gcc_assert (GET_CODE (XEXP (addr, 0)) == PLUS);
1010
 
1011
      base = tilegx_legitimize_tls_address (XEXP (XEXP (addr, 0), 0));
1012
      offset = XEXP (XEXP (addr, 0), 1);
1013
 
1014
      base = force_operand (base, NULL_RTX);
1015
      ret = force_reg (Pmode, gen_rtx_PLUS (Pmode, base, offset));
1016
    }
1017
  else
1018
    gcc_unreachable ();
1019
 
1020
  return ret;
1021
}
1022
 
1023
 
1024
/* Returns a register that points to ADDR, a symbolic address, by
1025
   computing its address relative to tilegx_text_label_symbol.  */
1026
static void
1027
compute_pcrel_address (rtx result, rtx addr)
1028
{
1029
  rtx text_label_symbol = tilegx_text_label_symbol ();
1030
  rtx text_label_rtx = tilegx_text_label_rtx ();
1031
  rtx temp, temp2;
1032
 
1033
  temp = create_temp_reg_if_possible (Pmode, result);
1034
  temp2 = create_temp_reg_if_possible (Pmode, result);
1035
 
1036
  if (TARGET_32BIT)
1037
    {
1038
      emit_insn (gen_mov_pcrel_step1_32bit (temp, addr, text_label_symbol));
1039
      emit_insn (gen_mov_pcrel_step2_32bit (temp2, temp, addr,
1040
                                            text_label_symbol));
1041
      emit_insn (gen_mov_pcrel_step3_32bit (result, temp2,
1042
                                            text_label_rtx,
1043
                                            addr, text_label_symbol));
1044
    }
1045
  else
1046
    {
1047
      emit_insn (gen_mov_pcrel_step1 (temp, addr, text_label_symbol));
1048
      emit_insn (gen_mov_pcrel_step2 (temp2, temp, addr, text_label_symbol));
1049
      emit_insn (gen_mov_pcrel_step3 (result, temp2,
1050
                                      text_label_rtx,
1051
                                      addr, text_label_symbol));
1052
    }
1053
}
1054
 
1055
 
1056
/* Legitimize PIC addresses.  If the address is already
1057
   position-independent, we return ORIG.  Newly generated
1058
   position-independent addresses go into a reg.  This is REG if
1059
   nonzero, otherwise we allocate register(s) as necessary.  */
1060
static rtx
1061
tilegx_legitimize_pic_address (rtx orig,
1062
                               enum machine_mode mode ATTRIBUTE_UNUSED,
1063
                               rtx reg)
1064
{
1065
  if (GET_CODE (orig) == SYMBOL_REF)
1066
    {
1067
      rtx address, pic_ref;
1068
 
1069
      if (reg == 0)
1070
        {
1071
          gcc_assert (can_create_pseudo_p ());
1072
          reg = gen_reg_rtx (Pmode);
1073
        }
1074
 
1075
      if (SYMBOL_REF_LOCAL_P (orig))
1076
        {
1077
          /* If not during reload, allocate another temp reg here for
1078
             loading in the address, so that these instructions can be
1079
             optimized properly.  */
1080
          rtx temp_reg = create_temp_reg_if_possible (Pmode, reg);
1081
          compute_pcrel_address (temp_reg, orig);
1082
 
1083
          /* Note: this is conservative.  We use the text_label but we
1084
             don't use the pic_offset_table.  However, in some cases
1085
             we may need the pic_offset_table (see
1086
             tilegx_fixup_pcrel_references).  */
1087
          crtl->uses_pic_offset_table = 1;
1088
 
1089
          address = temp_reg;
1090
 
1091
          emit_move_insn (reg, address);
1092
          return reg;
1093
        }
1094
      else
1095
        {
1096
          /* If not during reload, allocate another temp reg here for
1097
             loading in the address, so that these instructions can be
1098
             optimized properly.  */
1099
          rtx temp_reg = create_temp_reg_if_possible (Pmode, reg);
1100
 
1101
          gcc_assert (flag_pic);
1102
          if (flag_pic == 1)
1103
            {
1104
              if (TARGET_32BIT)
1105
                {
1106
                  emit_insn (gen_add_got16_32bit (temp_reg,
1107
                                                  tilegx_got_rtx (),
1108
                                                  orig));
1109
                }
1110
              else
1111
                {
1112
                  emit_insn (gen_add_got16 (temp_reg,
1113
                                            tilegx_got_rtx (), orig));
1114
                }
1115
            }
1116
          else
1117
            {
1118
              rtx temp_reg2 = create_temp_reg_if_possible (Pmode, reg);
1119
              rtx temp_reg3 = create_temp_reg_if_possible (Pmode, reg);
1120
              if (TARGET_32BIT)
1121
                {
1122
                  emit_insn (gen_mov_got32_step1_32bit (temp_reg3, orig));
1123
                  emit_insn (gen_mov_got32_step2_32bit
1124
                             (temp_reg2, temp_reg3, orig));
1125
                }
1126
              else
1127
                {
1128
                  emit_insn (gen_mov_got32_step1 (temp_reg3, orig));
1129
                  emit_insn (gen_mov_got32_step2 (temp_reg2, temp_reg3,
1130
                                                  orig));
1131
                }
1132
              emit_move_insn (temp_reg,
1133
                              gen_rtx_PLUS (Pmode,
1134
                                            tilegx_got_rtx (), temp_reg2));
1135
            }
1136
 
1137
          address = temp_reg;
1138
 
1139
          pic_ref = gen_const_mem (Pmode, address);
1140
          crtl->uses_pic_offset_table = 1;
1141
          emit_move_insn (reg, pic_ref);
1142
          /* The following put a REG_EQUAL note on this insn, so that
1143
             it can be optimized by loop.  But it causes the label to
1144
             be optimized away.  */
1145
          /* set_unique_reg_note (insn, REG_EQUAL, orig); */
1146
          return reg;
1147
        }
1148
    }
1149
  else if (GET_CODE (orig) == CONST)
1150
    {
1151
      rtx base, offset;
1152
 
1153
      if (GET_CODE (XEXP (orig, 0)) == PLUS
1154
          && XEXP (XEXP (orig, 0), 0) == tilegx_got_rtx ())
1155
        return orig;
1156
 
1157
      if (reg == 0)
1158
        {
1159
          gcc_assert (can_create_pseudo_p ());
1160
          reg = gen_reg_rtx (Pmode);
1161
        }
1162
 
1163
      gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
1164
      base = tilegx_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
1165
                                            Pmode, reg);
1166
      offset = tilegx_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1167
                                              base == reg ? 0 : reg);
1168
 
1169
      if (CONST_INT_P (offset))
1170
        {
1171
          if (can_create_pseudo_p ())
1172
            offset = force_reg (Pmode, offset);
1173
          else
1174
            /* If we reach here, then something is seriously wrong.  */
1175
            gcc_unreachable ();
1176
        }
1177
 
1178
      if (can_create_pseudo_p ())
1179
        return force_reg (Pmode, gen_rtx_PLUS (Pmode, base, offset));
1180
      else
1181
        gcc_unreachable ();
1182
    }
1183
  else if (GET_CODE (orig) == LABEL_REF)
1184
    {
1185
      rtx address;
1186
      rtx temp_reg;
1187
 
1188
      if (reg == 0)
1189
        {
1190
          gcc_assert (can_create_pseudo_p ());
1191
          reg = gen_reg_rtx (Pmode);
1192
        }
1193
 
1194
      /* If not during reload, allocate another temp reg here for
1195
         loading in the address, so that these instructions can be
1196
         optimized properly.  */
1197
      temp_reg = create_temp_reg_if_possible (Pmode, reg);
1198
      compute_pcrel_address (temp_reg, orig);
1199
 
1200
      /* Note: this is conservative.  We use the text_label but we
1201
         don't use the pic_offset_table.  */
1202
      crtl->uses_pic_offset_table = 1;
1203
 
1204
      address = temp_reg;
1205
 
1206
      emit_move_insn (reg, address);
1207
 
1208
      return reg;
1209
    }
1210
 
1211
  return orig;
1212
}
1213
 
1214
 
1215
/* Implement TARGET_LEGITIMIZE_ADDRESS.  */
1216
static rtx
1217
tilegx_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
1218
                           enum machine_mode mode)
1219
{
1220
  if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
1221
      && symbolic_operand (x, Pmode) && tilegx_tls_referenced_p (x))
1222
    {
1223
      return tilegx_legitimize_tls_address (x);
1224
    }
1225
  else if (flag_pic)
1226
    {
1227
      return tilegx_legitimize_pic_address (x, mode, 0);
1228
    }
1229
  else
1230
    return x;
1231
}
1232
 
1233
 
1234
/* Implement TARGET_DELEGITIMIZE_ADDRESS.  */
1235
static rtx
1236
tilegx_delegitimize_address (rtx x)
1237
{
1238
  x = delegitimize_mem_from_attrs (x);
1239
 
1240
  if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == UNSPEC)
1241
    {
1242
      switch (XINT (XEXP (x, 0), 1))
1243
        {
1244
          case UNSPEC_HW0:
1245
          case UNSPEC_HW1:
1246
          case UNSPEC_HW2:
1247
          case UNSPEC_HW3:
1248
          case UNSPEC_HW0_LAST:
1249
          case UNSPEC_HW1_LAST:
1250
          case UNSPEC_HW2_LAST:
1251
          case UNSPEC_HW0_PCREL:
1252
          case UNSPEC_HW1_LAST_PCREL:
1253
          case UNSPEC_HW0_GOT:
1254
          case UNSPEC_HW0_LAST_GOT:
1255
          case UNSPEC_HW1_LAST_GOT:
1256
          case UNSPEC_HW0_TLS_GD:
1257
          case UNSPEC_HW1_LAST_TLS_GD:
1258
          case UNSPEC_HW0_TLS_IE:
1259
          case UNSPEC_HW1_LAST_TLS_IE:
1260
          case UNSPEC_HW0_TLS_LE:
1261
          case UNSPEC_HW1_LAST_TLS_LE:
1262
            x = XVECEXP (XEXP (x, 0), 0, 0);
1263
          break;
1264
        }
1265
    }
1266
 
1267
  return x;
1268
}
1269
 
1270
 
1271
/* Emit code to load the PIC register.  */
1272
static void
1273
load_pic_register (bool delay_pic_helper ATTRIBUTE_UNUSED)
1274
{
1275
  int orig_flag_pic = flag_pic;
1276
 
1277
  rtx got_symbol = tilegx_got_symbol ();
1278
  rtx text_label_symbol = tilegx_text_label_symbol ();
1279
  rtx text_label_rtx = tilegx_text_label_rtx ();
1280
  flag_pic = 0;
1281
 
1282
  if (TARGET_32BIT)
1283
    {
1284
      emit_insn (gen_insn_lnk_and_label_32bit (text_label_rtx,
1285
                                               text_label_symbol));
1286
    }
1287
  else
1288
    {
1289
      emit_insn (gen_insn_lnk_and_label (text_label_rtx, text_label_symbol));
1290
    }
1291
 
1292
  compute_pcrel_address (tilegx_got_rtx (), got_symbol);
1293
 
1294
  flag_pic = orig_flag_pic;
1295
 
1296
  /* Need to emit this whether or not we obey regdecls, since
1297
     setjmp/longjmp can cause life info to screw up.  ??? In the case
1298
     where we don't obey regdecls, this is not sufficient since we may
1299
     not fall out the bottom.  */
1300
  emit_use (tilegx_got_rtx ());
1301
}
1302
 
1303
 
1304
/* Return the simd variant of the constant NUM of mode MODE, by
1305
   replicating it to fill an interger of mode DImode.  NUM is first
1306
   truncated to fit in MODE.  */
1307
rtx
1308
tilegx_simd_int (rtx num, enum machine_mode mode)
1309
{
1310
  HOST_WIDE_INT n = 0;
1311
 
1312
  gcc_assert (CONST_INT_P (num));
1313
 
1314
  n = INTVAL (num);
1315
 
1316
  switch (mode)
1317
    {
1318
    case QImode:
1319
      n = 0x0101010101010101LL * (n & 0x000000FF);
1320
      break;
1321
    case HImode:
1322
      n = 0x0001000100010001LL * (n & 0x0000FFFF);
1323
      break;
1324
    case SImode:
1325
      n = 0x0000000100000001LL * (n & 0xFFFFFFFF);
1326
      break;
1327
    case DImode:
1328
      break;
1329
    default:
1330
      gcc_unreachable ();
1331
    }
1332
 
1333
  return GEN_INT (n);
1334
}
1335
 
1336
 
1337
/* Returns true iff VAL can be moved into a register in one
1338
   instruction.  And if it can, it emits the code to move the
1339
   constant into DEST_REG.
1340
 
1341
   If THREE_WIDE_ONLY is true, this insists on an instruction that
1342
   works in a bundle containing three instructions.  */
1343
static bool
1344
expand_set_cint64_one_inst (rtx dest_reg,
1345
                            HOST_WIDE_INT val, bool three_wide_only)
1346
{
1347
  if (val == trunc_int_for_mode (val, QImode))
1348
    {
1349
      /* Success! */
1350
      emit_move_insn (dest_reg, GEN_INT (val));
1351
      return true;
1352
    }
1353
  else if (!three_wide_only)
1354
    {
1355
      rtx imm_op = GEN_INT (val);
1356
 
1357
      if (satisfies_constraint_J (imm_op)
1358
          || satisfies_constraint_K (imm_op)
1359
          || satisfies_constraint_N (imm_op)
1360
          || satisfies_constraint_P (imm_op))
1361
        {
1362
          emit_move_insn (dest_reg, imm_op);
1363
          return true;
1364
        }
1365
    }
1366
 
1367
  return false;
1368
}
1369
 
1370
 
1371
/* Implement DImode rotatert.  */
1372
static HOST_WIDE_INT
1373
rotate_right (HOST_WIDE_INT n, int count)
1374
{
1375
  unsigned HOST_WIDE_INT x = n & 0xFFFFFFFFFFFFFFFFULL;
1376
  if (count == 0)
1377
    return x;
1378
  return ((x >> count) | (x << (64 - count))) & 0xFFFFFFFFFFFFFFFFULL;
1379
}
1380
 
1381
 
1382
/* Return true iff n contains exactly one contiguous sequence of 1
1383
   bits, possibly wrapping around from high bits to low bits.  */
1384
bool
1385
tilegx_bitfield_operand_p (HOST_WIDE_INT n, int *first_bit, int *last_bit)
1386
{
1387
  int i;
1388
 
1389
  if (n == 0)
1390
    return false;
1391
 
1392
  for (i = 0; i < 64; i++)
1393
    {
1394
      unsigned HOST_WIDE_INT x = rotate_right (n, i);
1395
      if (!(x & 1))
1396
        continue;
1397
 
1398
      /* See if x is a power of two minus one, i.e. only consecutive 1
1399
         bits starting from bit 0.  */
1400
      if ((x & (x + 1)) == 0)
1401
        {
1402
          if (first_bit != NULL)
1403
            *first_bit = i;
1404
          if (last_bit != NULL)
1405
            *last_bit = (i + exact_log2 (x ^ (x >> 1))) & 63;
1406
 
1407
          return true;
1408
        }
1409
    }
1410
 
1411
  return false;
1412
}
1413
 
1414
 
1415
/* Create code to move the CONST_INT value in src_val to dest_reg.  */
1416
static void
1417
expand_set_cint64 (rtx dest_reg, rtx src_val)
1418
{
1419
  HOST_WIDE_INT val;
1420
  int leading_zeroes, trailing_zeroes;
1421
  int three_wide_only;
1422
  int shift, ins_shift, zero_cluster_shift;
1423
  rtx temp, subreg;
1424
 
1425
  gcc_assert (CONST_INT_P (src_val));
1426
  val = trunc_int_for_mode (INTVAL (src_val), GET_MODE (dest_reg));
1427
 
1428
  /* See if we can generate the constant in one instruction.  */
1429
  if (expand_set_cint64_one_inst (dest_reg, val, false))
1430
    return;
1431
 
1432
  /* Force the destination to DImode so we can use DImode instructions
1433
     to create it.  This both allows instructions like rotl, and
1434
     certain efficient 3-wide instructions.  */
1435
  subreg = simplify_gen_subreg (DImode, dest_reg, GET_MODE (dest_reg), 0);
1436
  gcc_assert (subreg != NULL);
1437
  dest_reg = subreg;
1438
 
1439
  temp = create_temp_reg_if_possible (DImode, dest_reg);
1440
 
1441
  leading_zeroes = 63 - floor_log2 (val & 0xFFFFFFFFFFFFFFFFULL);
1442
  trailing_zeroes = exact_log2 (val & -val);
1443
 
1444
  /* First try all three-wide instructions that generate a constant
1445
     (i.e. movei) followed by various shifts and rotates. If none of
1446
     those work, try various two-wide ways of generating a constant
1447
     followed by various shifts and rotates.  */
1448
  for (three_wide_only = 1; three_wide_only >= 0; three_wide_only--)
1449
    {
1450
      int count;
1451
 
1452
      if (expand_set_cint64_one_inst (temp, val >> trailing_zeroes,
1453
                                      three_wide_only))
1454
        {
1455
          /* 0xFFFFFFFFFFFFA500 becomes:
1456
             movei temp, 0xFFFFFFFFFFFFFFA5
1457
             shli dest, temp, 8  */
1458
          emit_move_insn (dest_reg,
1459
                          gen_rtx_ASHIFT (DImode, temp,
1460
                                          GEN_INT (trailing_zeroes)));
1461
          return;
1462
        }
1463
 
1464
      if (expand_set_cint64_one_inst (temp, val << leading_zeroes,
1465
                                      three_wide_only))
1466
        {
1467
          /* 0x7FFFFFFFFFFFFFFF becomes:
1468
             movei temp, -2
1469
             shrui dest, temp, 1  */
1470
          emit_move_insn (dest_reg,
1471
                          gen_rtx_LSHIFTRT (DImode, temp,
1472
                                            GEN_INT (leading_zeroes)));
1473
          return;
1474
        }
1475
 
1476
      /* Try rotating a one-instruction immediate.  */
1477
      for (count = 1; count < 64; count++)
1478
        {
1479
          HOST_WIDE_INT r = rotate_right (val, count);
1480
          if (expand_set_cint64_one_inst (temp, r, three_wide_only))
1481
            {
1482
              /* 0xFFFFFFFFFFA5FFFF becomes:
1483
                 movei temp, 0xFFFFFFFFFFFFFFA5
1484
                 rotli dest, temp, 16  */
1485
              emit_move_insn (dest_reg,
1486
                              gen_rtx_ROTATE (DImode, temp, GEN_INT (count)));
1487
              return;
1488
            }
1489
        }
1490
    }
1491
 
1492
  /* There are two cases here to produce a large constant.
1493
     In the most general case, we do this:
1494
 
1495
     moveli x, hw3(NUM)
1496
     shl16insli x, x, hw2(NUM)
1497
     shl16insli x, x, hw1(NUM)
1498
     shl16insli x, x, hw0(NUM)
1499
 
1500
     However, we can sometimes do better.  shl16insli is a poor way to
1501
     insert 16 zero bits, because simply shifting left by 16 has more
1502
     bundling freedom.  So if we see any contiguous aligned sequence
1503
     of 16 or more zero bits (below the highest set bit), it is always
1504
     more efficient to materialize the bits above the zero bits, then
1505
     left shift to put in the zeroes, then insert whatever bits
1506
     remain.  For example, we might end up with:
1507
 
1508
     movei x, NUM >> (37 + 16)
1509
     shli x, x, 37
1510
     shl16insli x, x, hw0(NUM)      */
1511
 
1512
  zero_cluster_shift = -1;
1513
 
1514
  for (shift = 0; shift < 48 - leading_zeroes; shift += 16)
1515
    {
1516
      HOST_WIDE_INT x = val >> shift;
1517
 
1518
      /* Find the least significant group of 16 aligned zero bits.  */
1519
      if ((x & 0xFFFF) == 0x0000)
1520
        {
1521
          /* Grab any following zero bits as well.  */
1522
          zero_cluster_shift = exact_log2 (x & -x);
1523
          shift += zero_cluster_shift;
1524
          break;
1525
        }
1526
    }
1527
 
1528
  if (zero_cluster_shift >= 0)
1529
    {
1530
      unsigned HOST_WIDE_INT leftover;
1531
 
1532
      /* Recursively create the constant above the lowest 16 zero
1533
         bits.  */
1534
      expand_set_cint64 (temp, GEN_INT (val >> shift));
1535
 
1536
      /* See if we can easily insert the remaining bits, or if we need
1537
         to fall through to the more general case.  */
1538
      leftover = val - ((val >> shift) << shift);
1539
      if (leftover == 0)
1540
        {
1541
          /* A simple left shift is enough.  */
1542
          emit_move_insn (dest_reg,
1543
                          gen_rtx_ASHIFT (DImode, temp, GEN_INT (shift)));
1544
          return;
1545
        }
1546
      else if (leftover <= 32767)
1547
        {
1548
          /* Left shift into position then add in the leftover.  */
1549
          rtx temp2 = create_temp_reg_if_possible (DImode, temp);
1550
          emit_move_insn (temp2,
1551
                          gen_rtx_ASHIFT (DImode, temp, GEN_INT (shift)));
1552
          emit_move_insn (dest_reg,
1553
                          gen_rtx_PLUS (DImode, temp2, GEN_INT (leftover)));
1554
          return;
1555
        }
1556
      else
1557
        {
1558
          /* Shift in the batch of >= 16 zeroes we detected earlier.
1559
             After this, shift will be aligned mod 16 so the final
1560
             loop can use shl16insli.  */
1561
          rtx temp2 = create_temp_reg_if_possible (DImode, temp);
1562
          rtx shift_count_rtx = GEN_INT (zero_cluster_shift);
1563
 
1564
          emit_move_insn (temp2,
1565
                          gen_rtx_ASHIFT (DImode, temp, shift_count_rtx));
1566
 
1567
          shift -= zero_cluster_shift;
1568
          temp = temp2;
1569
        }
1570
    }
1571
  else
1572
    {
1573
      /* Set as many high 16-bit blocks as we can with a single
1574
         instruction.  We'll insert the remaining 16-bit blocks
1575
         below.  */
1576
      for (shift = 16;; shift += 16)
1577
        {
1578
          gcc_assert (shift < 64);
1579
          if (expand_set_cint64_one_inst (temp, val >> shift, false))
1580
            break;
1581
        }
1582
    }
1583
 
1584
  /* At this point, temp == val >> shift, shift % 16 == 0, and we
1585
     still need to insert any bits of 'val' below 'shift'. Those bits
1586
     are guaranteed to not have 16 contiguous zeroes.  */
1587
 
1588
  gcc_assert ((shift & 15) == 0);
1589
 
1590
  for (ins_shift = shift - 16; ins_shift >= 0; ins_shift -= 16)
1591
    {
1592
      rtx result;
1593
      HOST_WIDE_INT bits = (val >> ins_shift) & 0xFFFF;
1594
      gcc_assert (bits != 0);
1595
 
1596
      /* On the last iteration we need to store into dest_reg.  */
1597
      if (ins_shift == 0)
1598
        result = dest_reg;
1599
      else
1600
        result = create_temp_reg_if_possible (DImode, dest_reg);
1601
 
1602
      emit_insn (gen_insn_shl16insli (result, temp, GEN_INT (bits)));
1603
 
1604
      temp = result;
1605
    }
1606
}
1607
 
1608
 
1609
/* Load OP1, a 64-bit constant, into OP0, a register.  We know it
1610
   can't be done in one insn when we get here, the move expander
1611
   guarantees this.  */
1612
void
1613
tilegx_expand_set_const64 (rtx op0, rtx op1)
1614
{
1615
  if (CONST_INT_P (op1))
1616
    {
1617
      /* TODO: I don't know if we want to split large constants
1618
         now, or wait until later (with a define_split).
1619
 
1620
         Does splitting early help CSE?  Does it harm other
1621
         optimizations that might fold loads? */
1622
      expand_set_cint64 (op0, op1);
1623
    }
1624
  else
1625
    {
1626
      rtx temp = create_temp_reg_if_possible (Pmode, op0);
1627
 
1628
      if (TARGET_32BIT)
1629
        {
1630
          /* Generate the 2-insn sequence to materialize a symbolic
1631
             address.  */
1632
          emit_insn (gen_mov_address_32bit_step1 (temp, op1));
1633
          emit_insn (gen_mov_address_32bit_step2 (op0, temp, op1));
1634
        }
1635
      else
1636
        {
1637
          /* Generate the 3-insn sequence to materialize a symbolic
1638
             address.  Note that this assumes that virtual addresses
1639
             fit in 48 signed bits, which is currently true.  */
1640
          rtx temp2 = create_temp_reg_if_possible (Pmode, op0);
1641
          emit_insn (gen_mov_address_step1 (temp, op1));
1642
          emit_insn (gen_mov_address_step2 (temp2, temp, op1));
1643
          emit_insn (gen_mov_address_step3 (op0, temp2, op1));
1644
        }
1645
    }
1646
}
1647
 
1648
 
1649
/* Expand a move instruction.  Return true if all work is done.  */
1650
bool
1651
tilegx_expand_mov (enum machine_mode mode, rtx *operands)
1652
{
1653
  /* Handle sets of MEM first.  */
1654
  if (MEM_P (operands[0]))
1655
    {
1656
      if (can_create_pseudo_p ())
1657
        operands[0] = validize_mem (operands[0]);
1658
 
1659
      if (reg_or_0_operand (operands[1], mode))
1660
        return false;
1661
 
1662
      if (!reload_in_progress)
1663
        operands[1] = force_reg (mode, operands[1]);
1664
    }
1665
 
1666
  /* Fixup TLS cases.  */
1667
  if (CONSTANT_P (operands[1]) && tilegx_tls_referenced_p (operands[1]))
1668
    {
1669
      operands[1] = tilegx_legitimize_tls_address (operands[1]);
1670
      return false;
1671
    }
1672
 
1673
  /* Fixup PIC cases.  */
1674
  if (flag_pic && CONSTANT_P (operands[1]))
1675
    {
1676
      if (tilegx_pic_address_needs_scratch (operands[1]))
1677
        operands[1] = tilegx_legitimize_pic_address (operands[1], mode, 0);
1678
 
1679
      if (symbolic_operand (operands[1], mode))
1680
        {
1681
          operands[1] = tilegx_legitimize_pic_address (operands[1],
1682
                                                       mode,
1683
                                                       (reload_in_progress ?
1684
                                                        operands[0] :
1685
                                                        NULL_RTX));
1686
          return false;
1687
        }
1688
    }
1689
 
1690
  /* Accept non-constants and valid constants unmodified.  */
1691
  if (!CONSTANT_P (operands[1]) || move_operand (operands[1], mode))
1692
    return false;
1693
 
1694
  /* Split large integers.  */
1695
  tilegx_expand_set_const64 (operands[0], operands[1]);
1696
  return true;
1697
}
1698
 
1699
 
1700
/* Expand unaligned loads.  */
1701
void
1702
tilegx_expand_unaligned_load (rtx dest_reg, rtx mem, HOST_WIDE_INT bitsize,
1703
                              HOST_WIDE_INT bit_offset, bool sign)
1704
{
1705
  enum machine_mode mode;
1706
  rtx addr_lo, addr_hi;
1707
  rtx mem_lo, mem_hi, hi;
1708
  rtx mema, wide_result;
1709
  int last_byte_offset;
1710
  HOST_WIDE_INT byte_offset = bit_offset / BITS_PER_UNIT;
1711
 
1712
  mode = GET_MODE (dest_reg);
1713
 
1714
  hi = gen_reg_rtx (mode);
1715
 
1716
  if (bitsize == 2 * BITS_PER_UNIT && (bit_offset % BITS_PER_UNIT) == 0)
1717
    {
1718
      /* When just loading a two byte value, we can load the two bytes
1719
         individually and combine them efficiently.  */
1720
 
1721
      mem_lo = adjust_address (mem, QImode, byte_offset);
1722
      mem_hi = adjust_address (mem, QImode, byte_offset + 1);
1723
 
1724
      if (sign)
1725
        {
1726
          /* Do a signed load of the second byte and use bfins to set
1727
             the high bits of the result.  */
1728
          emit_insn (gen_zero_extendqidi2 (gen_lowpart (DImode, dest_reg),
1729
                                           mem_lo));
1730
          emit_insn (gen_extendqidi2 (gen_lowpart (DImode, hi), mem_hi));
1731
          emit_insn (gen_insv (gen_lowpart (DImode, dest_reg),
1732
                               GEN_INT (64 - 8), GEN_INT (8),
1733
                               gen_lowpart (DImode, hi)));
1734
        }
1735
      else
1736
        {
1737
          /* Do two unsigned loads and use v1int_l to interleave
1738
             them.  */
1739
          rtx lo = gen_reg_rtx (mode);
1740
          emit_insn (gen_zero_extendqidi2 (gen_lowpart (DImode, lo),
1741
                                           mem_lo));
1742
          emit_insn (gen_zero_extendqidi2 (gen_lowpart (DImode, hi),
1743
                                           mem_hi));
1744
          emit_insn (gen_insn_v1int_l (gen_lowpart (DImode, dest_reg),
1745
                                       gen_lowpart (DImode, hi),
1746
                                       gen_lowpart (DImode, lo)));
1747
        }
1748
 
1749
      return;
1750
    }
1751
 
1752
  mema = XEXP (mem, 0);
1753
 
1754
  /* AND addresses cannot be in any alias set, since they may
1755
     implicitly alias surrounding code.  Ideally we'd have some alias
1756
     set that covered all types except those with alignment 8 or
1757
     higher.  */
1758
  addr_lo = force_reg (Pmode, plus_constant (mema, byte_offset));
1759
  mem_lo = change_address (mem, mode,
1760
                           gen_rtx_AND (GET_MODE (mema), addr_lo,
1761
                                        GEN_INT (-8)));
1762
  set_mem_alias_set (mem_lo, 0);
1763
 
1764
  /* Load the high word at an address that will not fault if the low
1765
     address is aligned and at the very end of a page.  */
1766
  last_byte_offset = (bit_offset + bitsize - 1) / BITS_PER_UNIT;
1767
  addr_hi = force_reg (Pmode, plus_constant (mema, last_byte_offset));
1768
  mem_hi = change_address (mem, mode,
1769
                           gen_rtx_AND (GET_MODE (mema), addr_hi,
1770
                                        GEN_INT (-8)));
1771
  set_mem_alias_set (mem_hi, 0);
1772
 
1773
  if (bitsize == 64)
1774
    {
1775
      addr_lo = make_safe_from (addr_lo, dest_reg);
1776
      wide_result = dest_reg;
1777
    }
1778
  else
1779
    {
1780
      wide_result = gen_reg_rtx (mode);
1781
    }
1782
 
1783
  /* Load hi first in case dest_reg is used in mema.  */
1784
  emit_move_insn (hi, mem_hi);
1785
  emit_move_insn (wide_result, mem_lo);
1786
 
1787
  emit_insn (gen_insn_dblalign (gen_lowpart (DImode, wide_result),
1788
                                gen_lowpart (DImode, wide_result),
1789
                                gen_lowpart (DImode, hi), addr_lo));
1790
 
1791
  if (bitsize != 64)
1792
    {
1793
      rtx extracted =
1794
        extract_bit_field (gen_lowpart (DImode, wide_result),
1795
                           bitsize, bit_offset % BITS_PER_UNIT,
1796
                           !sign, false, gen_lowpart (DImode, dest_reg),
1797
                           DImode, DImode);
1798
 
1799
      if (extracted != dest_reg)
1800
        emit_move_insn (dest_reg, gen_lowpart (DImode, extracted));
1801
    }
1802
}
1803
 
1804
 
1805
/* Expand unaligned stores.  */
1806
static void
1807
tilegx_expand_unaligned_store (rtx mem, rtx src, HOST_WIDE_INT bitsize,
1808
                               HOST_WIDE_INT bit_offset)
1809
{
1810
  HOST_WIDE_INT byte_offset = bit_offset / BITS_PER_UNIT;
1811
  HOST_WIDE_INT bytesize = bitsize / BITS_PER_UNIT;
1812
  HOST_WIDE_INT shift_amt;
1813
  HOST_WIDE_INT i;
1814
  rtx mem_addr;
1815
  rtx store_val;
1816
 
1817
  for (i = 0, shift_amt = 0; i < bytesize; i++, shift_amt += BITS_PER_UNIT)
1818
    {
1819
      mem_addr = adjust_address (mem, QImode, byte_offset + i);
1820
 
1821
      if (shift_amt)
1822
        {
1823
          store_val = expand_simple_binop (DImode, LSHIFTRT,
1824
                                           gen_lowpart (DImode, src),
1825
                                           GEN_INT (shift_amt), NULL, 1,
1826
                                           OPTAB_LIB_WIDEN);
1827
          store_val = gen_lowpart (QImode, store_val);
1828
        }
1829
      else
1830
        {
1831
          store_val = gen_lowpart (QImode, src);
1832
        }
1833
 
1834
      emit_move_insn (mem_addr, store_val);
1835
    }
1836
}
1837
 
1838
 
1839
/* Implement the movmisalign patterns.  One of the operands is a
1840
   memory that is not naturally aligned.  Emit instructions to load
1841
   it.  */
1842
void
1843
tilegx_expand_movmisalign (enum machine_mode mode, rtx *operands)
1844
{
1845
  if (MEM_P (operands[1]))
1846
    {
1847
      rtx tmp;
1848
 
1849
      if (register_operand (operands[0], mode))
1850
        tmp = operands[0];
1851
      else
1852
        tmp = gen_reg_rtx (mode);
1853
 
1854
      tilegx_expand_unaligned_load (tmp, operands[1], GET_MODE_BITSIZE (mode),
1855
                                    0, true);
1856
 
1857
      if (tmp != operands[0])
1858
        emit_move_insn (operands[0], tmp);
1859
    }
1860
  else if (MEM_P (operands[0]))
1861
    {
1862
      if (!reg_or_0_operand (operands[1], mode))
1863
        operands[1] = force_reg (mode, operands[1]);
1864
 
1865
      tilegx_expand_unaligned_store (operands[0], operands[1],
1866
                                     GET_MODE_BITSIZE (mode), 0);
1867
    }
1868
  else
1869
    gcc_unreachable ();
1870
 
1871
}
1872
 
1873
 
1874
/* Implement the allocate_stack pattern (alloca).  */
1875
void
1876
tilegx_allocate_stack (rtx op0, rtx op1)
1877
{
1878
  /* Technically the correct way to initialize chain_loc is with
1879
   * gen_frame_mem() instead of gen_rtx_MEM(), but gen_frame_mem()
1880
   * sets the alias_set to that of a frame reference.  Some of our
1881
   * tests rely on some unsafe assumption about when the chaining
1882
   * update is done, we need to be conservative about reordering the
1883
   * chaining instructions.
1884
   */
1885
  rtx fp_addr = gen_reg_rtx (Pmode);
1886
  rtx fp_value = gen_reg_rtx (Pmode);
1887
  rtx fp_loc;
1888
 
1889
  emit_move_insn (fp_addr, gen_rtx_PLUS (Pmode, stack_pointer_rtx,
1890
                                         GEN_INT (UNITS_PER_WORD)));
1891
 
1892
  fp_loc = gen_frame_mem (Pmode, fp_addr);
1893
 
1894
  emit_move_insn (fp_value, fp_loc);
1895
 
1896
  op1 = force_reg (Pmode, op1);
1897
 
1898
  emit_move_insn (stack_pointer_rtx,
1899
                  gen_rtx_MINUS (Pmode, stack_pointer_rtx, op1));
1900
 
1901
  emit_move_insn (fp_addr, gen_rtx_PLUS (Pmode, stack_pointer_rtx,
1902
                                         GEN_INT (UNITS_PER_WORD)));
1903
 
1904
  fp_loc = gen_frame_mem (Pmode, fp_addr);
1905
 
1906
  emit_move_insn (fp_loc, fp_value);
1907
 
1908
  emit_move_insn (op0, virtual_stack_dynamic_rtx);
1909
}
1910
 
1911
 
1912
 
1913
/* Multiplies */
1914
 
1915
 
1916
/* Returns the insn_code in ENTRY.  */
1917
static enum insn_code
1918
tilegx_multiply_get_opcode (const struct tilegx_multiply_insn_seq_entry
1919
                            *entry)
1920
{
1921
  return tilegx_multiply_insn_seq_decode_opcode[entry->compressed_opcode];
1922
}
1923
 
1924
 
1925
/* Returns the length of the 'op' array.  */
1926
static int
1927
tilegx_multiply_get_num_ops (const struct tilegx_multiply_insn_seq *seq)
1928
{
1929
  /* The array either uses all of its allocated slots or is terminated
1930
     by a bogus opcode. Either way, the array size is the index of the
1931
     last valid opcode plus one.  */
1932
  int i;
1933
  for (i = tilegx_multiply_insn_seq_MAX_OPERATIONS - 1; i >= 0; i--)
1934
    if (tilegx_multiply_get_opcode (&seq->op[i]) != CODE_FOR_nothing)
1935
      return i + 1;
1936
 
1937
  /* An empty array is not allowed.  */
1938
  gcc_unreachable ();
1939
}
1940
 
1941
 
1942
/* We precompute a number of expression trees for multiplying by
1943
   constants.  This generates code for such an expression tree by
1944
   walking through the nodes in the tree (which are conveniently
1945
   pre-linearized) and emitting an instruction for each one.  */
1946
static void
1947
tilegx_expand_constant_multiply_given_sequence (rtx result, rtx src,
1948
                                                const struct
1949
                                                tilegx_multiply_insn_seq *seq)
1950
{
1951
  int i;
1952
  int num_ops;
1953
 
1954
  /* Keep track of the subexpressions computed so far, so later
1955
     instructions can refer to them.  We seed the array with zero and
1956
     the value being multiplied.  */
1957
  int num_subexprs = 2;
1958
  rtx subexprs[tilegx_multiply_insn_seq_MAX_OPERATIONS + 2];
1959
  subexprs[0] = const0_rtx;
1960
  subexprs[1] = src;
1961
 
1962
  /* Determine how many instructions we are going to generate.  */
1963
  num_ops = tilegx_multiply_get_num_ops (seq);
1964
  gcc_assert (num_ops > 0
1965
              && num_ops <= tilegx_multiply_insn_seq_MAX_OPERATIONS);
1966
 
1967
  for (i = 0; i < num_ops; i++)
1968
    {
1969
      const struct tilegx_multiply_insn_seq_entry *entry = &seq->op[i];
1970
 
1971
      /* Figure out where to store the output of this instruction.  */
1972
      const bool is_last_op = (i + 1 == num_ops);
1973
      rtx out = is_last_op ? result : gen_reg_rtx (DImode);
1974
 
1975
      enum insn_code opcode = tilegx_multiply_get_opcode (entry);
1976
      if (opcode == CODE_FOR_ashldi3)
1977
        {
1978
          /* Handle shift by immediate. This is a special case because
1979
             the meaning of the second operand is a constant shift
1980
             count rather than an operand index.  */
1981
 
1982
          /* Make sure the shift count is in range. Zero should not
1983
             happen.  */
1984
          const int shift_count = entry->rhs;
1985
          gcc_assert (shift_count > 0 && shift_count < 64);
1986
 
1987
          /* Emit the actual instruction.  */
1988
          emit_insn (GEN_FCN (opcode)
1989
                     (out, subexprs[entry->lhs],
1990
                      gen_rtx_CONST_INT (DImode, shift_count)));
1991
        }
1992
      else
1993
        {
1994
          /* Handle a normal two-operand instruction, such as add or
1995
             shl1add.  */
1996
 
1997
          /* Make sure we are referring to a previously computed
1998
             subexpression.  */
1999
          gcc_assert (entry->rhs < num_subexprs);
2000
 
2001
          /* Emit the actual instruction.  */
2002
          emit_insn (GEN_FCN (opcode)
2003
                     (out, subexprs[entry->lhs], subexprs[entry->rhs]));
2004
        }
2005
 
2006
      /* Record this subexpression for use by later expressions.  */
2007
      subexprs[num_subexprs++] = out;
2008
    }
2009
}
2010
 
2011
 
2012
/* bsearch helper function.  */
2013
static int
2014
tilegx_compare_multipliers (const void *key, const void *t)
2015
{
2016
  long long delta =
2017
    (*(const long long *) key
2018
     - ((const struct tilegx_multiply_insn_seq *) t)->multiplier);
2019
  return (delta < 0) ? -1 : (delta > 0);
2020
}
2021
 
2022
 
2023
/* Returns the tilegx_multiply_insn_seq for multiplier, or NULL if none
2024
   exists.  */
2025
static const struct tilegx_multiply_insn_seq *
2026
tilegx_find_multiply_insn_seq_for_constant (long long multiplier)
2027
{
2028
  return ((const struct tilegx_multiply_insn_seq *)
2029
          bsearch (&multiplier, tilegx_multiply_insn_seq_table,
2030
                   tilegx_multiply_insn_seq_table_size,
2031
                   sizeof tilegx_multiply_insn_seq_table[0],
2032
                   tilegx_compare_multipliers));
2033
}
2034
 
2035
 
2036
/* Try to a expand constant multiply in DImode by looking it up in a
2037
   precompiled table.  OP0 is the result operand, OP1 is the source
2038
   operand, and MULTIPLIER is the value of the constant.  Return true
2039
   if it succeeds.  */
2040
static bool
2041
tilegx_expand_const_muldi (rtx op0, rtx op1, long long multiplier)
2042
{
2043
  /* See if we have precomputed an efficient way to multiply by this
2044
     constant.  */
2045
  const struct tilegx_multiply_insn_seq *seq =
2046
    tilegx_find_multiply_insn_seq_for_constant (multiplier);
2047
  if (seq != NULL)
2048
    {
2049
      tilegx_expand_constant_multiply_given_sequence (op0, op1, seq);
2050
      return true;
2051
    }
2052
  else
2053
    return false;
2054
}
2055
 
2056
/* Expand the muldi pattern.  */
2057
bool
2058
tilegx_expand_muldi (rtx op0, rtx op1, rtx op2)
2059
{
2060
  if (CONST_INT_P (op2))
2061
    {
2062
      HOST_WIDE_INT n = trunc_int_for_mode (INTVAL (op2), DImode);
2063
      return tilegx_expand_const_muldi (op0, op1, n);
2064
    }
2065
  return false;
2066
}
2067
 
2068
 
2069
/* Expand a high multiply pattern in DImode.  RESULT, OP1, OP2 are the
2070
   operands, and SIGN is true if it's a signed multiply, and false if
2071
   it's an unsigned multiply.  */
2072
static void
2073
tilegx_expand_high_multiply (rtx result, rtx op1, rtx op2, bool sign)
2074
{
2075
  rtx tmp0 = gen_reg_rtx (DImode);
2076
  rtx tmp1 = gen_reg_rtx (DImode);
2077
  rtx tmp2 = gen_reg_rtx (DImode);
2078
  rtx tmp3 = gen_reg_rtx (DImode);
2079
  rtx tmp4 = gen_reg_rtx (DImode);
2080
  rtx tmp5 = gen_reg_rtx (DImode);
2081
  rtx tmp6 = gen_reg_rtx (DImode);
2082
  rtx tmp7 = gen_reg_rtx (DImode);
2083
  rtx tmp8 = gen_reg_rtx (DImode);
2084
  rtx tmp9 = gen_reg_rtx (DImode);
2085
  rtx tmp10 = gen_reg_rtx (DImode);
2086
  rtx tmp11 = gen_reg_rtx (DImode);
2087
  rtx tmp12 = gen_reg_rtx (DImode);
2088
  rtx tmp13 = gen_reg_rtx (DImode);
2089
  rtx result_lo = gen_reg_rtx (DImode);
2090
 
2091
  if (sign)
2092
    {
2093
      emit_insn (gen_insn_mul_hs_lu (tmp0, op1, op2));
2094
      emit_insn (gen_insn_mul_hs_lu (tmp1, op2, op1));
2095
      emit_insn (gen_insn_mul_lu_lu (tmp2, op1, op2));
2096
      emit_insn (gen_insn_mul_hs_hs (tmp3, op1, op2));
2097
    }
2098
  else
2099
    {
2100
      emit_insn (gen_insn_mul_hu_lu (tmp0, op1, op2));
2101
      emit_insn (gen_insn_mul_hu_lu (tmp1, op2, op1));
2102
      emit_insn (gen_insn_mul_lu_lu (tmp2, op1, op2));
2103
      emit_insn (gen_insn_mul_hu_hu (tmp3, op1, op2));
2104
    }
2105
 
2106
  emit_move_insn (tmp4, (gen_rtx_ASHIFT (DImode, tmp0, GEN_INT (32))));
2107
 
2108
  emit_move_insn (tmp5, (gen_rtx_ASHIFT (DImode, tmp1, GEN_INT (32))));
2109
 
2110
  emit_move_insn (tmp6, (gen_rtx_PLUS (DImode, tmp4, tmp5)));
2111
  emit_move_insn (result_lo, (gen_rtx_PLUS (DImode, tmp2, tmp6)));
2112
 
2113
  emit_move_insn (tmp7, gen_rtx_LTU (DImode, tmp6, tmp4));
2114
  emit_move_insn (tmp8, gen_rtx_LTU (DImode, result_lo, tmp2));
2115
 
2116
  if (sign)
2117
    {
2118
      emit_move_insn (tmp9, (gen_rtx_ASHIFTRT (DImode, tmp0, GEN_INT (32))));
2119
      emit_move_insn (tmp10, (gen_rtx_ASHIFTRT (DImode, tmp1, GEN_INT (32))));
2120
    }
2121
  else
2122
    {
2123
      emit_move_insn (tmp9, (gen_rtx_LSHIFTRT (DImode, tmp0, GEN_INT (32))));
2124
      emit_move_insn (tmp10, (gen_rtx_LSHIFTRT (DImode, tmp1, GEN_INT (32))));
2125
    }
2126
 
2127
  emit_move_insn (tmp11, (gen_rtx_PLUS (DImode, tmp3, tmp7)));
2128
  emit_move_insn (tmp12, (gen_rtx_PLUS (DImode, tmp8, tmp9)));
2129
  emit_move_insn (tmp13, (gen_rtx_PLUS (DImode, tmp11, tmp12)));
2130
  emit_move_insn (result, (gen_rtx_PLUS (DImode, tmp13, tmp10)));
2131
}
2132
 
2133
 
2134
/* Implement smuldi3_highpart.  */
2135
void
2136
tilegx_expand_smuldi3_highpart (rtx op0, rtx op1, rtx op2)
2137
{
2138
  tilegx_expand_high_multiply (op0, op1, op2, true);
2139
}
2140
 
2141
 
2142
/* Implement umuldi3_highpart.  */
2143
void
2144
tilegx_expand_umuldi3_highpart (rtx op0, rtx op1, rtx op2)
2145
{
2146
  tilegx_expand_high_multiply (op0, op1, op2, false);
2147
}
2148
 
2149
 
2150
 
2151
/* Compare and branches  */
2152
 
2153
/* Produce the rtx yielding a bool for a floating point
2154
   comparison.  */
2155
static bool
2156
tilegx_emit_fp_setcc (rtx res, enum rtx_code code, enum machine_mode mode,
2157
                      rtx op0, rtx op1)
2158
{
2159
  /* TODO: Certain compares again constants can be done using entirely
2160
     integer operations. But you have to get the special cases right
2161
     e.g. NaN, +0 == -0, etc.  */
2162
 
2163
  rtx flags;
2164
  int flag_index;
2165
  rtx a = force_reg (DImode, gen_lowpart (DImode, op0));
2166
  rtx b = force_reg (DImode, gen_lowpart (DImode, op1));
2167
 
2168
  flags = gen_reg_rtx (DImode);
2169
 
2170
  if (mode == SFmode)
2171
    {
2172
      emit_insn (gen_insn_fsingle_add1 (flags, a, b));
2173
    }
2174
  else
2175
    {
2176
      gcc_assert (mode == DFmode);
2177
      emit_insn (gen_insn_fdouble_add_flags (flags, a, b));
2178
    }
2179
 
2180
  switch (code)
2181
    {
2182
    case EQ: flag_index = 30; break;
2183
    case NE: flag_index = 31; break;
2184
    case LE: flag_index = 27; break;
2185
    case LT: flag_index = 26; break;
2186
    case GE: flag_index = 29; break;
2187
    case GT: flag_index = 28; break;
2188
    default: gcc_unreachable ();
2189
    }
2190
 
2191
  gcc_assert (GET_MODE (res) == DImode);
2192
  emit_move_insn (res, gen_rtx_ZERO_EXTRACT (DImode, flags, GEN_INT (1),
2193
                                             GEN_INT (flag_index)));
2194
  return true;
2195
}
2196
 
2197
 
2198
/* Certain simplifications can be done to make invalid setcc
2199
   operations valid.  Return the final comparison, or NULL if we can't
2200
   work.  */
2201
static bool
2202
tilegx_emit_setcc_internal (rtx res, enum rtx_code code, rtx op0, rtx op1,
2203
                            enum machine_mode cmp_mode)
2204
{
2205
  rtx tmp;
2206
  bool swap = false;
2207
 
2208
  if (cmp_mode == SFmode || cmp_mode == DFmode)
2209
    return tilegx_emit_fp_setcc (res, code, cmp_mode, op0, op1);
2210
 
2211
  /* The general case: fold the comparison code to the types of
2212
     compares that we have, choosing the branch as necessary.  */
2213
 
2214
  switch (code)
2215
    {
2216
    case EQ:
2217
    case NE:
2218
    case LE:
2219
    case LT:
2220
    case LEU:
2221
    case LTU:
2222
      /* We have these compares.  */
2223
      break;
2224
 
2225
    case GE:
2226
    case GT:
2227
    case GEU:
2228
    case GTU:
2229
      /* We do not have these compares, so we reverse the
2230
         operands.  */
2231
      swap = true;
2232
      break;
2233
 
2234
    default:
2235
      /* We should not have called this with any other code.  */
2236
      gcc_unreachable ();
2237
    }
2238
 
2239
  if (swap)
2240
    {
2241
      code = swap_condition (code);
2242
      tmp = op0, op0 = op1, op1 = tmp;
2243
    }
2244
 
2245
  if (!reg_or_0_operand (op0, cmp_mode))
2246
    op0 = force_reg (cmp_mode, op0);
2247
 
2248
  if (!CONST_INT_P (op1) && !register_operand (op1, cmp_mode))
2249
    op1 = force_reg (cmp_mode, op1);
2250
 
2251
  /* Return the setcc comparison.  */
2252
  emit_insn (gen_rtx_SET (VOIDmode, res,
2253
                          gen_rtx_fmt_ee (code, DImode, op0, op1)));
2254
 
2255
  return true;
2256
}
2257
 
2258
 
2259
/* Implement cstore patterns.  */
2260
bool
2261
tilegx_emit_setcc (rtx operands[], enum machine_mode cmp_mode)
2262
{
2263
  return
2264
    tilegx_emit_setcc_internal (operands[0], GET_CODE (operands[1]),
2265
                                operands[2], operands[3], cmp_mode);
2266
}
2267
 
2268
 
2269
/* Return whether CODE is a signed comparison.  */
2270
static bool
2271
signed_compare_p (enum rtx_code code)
2272
{
2273
  return (code == EQ || code == NE || code == LT || code == LE
2274
          || code == GT || code == GE);
2275
}
2276
 
2277
 
2278
/* Generate the comparison for a DImode conditional branch.  */
2279
static rtx
2280
tilegx_emit_cc_test (enum rtx_code code, rtx op0, rtx op1,
2281
                     enum machine_mode cmp_mode, bool eq_ne_only)
2282
{
2283
  enum rtx_code branch_code;
2284
  rtx temp;
2285
 
2286
  if (cmp_mode == SFmode || cmp_mode == DFmode)
2287
    {
2288
      /* Compute a boolean saying whether the comparison is true.  */
2289
      temp = gen_reg_rtx (DImode);
2290
      tilegx_emit_setcc_internal (temp, code, op0, op1, cmp_mode);
2291
 
2292
      /* Test that flag.  */
2293
      return gen_rtx_fmt_ee (NE, VOIDmode, temp, const0_rtx);
2294
    }
2295
 
2296
  /* Check for a compare against zero using a comparison we can do
2297
     directly.  */
2298
  if (op1 == const0_rtx
2299
      && (code == EQ || code == NE
2300
          || (!eq_ne_only && signed_compare_p (code))))
2301
    {
2302
      op0 = force_reg (cmp_mode, op0);
2303
      return gen_rtx_fmt_ee (code, VOIDmode, op0, const0_rtx);
2304
    }
2305
 
2306
  /* The general case: fold the comparison code to the types of
2307
     compares that we have, choosing the branch as necessary.  */
2308
  switch (code)
2309
    {
2310
    case EQ:
2311
    case LE:
2312
    case LT:
2313
    case LEU:
2314
    case LTU:
2315
      /* We have these compares.  */
2316
      branch_code = NE;
2317
      break;
2318
 
2319
    case NE:
2320
    case GE:
2321
    case GT:
2322
    case GEU:
2323
    case GTU:
2324
      /* These must be reversed (except NE, but let's
2325
         canonicalize).  */
2326
      code = reverse_condition (code);
2327
      branch_code = EQ;
2328
      break;
2329
 
2330
    default:
2331
      gcc_unreachable ();
2332
    }
2333
 
2334
  if (CONST_INT_P (op1) && (!satisfies_constraint_I (op1) || code == LEU))
2335
    {
2336
      HOST_WIDE_INT n = INTVAL (op1);
2337
 
2338
      switch (code)
2339
        {
2340
        case EQ:
2341
          /* Subtract off the value we want to compare against and see
2342
             if we get zero.  This is cheaper than creating a constant
2343
             in a register. Except that subtracting -128 is more
2344
             expensive than seqi to -128, so we leave that alone.  */
2345
          /* ??? Don't do this when comparing against symbols,
2346
             otherwise we'll reduce (&x == 0x1234) to (&x-0x1234 ==
2347
             0), which will be declared false out of hand (at least
2348
             for non-weak).  */
2349
          if (n != -128
2350
              && add_operand (GEN_INT (-n), DImode)
2351
              && !(symbolic_operand (op0, VOIDmode)
2352
                   || (REG_P (op0) && REG_POINTER (op0))))
2353
            {
2354
              /* TODO: Use a SIMD add immediate to hit zero for tiled
2355
                 constants in a single instruction.  */
2356
              if (GET_MODE (op0) != DImode)
2357
                {
2358
                  /* Convert to DImode so we can use addli.  Note that
2359
                     this will not actually generate any code because
2360
                     sign extension from SI -> DI is a no-op.  I don't
2361
                     know if it's safe just to make a paradoxical
2362
                     subreg here though.  */
2363
                  rtx temp2 = gen_reg_rtx (DImode);
2364
                  emit_insn (gen_extendsidi2 (temp2, op0));
2365
                  op0 = temp2;
2366
                }
2367
              else
2368
                {
2369
                  op0 = force_reg (DImode, op0);
2370
                }
2371
              temp = gen_reg_rtx (DImode);
2372
              emit_move_insn (temp, gen_rtx_PLUS (DImode, op0, GEN_INT (-n)));
2373
              return gen_rtx_fmt_ee (reverse_condition (branch_code),
2374
                                     VOIDmode, temp, const0_rtx);
2375
            }
2376
          break;
2377
 
2378
        case LEU:
2379
          if (n == -1)
2380
            break;
2381
          /* FALLTHRU */
2382
 
2383
        case LTU:
2384
          /* Change ((unsigned)x < 0x1000) into !((int)x >> 12), etc.
2385
             We use arithmetic shift right because it's a 3-wide op,
2386
             while logical shift right is not.  */
2387
          {
2388
            int first = exact_log2 (code == LTU ? n : n + 1);
2389
            if (first != -1)
2390
              {
2391
                op0 = force_reg (cmp_mode, op0);
2392
                temp = gen_reg_rtx (cmp_mode);
2393
                emit_move_insn (temp,
2394
                                gen_rtx_ASHIFTRT (cmp_mode, op0,
2395
                                                  GEN_INT (first)));
2396
                return gen_rtx_fmt_ee (reverse_condition (branch_code),
2397
                                       VOIDmode, temp, const0_rtx);
2398
              }
2399
          }
2400
          break;
2401
 
2402
        default:
2403
          break;
2404
        }
2405
    }
2406
 
2407
  /* Compute a flag saying whether we should branch.  */
2408
  temp = gen_reg_rtx (DImode);
2409
  tilegx_emit_setcc_internal (temp, code, op0, op1, cmp_mode);
2410
 
2411
  /* Return the branch comparison.  */
2412
  return gen_rtx_fmt_ee (branch_code, VOIDmode, temp, const0_rtx);
2413
}
2414
 
2415
 
2416
/* Generate the comparison for a conditional branch.  */
2417
void
2418
tilegx_emit_conditional_branch (rtx operands[], enum machine_mode cmp_mode)
2419
{
2420
  rtx cmp_rtx =
2421
    tilegx_emit_cc_test (GET_CODE (operands[0]), operands[1], operands[2],
2422
                         cmp_mode, false);
2423
  rtx branch_rtx = gen_rtx_SET (VOIDmode, pc_rtx,
2424
                                gen_rtx_IF_THEN_ELSE (VOIDmode, cmp_rtx,
2425
                                                      gen_rtx_LABEL_REF
2426
                                                      (VOIDmode,
2427
                                                       operands[3]),
2428
                                                      pc_rtx));
2429
  emit_jump_insn (branch_rtx);
2430
}
2431
 
2432
 
2433
/* Implement the mov<mode>cc pattern.  */
2434
rtx
2435
tilegx_emit_conditional_move (rtx cmp)
2436
{
2437
  return
2438
    tilegx_emit_cc_test (GET_CODE (cmp), XEXP (cmp, 0), XEXP (cmp, 1),
2439
                         GET_MODE (XEXP (cmp, 0)), true);
2440
}
2441
 
2442
 
2443
/* Return true if INSN is annotated with a REG_BR_PROB note that
2444
   indicates it's a branch that's predicted taken.  */
2445
static bool
2446
cbranch_predicted_p (rtx insn)
2447
{
2448
  rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2449
 
2450
  if (x)
2451
    {
2452
      int pred_val = INTVAL (XEXP (x, 0));
2453
 
2454
      return pred_val >= REG_BR_PROB_BASE / 2;
2455
    }
2456
 
2457
  return false;
2458
}
2459
 
2460
 
2461
/* Output assembly code for a specific branch instruction, appending
2462
   the branch prediction flag to the opcode if appropriate.  */
2463
static const char *
2464
tilegx_output_simple_cbranch_with_opcode (rtx insn, const char *opcode,
2465
                                          int regop, bool reverse_predicted)
2466
{
2467
  static char buf[64];
2468
  sprintf (buf, "%s%s\t%%r%d, %%l0", opcode,
2469
           (cbranch_predicted_p (insn) ^ reverse_predicted) ? "t" : "",
2470
           regop);
2471
  return buf;
2472
}
2473
 
2474
 
2475
/* Output assembly code for a specific branch instruction, appending
2476
   the branch prediction flag to the opcode if appropriate.  */
2477
const char *
2478
tilegx_output_cbranch_with_opcode (rtx insn, rtx *operands,
2479
                                   const char *opcode,
2480
                                   const char *rev_opcode, int regop)
2481
{
2482
  const char *branch_if_false;
2483
  rtx taken, not_taken;
2484
  bool is_simple_branch;
2485
 
2486
  gcc_assert (LABEL_P (operands[0]));
2487
 
2488
  is_simple_branch = true;
2489
  if (INSN_ADDRESSES_SET_P ())
2490
    {
2491
      int from_addr = INSN_ADDRESSES (INSN_UID (insn));
2492
      int to_addr = INSN_ADDRESSES (INSN_UID (operands[0]));
2493
      int delta = to_addr - from_addr;
2494
      is_simple_branch = IN_RANGE (delta, -524288, 524280);
2495
    }
2496
 
2497
  if (is_simple_branch)
2498
    {
2499
      /* Just a simple conditional branch.  */
2500
      return
2501
        tilegx_output_simple_cbranch_with_opcode (insn, opcode, regop, false);
2502
    }
2503
 
2504
  /* Generate a reversed branch around a direct jump.  This fallback
2505
     does not use branch-likely instructions.  */
2506
  not_taken = gen_label_rtx ();
2507
  taken = operands[0];
2508
 
2509
  /* Generate the reversed branch to NOT_TAKEN.  */
2510
  operands[0] = not_taken;
2511
  branch_if_false =
2512
    tilegx_output_simple_cbranch_with_opcode (insn, rev_opcode, regop, true);
2513
  output_asm_insn (branch_if_false, operands);
2514
 
2515
  output_asm_insn ("j\t%l0", &taken);
2516
 
2517
  /* Output NOT_TAKEN.  */
2518
  targetm.asm_out.internal_label (asm_out_file, "L",
2519
                                  CODE_LABEL_NUMBER (not_taken));
2520
  return "";
2521
}
2522
 
2523
 
2524
/* Output assembly code for a conditional branch instruction.  */
2525
const char *
2526
tilegx_output_cbranch (rtx insn, rtx *operands, bool reversed)
2527
{
2528
  enum rtx_code code = GET_CODE (operands[1]);
2529
  const char *opcode;
2530
  const char *rev_opcode;
2531
 
2532
  if (reversed)
2533
    code = reverse_condition (code);
2534
 
2535
  switch (code)
2536
    {
2537
    case NE:
2538
      opcode = "bnez";
2539
      rev_opcode = "beqz";
2540
      break;
2541
    case EQ:
2542
      opcode = "beqz";
2543
      rev_opcode = "bnez";
2544
      break;
2545
    case GE:
2546
      opcode = "bgez";
2547
      rev_opcode = "bltz";
2548
      break;
2549
    case GT:
2550
      opcode = "bgtz";
2551
      rev_opcode = "blez";
2552
      break;
2553
    case LE:
2554
      opcode = "blez";
2555
      rev_opcode = "bgtz";
2556
      break;
2557
    case LT:
2558
      opcode = "bltz";
2559
      rev_opcode = "bgez";
2560
      break;
2561
    default:
2562
      gcc_unreachable ();
2563
    }
2564
 
2565
  return tilegx_output_cbranch_with_opcode (insn, operands, opcode,
2566
                                            rev_opcode, 2);
2567
}
2568
 
2569
 
2570
/* Implement the tablejump pattern.  */
2571
void
2572
tilegx_expand_tablejump (rtx op0, rtx op1)
2573
{
2574
  if (flag_pic)
2575
    {
2576
      rtx temp = gen_reg_rtx (Pmode);
2577
      rtx temp2 = gen_reg_rtx (Pmode);
2578
 
2579
      compute_pcrel_address (temp, gen_rtx_LABEL_REF (Pmode, op1));
2580
      emit_move_insn (temp2,
2581
                      gen_rtx_PLUS (Pmode,
2582
                                    convert_to_mode (Pmode, op0, false),
2583
                                    temp));
2584
      op0 = temp2;
2585
    }
2586
 
2587
  emit_jump_insn (gen_tablejump_aux (op0, op1));
2588
}
2589
 
2590
 
2591
/* Emit barrier before an atomic, as needed for the memory MODEL.  */
2592
void
2593
tilegx_pre_atomic_barrier (enum memmodel model)
2594
{
2595
  switch (model)
2596
    {
2597
    case MEMMODEL_RELAXED:
2598
    case MEMMODEL_CONSUME:
2599
    case MEMMODEL_ACQUIRE:
2600
      break;
2601
    case MEMMODEL_RELEASE:
2602
    case MEMMODEL_ACQ_REL:
2603
    case MEMMODEL_SEQ_CST:
2604
      emit_insn (gen_memory_barrier ());
2605
      break;
2606
    default:
2607
      gcc_unreachable ();
2608
    }
2609
}
2610
 
2611
 
2612
/* Emit barrier after an atomic, as needed for the memory MODEL.  */
2613
void
2614
tilegx_post_atomic_barrier (enum memmodel model)
2615
{
2616
  switch (model)
2617
    {
2618
    case MEMMODEL_RELAXED:
2619
    case MEMMODEL_CONSUME:
2620
    case MEMMODEL_RELEASE:
2621
      break;
2622
    case MEMMODEL_ACQUIRE:
2623
    case MEMMODEL_ACQ_REL:
2624
    case MEMMODEL_SEQ_CST:
2625
      emit_insn (gen_memory_barrier ());
2626
      break;
2627
    default:
2628
      gcc_unreachable ();
2629
    }
2630
}
2631
 
2632
 
2633
 
2634
/* Expand a builtin vector binary op, by calling gen function GEN with
2635
   operands in the proper modes.  DEST is converted to DEST_MODE, and
2636
   src0 and src1 (if DO_SRC1 is true) is converted to SRC_MODE.  */
2637
void
2638
tilegx_expand_builtin_vector_binop (rtx (*gen) (rtx, rtx, rtx),
2639
                                    enum machine_mode dest_mode,
2640
                                    rtx dest,
2641
                                    enum machine_mode src_mode,
2642
                                    rtx src0, rtx src1, bool do_src1)
2643
{
2644
  dest = gen_lowpart (dest_mode, dest);
2645
 
2646
  if (src0 == const0_rtx)
2647
    src0 = CONST0_RTX (src_mode);
2648
  else
2649
    src0 = gen_lowpart (src_mode, src0);
2650
 
2651
  if (do_src1)
2652
    {
2653
      if (src1 == const0_rtx)
2654
        src1 = CONST0_RTX (src_mode);
2655
      else
2656
        src1 = gen_lowpart (src_mode, src1);
2657
    }
2658
 
2659
  emit_insn ((*gen) (dest, src0, src1));
2660
}
2661
 
2662
 
2663
 
2664
/* Intrinsics  */
2665
 
2666
 
2667
struct tile_builtin_info
2668
{
2669
  enum insn_code icode;
2670
  tree fndecl;
2671
};
2672
 
2673
static struct tile_builtin_info tilegx_builtin_info[TILEGX_BUILTIN_max] = {
2674
  { CODE_FOR_adddi3,                    NULL }, /* add */
2675
  { CODE_FOR_addsi3,                    NULL }, /* addx */
2676
  { CODE_FOR_ssaddsi3,                  NULL }, /* addxsc */
2677
  { CODE_FOR_anddi3,                    NULL }, /* and */
2678
  { CODE_FOR_insn_bfexts,               NULL }, /* bfexts */
2679
  { CODE_FOR_insn_bfextu,               NULL }, /* bfextu */
2680
  { CODE_FOR_insn_bfins,                NULL }, /* bfins */
2681
  { CODE_FOR_clzdi2,                    NULL }, /* clz */
2682
  { CODE_FOR_insn_cmoveqz,              NULL }, /* cmoveqz */
2683
  { CODE_FOR_insn_cmovnez,              NULL }, /* cmovnez */
2684
  { CODE_FOR_insn_cmpeq_didi,           NULL }, /* cmpeq */
2685
  { CODE_FOR_insn_cmpexch,              NULL }, /* cmpexch */
2686
  { CODE_FOR_insn_cmpexch4,             NULL }, /* cmpexch4 */
2687
  { CODE_FOR_insn_cmples_didi,          NULL }, /* cmples */
2688
  { CODE_FOR_insn_cmpleu_didi,          NULL }, /* cmpleu */
2689
  { CODE_FOR_insn_cmplts_didi,          NULL }, /* cmplts */
2690
  { CODE_FOR_insn_cmpltu_didi,          NULL }, /* cmpltu */
2691
  { CODE_FOR_insn_cmpne_didi,           NULL }, /* cmpne */
2692
  { CODE_FOR_insn_cmul,                 NULL }, /* cmul */
2693
  { CODE_FOR_insn_cmula,                NULL }, /* cmula */
2694
  { CODE_FOR_insn_cmulaf,               NULL }, /* cmulaf */
2695
  { CODE_FOR_insn_cmulf,                NULL }, /* cmulf */
2696
  { CODE_FOR_insn_cmulfr,               NULL }, /* cmulfr */
2697
  { CODE_FOR_insn_cmulh,                NULL }, /* cmulh */
2698
  { CODE_FOR_insn_cmulhr,               NULL }, /* cmulhr */
2699
  { CODE_FOR_insn_crc32_32,             NULL }, /* crc32_32 */
2700
  { CODE_FOR_insn_crc32_8,              NULL }, /* crc32_8 */
2701
  { CODE_FOR_ctzdi2,                    NULL }, /* ctz */
2702
  { CODE_FOR_insn_dblalign,             NULL }, /* dblalign */
2703
  { CODE_FOR_insn_dblalign2,            NULL }, /* dblalign2 */
2704
  { CODE_FOR_insn_dblalign4,            NULL }, /* dblalign4 */
2705
  { CODE_FOR_insn_dblalign6,            NULL }, /* dblalign6 */
2706
  { CODE_FOR_insn_drain,                NULL }, /* drain */
2707
  { CODE_FOR_insn_dtlbpr,               NULL }, /* dtlbpr */
2708
  { CODE_FOR_insn_exch,                 NULL }, /* exch */
2709
  { CODE_FOR_insn_exch4,                NULL }, /* exch4 */
2710
  { CODE_FOR_insn_fdouble_add_flags,    NULL }, /* fdouble_add_flags */
2711
  { CODE_FOR_insn_fdouble_addsub,       NULL }, /* fdouble_addsub */
2712
  { CODE_FOR_insn_fdouble_mul_flags,    NULL }, /* fdouble_mul_flags */
2713
  { CODE_FOR_insn_fdouble_pack1,        NULL }, /* fdouble_pack1 */
2714
  { CODE_FOR_insn_fdouble_pack2,        NULL }, /* fdouble_pack2 */
2715
  { CODE_FOR_insn_fdouble_sub_flags,    NULL }, /* fdouble_sub_flags */
2716
  { CODE_FOR_insn_fdouble_unpack_max,   NULL }, /* fdouble_unpack_max */
2717
  { CODE_FOR_insn_fdouble_unpack_min,   NULL }, /* fdouble_unpack_min */
2718
  { CODE_FOR_insn_fetchadd,             NULL }, /* fetchadd */
2719
  { CODE_FOR_insn_fetchadd4,            NULL }, /* fetchadd4 */
2720
  { CODE_FOR_insn_fetchaddgez,          NULL }, /* fetchaddgez */
2721
  { CODE_FOR_insn_fetchaddgez4,         NULL }, /* fetchaddgez4 */
2722
  { CODE_FOR_insn_fetchand,             NULL }, /* fetchand */
2723
  { CODE_FOR_insn_fetchand4,            NULL }, /* fetchand4 */
2724
  { CODE_FOR_insn_fetchor,              NULL }, /* fetchor */
2725
  { CODE_FOR_insn_fetchor4,             NULL }, /* fetchor4 */
2726
  { CODE_FOR_insn_finv,                 NULL }, /* finv */
2727
  { CODE_FOR_insn_flush,                NULL }, /* flush */
2728
  { CODE_FOR_insn_flushwb,              NULL }, /* flushwb */
2729
  { CODE_FOR_insn_fnop,                 NULL }, /* fnop */
2730
  { CODE_FOR_insn_fsingle_add1,         NULL }, /* fsingle_add1 */
2731
  { CODE_FOR_insn_fsingle_addsub2,      NULL }, /* fsingle_addsub2 */
2732
  { CODE_FOR_insn_fsingle_mul1,         NULL }, /* fsingle_mul1 */
2733
  { CODE_FOR_insn_fsingle_mul2,         NULL }, /* fsingle_mul2 */
2734
  { CODE_FOR_insn_fsingle_pack1,        NULL }, /* fsingle_pack1 */
2735
  { CODE_FOR_insn_fsingle_pack2,        NULL }, /* fsingle_pack2 */
2736
  { CODE_FOR_insn_fsingle_sub1,         NULL }, /* fsingle_sub1 */
2737
  { CODE_FOR_insn_icoh,                 NULL }, /* icoh */
2738
  { CODE_FOR_insn_ill,                  NULL }, /* ill */
2739
  { CODE_FOR_insn_info,                 NULL }, /* info */
2740
  { CODE_FOR_insn_infol,                NULL }, /* infol */
2741
  { CODE_FOR_insn_inv,                  NULL }, /* inv */
2742
  { CODE_FOR_insn_ld,                   NULL }, /* ld */
2743
  { CODE_FOR_insn_ld1s,                 NULL }, /* ld1s */
2744
  { CODE_FOR_insn_ld1u,                 NULL }, /* ld1u */
2745
  { CODE_FOR_insn_ld2s,                 NULL }, /* ld2s */
2746
  { CODE_FOR_insn_ld2u,                 NULL }, /* ld2u */
2747
  { CODE_FOR_insn_ld4s,                 NULL }, /* ld4s */
2748
  { CODE_FOR_insn_ld4u,                 NULL }, /* ld4u */
2749
  { CODE_FOR_insn_ldna,                 NULL }, /* ldna */
2750
  { CODE_FOR_insn_ldnt,                 NULL }, /* ldnt */
2751
  { CODE_FOR_insn_ldnt1s,               NULL }, /* ldnt1s */
2752
  { CODE_FOR_insn_ldnt1u,               NULL }, /* ldnt1u */
2753
  { CODE_FOR_insn_ldnt2s,               NULL }, /* ldnt2s */
2754
  { CODE_FOR_insn_ldnt2u,               NULL }, /* ldnt2u */
2755
  { CODE_FOR_insn_ldnt4s,               NULL }, /* ldnt4s */
2756
  { CODE_FOR_insn_ldnt4u,               NULL }, /* ldnt4u */
2757
  { CODE_FOR_insn_ld_L2,                NULL }, /* ld_L2 */
2758
  { CODE_FOR_insn_ld1s_L2,              NULL }, /* ld1s_L2 */
2759
  { CODE_FOR_insn_ld1u_L2,              NULL }, /* ld1u_L2 */
2760
  { CODE_FOR_insn_ld2s_L2,              NULL }, /* ld2s_L2 */
2761
  { CODE_FOR_insn_ld2u_L2,              NULL }, /* ld2u_L2 */
2762
  { CODE_FOR_insn_ld4s_L2,              NULL }, /* ld4s_L2 */
2763
  { CODE_FOR_insn_ld4u_L2,              NULL }, /* ld4u_L2 */
2764
  { CODE_FOR_insn_ldna_L2,              NULL }, /* ldna_L2 */
2765
  { CODE_FOR_insn_ldnt_L2,              NULL }, /* ldnt_L2 */
2766
  { CODE_FOR_insn_ldnt1s_L2,            NULL }, /* ldnt1s_L2 */
2767
  { CODE_FOR_insn_ldnt1u_L2,            NULL }, /* ldnt1u_L2 */
2768
  { CODE_FOR_insn_ldnt2s_L2,            NULL }, /* ldnt2s_L2 */
2769
  { CODE_FOR_insn_ldnt2u_L2,            NULL }, /* ldnt2u_L2 */
2770
  { CODE_FOR_insn_ldnt4s_L2,            NULL }, /* ldnt4s_L2 */
2771
  { CODE_FOR_insn_ldnt4u_L2,            NULL }, /* ldnt4u_L2 */
2772
  { CODE_FOR_insn_ld_miss,              NULL }, /* ld_miss */
2773
  { CODE_FOR_insn_ld1s_miss,            NULL }, /* ld1s_miss */
2774
  { CODE_FOR_insn_ld1u_miss,            NULL }, /* ld1u_miss */
2775
  { CODE_FOR_insn_ld2s_miss,            NULL }, /* ld2s_miss */
2776
  { CODE_FOR_insn_ld2u_miss,            NULL }, /* ld2u_miss */
2777
  { CODE_FOR_insn_ld4s_miss,            NULL }, /* ld4s_miss */
2778
  { CODE_FOR_insn_ld4u_miss,            NULL }, /* ld4u_miss */
2779
  { CODE_FOR_insn_ldna_miss,            NULL }, /* ldna_miss */
2780
  { CODE_FOR_insn_ldnt_miss,            NULL }, /* ldnt_miss */
2781
  { CODE_FOR_insn_ldnt1s_miss,          NULL }, /* ldnt1s_miss */
2782
  { CODE_FOR_insn_ldnt1u_miss,          NULL }, /* ldnt1u_miss */
2783
  { CODE_FOR_insn_ldnt2s_miss,          NULL }, /* ldnt2s_miss */
2784
  { CODE_FOR_insn_ldnt2u_miss,          NULL }, /* ldnt2u_miss */
2785
  { CODE_FOR_insn_ldnt4s_miss,          NULL }, /* ldnt4s_miss */
2786
  { CODE_FOR_insn_ldnt4u_miss,          NULL }, /* ldnt4u_miss */
2787
  { CODE_FOR_insn_lnk,                  NULL }, /* lnk */
2788
  { CODE_FOR_memory_barrier,            NULL }, /* mf */
2789
  { CODE_FOR_insn_mfspr,                NULL }, /* mfspr */
2790
  { CODE_FOR_insn_mm,                   NULL }, /* mm */
2791
  { CODE_FOR_insn_mnz,                  NULL }, /* mnz */
2792
  { CODE_FOR_movdi,                     NULL }, /* move */
2793
  { CODE_FOR_insn_mtspr,                NULL }, /* mtspr */
2794
  { CODE_FOR_insn_mul_hs_hs,            NULL }, /* mul_hs_hs */
2795
  { CODE_FOR_insn_mul_hs_hu,            NULL }, /* mul_hs_hu */
2796
  { CODE_FOR_insn_mul_hs_ls,            NULL }, /* mul_hs_ls */
2797
  { CODE_FOR_insn_mul_hs_lu,            NULL }, /* mul_hs_lu */
2798
  { CODE_FOR_insn_mul_hu_hu,            NULL }, /* mul_hu_hu */
2799
  { CODE_FOR_insn_mul_hu_ls,            NULL }, /* mul_hu_ls */
2800
  { CODE_FOR_insn_mul_hu_lu,            NULL }, /* mul_hu_lu */
2801
  { CODE_FOR_insn_mul_ls_ls,            NULL }, /* mul_ls_ls */
2802
  { CODE_FOR_insn_mul_ls_lu,            NULL }, /* mul_ls_lu */
2803
  { CODE_FOR_insn_mul_lu_lu,            NULL }, /* mul_lu_lu */
2804
  { CODE_FOR_insn_mula_hs_hs,           NULL }, /* mula_hs_hs */
2805
  { CODE_FOR_insn_mula_hs_hu,           NULL }, /* mula_hs_hu */
2806
  { CODE_FOR_insn_mula_hs_ls,           NULL }, /* mula_hs_ls */
2807
  { CODE_FOR_insn_mula_hs_lu,           NULL }, /* mula_hs_lu */
2808
  { CODE_FOR_insn_mula_hu_hu,           NULL }, /* mula_hu_hu */
2809
  { CODE_FOR_insn_mula_hu_ls,           NULL }, /* mula_hu_ls */
2810
  { CODE_FOR_insn_mula_hu_lu,           NULL }, /* mula_hu_lu */
2811
  { CODE_FOR_insn_mula_ls_ls,           NULL }, /* mula_ls_ls */
2812
  { CODE_FOR_insn_mula_ls_lu,           NULL }, /* mula_ls_lu */
2813
  { CODE_FOR_insn_mula_lu_lu,           NULL }, /* mula_lu_lu */
2814
  { CODE_FOR_insn_mulax,                NULL }, /* mulax */
2815
  { CODE_FOR_mulsi3,                    NULL }, /* mulx */
2816
  { CODE_FOR_insn_mz,                   NULL }, /* mz */
2817
  { CODE_FOR_insn_nap,                  NULL }, /* nap */
2818
  { CODE_FOR_nop,                       NULL }, /* nop */
2819
  { CODE_FOR_insn_nor_di,               NULL }, /* nor */
2820
  { CODE_FOR_iordi3,                    NULL }, /* or */
2821
  { CODE_FOR_popcountdi2,               NULL }, /* pcnt */
2822
  { CODE_FOR_insn_prefetch_l1,          NULL }, /* prefetch_l1 */
2823
  { CODE_FOR_insn_prefetch_l1_fault,    NULL }, /* prefetch_l1_fault */
2824
  { CODE_FOR_insn_prefetch_l2,          NULL }, /* prefetch_l2 */
2825
  { CODE_FOR_insn_prefetch_l2_fault,    NULL }, /* prefetch_l2_fault */
2826
  { CODE_FOR_insn_prefetch_l3,          NULL }, /* prefetch_l3 */
2827
  { CODE_FOR_insn_prefetch_l3_fault,    NULL }, /* prefetch_l3_fault */
2828
  { CODE_FOR_insn_revbits,              NULL }, /* revbits */
2829
  { CODE_FOR_bswapdi2,                  NULL }, /* revbytes */
2830
  { CODE_FOR_rotldi3,                   NULL }, /* rotl */
2831
  { CODE_FOR_ashldi3,                   NULL }, /* shl */
2832
  { CODE_FOR_insn_shl16insli,           NULL }, /* shl16insli */
2833
  { CODE_FOR_insn_shl1add,              NULL }, /* shl1add */
2834
  { CODE_FOR_insn_shl1addx,             NULL }, /* shl1addx */
2835
  { CODE_FOR_insn_shl2add,              NULL }, /* shl2add */
2836
  { CODE_FOR_insn_shl2addx,             NULL }, /* shl2addx */
2837
  { CODE_FOR_insn_shl3add,              NULL }, /* shl3add */
2838
  { CODE_FOR_insn_shl3addx,             NULL }, /* shl3addx */
2839
  { CODE_FOR_ashlsi3,                   NULL }, /* shlx */
2840
  { CODE_FOR_ashrdi3,                   NULL }, /* shrs */
2841
  { CODE_FOR_lshrdi3,                   NULL }, /* shru */
2842
  { CODE_FOR_lshrsi3,                   NULL }, /* shrux */
2843
  { CODE_FOR_insn_shufflebytes,         NULL }, /* shufflebytes */
2844
  { CODE_FOR_insn_st,                   NULL }, /* st */
2845
  { CODE_FOR_insn_st1,                  NULL }, /* st1 */
2846
  { CODE_FOR_insn_st2,                  NULL }, /* st2 */
2847
  { CODE_FOR_insn_st4,                  NULL }, /* st4 */
2848
  { CODE_FOR_insn_stnt,                 NULL }, /* stnt */
2849
  { CODE_FOR_insn_stnt1,                NULL }, /* stnt1 */
2850
  { CODE_FOR_insn_stnt2,                NULL }, /* stnt2 */
2851
  { CODE_FOR_insn_stnt4,                NULL }, /* stnt4 */
2852
  { CODE_FOR_subdi3,                    NULL }, /* sub */
2853
  { CODE_FOR_subsi3,                    NULL }, /* subx */
2854
  { CODE_FOR_sssubsi3,                  NULL }, /* subxsc */
2855
  { CODE_FOR_insn_tblidxb0,             NULL }, /* tblidxb0 */
2856
  { CODE_FOR_insn_tblidxb1,             NULL }, /* tblidxb1 */
2857
  { CODE_FOR_insn_tblidxb2,             NULL }, /* tblidxb2 */
2858
  { CODE_FOR_insn_tblidxb3,             NULL }, /* tblidxb3 */
2859
  { CODE_FOR_insn_v1add,                NULL }, /* v1add */
2860
  { CODE_FOR_insn_v1addi,               NULL }, /* v1addi */
2861
  { CODE_FOR_insn_v1adduc,              NULL }, /* v1adduc */
2862
  { CODE_FOR_insn_v1adiffu,             NULL }, /* v1adiffu */
2863
  { CODE_FOR_insn_v1avgu,               NULL }, /* v1avgu */
2864
  { CODE_FOR_insn_v1cmpeq,              NULL }, /* v1cmpeq */
2865
  { CODE_FOR_insn_v1cmpeqi,             NULL }, /* v1cmpeqi */
2866
  { CODE_FOR_insn_v1cmples,             NULL }, /* v1cmples */
2867
  { CODE_FOR_insn_v1cmpleu,             NULL }, /* v1cmpleu */
2868
  { CODE_FOR_insn_v1cmplts,             NULL }, /* v1cmplts */
2869
  { CODE_FOR_insn_v1cmpltsi,            NULL }, /* v1cmpltsi */
2870
  { CODE_FOR_insn_v1cmpltu,             NULL }, /* v1cmpltu */
2871
  { CODE_FOR_insn_v1cmpltui,            NULL }, /* v1cmpltui */
2872
  { CODE_FOR_insn_v1cmpne,              NULL }, /* v1cmpne */
2873
  { CODE_FOR_insn_v1ddotpu,             NULL }, /* v1ddotpu */
2874
  { CODE_FOR_insn_v1ddotpua,            NULL }, /* v1ddotpua */
2875
  { CODE_FOR_insn_v1ddotpus,            NULL }, /* v1ddotpus */
2876
  { CODE_FOR_insn_v1ddotpusa,           NULL }, /* v1ddotpusa */
2877
  { CODE_FOR_insn_v1dotp,               NULL }, /* v1dotp */
2878
  { CODE_FOR_insn_v1dotpa,              NULL }, /* v1dotpa */
2879
  { CODE_FOR_insn_v1dotpu,              NULL }, /* v1dotpu */
2880
  { CODE_FOR_insn_v1dotpua,             NULL }, /* v1dotpua */
2881
  { CODE_FOR_insn_v1dotpus,             NULL }, /* v1dotpus */
2882
  { CODE_FOR_insn_v1dotpusa,            NULL }, /* v1dotpusa */
2883
  { CODE_FOR_insn_v1int_h,              NULL }, /* v1int_h */
2884
  { CODE_FOR_insn_v1int_l,              NULL }, /* v1int_l */
2885
  { CODE_FOR_insn_v1maxu,               NULL }, /* v1maxu */
2886
  { CODE_FOR_insn_v1maxui,              NULL }, /* v1maxui */
2887
  { CODE_FOR_insn_v1minu,               NULL }, /* v1minu */
2888
  { CODE_FOR_insn_v1minui,              NULL }, /* v1minui */
2889
  { CODE_FOR_insn_v1mnz,                NULL }, /* v1mnz */
2890
  { CODE_FOR_insn_v1multu,              NULL }, /* v1multu */
2891
  { CODE_FOR_insn_v1mulu,               NULL }, /* v1mulu */
2892
  { CODE_FOR_insn_v1mulus,              NULL }, /* v1mulus */
2893
  { CODE_FOR_insn_v1mz,                 NULL }, /* v1mz */
2894
  { CODE_FOR_insn_v1sadau,              NULL }, /* v1sadau */
2895
  { CODE_FOR_insn_v1sadu,               NULL }, /* v1sadu */
2896
  { CODE_FOR_insn_v1shl,                NULL }, /* v1shl */
2897
  { CODE_FOR_insn_v1shl,                NULL }, /* v1shli */
2898
  { CODE_FOR_insn_v1shrs,               NULL }, /* v1shrs */
2899
  { CODE_FOR_insn_v1shrs,               NULL }, /* v1shrsi */
2900
  { CODE_FOR_insn_v1shru,               NULL }, /* v1shru */
2901
  { CODE_FOR_insn_v1shru,               NULL }, /* v1shrui */
2902
  { CODE_FOR_insn_v1sub,                NULL }, /* v1sub */
2903
  { CODE_FOR_insn_v1subuc,              NULL }, /* v1subuc */
2904
  { CODE_FOR_insn_v2add,                NULL }, /* v2add */
2905
  { CODE_FOR_insn_v2addi,               NULL }, /* v2addi */
2906
  { CODE_FOR_insn_v2addsc,              NULL }, /* v2addsc */
2907
  { CODE_FOR_insn_v2adiffs,             NULL }, /* v2adiffs */
2908
  { CODE_FOR_insn_v2avgs,               NULL }, /* v2avgs */
2909
  { CODE_FOR_insn_v2cmpeq,              NULL }, /* v2cmpeq */
2910
  { CODE_FOR_insn_v2cmpeqi,             NULL }, /* v2cmpeqi */
2911
  { CODE_FOR_insn_v2cmples,             NULL }, /* v2cmples */
2912
  { CODE_FOR_insn_v2cmpleu,             NULL }, /* v2cmpleu */
2913
  { CODE_FOR_insn_v2cmplts,             NULL }, /* v2cmplts */
2914
  { CODE_FOR_insn_v2cmpltsi,            NULL }, /* v2cmpltsi */
2915
  { CODE_FOR_insn_v2cmpltu,             NULL }, /* v2cmpltu */
2916
  { CODE_FOR_insn_v2cmpltui,            NULL }, /* v2cmpltui */
2917
  { CODE_FOR_insn_v2cmpne,              NULL }, /* v2cmpne */
2918
  { CODE_FOR_insn_v2dotp,               NULL }, /* v2dotp */
2919
  { CODE_FOR_insn_v2dotpa,              NULL }, /* v2dotpa */
2920
  { CODE_FOR_insn_v2int_h,              NULL }, /* v2int_h */
2921
  { CODE_FOR_insn_v2int_l,              NULL }, /* v2int_l */
2922
  { CODE_FOR_insn_v2maxs,               NULL }, /* v2maxs */
2923
  { CODE_FOR_insn_v2maxsi,              NULL }, /* v2maxsi */
2924
  { CODE_FOR_insn_v2mins,               NULL }, /* v2mins */
2925
  { CODE_FOR_insn_v2minsi,              NULL }, /* v2minsi */
2926
  { CODE_FOR_insn_v2mnz,                NULL }, /* v2mnz */
2927
  { CODE_FOR_insn_v2mulfsc,             NULL }, /* v2mulfsc */
2928
  { CODE_FOR_insn_v2muls,               NULL }, /* v2muls */
2929
  { CODE_FOR_insn_v2mults,              NULL }, /* v2mults */
2930
  { CODE_FOR_insn_v2mz,                 NULL }, /* v2mz */
2931
  { CODE_FOR_insn_v2packh,              NULL }, /* v2packh */
2932
  { CODE_FOR_insn_v2packl,              NULL }, /* v2packl */
2933
  { CODE_FOR_insn_v2packuc,             NULL }, /* v2packuc */
2934
  { CODE_FOR_insn_v2sadas,              NULL }, /* v2sadas */
2935
  { CODE_FOR_insn_v2sadau,              NULL }, /* v2sadau */
2936
  { CODE_FOR_insn_v2sads,               NULL }, /* v2sads */
2937
  { CODE_FOR_insn_v2sadu,               NULL }, /* v2sadu */
2938
  { CODE_FOR_insn_v2shl,                NULL }, /* v2shl */
2939
  { CODE_FOR_insn_v2shl,                NULL }, /* v2shli */
2940
  { CODE_FOR_insn_v2shlsc,              NULL }, /* v2shlsc */
2941
  { CODE_FOR_insn_v2shrs,               NULL }, /* v2shrs */
2942
  { CODE_FOR_insn_v2shrs,               NULL }, /* v2shrsi */
2943
  { CODE_FOR_insn_v2shru,               NULL }, /* v2shru */
2944
  { CODE_FOR_insn_v2shru,               NULL }, /* v2shrui */
2945
  { CODE_FOR_insn_v2sub,                NULL }, /* v2sub */
2946
  { CODE_FOR_insn_v2subsc,              NULL }, /* v2subsc */
2947
  { CODE_FOR_insn_v4add,                NULL }, /* v4add */
2948
  { CODE_FOR_insn_v4addsc,              NULL }, /* v4addsc */
2949
  { CODE_FOR_insn_v4int_h,              NULL }, /* v4int_h */
2950
  { CODE_FOR_insn_v4int_l,              NULL }, /* v4int_l */
2951
  { CODE_FOR_insn_v4packsc,             NULL }, /* v4packsc */
2952
  { CODE_FOR_insn_v4shl,                NULL }, /* v4shl */
2953
  { CODE_FOR_insn_v4shlsc,              NULL }, /* v4shlsc */
2954
  { CODE_FOR_insn_v4shrs,               NULL }, /* v4shrs */
2955
  { CODE_FOR_insn_v4shru,               NULL }, /* v4shru */
2956
  { CODE_FOR_insn_v4sub,                NULL }, /* v4sub */
2957
  { CODE_FOR_insn_v4subsc,              NULL }, /* v4subsc */
2958
  { CODE_FOR_insn_wh64,                 NULL }, /* wh64 */
2959
  { CODE_FOR_xordi3,                    NULL }, /* xor */
2960
  { CODE_FOR_tilegx_network_barrier,    NULL }, /* network_barrier */
2961
  { CODE_FOR_tilegx_idn0_receive,       NULL }, /* idn0_receive */
2962
  { CODE_FOR_tilegx_idn1_receive,       NULL }, /* idn1_receive */
2963
  { CODE_FOR_tilegx_idn_send,           NULL }, /* idn_send */
2964
  { CODE_FOR_tilegx_udn0_receive,       NULL }, /* udn0_receive */
2965
  { CODE_FOR_tilegx_udn1_receive,       NULL }, /* udn1_receive */
2966
  { CODE_FOR_tilegx_udn2_receive,       NULL }, /* udn2_receive */
2967
  { CODE_FOR_tilegx_udn3_receive,       NULL }, /* udn3_receive */
2968
  { CODE_FOR_tilegx_udn_send,           NULL }, /* udn_send */
2969
};
2970
 
2971
 
2972
struct tilegx_builtin_def
2973
{
2974
  const char *name;
2975
  enum tilegx_builtin code;
2976
  bool is_const;
2977
  /* The first character is the return type.  Subsequent characters
2978
     are the argument types. See char_to_type.  */
2979
  const char *type;
2980
};
2981
 
2982
 
2983
static const struct tilegx_builtin_def tilegx_builtins[] = {
2984
  { "__insn_add",                TILEGX_INSN_ADD,                true,  "lll"  },
2985
  { "__insn_addi",               TILEGX_INSN_ADD,                true,  "lll"  },
2986
  { "__insn_addli",              TILEGX_INSN_ADD,                true,  "lll"  },
2987
  { "__insn_addx",               TILEGX_INSN_ADDX,               true,  "iii"  },
2988
  { "__insn_addxi",              TILEGX_INSN_ADDX,               true,  "iii"  },
2989
  { "__insn_addxli",             TILEGX_INSN_ADDX,               true,  "iii"  },
2990
  { "__insn_addxsc",             TILEGX_INSN_ADDXSC,             true,  "iii"  },
2991
  { "__insn_and",                TILEGX_INSN_AND,                true,  "lll"  },
2992
  { "__insn_andi",               TILEGX_INSN_AND,                true,  "lll"  },
2993
  { "__insn_bfexts",             TILEGX_INSN_BFEXTS,             true,  "llll" },
2994
  { "__insn_bfextu",             TILEGX_INSN_BFEXTU,             true,  "llll" },
2995
  { "__insn_bfins",              TILEGX_INSN_BFINS,              true,  "lllll"},
2996
  { "__insn_clz",                TILEGX_INSN_CLZ,                true,  "ll"   },
2997
  { "__insn_cmoveqz",            TILEGX_INSN_CMOVEQZ,            true,  "llll" },
2998
  { "__insn_cmovnez",            TILEGX_INSN_CMOVNEZ,            true,  "llll" },
2999
  { "__insn_cmpeq",              TILEGX_INSN_CMPEQ,              true,  "lll"  },
3000
  { "__insn_cmpeqi",             TILEGX_INSN_CMPEQ,              true,  "lll"  },
3001
  { "__insn_cmpexch",            TILEGX_INSN_CMPEXCH,            false, "lpl"  },
3002
  { "__insn_cmpexch4",           TILEGX_INSN_CMPEXCH4,           false, "ipi"  },
3003
  { "__insn_cmples",             TILEGX_INSN_CMPLES,             true,  "lll"  },
3004
  { "__insn_cmpleu",             TILEGX_INSN_CMPLEU,             true,  "lll"  },
3005
  { "__insn_cmplts",             TILEGX_INSN_CMPLTS,             true,  "lll"  },
3006
  { "__insn_cmpltsi",            TILEGX_INSN_CMPLTS,             true,  "lll"  },
3007
  { "__insn_cmpltu",             TILEGX_INSN_CMPLTU,             true,  "lll"  },
3008
  { "__insn_cmpltui",            TILEGX_INSN_CMPLTU,             true,  "lll"  },
3009
  { "__insn_cmpne",              TILEGX_INSN_CMPNE,              true,  "lll"  },
3010
  { "__insn_cmul",               TILEGX_INSN_CMUL,               true,  "lll"  },
3011
  { "__insn_cmula",              TILEGX_INSN_CMULA,              true,  "llll" },
3012
  { "__insn_cmulaf",             TILEGX_INSN_CMULAF,             true,  "llll" },
3013
  { "__insn_cmulf",              TILEGX_INSN_CMULF,              true,  "lll"  },
3014
  { "__insn_cmulfr",             TILEGX_INSN_CMULFR,             true,  "lll"  },
3015
  { "__insn_cmulh",              TILEGX_INSN_CMULH,              true,  "lll"  },
3016
  { "__insn_cmulhr",             TILEGX_INSN_CMULHR,             true,  "lll"  },
3017
  { "__insn_crc32_32",           TILEGX_INSN_CRC32_32,           true,  "lll"  },
3018
  { "__insn_crc32_8",            TILEGX_INSN_CRC32_8,            true,  "lll"  },
3019
  { "__insn_ctz",                TILEGX_INSN_CTZ,                true,  "ll"   },
3020
  { "__insn_dblalign",           TILEGX_INSN_DBLALIGN,           true,  "lllk" },
3021
  { "__insn_dblalign2",          TILEGX_INSN_DBLALIGN2,          true,  "lll"  },
3022
  { "__insn_dblalign4",          TILEGX_INSN_DBLALIGN4,          true,  "lll"  },
3023
  { "__insn_dblalign6",          TILEGX_INSN_DBLALIGN6,          true,  "lll"  },
3024
  { "__insn_drain",              TILEGX_INSN_DRAIN,              false, "v"    },
3025
  { "__insn_dtlbpr",             TILEGX_INSN_DTLBPR,             false, "vl"   },
3026
  { "__insn_exch",               TILEGX_INSN_EXCH,               false, "lpl"  },
3027
  { "__insn_exch4",              TILEGX_INSN_EXCH4,              false, "ipi"  },
3028
  { "__insn_fdouble_add_flags",  TILEGX_INSN_FDOUBLE_ADD_FLAGS,  true,  "lll"  },
3029
  { "__insn_fdouble_addsub",     TILEGX_INSN_FDOUBLE_ADDSUB,     true,  "llll" },
3030
  { "__insn_fdouble_mul_flags",  TILEGX_INSN_FDOUBLE_MUL_FLAGS,  true,  "lll"  },
3031
  { "__insn_fdouble_pack1",      TILEGX_INSN_FDOUBLE_PACK1,      true,  "lll"  },
3032
  { "__insn_fdouble_pack2",      TILEGX_INSN_FDOUBLE_PACK2,      true,  "llll" },
3033
  { "__insn_fdouble_sub_flags",  TILEGX_INSN_FDOUBLE_SUB_FLAGS,  true,  "lll"  },
3034
  { "__insn_fdouble_unpack_max", TILEGX_INSN_FDOUBLE_UNPACK_MAX, true,  "lll"  },
3035
  { "__insn_fdouble_unpack_min", TILEGX_INSN_FDOUBLE_UNPACK_MIN, true,  "lll"  },
3036
  { "__insn_fetchadd",           TILEGX_INSN_FETCHADD,           false, "lpl"  },
3037
  { "__insn_fetchadd4",          TILEGX_INSN_FETCHADD4,          false, "ipi"  },
3038
  { "__insn_fetchaddgez",        TILEGX_INSN_FETCHADDGEZ,        false, "lpl"  },
3039
  { "__insn_fetchaddgez4",       TILEGX_INSN_FETCHADDGEZ4,       false, "ipi"  },
3040
  { "__insn_fetchand",           TILEGX_INSN_FETCHAND,           false, "lpl"  },
3041
  { "__insn_fetchand4",          TILEGX_INSN_FETCHAND4,          false, "ipi"  },
3042
  { "__insn_fetchor",            TILEGX_INSN_FETCHOR,            false, "lpl"  },
3043
  { "__insn_fetchor4",           TILEGX_INSN_FETCHOR4,           false, "ipi"  },
3044
  { "__insn_finv",               TILEGX_INSN_FINV,               false, "vk"   },
3045
  { "__insn_flush",              TILEGX_INSN_FLUSH,              false, "vk"   },
3046
  { "__insn_flushwb",            TILEGX_INSN_FLUSHWB,            false, "v"    },
3047
  { "__insn_fnop",               TILEGX_INSN_FNOP,               false, "v"    },
3048
  { "__insn_fsingle_add1",       TILEGX_INSN_FSINGLE_ADD1,       true,  "lll"  },
3049
  { "__insn_fsingle_addsub2",    TILEGX_INSN_FSINGLE_ADDSUB2,    true,  "llll" },
3050
  { "__insn_fsingle_mul1",       TILEGX_INSN_FSINGLE_MUL1,       true,  "lll"  },
3051
  { "__insn_fsingle_mul2",       TILEGX_INSN_FSINGLE_MUL2,       true,  "lll"  },
3052
  { "__insn_fsingle_pack1",      TILEGX_INSN_FSINGLE_PACK1,      true,  "ll"   },
3053
  { "__insn_fsingle_pack2",      TILEGX_INSN_FSINGLE_PACK2,      true,  "lll"  },
3054
  { "__insn_fsingle_sub1",       TILEGX_INSN_FSINGLE_SUB1,       true,  "lll"  },
3055
  { "__insn_icoh",               TILEGX_INSN_ICOH,               false, "vk"   },
3056
  { "__insn_ill",                TILEGX_INSN_ILL,                false, "v"    },
3057
  { "__insn_info",               TILEGX_INSN_INFO,               false, "vl"   },
3058
  { "__insn_infol",              TILEGX_INSN_INFOL,              false, "vl"   },
3059
  { "__insn_inv",                TILEGX_INSN_INV,                false, "vp"   },
3060
  { "__insn_ld",                 TILEGX_INSN_LD,                 false, "lk"   },
3061
  { "__insn_ld1s",               TILEGX_INSN_LD1S,               false, "lk"   },
3062
  { "__insn_ld1u",               TILEGX_INSN_LD1U,               false, "lk"   },
3063
  { "__insn_ld2s",               TILEGX_INSN_LD2S,               false, "lk"   },
3064
  { "__insn_ld2u",               TILEGX_INSN_LD2U,               false, "lk"   },
3065
  { "__insn_ld4s",               TILEGX_INSN_LD4S,               false, "lk"   },
3066
  { "__insn_ld4u",               TILEGX_INSN_LD4U,               false, "lk"   },
3067
  { "__insn_ldna",               TILEGX_INSN_LDNA,               false, "lk"   },
3068
  { "__insn_ldnt",               TILEGX_INSN_LDNT,               false, "lk"   },
3069
  { "__insn_ldnt1s",             TILEGX_INSN_LDNT1S,             false, "lk"   },
3070
  { "__insn_ldnt1u",             TILEGX_INSN_LDNT1U,             false, "lk"   },
3071
  { "__insn_ldnt2s",             TILEGX_INSN_LDNT2S,             false, "lk"   },
3072
  { "__insn_ldnt2u",             TILEGX_INSN_LDNT2U,             false, "lk"   },
3073
  { "__insn_ldnt4s",             TILEGX_INSN_LDNT4S,             false, "lk"   },
3074
  { "__insn_ldnt4u",             TILEGX_INSN_LDNT4U,             false, "lk"   },
3075
  { "__insn_ld_L2",              TILEGX_INSN_LD_L2,              false, "lk"   },
3076
  { "__insn_ld1s_L2",            TILEGX_INSN_LD1S_L2,            false, "lk"   },
3077
  { "__insn_ld1u_L2",            TILEGX_INSN_LD1U_L2,            false, "lk"   },
3078
  { "__insn_ld2s_L2",            TILEGX_INSN_LD2S_L2,            false, "lk"   },
3079
  { "__insn_ld2u_L2",            TILEGX_INSN_LD2U_L2,            false, "lk"   },
3080
  { "__insn_ld4s_L2",            TILEGX_INSN_LD4S_L2,            false, "lk"   },
3081
  { "__insn_ld4u_L2",            TILEGX_INSN_LD4U_L2,            false, "lk"   },
3082
  { "__insn_ldna_L2",            TILEGX_INSN_LDNA_L2,            false, "lk"   },
3083
  { "__insn_ldnt_L2",            TILEGX_INSN_LDNT_L2,            false, "lk"   },
3084
  { "__insn_ldnt1s_L2",          TILEGX_INSN_LDNT1S_L2,          false, "lk"   },
3085
  { "__insn_ldnt1u_L2",          TILEGX_INSN_LDNT1U_L2,          false, "lk"   },
3086
  { "__insn_ldnt2s_L2",          TILEGX_INSN_LDNT2S_L2,          false, "lk"   },
3087
  { "__insn_ldnt2u_L2",          TILEGX_INSN_LDNT2U_L2,          false, "lk"   },
3088
  { "__insn_ldnt4s_L2",          TILEGX_INSN_LDNT4S_L2,          false, "lk"   },
3089
  { "__insn_ldnt4u_L2",          TILEGX_INSN_LDNT4U_L2,          false, "lk"   },
3090
  { "__insn_ld_miss",            TILEGX_INSN_LD_MISS,            false, "lk"   },
3091
  { "__insn_ld1s_miss",          TILEGX_INSN_LD1S_MISS,          false, "lk"   },
3092
  { "__insn_ld1u_miss",          TILEGX_INSN_LD1U_MISS,          false, "lk"   },
3093
  { "__insn_ld2s_miss",          TILEGX_INSN_LD2S_MISS,          false, "lk"   },
3094
  { "__insn_ld2u_miss",          TILEGX_INSN_LD2U_MISS,          false, "lk"   },
3095
  { "__insn_ld4s_miss",          TILEGX_INSN_LD4S_MISS,          false, "lk"   },
3096
  { "__insn_ld4u_miss",          TILEGX_INSN_LD4U_MISS,          false, "lk"   },
3097
  { "__insn_ldna_miss",          TILEGX_INSN_LDNA_MISS,          false, "lk"   },
3098
  { "__insn_ldnt_miss",          TILEGX_INSN_LDNT_MISS,          false, "lk"   },
3099
  { "__insn_ldnt1s_miss",        TILEGX_INSN_LDNT1S_MISS,        false, "lk"   },
3100
  { "__insn_ldnt1u_miss",        TILEGX_INSN_LDNT1U_MISS,        false, "lk"   },
3101
  { "__insn_ldnt2s_miss",        TILEGX_INSN_LDNT2S_MISS,        false, "lk"   },
3102
  { "__insn_ldnt2u_miss",        TILEGX_INSN_LDNT2U_MISS,        false, "lk"   },
3103
  { "__insn_ldnt4s_miss",        TILEGX_INSN_LDNT4S_MISS,        false, "lk"   },
3104
  { "__insn_ldnt4u_miss",        TILEGX_INSN_LDNT4U_MISS,        false, "lk"   },
3105
  { "__insn_lnk",                TILEGX_INSN_LNK,                true,  "l"    },
3106
  { "__insn_mf",                 TILEGX_INSN_MF,                 false, "v"    },
3107
  { "__insn_mfspr",              TILEGX_INSN_MFSPR,              false, "ll"   },
3108
  { "__insn_mm",                 TILEGX_INSN_MM,                 true,  "lllll"},
3109
  { "__insn_mnz",                TILEGX_INSN_MNZ,                true,  "lll"  },
3110
  { "__insn_move",               TILEGX_INSN_MOVE,               true,  "ll"   },
3111
  { "__insn_movei",              TILEGX_INSN_MOVE,               true,  "ll"   },
3112
  { "__insn_moveli",             TILEGX_INSN_MOVE,               true,  "ll"   },
3113
  { "__insn_mtspr",              TILEGX_INSN_MTSPR,              false, "vll"  },
3114
  { "__insn_mul_hs_hs",          TILEGX_INSN_MUL_HS_HS,          true,  "lll"  },
3115
  { "__insn_mul_hs_hu",          TILEGX_INSN_MUL_HS_HU,          true,  "lll"  },
3116
  { "__insn_mul_hs_ls",          TILEGX_INSN_MUL_HS_LS,          true,  "lll"  },
3117
  { "__insn_mul_hs_lu",          TILEGX_INSN_MUL_HS_LU,          true,  "lll"  },
3118
  { "__insn_mul_hu_hu",          TILEGX_INSN_MUL_HU_HU,          true,  "lll"  },
3119
  { "__insn_mul_hu_ls",          TILEGX_INSN_MUL_HU_LS,          true,  "lll"  },
3120
  { "__insn_mul_hu_lu",          TILEGX_INSN_MUL_HU_LU,          true,  "lll"  },
3121
  { "__insn_mul_ls_ls",          TILEGX_INSN_MUL_LS_LS,          true,  "lll"  },
3122
  { "__insn_mul_ls_lu",          TILEGX_INSN_MUL_LS_LU,          true,  "lll"  },
3123
  { "__insn_mul_lu_lu",          TILEGX_INSN_MUL_LU_LU,          true,  "lll"  },
3124
  { "__insn_mula_hs_hs",         TILEGX_INSN_MULA_HS_HS,         true,  "llll" },
3125
  { "__insn_mula_hs_hu",         TILEGX_INSN_MULA_HS_HU,         true,  "llll" },
3126
  { "__insn_mula_hs_ls",         TILEGX_INSN_MULA_HS_LS,         true,  "llll" },
3127
  { "__insn_mula_hs_lu",         TILEGX_INSN_MULA_HS_LU,         true,  "llll" },
3128
  { "__insn_mula_hu_hu",         TILEGX_INSN_MULA_HU_HU,         true,  "llll" },
3129
  { "__insn_mula_hu_ls",         TILEGX_INSN_MULA_HU_LS,         true,  "llll" },
3130
  { "__insn_mula_hu_lu",         TILEGX_INSN_MULA_HU_LU,         true,  "llll" },
3131
  { "__insn_mula_ls_ls",         TILEGX_INSN_MULA_LS_LS,         true,  "llll" },
3132
  { "__insn_mula_ls_lu",         TILEGX_INSN_MULA_LS_LU,         true,  "llll" },
3133
  { "__insn_mula_lu_lu",         TILEGX_INSN_MULA_LU_LU,         true,  "llll" },
3134
  { "__insn_mulax",              TILEGX_INSN_MULAX,              true,  "iiii" },
3135
  { "__insn_mulx",               TILEGX_INSN_MULX,               true,  "iii"  },
3136
  { "__insn_mz",                 TILEGX_INSN_MZ,                 true,  "lll"  },
3137
  { "__insn_nap",                TILEGX_INSN_NAP,                false, "v"    },
3138
  { "__insn_nop",                TILEGX_INSN_NOP,                true,  "v"    },
3139
  { "__insn_nor",                TILEGX_INSN_NOR,                true,  "lll"  },
3140
  { "__insn_or",                 TILEGX_INSN_OR,                 true,  "lll"  },
3141
  { "__insn_ori",                TILEGX_INSN_OR,                 true,  "lll"  },
3142
  { "__insn_pcnt",               TILEGX_INSN_PCNT,               true,  "ll"   },
3143
  { "__insn_prefetch",           TILEGX_INSN_PREFETCH_L1,        false, "vk"   },
3144
  { "__insn_prefetch_l1",        TILEGX_INSN_PREFETCH_L1,        false, "vk"   },
3145
  { "__insn_prefetch_l1_fault",  TILEGX_INSN_PREFETCH_L1_FAULT,  false, "vk"   },
3146
  { "__insn_prefetch_l2",        TILEGX_INSN_PREFETCH_L2,        false, "vk"   },
3147
  { "__insn_prefetch_l2_fault",  TILEGX_INSN_PREFETCH_L2_FAULT,  false, "vk"   },
3148
  { "__insn_prefetch_l3",        TILEGX_INSN_PREFETCH_L3,        false, "vk"   },
3149
  { "__insn_prefetch_l3_fault",  TILEGX_INSN_PREFETCH_L3_FAULT,  false, "vk"   },
3150
  { "__insn_revbits",            TILEGX_INSN_REVBITS,            true,  "ll"   },
3151
  { "__insn_revbytes",           TILEGX_INSN_REVBYTES,           true,  "ll"   },
3152
  { "__insn_rotl",               TILEGX_INSN_ROTL,               true,  "lli"  },
3153
  { "__insn_rotli",              TILEGX_INSN_ROTL,               true,  "lli"  },
3154
  { "__insn_shl",                TILEGX_INSN_SHL,                true,  "lli"  },
3155
  { "__insn_shl16insli",         TILEGX_INSN_SHL16INSLI,         true,  "lll"  },
3156
  { "__insn_shl1add",            TILEGX_INSN_SHL1ADD,            true,  "lll"  },
3157
  { "__insn_shl1addx",           TILEGX_INSN_SHL1ADDX,           true,  "iii"  },
3158
  { "__insn_shl2add",            TILEGX_INSN_SHL2ADD,            true,  "lll"  },
3159
  { "__insn_shl2addx",           TILEGX_INSN_SHL2ADDX,           true,  "iii"  },
3160
  { "__insn_shl3add",            TILEGX_INSN_SHL3ADD,            true,  "lll"  },
3161
  { "__insn_shl3addx",           TILEGX_INSN_SHL3ADDX,           true,  "iii"  },
3162
  { "__insn_shli",               TILEGX_INSN_SHL,                true,  "lli"  },
3163
  { "__insn_shlx",               TILEGX_INSN_SHLX,               true,  "iii"  },
3164
  { "__insn_shlxi",              TILEGX_INSN_SHLX,               true,  "iii"  },
3165
  { "__insn_shrs",               TILEGX_INSN_SHRS,               true,  "lli"  },
3166
  { "__insn_shrsi",              TILEGX_INSN_SHRS,               true,  "lli"  },
3167
  { "__insn_shru",               TILEGX_INSN_SHRU,               true,  "lli"  },
3168
  { "__insn_shrui",              TILEGX_INSN_SHRU,               true,  "lli"  },
3169
  { "__insn_shrux",              TILEGX_INSN_SHRUX,              true,  "iii"  },
3170
  { "__insn_shruxi",             TILEGX_INSN_SHRUX,              true,  "iii"  },
3171
  { "__insn_shufflebytes",       TILEGX_INSN_SHUFFLEBYTES,       true,  "llll" },
3172
  { "__insn_st",                 TILEGX_INSN_ST,                 false, "vpl"  },
3173
  { "__insn_st1",                TILEGX_INSN_ST1,                false, "vpl"  },
3174
  { "__insn_st2",                TILEGX_INSN_ST2,                false, "vpl"  },
3175
  { "__insn_st4",                TILEGX_INSN_ST4,                false, "vpl"  },
3176
  { "__insn_stnt",               TILEGX_INSN_STNT,               false, "vpl"  },
3177
  { "__insn_stnt1",              TILEGX_INSN_STNT1,              false, "vpl"  },
3178
  { "__insn_stnt2",              TILEGX_INSN_STNT2,              false, "vpl"  },
3179
  { "__insn_stnt4",              TILEGX_INSN_STNT4,              false, "vpl"  },
3180
  { "__insn_sub",                TILEGX_INSN_SUB,                true,  "lll"  },
3181
  { "__insn_subx",               TILEGX_INSN_SUBX,               true,  "iii"  },
3182
  { "__insn_subxsc",             TILEGX_INSN_SUBXSC,             true,  "iii"  },
3183
  { "__insn_tblidxb0",           TILEGX_INSN_TBLIDXB0,           true,  "lll"  },
3184
  { "__insn_tblidxb1",           TILEGX_INSN_TBLIDXB1,           true,  "lll"  },
3185
  { "__insn_tblidxb2",           TILEGX_INSN_TBLIDXB2,           true,  "lll"  },
3186
  { "__insn_tblidxb3",           TILEGX_INSN_TBLIDXB3,           true,  "lll"  },
3187
  { "__insn_v1add",              TILEGX_INSN_V1ADD,              true,  "lll"  },
3188
  { "__insn_v1addi",             TILEGX_INSN_V1ADDI,             true,  "lll"  },
3189
  { "__insn_v1adduc",            TILEGX_INSN_V1ADDUC,            true,  "lll"  },
3190
  { "__insn_v1adiffu",           TILEGX_INSN_V1ADIFFU,           true,  "lll"  },
3191
  { "__insn_v1avgu",             TILEGX_INSN_V1AVGU,             true,  "lll"  },
3192
  { "__insn_v1cmpeq",            TILEGX_INSN_V1CMPEQ,            true,  "lll"  },
3193
  { "__insn_v1cmpeqi",           TILEGX_INSN_V1CMPEQI,           true,  "lll"  },
3194
  { "__insn_v1cmples",           TILEGX_INSN_V1CMPLES,           true,  "lll"  },
3195
  { "__insn_v1cmpleu",           TILEGX_INSN_V1CMPLEU,           true,  "lll"  },
3196
  { "__insn_v1cmplts",           TILEGX_INSN_V1CMPLTS,           true,  "lll"  },
3197
  { "__insn_v1cmpltsi",          TILEGX_INSN_V1CMPLTSI,          true,  "lll"  },
3198
  { "__insn_v1cmpltu",           TILEGX_INSN_V1CMPLTU,           true,  "lll"  },
3199
  { "__insn_v1cmpltui",          TILEGX_INSN_V1CMPLTUI,          true,  "lll"  },
3200
  { "__insn_v1cmpne",            TILEGX_INSN_V1CMPNE,            true,  "lll"  },
3201
  { "__insn_v1ddotpu",           TILEGX_INSN_V1DDOTPU,           true,  "lll"  },
3202
  { "__insn_v1ddotpua",          TILEGX_INSN_V1DDOTPUA,          true,  "llll" },
3203
  { "__insn_v1ddotpus",          TILEGX_INSN_V1DDOTPUS,          true,  "lll"  },
3204
  { "__insn_v1ddotpusa",         TILEGX_INSN_V1DDOTPUSA,         true,  "llll" },
3205
  { "__insn_v1dotp",             TILEGX_INSN_V1DOTP,             true,  "lll"  },
3206
  { "__insn_v1dotpa",            TILEGX_INSN_V1DOTPA,            true,  "llll" },
3207
  { "__insn_v1dotpu",            TILEGX_INSN_V1DOTPU,            true,  "lll"  },
3208
  { "__insn_v1dotpua",           TILEGX_INSN_V1DOTPUA,           true,  "llll" },
3209
  { "__insn_v1dotpus",           TILEGX_INSN_V1DOTPUS,           true,  "lll"  },
3210
  { "__insn_v1dotpusa",          TILEGX_INSN_V1DOTPUSA,          true,  "llll" },
3211
  { "__insn_v1int_h",            TILEGX_INSN_V1INT_H,            true,  "lll"  },
3212
  { "__insn_v1int_l",            TILEGX_INSN_V1INT_L,            true,  "lll"  },
3213
  { "__insn_v1maxu",             TILEGX_INSN_V1MAXU,             true,  "lll"  },
3214
  { "__insn_v1maxui",            TILEGX_INSN_V1MAXUI,            true,  "lll"  },
3215
  { "__insn_v1minu",             TILEGX_INSN_V1MINU,             true,  "lll"  },
3216
  { "__insn_v1minui",            TILEGX_INSN_V1MINUI,            true,  "lll"  },
3217
  { "__insn_v1mnz",              TILEGX_INSN_V1MNZ,              true,  "lll"  },
3218
  { "__insn_v1multu",            TILEGX_INSN_V1MULTU,            true,  "lll"  },
3219
  { "__insn_v1mulu",             TILEGX_INSN_V1MULU,             true,  "lll"  },
3220
  { "__insn_v1mulus",            TILEGX_INSN_V1MULUS,            true,  "lll"  },
3221
  { "__insn_v1mz",               TILEGX_INSN_V1MZ,               true,  "lll"  },
3222
  { "__insn_v1sadau",            TILEGX_INSN_V1SADAU,            true,  "llll" },
3223
  { "__insn_v1sadu",             TILEGX_INSN_V1SADU,             true,  "lll"  },
3224
  { "__insn_v1shl",              TILEGX_INSN_V1SHL,              true,  "lll"  },
3225
  { "__insn_v1shli",             TILEGX_INSN_V1SHLI,             true,  "lll"  },
3226
  { "__insn_v1shrs",             TILEGX_INSN_V1SHRS,             true,  "lll"  },
3227
  { "__insn_v1shrsi",            TILEGX_INSN_V1SHRSI,            true,  "lll"  },
3228
  { "__insn_v1shru",             TILEGX_INSN_V1SHRU,             true,  "lll"  },
3229
  { "__insn_v1shrui",            TILEGX_INSN_V1SHRUI,            true,  "lll"  },
3230
  { "__insn_v1sub",              TILEGX_INSN_V1SUB,              true,  "lll"  },
3231
  { "__insn_v1subuc",            TILEGX_INSN_V1SUBUC,            true,  "lll"  },
3232
  { "__insn_v2add",              TILEGX_INSN_V2ADD,              true,  "lll"  },
3233
  { "__insn_v2addi",             TILEGX_INSN_V2ADDI,             true,  "lll"  },
3234
  { "__insn_v2addsc",            TILEGX_INSN_V2ADDSC,            true,  "lll"  },
3235
  { "__insn_v2adiffs",           TILEGX_INSN_V2ADIFFS,           true,  "lll"  },
3236
  { "__insn_v2avgs",             TILEGX_INSN_V2AVGS,             true,  "lll"  },
3237
  { "__insn_v2cmpeq",            TILEGX_INSN_V2CMPEQ,            true,  "lll"  },
3238
  { "__insn_v2cmpeqi",           TILEGX_INSN_V2CMPEQI,           true,  "lll"  },
3239
  { "__insn_v2cmples",           TILEGX_INSN_V2CMPLES,           true,  "lll"  },
3240
  { "__insn_v2cmpleu",           TILEGX_INSN_V2CMPLEU,           true,  "lll"  },
3241
  { "__insn_v2cmplts",           TILEGX_INSN_V2CMPLTS,           true,  "lll"  },
3242
  { "__insn_v2cmpltsi",          TILEGX_INSN_V2CMPLTSI,          true,  "lll"  },
3243
  { "__insn_v2cmpltu",           TILEGX_INSN_V2CMPLTU,           true,  "lll"  },
3244
  { "__insn_v2cmpltui",          TILEGX_INSN_V2CMPLTUI,          true,  "lll"  },
3245
  { "__insn_v2cmpne",            TILEGX_INSN_V2CMPNE,            true,  "lll"  },
3246
  { "__insn_v2dotp",             TILEGX_INSN_V2DOTP,             true,  "lll"  },
3247
  { "__insn_v2dotpa",            TILEGX_INSN_V2DOTPA,            true,  "llll" },
3248
  { "__insn_v2int_h",            TILEGX_INSN_V2INT_H,            true,  "lll"  },
3249
  { "__insn_v2int_l",            TILEGX_INSN_V2INT_L,            true,  "lll"  },
3250
  { "__insn_v2maxs",             TILEGX_INSN_V2MAXS,             true,  "lll"  },
3251
  { "__insn_v2maxsi",            TILEGX_INSN_V2MAXSI,            true,  "lll"  },
3252
  { "__insn_v2mins",             TILEGX_INSN_V2MINS,             true,  "lll"  },
3253
  { "__insn_v2minsi",            TILEGX_INSN_V2MINSI,            true,  "lll"  },
3254
  { "__insn_v2mnz",              TILEGX_INSN_V2MNZ,              true,  "lll"  },
3255
  { "__insn_v2mulfsc",           TILEGX_INSN_V2MULFSC,           true,  "lll"  },
3256
  { "__insn_v2muls",             TILEGX_INSN_V2MULS,             true,  "lll"  },
3257
  { "__insn_v2mults",            TILEGX_INSN_V2MULTS,            true,  "lll"  },
3258
  { "__insn_v2mz",               TILEGX_INSN_V2MZ,               true,  "lll"  },
3259
  { "__insn_v2packh",            TILEGX_INSN_V2PACKH,            true,  "lll"  },
3260
  { "__insn_v2packl",            TILEGX_INSN_V2PACKL,            true,  "lll"  },
3261
  { "__insn_v2packuc",           TILEGX_INSN_V2PACKUC,           true,  "lll"  },
3262
  { "__insn_v2sadas",            TILEGX_INSN_V2SADAS,            true,  "llll" },
3263
  { "__insn_v2sadau",            TILEGX_INSN_V2SADAU,            true,  "llll" },
3264
  { "__insn_v2sads",             TILEGX_INSN_V2SADS,             true,  "lll"  },
3265
  { "__insn_v2sadu",             TILEGX_INSN_V2SADU,             true,  "lll"  },
3266
  { "__insn_v2shl",              TILEGX_INSN_V2SHL,              true,  "lll"  },
3267
  { "__insn_v2shli",             TILEGX_INSN_V2SHLI,             true,  "lll"  },
3268
  { "__insn_v2shlsc",            TILEGX_INSN_V2SHLSC,            true,  "lll"  },
3269
  { "__insn_v2shrs",             TILEGX_INSN_V2SHRS,             true,  "lll"  },
3270
  { "__insn_v2shrsi",            TILEGX_INSN_V2SHRSI,            true,  "lll"  },
3271
  { "__insn_v2shru",             TILEGX_INSN_V2SHRU,             true,  "lll"  },
3272
  { "__insn_v2shrui",            TILEGX_INSN_V2SHRUI,            true,  "lll"  },
3273
  { "__insn_v2sub",              TILEGX_INSN_V2SUB,              true,  "lll"  },
3274
  { "__insn_v2subsc",            TILEGX_INSN_V2SUBSC,            true,  "lll"  },
3275
  { "__insn_v4add",              TILEGX_INSN_V4ADD,              true,  "lll"  },
3276
  { "__insn_v4addsc",            TILEGX_INSN_V4ADDSC,            true,  "lll"  },
3277
  { "__insn_v4int_h",            TILEGX_INSN_V4INT_H,            true,  "lll"  },
3278
  { "__insn_v4int_l",            TILEGX_INSN_V4INT_L,            true,  "lll"  },
3279
  { "__insn_v4packsc",           TILEGX_INSN_V4PACKSC,           true,  "lll"  },
3280
  { "__insn_v4shl",              TILEGX_INSN_V4SHL,              true,  "lll"  },
3281
  { "__insn_v4shlsc",            TILEGX_INSN_V4SHLSC,            true,  "lll"  },
3282
  { "__insn_v4shrs",             TILEGX_INSN_V4SHRS,             true,  "lll"  },
3283
  { "__insn_v4shru",             TILEGX_INSN_V4SHRU,             true,  "lll"  },
3284
  { "__insn_v4sub",              TILEGX_INSN_V4SUB,              true,  "lll"  },
3285
  { "__insn_v4subsc",            TILEGX_INSN_V4SUBSC,            true,  "lll"  },
3286
  { "__insn_wh64",               TILEGX_INSN_WH64,               false, "vp"   },
3287
  { "__insn_xor",                TILEGX_INSN_XOR,                true,  "lll"  },
3288
  { "__insn_xori",               TILEGX_INSN_XOR,                true,  "lll"  },
3289
  { "__tile_network_barrier",    TILEGX_NETWORK_BARRIER,         false, "v"    },
3290
  { "__tile_idn0_receive",       TILEGX_IDN0_RECEIVE,            false, "l"    },
3291
  { "__tile_idn1_receive",       TILEGX_IDN1_RECEIVE,            false, "l"    },
3292
  { "__tile_idn_send",           TILEGX_IDN_SEND,                false, "vl"   },
3293
  { "__tile_udn0_receive",       TILEGX_UDN0_RECEIVE,            false, "l"    },
3294
  { "__tile_udn1_receive",       TILEGX_UDN1_RECEIVE,            false, "l"    },
3295
  { "__tile_udn2_receive",       TILEGX_UDN2_RECEIVE,            false, "l"    },
3296
  { "__tile_udn3_receive",       TILEGX_UDN3_RECEIVE,            false, "l"    },
3297
  { "__tile_udn_send",           TILEGX_UDN_SEND,                false, "vl"   },
3298
};
3299
 
3300
 
3301
/* Convert a character in a builtin type string to a tree type.  */
3302
static tree
3303
char_to_type (char c)
3304
{
3305
  static tree volatile_ptr_type_node = NULL;
3306
  static tree volatile_const_ptr_type_node = NULL;
3307
 
3308
  if (volatile_ptr_type_node == NULL)
3309
    {
3310
      volatile_ptr_type_node =
3311
        build_pointer_type (build_qualified_type (void_type_node,
3312
                                                  TYPE_QUAL_VOLATILE));
3313
      volatile_const_ptr_type_node =
3314
        build_pointer_type (build_qualified_type (void_type_node,
3315
                                                  TYPE_QUAL_CONST
3316
                                                  | TYPE_QUAL_VOLATILE));
3317
    }
3318
 
3319
  switch (c)
3320
    {
3321
    case 'v':
3322
      return void_type_node;
3323
    case 'i':
3324
      return unsigned_type_node;
3325
    case 'l':
3326
      return long_long_unsigned_type_node;
3327
    case 'p':
3328
      return volatile_ptr_type_node;
3329
    case 'k':
3330
      return volatile_const_ptr_type_node;
3331
    default:
3332
      gcc_unreachable ();
3333
    }
3334
}
3335
 
3336
 
3337
/* Implement TARGET_INIT_BUILTINS.  */
3338
static void
3339
tilegx_init_builtins (void)
3340
{
3341
  size_t i;
3342
 
3343
  for (i = 0; i < ARRAY_SIZE (tilegx_builtins); i++)
3344
    {
3345
      const struct tilegx_builtin_def *p = &tilegx_builtins[i];
3346
      tree ftype, ret_type, arg_type_list = void_list_node;
3347
      tree decl;
3348
      int j;
3349
 
3350
      for (j = strlen (p->type) - 1; j > 0; j--)
3351
        {
3352
          arg_type_list =
3353
            tree_cons (NULL_TREE, char_to_type (p->type[j]), arg_type_list);
3354
        }
3355
 
3356
      ret_type = char_to_type (p->type[0]);
3357
 
3358
      ftype = build_function_type (ret_type, arg_type_list);
3359
 
3360
      decl = add_builtin_function (p->name, ftype, p->code, BUILT_IN_MD,
3361
                                   NULL, NULL);
3362
 
3363
      if (p->is_const)
3364
        TREE_READONLY (decl) = 1;
3365
      TREE_NOTHROW (decl) = 1;
3366
 
3367
      if (tilegx_builtin_info[p->code].fndecl == NULL)
3368
        tilegx_builtin_info[p->code].fndecl = decl;
3369
    }
3370
}
3371
 
3372
 
3373
/* Implement TARGET_EXPAND_BUILTIN.  */
3374
static rtx
3375
tilegx_expand_builtin (tree exp,
3376
                       rtx target,
3377
                       rtx subtarget ATTRIBUTE_UNUSED,
3378
                       enum machine_mode mode ATTRIBUTE_UNUSED,
3379
                       int ignore ATTRIBUTE_UNUSED)
3380
{
3381
#define MAX_BUILTIN_ARGS 4
3382
 
3383
  tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3384
  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3385
  tree arg;
3386
  call_expr_arg_iterator iter;
3387
  enum insn_code icode;
3388
  rtx op[MAX_BUILTIN_ARGS + 1], pat;
3389
  int opnum;
3390
  bool nonvoid;
3391
  insn_gen_fn fn;
3392
 
3393
  if (fcode >= TILEGX_BUILTIN_max)
3394
    internal_error ("bad builtin fcode");
3395
  icode = tilegx_builtin_info[fcode].icode;
3396
  if (icode == 0)
3397
    internal_error ("bad builtin icode");
3398
 
3399
  nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
3400
 
3401
  opnum = nonvoid;
3402
  FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3403
  {
3404
    const struct insn_operand_data *insn_op;
3405
 
3406
    if (arg == error_mark_node)
3407
      return NULL_RTX;
3408
    if (opnum > MAX_BUILTIN_ARGS)
3409
      return NULL_RTX;
3410
 
3411
    insn_op = &insn_data[icode].operand[opnum];
3412
 
3413
    op[opnum] = expand_expr (arg, NULL_RTX, insn_op->mode, EXPAND_NORMAL);
3414
 
3415
    if (!(*insn_op->predicate) (op[opnum], insn_op->mode))
3416
      {
3417
        enum machine_mode opmode = insn_op->mode;
3418
 
3419
        /* pointer_operand and pmode_register_operand operands do
3420
           not specify a mode, so use the operand's mode instead
3421
           (which should always be right by the time we get here,
3422
           except for constants, which are VOIDmode).  */
3423
        if (opmode == VOIDmode)
3424
          {
3425
            enum machine_mode m = GET_MODE (op[opnum]);
3426
            gcc_assert (m == Pmode || m == VOIDmode);
3427
            opmode = Pmode;
3428
          }
3429
 
3430
        op[opnum] = copy_to_mode_reg (opmode, op[opnum]);
3431
      }
3432
 
3433
    if (!(*insn_op->predicate) (op[opnum], insn_op->mode))
3434
      {
3435
        /* We still failed to meet the predicate even after moving
3436
           into a register. Assume we needed an immediate.  */
3437
        error_at (EXPR_LOCATION (exp),
3438
                  "operand must be an immediate of the right size");
3439
        return const0_rtx;
3440
      }
3441
 
3442
    opnum++;
3443
  }
3444
 
3445
  if (nonvoid)
3446
    {
3447
      enum machine_mode tmode = insn_data[icode].operand[0].mode;
3448
      if (!target
3449
          || GET_MODE (target) != tmode
3450
          || !(*insn_data[icode].operand[0].predicate) (target, tmode))
3451
        {
3452
          if (tmode == VOIDmode)
3453
            {
3454
              /* get the mode from the return type.  */
3455
              tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl)));
3456
            }
3457
          target = gen_reg_rtx (tmode);
3458
        }
3459
      op[0] = target;
3460
    }
3461
 
3462
  fn = GEN_FCN (icode);
3463
  switch (opnum)
3464
    {
3465
    case 0:
3466
      pat = fn (NULL_RTX);
3467
      break;
3468
    case 1:
3469
      pat = fn (op[0]);
3470
      break;
3471
    case 2:
3472
      pat = fn (op[0], op[1]);
3473
      break;
3474
    case 3:
3475
      pat = fn (op[0], op[1], op[2]);
3476
      break;
3477
    case 4:
3478
      pat = fn (op[0], op[1], op[2], op[3]);
3479
      break;
3480
    case 5:
3481
      pat = fn (op[0], op[1], op[2], op[3], op[4]);
3482
      break;
3483
    default:
3484
      gcc_unreachable ();
3485
    }
3486
  if (!pat)
3487
    return NULL_RTX;
3488
  emit_insn (pat);
3489
 
3490
  if (nonvoid)
3491
    return target;
3492
  else
3493
    return const0_rtx;
3494
}
3495
 
3496
 
3497
/* Implement TARGET_BUILTIN_DECL.  */
3498
static tree
3499
tilegx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
3500
{
3501
  if (code >= TILEGX_BUILTIN_max)
3502
    return error_mark_node;
3503
 
3504
  return tilegx_builtin_info[code].fndecl;
3505
}
3506
 
3507
 
3508
 
3509
/* Stack frames  */
3510
 
3511
/* Return whether REGNO needs to be saved in the stack frame.  */
3512
static bool
3513
need_to_save_reg (unsigned int regno)
3514
{
3515
  if (!fixed_regs[regno] && !call_used_regs[regno]
3516
      && df_regs_ever_live_p (regno))
3517
    return true;
3518
 
3519
  if (flag_pic
3520
      && (regno == PIC_OFFSET_TABLE_REGNUM
3521
          || regno == TILEGX_PIC_TEXT_LABEL_REGNUM)
3522
      && (crtl->uses_pic_offset_table || crtl->saves_all_registers))
3523
    return true;
3524
 
3525
  if (crtl->calls_eh_return)
3526
    {
3527
      unsigned i;
3528
      for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; i++)
3529
        {
3530
          if (regno == EH_RETURN_DATA_REGNO (i))
3531
            return true;
3532
        }
3533
    }
3534
 
3535
  return false;
3536
}
3537
 
3538
 
3539
/* Return the size of the register savev area.  This function is only
3540
   correct starting with local register allocation */
3541
static int
3542
tilegx_saved_regs_size (void)
3543
{
3544
  int reg_save_size = 0;
3545
  int regno;
3546
  int offset_to_frame;
3547
  int align_mask;
3548
 
3549
  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
3550
    if (need_to_save_reg (regno))
3551
      reg_save_size += UNITS_PER_WORD;
3552
 
3553
  /* Pad out the register save area if necessary to make
3554
     frame_pointer_rtx be as aligned as the stack pointer.  */
3555
  offset_to_frame = crtl->args.pretend_args_size + reg_save_size;
3556
  align_mask = (STACK_BOUNDARY / BITS_PER_UNIT) - 1;
3557
  reg_save_size += (-offset_to_frame) & align_mask;
3558
 
3559
  return reg_save_size;
3560
}
3561
 
3562
 
3563
/* Round up frame size SIZE.  */
3564
static int
3565
round_frame_size (int size)
3566
{
3567
  return ((size + STACK_BOUNDARY / BITS_PER_UNIT - 1)
3568
          & -STACK_BOUNDARY / BITS_PER_UNIT);
3569
}
3570
 
3571
 
3572
/* Emit a store in the stack frame to save REGNO at address ADDR, and
3573
   emit the corresponding REG_CFA_OFFSET note described by CFA and
3574
   CFA_OFFSET.  Return the emitted insn.  */
3575
static rtx
3576
frame_emit_store (int regno, int regno_note, rtx addr, rtx cfa,
3577
                  int cfa_offset)
3578
{
3579
  rtx reg = gen_rtx_REG (DImode, regno);
3580
  rtx mem = gen_frame_mem (DImode, addr);
3581
  rtx mov = gen_movdi (mem, reg);
3582
 
3583
  /* Describe what just happened in a way that dwarf understands.  We
3584
     use temporary registers to hold the address to make scheduling
3585
     easier, and use the REG_CFA_OFFSET to describe the address as an
3586
     offset from the CFA.  */
3587
  rtx reg_note = gen_rtx_REG (DImode, regno_note);
3588
  rtx cfa_relative_addr = gen_rtx_PLUS (Pmode, cfa, GEN_INT (cfa_offset));
3589
  rtx cfa_relative_mem = gen_frame_mem (DImode, cfa_relative_addr);
3590
  rtx real = gen_rtx_SET (VOIDmode, cfa_relative_mem, reg_note);
3591
  add_reg_note (mov, REG_CFA_OFFSET, real);
3592
 
3593
  return emit_insn (mov);
3594
}
3595
 
3596
 
3597
/* Emit a load in the stack frame to load REGNO from address ADDR.
3598
   Add a REG_CFA_RESTORE note to CFA_RESTORES if CFA_RESTORES is
3599
   non-null.  Return the emitted insn.  */
3600
static rtx
3601
frame_emit_load (int regno, rtx addr, rtx *cfa_restores)
3602
{
3603
  rtx reg = gen_rtx_REG (DImode, regno);
3604
  rtx mem = gen_frame_mem (DImode, addr);
3605
  if (cfa_restores)
3606
    *cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg, *cfa_restores);
3607
  return emit_insn (gen_movdi (reg, mem));
3608
}
3609
 
3610
 
3611
/* Helper function to set RTX_FRAME_RELATED_P on instructions,
3612
   including sequences.  */
3613
static rtx
3614
set_frame_related_p (void)
3615
{
3616
  rtx seq = get_insns ();
3617
  rtx insn;
3618
 
3619
  end_sequence ();
3620
 
3621
  if (!seq)
3622
    return NULL_RTX;
3623
 
3624
  if (INSN_P (seq))
3625
    {
3626
      insn = seq;
3627
      while (insn != NULL_RTX)
3628
        {
3629
          RTX_FRAME_RELATED_P (insn) = 1;
3630
          insn = NEXT_INSN (insn);
3631
        }
3632
      seq = emit_insn (seq);
3633
    }
3634
  else
3635
    {
3636
      seq = emit_insn (seq);
3637
      RTX_FRAME_RELATED_P (seq) = 1;
3638
    }
3639
  return seq;
3640
}
3641
 
3642
 
3643
#define FRP(exp)  (start_sequence (), exp, set_frame_related_p ())
3644
 
3645
/* This emits code for 'sp += offset'.
3646
 
3647
   The ABI only allows us to modify 'sp' in a single 'addi' or
3648
   'addli', so the backtracer understands it. Larger amounts cannot
3649
   use those instructions, so are added by placing the offset into a
3650
   large register and using 'add'.
3651
 
3652
   This happens after reload, so we need to expand it ourselves.  */
3653
static rtx
3654
emit_sp_adjust (int offset, int *next_scratch_regno, bool frame_related,
3655
                rtx reg_notes)
3656
{
3657
  rtx to_add;
3658
  rtx imm_rtx = GEN_INT (offset);
3659
 
3660
  rtx insn;
3661
  if (satisfies_constraint_J (imm_rtx))
3662
    {
3663
      /* We can add this using a single immediate add.  */
3664
      to_add = imm_rtx;
3665
    }
3666
  else
3667
    {
3668
      rtx tmp = gen_rtx_REG (Pmode, (*next_scratch_regno)--);
3669
      tilegx_expand_set_const64 (tmp, imm_rtx);
3670
      to_add = tmp;
3671
    }
3672
 
3673
  /* Actually adjust the stack pointer.  */
3674
  if (TARGET_32BIT)
3675
    insn = gen_sp_adjust_32bit (stack_pointer_rtx, stack_pointer_rtx, to_add);
3676
  else
3677
    insn = gen_sp_adjust (stack_pointer_rtx, stack_pointer_rtx, to_add);
3678
 
3679
  insn = emit_insn (insn);
3680
  REG_NOTES (insn) = reg_notes;
3681
 
3682
  /* Describe what just happened in a way that dwarf understands.  */
3683
  if (frame_related)
3684
    {
3685
      rtx real = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
3686
                              gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3687
                                            imm_rtx));
3688
      RTX_FRAME_RELATED_P (insn) = 1;
3689
      add_reg_note (insn, REG_CFA_ADJUST_CFA, real);
3690
    }
3691
 
3692
  return insn;
3693
}
3694
 
3695
 
3696
/* Return whether the current function is leaf.  This takes into
3697
   account whether the function calls tls_get_addr.  */
3698
static bool
3699
tilegx_current_function_is_leaf (void)
3700
{
3701
  return current_function_is_leaf && !cfun->machine->calls_tls_get_addr;
3702
}
3703
 
3704
 
3705
/* Return the frame size.  */
3706
static int
3707
compute_total_frame_size (void)
3708
{
3709
  int total_size = (get_frame_size () + tilegx_saved_regs_size ()
3710
                    + crtl->outgoing_args_size
3711
                    + crtl->args.pretend_args_size);
3712
 
3713
  if (!tilegx_current_function_is_leaf () || cfun->calls_alloca)
3714
    {
3715
      /* Make room for save area in callee.  */
3716
      total_size += STACK_POINTER_OFFSET;
3717
    }
3718
 
3719
  return round_frame_size (total_size);
3720
}
3721
 
3722
 
3723
/* Return nonzero if this function is known to have a null epilogue.
3724
   This allows the optimizer to omit jumps to jumps if no stack was
3725
   created.  */
3726
bool
3727
tilegx_can_use_return_insn_p (void)
3728
{
3729
  return (reload_completed
3730
          && cfun->static_chain_decl == 0
3731
          && compute_total_frame_size () == 0
3732
          && tilegx_current_function_is_leaf ()
3733
          && !crtl->profile && !df_regs_ever_live_p (TILEGX_LINK_REGNUM));
3734
}
3735
 
3736
 
3737
/* Returns an rtx for a stack slot at 'FP + offset_from_fp'.  If there
3738
   is a frame pointer, it computes the value relative to
3739
   that. Otherwise it uses the stack pointer.  */
3740
static rtx
3741
compute_frame_addr (int offset_from_fp, int *next_scratch_regno)
3742
{
3743
  rtx base_reg_rtx, tmp_reg_rtx, offset_rtx;
3744
  int offset_from_base;
3745
 
3746
  if (frame_pointer_needed)
3747
    {
3748
      base_reg_rtx = hard_frame_pointer_rtx;
3749
      offset_from_base = offset_from_fp;
3750
    }
3751
  else
3752
    {
3753
      int offset_from_sp = compute_total_frame_size () + offset_from_fp;
3754
      offset_from_base = offset_from_sp;
3755
      base_reg_rtx = stack_pointer_rtx;
3756
    }
3757
 
3758
  if (offset_from_base == 0)
3759
    return base_reg_rtx;
3760
 
3761
  /* Compute the new value of the stack pointer.  */
3762
  tmp_reg_rtx = gen_rtx_REG (Pmode, (*next_scratch_regno)--);
3763
  offset_rtx = GEN_INT (offset_from_base);
3764
 
3765
  if (!add_operand (offset_rtx, Pmode))
3766
    {
3767
      expand_set_cint64 (tmp_reg_rtx, offset_rtx);
3768
      offset_rtx = tmp_reg_rtx;
3769
    }
3770
 
3771
  emit_insn (gen_rtx_SET (VOIDmode, tmp_reg_rtx,
3772
                          gen_rtx_PLUS (Pmode, base_reg_rtx, offset_rtx)));
3773
 
3774
  return tmp_reg_rtx;
3775
}
3776
 
3777
 
3778
/* The stack frame looks like this:
3779
         +-------------+
3780
         |    ...      |
3781
         |  incoming   |
3782
         | stack args  |
3783
   AP -> +-------------+
3784
         | caller's HFP|
3785
         +-------------+
3786
         | lr save     |
3787
  HFP -> +-------------+
3788
         |  var args   |
3789
         |  reg save   | crtl->args.pretend_args_size bytes
3790
         +-------------+
3791
         |    ...      |
3792
         | saved regs  | tilegx_saved_regs_size() bytes
3793
   FP -> +-------------+
3794
         |    ...      |
3795
         |   vars      | get_frame_size() bytes
3796
         +-------------+
3797
         |    ...      |
3798
         |  outgoing   |
3799
         |  stack args | crtl->outgoing_args_size bytes
3800
         +-------------+
3801
         | HFP         | ptr_size bytes (only here if nonleaf / alloca)
3802
         +-------------+
3803
         | callee lr   | ptr_size bytes (only here if nonleaf / alloca)
3804
         | save        |
3805
   SP -> +-------------+
3806
 
3807
  HFP == incoming SP.
3808
 
3809
  For functions with a frame larger than 32767 bytes, or which use
3810
  alloca (), r52 is used as a frame pointer.  Otherwise there is no
3811
  frame pointer.
3812
 
3813
  FP is saved at SP+ptr_size before calling a subroutine so the callee
3814
  can chain.  */
3815
void
3816
tilegx_expand_prologue (void)
3817
{
3818
#define ROUND_ROBIN_SIZE 4
3819
  /* We round-robin through four scratch registers to hold temporary
3820
     addresses for saving registers, to make instruction scheduling
3821
     easier.  */
3822
  rtx reg_save_addr[ROUND_ROBIN_SIZE] = {
3823
    NULL_RTX, NULL_RTX, NULL_RTX, NULL_RTX
3824
  };
3825
  rtx insn, cfa;
3826
  unsigned int which_scratch;
3827
  int offset, start_offset, regno;
3828
 
3829
  /* A register that holds a copy of the incoming fp.  */
3830
  int fp_copy_regno = -1;
3831
 
3832
  /* A register that holds a copy of the incoming sp.  */
3833
  int sp_copy_regno = -1;
3834
 
3835
  /* Next scratch register number to hand out (postdecrementing).  */
3836
  int next_scratch_regno = 29;
3837
 
3838
  int total_size = compute_total_frame_size ();
3839
 
3840
  if (flag_stack_usage_info)
3841
    current_function_static_stack_size = total_size;
3842
 
3843
  /* Save lr first in its special location because code after this
3844
     might use the link register as a scratch register.  */
3845
  if (df_regs_ever_live_p (TILEGX_LINK_REGNUM) || crtl->calls_eh_return)
3846
    FRP (frame_emit_store (TILEGX_LINK_REGNUM, TILEGX_LINK_REGNUM,
3847
                           stack_pointer_rtx, stack_pointer_rtx, 0));
3848
 
3849
  if (total_size == 0)
3850
    {
3851
      /* Load the PIC register if needed.  */
3852
      if (flag_pic && crtl->uses_pic_offset_table)
3853
        load_pic_register (false);
3854
 
3855
      return;
3856
    }
3857
 
3858
  cfa = stack_pointer_rtx;
3859
 
3860
  if (frame_pointer_needed)
3861
    {
3862
      fp_copy_regno = next_scratch_regno--;
3863
 
3864
      /* Copy the old frame pointer aside so we can save it later.  */
3865
      insn =
3866
        FRP (emit_move_insn (gen_rtx_REG (word_mode, fp_copy_regno),
3867
                             gen_lowpart (word_mode, hard_frame_pointer_rtx)));
3868
      add_reg_note (insn, REG_CFA_REGISTER, NULL_RTX);
3869
 
3870
      /* Set up the frame pointer.  */
3871
      insn = FRP (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
3872
      add_reg_note (insn, REG_CFA_DEF_CFA, hard_frame_pointer_rtx);
3873
      cfa = hard_frame_pointer_rtx;
3874
      REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
3875
 
3876
      /* fp holds a copy of the incoming sp, in case we need to store
3877
         it.  */
3878
      sp_copy_regno = HARD_FRAME_POINTER_REGNUM;
3879
    }
3880
  else if (!tilegx_current_function_is_leaf ())
3881
    {
3882
      /* Copy the old stack pointer aside so we can save it later.  */
3883
      sp_copy_regno = next_scratch_regno--;
3884
      insn = FRP (emit_move_insn (gen_rtx_REG (Pmode, sp_copy_regno),
3885
                                  stack_pointer_rtx));
3886
      add_reg_note (insn, REG_CFA_REGISTER, NULL_RTX);
3887
    }
3888
 
3889
  if (tilegx_current_function_is_leaf ())
3890
    {
3891
      /* No need to store chain pointer to caller's frame.  */
3892
      emit_sp_adjust (-total_size, &next_scratch_regno,
3893
                      !frame_pointer_needed, NULL_RTX);
3894
    }
3895
  else
3896
    {
3897
      /* Save the frame pointer (incoming sp value) to support
3898
         backtracing.  First we need to create an rtx with the store
3899
         address.  */
3900
      rtx chain_addr = gen_rtx_REG (Pmode, next_scratch_regno--);
3901
      rtx size_rtx = GEN_INT (-(total_size - UNITS_PER_WORD));
3902
      int cfa_offset =
3903
        frame_pointer_needed ? UNITS_PER_WORD - total_size : UNITS_PER_WORD;
3904
 
3905
      if (add_operand (size_rtx, Pmode))
3906
        {
3907
          /* Expose more parallelism by computing this value from the
3908
             original stack pointer, not the one after we have pushed
3909
             the frame.  */
3910
          rtx p = gen_rtx_PLUS (Pmode, stack_pointer_rtx, size_rtx);
3911
          emit_insn (gen_rtx_SET (VOIDmode, chain_addr, p));
3912
          emit_sp_adjust (-total_size, &next_scratch_regno,
3913
                          !frame_pointer_needed, NULL_RTX);
3914
        }
3915
      else
3916
        {
3917
          /* The stack frame is large, so just store the incoming sp
3918
             value at *(new_sp + UNITS_PER_WORD).  */
3919
          rtx p;
3920
          emit_sp_adjust (-total_size, &next_scratch_regno,
3921
                          !frame_pointer_needed, NULL_RTX);
3922
          p = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3923
                            GEN_INT (UNITS_PER_WORD));
3924
          emit_insn (gen_rtx_SET (VOIDmode, chain_addr, p));
3925
        }
3926
 
3927
      /* Save our frame pointer for backtrace chaining.  */
3928
      FRP (frame_emit_store (sp_copy_regno, STACK_POINTER_REGNUM,
3929
                             chain_addr, cfa, cfa_offset));
3930
    }
3931
 
3932
  /* Compute where to start storing registers we need to save.  */
3933
  start_offset = -crtl->args.pretend_args_size - UNITS_PER_WORD;
3934
  offset = start_offset;
3935
 
3936
  /* Store all registers that need saving.  */
3937
  which_scratch = 0;
3938
  for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
3939
    if (need_to_save_reg (regno))
3940
      {
3941
        rtx r = reg_save_addr[which_scratch];
3942
        int from_regno;
3943
        int cfa_offset = frame_pointer_needed ? offset : total_size + offset;
3944
 
3945
        if (r == NULL_RTX)
3946
          {
3947
            rtx p = compute_frame_addr (offset, &next_scratch_regno);
3948
            r = gen_rtx_REG (Pmode, next_scratch_regno--);
3949
            reg_save_addr[which_scratch] = r;
3950
 
3951
            emit_insn (gen_rtx_SET (VOIDmode, r, p));
3952
          }
3953
        else
3954
          {
3955
            /* Advance to the next stack slot to store this
3956
               register.  */
3957
            int stride = ROUND_ROBIN_SIZE * -UNITS_PER_WORD;
3958
            rtx p = gen_rtx_PLUS (Pmode, r, GEN_INT (stride));
3959
            emit_insn (gen_rtx_SET (VOIDmode, r, p));
3960
          }
3961
 
3962
        /* Save this register to the stack (but use the old fp value
3963
           we copied aside if appropriate).  */
3964
        from_regno =
3965
          (fp_copy_regno >= 0 && regno == HARD_FRAME_POINTER_REGNUM)
3966
          ? fp_copy_regno : regno;
3967
        FRP (frame_emit_store (from_regno, regno, r, cfa, cfa_offset));
3968
 
3969
        offset -= UNITS_PER_WORD;
3970
        which_scratch = (which_scratch + 1) % ROUND_ROBIN_SIZE;
3971
      }
3972
 
3973
  /* If profiling, force that to happen after the frame is set up.  */
3974
  if (crtl->profile)
3975
    emit_insn (gen_blockage ());
3976
 
3977
  /* Load the PIC register if needed.  */
3978
  if (flag_pic && crtl->uses_pic_offset_table)
3979
    load_pic_register (false);
3980
}
3981
 
3982
 
3983
/* Implement the epilogue and sibcall_epilogue patterns.  SIBCALL_P is
3984
   true for a sibcall_epilogue pattern, and false for an epilogue
3985
   pattern.  */
3986
void
3987
tilegx_expand_epilogue (bool sibcall_p)
3988
{
3989
  /* We round-robin through four scratch registers to hold temporary
3990
     addresses for saving registers, to make instruction scheduling
3991
     easier.  */
3992
  rtx reg_save_addr[ROUND_ROBIN_SIZE] = {
3993
    NULL_RTX, NULL_RTX, NULL_RTX, NULL_RTX
3994
  };
3995
  rtx last_insn, insn;
3996
  unsigned int which_scratch;
3997
  int offset, start_offset, regno;
3998
  rtx cfa_restores = NULL_RTX;
3999
 
4000
  /* A register that holds a copy of the incoming fp.  */
4001
  int fp_copy_regno = -1;
4002
 
4003
  /* Next scratch register number to hand out (postdecrementing).  */
4004
  int next_scratch_regno = 29;
4005
 
4006
  int total_size = compute_total_frame_size ();
4007
 
4008
  last_insn = get_last_insn ();
4009
 
4010
  /* Load lr first since we are going to need it first.  */
4011
  insn = NULL;
4012
  if (df_regs_ever_live_p (TILEGX_LINK_REGNUM))
4013
    {
4014
      insn = frame_emit_load (TILEGX_LINK_REGNUM,
4015
                              compute_frame_addr (0, &next_scratch_regno),
4016
                              &cfa_restores);
4017
    }
4018
 
4019
  if (total_size == 0)
4020
    {
4021
      if (insn)
4022
        {
4023
          RTX_FRAME_RELATED_P (insn) = 1;
4024
          REG_NOTES (insn) = cfa_restores;
4025
        }
4026
      goto done;
4027
    }
4028
 
4029
  /* Compute where to start restoring registers.  */
4030
  start_offset = -crtl->args.pretend_args_size - UNITS_PER_WORD;
4031
  offset = start_offset;
4032
 
4033
  if (frame_pointer_needed)
4034
    fp_copy_regno = next_scratch_regno--;
4035
 
4036
  /* Restore all callee-saved registers.  */
4037
  which_scratch = 0;
4038
  for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
4039
    if (need_to_save_reg (regno))
4040
      {
4041
        rtx r = reg_save_addr[which_scratch];
4042
        if (r == NULL_RTX)
4043
          {
4044
            r = compute_frame_addr (offset, &next_scratch_regno);
4045
            reg_save_addr[which_scratch] = r;
4046
          }
4047
        else
4048
          {
4049
            /* Advance to the next stack slot to store this register.  */
4050
            int stride = ROUND_ROBIN_SIZE * -UNITS_PER_WORD;
4051
            rtx p = gen_rtx_PLUS (Pmode, r, GEN_INT (stride));
4052
            emit_insn (gen_rtx_SET (VOIDmode, r, p));
4053
          }
4054
 
4055
        if (fp_copy_regno >= 0 && regno == HARD_FRAME_POINTER_REGNUM)
4056
          frame_emit_load (fp_copy_regno, r, NULL);
4057
        else
4058
          frame_emit_load (regno, r, &cfa_restores);
4059
 
4060
        offset -= UNITS_PER_WORD;
4061
        which_scratch = (which_scratch + 1) % ROUND_ROBIN_SIZE;
4062
      }
4063
 
4064
  if (!tilegx_current_function_is_leaf ())
4065
    cfa_restores =
4066
      alloc_reg_note (REG_CFA_RESTORE, stack_pointer_rtx, cfa_restores);
4067
 
4068
  emit_insn (gen_blockage ());
4069
 
4070
  if (crtl->calls_eh_return)
4071
    {
4072
      rtx r = compute_frame_addr (-total_size + UNITS_PER_WORD,
4073
                                  &next_scratch_regno);
4074
      insn = emit_move_insn (gen_lowpart (DImode, stack_pointer_rtx),
4075
                             gen_frame_mem (DImode, r));
4076
      RTX_FRAME_RELATED_P (insn) = 1;
4077
      REG_NOTES (insn) = cfa_restores;
4078
    }
4079
  else if (frame_pointer_needed)
4080
    {
4081
      /* Restore the old stack pointer by copying from the frame
4082
         pointer.  */
4083
      if (TARGET_32BIT)
4084
        {
4085
          insn = emit_insn (gen_sp_restore_32bit (stack_pointer_rtx,
4086
                                                  hard_frame_pointer_rtx));
4087
        }
4088
      else
4089
        {
4090
          insn = emit_insn (gen_sp_restore (stack_pointer_rtx,
4091
                                            hard_frame_pointer_rtx));
4092
        }
4093
      RTX_FRAME_RELATED_P (insn) = 1;
4094
      REG_NOTES (insn) = cfa_restores;
4095
      add_reg_note (insn, REG_CFA_DEF_CFA, stack_pointer_rtx);
4096
    }
4097
  else
4098
    {
4099
      insn = emit_sp_adjust (total_size, &next_scratch_regno, true,
4100
                             cfa_restores);
4101
    }
4102
 
4103
  /* Restore the old frame pointer.  */
4104
  if (frame_pointer_needed)
4105
    {
4106
      insn = emit_move_insn (gen_lowpart (DImode, hard_frame_pointer_rtx),
4107
                             gen_rtx_REG (DImode, fp_copy_regno));
4108
      add_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx);
4109
    }
4110
 
4111
  /* Mark the pic registers as live outside of the function.  */
4112
  if (flag_pic)
4113
    {
4114
      emit_use (cfun->machine->text_label_rtx);
4115
      emit_use (cfun->machine->got_rtx);
4116
    }
4117
 
4118
done:
4119
  if (!sibcall_p)
4120
    {
4121
      emit_jump_insn (gen__return ());
4122
    }
4123
  else
4124
    {
4125
      emit_use (gen_rtx_REG (Pmode, TILEGX_LINK_REGNUM));
4126
    }
4127
 
4128
  /* Mark all insns we just emitted as frame-related.  */
4129
  for (; last_insn != NULL_RTX; last_insn = next_insn (last_insn))
4130
    RTX_FRAME_RELATED_P (last_insn) = 1;
4131
}
4132
 
4133
#undef ROUND_ROBIN_SIZE
4134
 
4135
 
4136
/* Implement INITIAL_ELIMINATION_OFFSET.  */
4137
int
4138
tilegx_initial_elimination_offset (int from, int to)
4139
{
4140
  int total_size = compute_total_frame_size ();
4141
 
4142
  if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
4143
    {
4144
      return (total_size - crtl->args.pretend_args_size
4145
              - tilegx_saved_regs_size ());
4146
    }
4147
  else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
4148
    {
4149
      return -(crtl->args.pretend_args_size + tilegx_saved_regs_size ());
4150
    }
4151
  else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
4152
    {
4153
      return STACK_POINTER_OFFSET + total_size;
4154
    }
4155
  else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
4156
    {
4157
      return STACK_POINTER_OFFSET;
4158
    }
4159
  else
4160
    gcc_unreachable ();
4161
}
4162
 
4163
 
4164
/* Return an RTX indicating where the return address to the calling
4165
   function can be found.  */
4166
rtx
4167
tilegx_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
4168
{
4169
  if (count != 0)
4170
    return const0_rtx;
4171
 
4172
  return get_hard_reg_initial_val (Pmode, TILEGX_LINK_REGNUM);
4173
}
4174
 
4175
 
4176
/* Implement EH_RETURN_HANDLER_RTX.  The MEM needs to be volatile to
4177
   prevent it from being deleted.  */
4178
rtx
4179
tilegx_eh_return_handler_rtx (void)
4180
{
4181
  rtx tmp = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
4182
  MEM_VOLATILE_P (tmp) = true;
4183
  return tmp;
4184
}
4185
 
4186
 
4187
 
4188
/* Registers  */
4189
 
4190
/* Implemnet TARGET_CONDITIONAL_REGISTER_USAGE.  */
4191
static void
4192
tilegx_conditional_register_usage (void)
4193
{
4194
  global_regs[TILEGX_NETORDER_REGNUM] = 1;
4195
  /* TILEGX_PIC_TEXT_LABEL_REGNUM is conditionally used.  It is a
4196
     member of fixed_regs, and therefore must be member of
4197
     call_used_regs, but it is not a member of call_really_used_regs[]
4198
     because it is not clobbered by a call.  */
4199
  if (TILEGX_PIC_TEXT_LABEL_REGNUM != INVALID_REGNUM)
4200
    {
4201
      fixed_regs[TILEGX_PIC_TEXT_LABEL_REGNUM] = 1;
4202
      call_used_regs[TILEGX_PIC_TEXT_LABEL_REGNUM] = 1;
4203
    }
4204
  if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
4205
    {
4206
      fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
4207
      call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
4208
    }
4209
}
4210
 
4211
 
4212
/* Implement TARGET_FRAME_POINTER_REQUIRED.  */
4213
static bool
4214
tilegx_frame_pointer_required (void)
4215
{
4216
  return crtl->calls_eh_return || cfun->calls_alloca;
4217
}
4218
 
4219
 
4220
 
4221
/* Scheduling and reorg  */
4222
 
4223
/* Return the length of INSN.  LENGTH is the initial length computed
4224
   by attributes in the machine-description file.  This is where we
4225
   account for bundles.  */
4226
int
4227
tilegx_adjust_insn_length (rtx insn, int length)
4228
{
4229
  enum machine_mode mode = GET_MODE (insn);
4230
 
4231
  /* A non-termininating instruction in a bundle has length 0.  */
4232
  if (mode == SImode)
4233
    return 0;
4234
 
4235
  /* By default, there is not length adjustment.  */
4236
  return length;
4237
}
4238
 
4239
 
4240
/* Implement TARGET_SCHED_ISSUE_RATE.  */
4241
static int
4242
tilegx_issue_rate (void)
4243
{
4244
  return 3;
4245
}
4246
 
4247
 
4248
/* Return the rtx for the jump target.  */
4249
static rtx
4250
get_jump_target (rtx branch)
4251
{
4252
  if (CALL_P (branch))
4253
    {
4254
      rtx call;
4255
      call = PATTERN (branch);
4256
 
4257
      if (GET_CODE (call) == PARALLEL)
4258
        call = XVECEXP (call, 0, 0);
4259
 
4260
      if (GET_CODE (call) == SET)
4261
        call = SET_SRC (call);
4262
 
4263
      if (GET_CODE (call) == CALL)
4264
        return XEXP (XEXP (call, 0), 0);
4265
    }
4266
  return 0;
4267
}
4268
 
4269
/* Implement TARGET_SCHED_ADJUST_COST.  */
4270
static int
4271
tilegx_sched_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
4272
{
4273
  /* If we have a true dependence, INSN is a call, and DEP_INSN
4274
     defines a register that is needed by the call (argument or stack
4275
     pointer) , set its latency to 0 so that it can be bundled with
4276
     the call.  Explicitly check for and exclude the case when
4277
     DEP_INSN defines the target of the jump.  */
4278
  if (CALL_P (insn) && REG_NOTE_KIND (link) == REG_DEP_TRUE)
4279
    {
4280
      rtx target = get_jump_target (insn);
4281
      if (!REG_P (target) || !set_of (target, dep_insn))
4282
        return 0;
4283
    }
4284
 
4285
  return cost;
4286
}
4287
 
4288
 
4289
/* Skip over irrelevant NOTEs and such and look for the next insn we
4290
   would consider bundling.  */
4291
static rtx
4292
next_insn_to_bundle (rtx r, rtx end)
4293
{
4294
  for (; r != end; r = NEXT_INSN (r))
4295
    {
4296
      if (NONDEBUG_INSN_P (r)
4297
          && GET_CODE (PATTERN (r)) != USE
4298
          && GET_CODE (PATTERN (r)) != CLOBBER)
4299
        return r;
4300
    }
4301
 
4302
  return NULL_RTX;
4303
}
4304
 
4305
 
4306
/* Go through all insns, and use the information generated during
4307
   scheduling to generate SEQUENCEs to represent bundles of
4308
   instructions issued simultaneously.  */
4309
static void
4310
tilegx_gen_bundles (void)
4311
{
4312
  basic_block bb;
4313
  FOR_EACH_BB (bb)
4314
  {
4315
    rtx insn, next;
4316
    rtx end = NEXT_INSN (BB_END (bb));
4317
 
4318
    for (insn = next_insn_to_bundle (BB_HEAD (bb), end); insn; insn = next)
4319
      {
4320
        next = next_insn_to_bundle (NEXT_INSN (insn), end);
4321
 
4322
        /* Never wrap {} around inline asm.  */
4323
        if (GET_CODE (PATTERN (insn)) != ASM_INPUT)
4324
          {
4325
            if (next == NULL_RTX || GET_MODE (next) == TImode
4326
                /* NOTE: The scheduler incorrectly believes a call
4327
                   insn can execute in the same cycle as the insn
4328
                   after the call.  This is of course impossible.
4329
                   Really we need to fix the scheduler somehow, so
4330
                   the code after the call gets scheduled
4331
                   optimally.  */
4332
                || CALL_P (insn))
4333
              {
4334
                /* Mark current insn as the end of a bundle.  */
4335
                PUT_MODE (insn, QImode);
4336
              }
4337
            else
4338
              {
4339
                /* Mark it as part of a bundle.  */
4340
                PUT_MODE (insn, SImode);
4341
              }
4342
          }
4343
      }
4344
  }
4345
}
4346
 
4347
 
4348
/* Replace OLD_INSN with NEW_INSN.  */
4349
static void
4350
replace_insns (rtx old_insn, rtx new_insns)
4351
{
4352
  if (new_insns)
4353
    emit_insn_before (new_insns, old_insn);
4354
 
4355
  delete_insn (old_insn);
4356
}
4357
 
4358
 
4359
/* Returns true if INSN is the first instruction of a pc-relative
4360
   address compuatation.  */
4361
static bool
4362
match_pcrel_step1 (rtx insn)
4363
{
4364
  rtx pattern = PATTERN (insn);
4365
  rtx src;
4366
 
4367
  if (GET_CODE (pattern) != SET)
4368
    return false;
4369
 
4370
  src = SET_SRC (pattern);
4371
 
4372
  return (GET_CODE (src) == CONST
4373
          && GET_CODE (XEXP (src, 0)) == UNSPEC
4374
          && XINT (XEXP (src, 0), 1) == UNSPEC_HW1_LAST_PCREL);
4375
}
4376
 
4377
 
4378
/* Do the first replacement step in tilegx_fixup_pcrel_references.  */
4379
static void
4380
replace_mov_pcrel_step1 (rtx insn)
4381
{
4382
  rtx pattern = PATTERN (insn);
4383
  rtx unspec;
4384
  rtx opnds[2];
4385
  rtx new_insns;
4386
 
4387
  gcc_assert (GET_CODE (pattern) == SET);
4388
  opnds[0] = SET_DEST (pattern);
4389
 
4390
  gcc_assert (GET_CODE (SET_SRC (pattern)) == CONST);
4391
 
4392
  unspec = XEXP (SET_SRC (pattern), 0);
4393
  gcc_assert (GET_CODE (unspec) == UNSPEC);
4394
  gcc_assert (XINT (unspec, 1) == UNSPEC_HW1_LAST_PCREL);
4395
  opnds[1] = XVECEXP (unspec, 0, 0);
4396
 
4397
  /* We only need to replace SYMBOL_REFs, not LABEL_REFs.  */
4398
  if (GET_CODE (opnds[1]) != SYMBOL_REF)
4399
    return;
4400
 
4401
  start_sequence ();
4402
 
4403
  if (flag_pic != 1)
4404
    {
4405
      if (TARGET_32BIT)
4406
        emit_insn (gen_mov_got32_step1_32bit (opnds[0], opnds[1]));
4407
      else
4408
        emit_insn (gen_mov_got32_step1 (opnds[0], opnds[1]));
4409
    }
4410
 
4411
  new_insns = get_insns ();
4412
  end_sequence ();
4413
 
4414
  replace_insns (insn, new_insns);
4415
}
4416
 
4417
 
4418
/* Returns true if INSN is the second instruction of a pc-relative
4419
   address compuatation.  */
4420
static bool
4421
match_pcrel_step2 (rtx insn)
4422
{
4423
  rtx unspec;
4424
  rtx addr;
4425
 
4426
  if (TARGET_32BIT)
4427
    {
4428
      if (recog_memoized (insn) != CODE_FOR_insn_addr_shl16insli_32bit)
4429
        return false;
4430
    }
4431
  else
4432
    {
4433
      if (recog_memoized (insn) != CODE_FOR_insn_addr_shl16insli)
4434
        return false;
4435
    }
4436
 
4437
  unspec = SET_SRC (PATTERN (insn));
4438
  addr = XVECEXP (unspec, 0, 1);
4439
 
4440
  return (GET_CODE (addr) == CONST
4441
          && GET_CODE (XEXP (addr, 0)) == UNSPEC
4442
          && XINT (XEXP (addr, 0), 1) == UNSPEC_HW0_PCREL);
4443
}
4444
 
4445
 
4446
/* Do the second replacement step in tilegx_fixup_pcrel_references.  */
4447
static void
4448
replace_mov_pcrel_step2 (rtx insn)
4449
{
4450
  rtx pattern = PATTERN (insn);
4451
  rtx unspec;
4452
  rtx addr;
4453
  rtx opnds[3];
4454
  rtx new_insns;
4455
  rtx got_rtx = tilegx_got_rtx ();
4456
 
4457
  gcc_assert (GET_CODE (pattern) == SET);
4458
  opnds[0] = SET_DEST (pattern);
4459
 
4460
  unspec = SET_SRC (pattern);
4461
  gcc_assert (GET_CODE (unspec) == UNSPEC);
4462
  gcc_assert (XINT (unspec, 1) == UNSPEC_INSN_ADDR_SHL16INSLI);
4463
 
4464
  opnds[1] = XVECEXP (unspec, 0, 0);
4465
 
4466
  addr = XVECEXP (unspec, 0, 1);
4467
  gcc_assert (GET_CODE (addr) == CONST);
4468
 
4469
  unspec = XEXP (addr, 0);
4470
  gcc_assert (GET_CODE (unspec) == UNSPEC);
4471
  gcc_assert (XINT (unspec, 1) == UNSPEC_HW0_PCREL);
4472
  opnds[2] = XVECEXP (unspec, 0, 0);
4473
 
4474
  /* We only need to replace SYMBOL_REFs, not LABEL_REFs.  */
4475
  if (GET_CODE (opnds[2]) != SYMBOL_REF)
4476
    return;
4477
 
4478
  start_sequence ();
4479
 
4480
  if (flag_pic == 1)
4481
    {
4482
      if (TARGET_32BIT)
4483
        emit_insn (gen_add_got16_32bit (opnds[0], got_rtx, opnds[2]));
4484
      else
4485
        emit_insn (gen_add_got16 (opnds[0], got_rtx, opnds[2]));
4486
    }
4487
  else
4488
    {
4489
      if (TARGET_32BIT)
4490
        emit_insn (gen_mov_got32_step2_32bit
4491
                   (opnds[0], opnds[1], opnds[2]));
4492
      else
4493
        emit_insn (gen_mov_got32_step2 (opnds[0], opnds[1], opnds[2]));
4494
    }
4495
 
4496
  new_insns = get_insns ();
4497
  end_sequence ();
4498
 
4499
  replace_insns (insn, new_insns);
4500
}
4501
 
4502
 
4503
/* Do the third replacement step in tilegx_fixup_pcrel_references.  */
4504
static void
4505
replace_mov_pcrel_step3 (rtx insn)
4506
{
4507
  rtx pattern = PATTERN (insn);
4508
  rtx unspec;
4509
  rtx opnds[4];
4510
  rtx new_insns;
4511
  rtx got_rtx = tilegx_got_rtx ();
4512
  rtx text_label_rtx = tilegx_text_label_rtx ();
4513
 
4514
  gcc_assert (GET_CODE (pattern) == SET);
4515
  opnds[0] = SET_DEST (pattern);
4516
 
4517
  unspec = SET_SRC (pattern);
4518
  gcc_assert (GET_CODE (unspec) == UNSPEC);
4519
  gcc_assert (XINT (unspec, 1) == UNSPEC_MOV_PCREL_STEP3);
4520
 
4521
  opnds[1] = got_rtx;
4522
 
4523
  if (XVECEXP (unspec, 0, 0) == text_label_rtx)
4524
    opnds[2] = XVECEXP (unspec, 0, 1);
4525
  else
4526
    {
4527
      gcc_assert (XVECEXP (unspec, 0, 1) == text_label_rtx);
4528
      opnds[2] = XVECEXP (unspec, 0, 0);
4529
    }
4530
 
4531
  opnds[3] = XVECEXP (unspec, 0, 2);
4532
 
4533
  /* We only need to replace SYMBOL_REFs, not LABEL_REFs.  */
4534
  if (GET_CODE (opnds[3]) != SYMBOL_REF)
4535
    return;
4536
 
4537
  start_sequence ();
4538
 
4539
  if (flag_pic == 1)
4540
    {
4541
      emit_move_insn (opnds[0], gen_const_mem (Pmode, opnds[2]));
4542
    }
4543
  else
4544
    {
4545
      emit_move_insn (opnds[0], gen_rtx_PLUS (Pmode, opnds[1], opnds[2]));
4546
      emit_move_insn (opnds[0], gen_const_mem (Pmode, opnds[0]));
4547
    }
4548
 
4549
  new_insns = get_insns ();
4550
  end_sequence ();
4551
 
4552
  replace_insns (insn, new_insns);
4553
}
4554
 
4555
 
4556
/* We generate PC relative SYMBOL_REFs as an optimization, to avoid
4557
   going through the GOT when the symbol is local to the compilation
4558
   unit.  But such a symbol requires that the common text_label that
4559
   we generate at the beginning of the function be in the same section
4560
   as the reference to the SYMBOL_REF.  This may not be true if we
4561
   generate hot/cold sections.  This function looks for such cases and
4562
   replaces such references with the longer sequence going through the
4563
   GOT.
4564
 
4565
   We expect following instruction sequence:
4566
   moveli      tmp1, hw1_last(x-.L_PICLNK)          [1]
4567
   shl16insli  tmp2, tmp1, hw0(x-.L_PICLNK)         [2]
4568
   add<x>      tmp3, txt_label_reg, tmp2            [3]
4569
 
4570
   If we're compiling -fpic, we replace with the following sequence
4571
   (the numbers in brackets match the instructions they're replacing
4572
   above).
4573
 
4574
   add<x>li    tmp2, got_reg, hw0_last_got(x)       [2]
4575
   ld<4>       tmp3, tmp2                           [3]
4576
 
4577
   If we're compiling -fPIC, we replace the first instruction with:
4578
 
4579
   moveli      tmp1, hw1_last_got(x)                [1]
4580
   shl16insli  tmp2, tmp1, hw0_got(x)               [2]
4581
   add<x>      tmp3, got_reg, tmp2                  [3]
4582
   ld<4>       tmp3, tmp3                           [3]
4583
 
4584
   Note that we're careful to disturb the instruction sequence as
4585
   little as possible, since it's very late in the compilation
4586
   process.  */
4587
static void
4588
tilegx_fixup_pcrel_references (void)
4589
{
4590
  rtx insn, next_insn;
4591
  bool same_section_as_entry = true;
4592
 
4593
  for (insn = get_insns (); insn; insn = next_insn)
4594
    {
4595
      next_insn = NEXT_INSN (insn);
4596
 
4597
      if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
4598
        {
4599
          same_section_as_entry = !same_section_as_entry;
4600
          continue;
4601
        }
4602
 
4603
      if (same_section_as_entry)
4604
        continue;
4605
 
4606
      if (!(INSN_P (insn)
4607
            && GET_CODE (PATTERN (insn)) != USE
4608
            && GET_CODE (PATTERN (insn)) != CLOBBER))
4609
        continue;
4610
 
4611
      if (TARGET_32BIT)
4612
        {
4613
          if (match_pcrel_step1 (insn))
4614
            replace_mov_pcrel_step1 (insn);
4615
          else if (match_pcrel_step2 (insn))
4616
            replace_mov_pcrel_step2 (insn);
4617
          else if (recog_memoized (insn) == CODE_FOR_mov_pcrel_step3_32bit)
4618
            replace_mov_pcrel_step3 (insn);
4619
        }
4620
      else
4621
        {
4622
          if (match_pcrel_step1 (insn))
4623
            replace_mov_pcrel_step1 (insn);
4624
          else if (match_pcrel_step2 (insn))
4625
            replace_mov_pcrel_step2 (insn);
4626
          else if (recog_memoized (insn) == CODE_FOR_mov_pcrel_step3)
4627
            replace_mov_pcrel_step3 (insn);
4628
        }
4629
    }
4630
}
4631
 
4632
 
4633
/* Ensure that no var tracking notes are emitted in the middle of a
4634
   three-instruction bundle.  */
4635
static void
4636
reorder_var_tracking_notes (void)
4637
{
4638
  basic_block bb;
4639
  FOR_EACH_BB (bb)
4640
  {
4641
    rtx insn, next;
4642
    rtx queue = NULL_RTX;
4643
    bool in_bundle = false;
4644
 
4645
    for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = next)
4646
      {
4647
        next = NEXT_INSN (insn);
4648
 
4649
        if (INSN_P (insn))
4650
          {
4651
            /* Emit queued up notes at the last instruction of a
4652
               bundle.  */
4653
            if (GET_MODE (insn) == QImode)
4654
              {
4655
                while (queue)
4656
                  {
4657
                    rtx next_queue = PREV_INSN (queue);
4658
                    PREV_INSN (NEXT_INSN (insn)) = queue;
4659
                    NEXT_INSN (queue) = NEXT_INSN (insn);
4660
                    NEXT_INSN (insn) = queue;
4661
                    PREV_INSN (queue) = insn;
4662
                    queue = next_queue;
4663
                  }
4664
                in_bundle = false;
4665
              }
4666
            else if (GET_MODE (insn) == SImode)
4667
              in_bundle = true;
4668
          }
4669
        else if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
4670
          {
4671
            if (in_bundle)
4672
              {
4673
                rtx prev = PREV_INSN (insn);
4674
                PREV_INSN (next) = prev;
4675
                NEXT_INSN (prev) = next;
4676
 
4677
                PREV_INSN (insn) = queue;
4678
                queue = insn;
4679
              }
4680
          }
4681
      }
4682
  }
4683
}
4684
 
4685
 
4686
/* Perform machine dependent operations on the rtl chain INSNS.  */
4687
static void
4688
tilegx_reorg (void)
4689
{
4690
  /* We are freeing block_for_insn in the toplev to keep compatibility
4691
     with old MDEP_REORGS that are not CFG based.  Recompute it
4692
     now.  */
4693
  compute_bb_for_insn ();
4694
 
4695
  if (flag_reorder_blocks_and_partition)
4696
    {
4697
      tilegx_fixup_pcrel_references ();
4698
    }
4699
 
4700
  if (flag_schedule_insns_after_reload)
4701
    {
4702
      split_all_insns ();
4703
 
4704
      timevar_push (TV_SCHED2);
4705
      schedule_insns ();
4706
      timevar_pop (TV_SCHED2);
4707
 
4708
      /* Examine the schedule to group into bundles.  */
4709
      tilegx_gen_bundles ();
4710
    }
4711
 
4712
  df_analyze ();
4713
 
4714
  if (flag_var_tracking)
4715
    {
4716
      timevar_push (TV_VAR_TRACKING);
4717
      variable_tracking_main ();
4718
      reorder_var_tracking_notes ();
4719
      timevar_pop (TV_VAR_TRACKING);
4720
    }
4721
 
4722
  df_finish_pass (false);
4723
}
4724
 
4725
 
4726
 
4727
/* Assembly  */
4728
 
4729
/* Select a format to encode pointers in exception handling data.
4730
   CODE is 0 for data, 1 for code labels, 2 for function pointers.
4731
   GLOBAL is true if the symbol may be affected by dynamic
4732
   relocations.  */
4733
int
4734
tilegx_asm_preferred_eh_data_format (int code ATTRIBUTE_UNUSED, int global)
4735
{
4736
  if (flag_pic)
4737
    {
4738
      int type = TARGET_32BIT ? DW_EH_PE_sdata4 : DW_EH_PE_sdata8;
4739
      return (global ? DW_EH_PE_indirect : 0) | DW_EH_PE_pcrel | type;
4740
    }
4741
  else
4742
    return DW_EH_PE_absptr;
4743
}
4744
 
4745
 
4746
/* Implement TARGET_ASM_OUTPUT_MI_THUNK.  */
4747
static void
4748
tilegx_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
4749
                        HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
4750
                        tree function)
4751
{
4752
  rtx this_rtx, insn, funexp, addend;
4753
 
4754
  /* Pretend to be a post-reload pass while generating rtl.  */
4755
  reload_completed = 1;
4756
 
4757
  /* Mark the end of the (empty) prologue.  */
4758
  emit_note (NOTE_INSN_PROLOGUE_END);
4759
 
4760
  /* Find the "this" pointer.  If the function returns a structure,
4761
     the structure return pointer is in $1.  */
4762
  if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
4763
    this_rtx = gen_rtx_REG (Pmode, 1);
4764
  else
4765
    this_rtx = gen_rtx_REG (Pmode, 0);
4766
 
4767
  /* Add DELTA to THIS_RTX.  */
4768
  if (!(delta >= -32868 && delta <= 32767))
4769
    {
4770
      addend = gen_rtx_REG (Pmode, 29);
4771
      emit_move_insn (addend, GEN_INT (delta));
4772
    }
4773
  else
4774
    addend = GEN_INT (delta);
4775
 
4776
  if (TARGET_32BIT)
4777
    emit_insn (gen_addsi3 (this_rtx, this_rtx, addend));
4778
  else
4779
    emit_insn (gen_adddi3 (this_rtx, this_rtx, addend));
4780
 
4781
  /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX.  */
4782
  if (vcall_offset)
4783
    {
4784
      rtx tmp;
4785
 
4786
      tmp = gen_rtx_REG (Pmode, 29);
4787
      emit_move_insn (tmp, gen_rtx_MEM (Pmode, this_rtx));
4788
 
4789
      if (!(vcall_offset >= -32868 && vcall_offset <= 32767))
4790
        {
4791
          addend = gen_rtx_REG (Pmode, 28);
4792
          emit_move_insn (addend, GEN_INT (vcall_offset));
4793
        }
4794
      else
4795
        addend = GEN_INT (vcall_offset);
4796
 
4797
      if (TARGET_32BIT)
4798
        emit_insn (gen_addsi3 (tmp, tmp, addend));
4799
      else
4800
        emit_insn (gen_adddi3 (tmp, tmp, addend));
4801
 
4802
      emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
4803
 
4804
      if (TARGET_32BIT)
4805
        emit_insn (gen_addsi3 (this_rtx, this_rtx, tmp));
4806
      else
4807
        emit_insn (gen_adddi3 (this_rtx, this_rtx, tmp));
4808
    }
4809
 
4810
  /* Generate a tail call to the target function.  */
4811
  if (!TREE_USED (function))
4812
    {
4813
      assemble_external (function);
4814
      TREE_USED (function) = 1;
4815
    }
4816
  funexp = XEXP (DECL_RTL (function), 0);
4817
  funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
4818
  insn = emit_call_insn (gen_sibcall (funexp, const0_rtx));
4819
  SIBLING_CALL_P (insn) = 1;
4820
 
4821
  /* Run just enough of rest_of_compilation to get the insns emitted.
4822
     There's not really enough bulk here to make other passes such as
4823
     instruction scheduling worth while.  Note that use_thunk calls
4824
     assemble_start_function and assemble_end_function.
4825
 
4826
     We don't currently bundle, but the instruciton sequence is all
4827
     serial except for the tail call, so we're only wasting one cycle.
4828
   */
4829
  insn = get_insns ();
4830
  insn_locators_alloc ();
4831
  shorten_branches (insn);
4832
  final_start_function (insn, file, 1);
4833
  final (insn, file, 1);
4834
  final_end_function ();
4835
 
4836
  /* Stop pretending to be a post-reload pass.  */
4837
  reload_completed = 0;
4838
}
4839
 
4840
 
4841
/* Implement TARGET_ASM_TRAMPOLINE_TEMPLATE.  */
4842
static void
4843
tilegx_asm_trampoline_template (FILE *file)
4844
{
4845
  int ptr_mode_size = GET_MODE_SIZE (ptr_mode);
4846
  if (TARGET_32BIT)
4847
    {
4848
      fprintf (file, "\tlnk      r10\n");
4849
      fprintf (file, "\taddxi    r10, r10, 32\n");
4850
      fprintf (file, "\tld4s_add r11, r10, %d\n", ptr_mode_size);
4851
      fprintf (file, "\tld4s     r10, r10\n");
4852
      fprintf (file, "\tjr       r11\n");
4853
      fprintf (file, "\t.word 0 # <function address>\n");
4854
      fprintf (file, "\t.word 0 # <static chain value>\n");
4855
    }
4856
  else
4857
    {
4858
      fprintf (file, "\tlnk      r10\n");
4859
      fprintf (file, "\taddi     r10, r10, 32\n");
4860
      fprintf (file, "\tld_add   r11, r10, %d\n", ptr_mode_size);
4861
      fprintf (file, "\tld       r10, r10\n");
4862
      fprintf (file, "\tjr       r11\n");
4863
      fprintf (file, "\t.quad 0 # <function address>\n");
4864
      fprintf (file, "\t.quad 0 # <static chain value>\n");
4865
    }
4866
}
4867
 
4868
 
4869
/* Implement TARGET_TRAMPOLINE_INIT.  */
4870
static void
4871
tilegx_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4872
{
4873
  rtx fnaddr, chaddr;
4874
  rtx mem;
4875
  rtx begin_addr, end_addr;
4876
  int ptr_mode_size = GET_MODE_SIZE (ptr_mode);
4877
 
4878
  fnaddr = copy_to_reg (XEXP (DECL_RTL (fndecl), 0));
4879
  chaddr = copy_to_reg (static_chain);
4880
 
4881
  emit_block_move (m_tramp, assemble_trampoline_template (),
4882
                   GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
4883
 
4884
  mem = adjust_address (m_tramp, ptr_mode,
4885
                        TRAMPOLINE_SIZE - 2 * ptr_mode_size);
4886
  emit_move_insn (mem, fnaddr);
4887
  mem = adjust_address (m_tramp, ptr_mode,
4888
                        TRAMPOLINE_SIZE - ptr_mode_size);
4889
  emit_move_insn (mem, chaddr);
4890
 
4891
  /* Get pointers to the beginning and end of the code block.  */
4892
  begin_addr = force_reg (Pmode, XEXP (m_tramp, 0));
4893
  end_addr = force_reg (Pmode, plus_constant (XEXP (m_tramp, 0),
4894
                                              TRAMPOLINE_SIZE));
4895
 
4896
  emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__clear_cache"),
4897
                     LCT_NORMAL, VOIDmode, 2, begin_addr, Pmode,
4898
                     end_addr, Pmode);
4899
}
4900
 
4901
 
4902
/* Implement TARGET_PRINT_OPERAND.  */
4903
static void
4904
tilegx_print_operand (FILE *file, rtx x, int code)
4905
{
4906
  switch (code)
4907
    {
4908
    case 'c':
4909
      /* Print the compare operator opcode for conditional moves. */
4910
      switch (GET_CODE (x))
4911
        {
4912
        case EQ:
4913
          fputs ("z", file);
4914
          break;
4915
        case NE:
4916
          fputs ("nz", file);
4917
          break;
4918
        default:
4919
          output_operand_lossage ("invalid %%c operand");
4920
        }
4921
      return;
4922
 
4923
    case 'C':
4924
      /* Print the compare operator opcode for conditional moves. */
4925
      switch (GET_CODE (x))
4926
        {
4927
        case EQ:
4928
          fputs ("nz", file);
4929
          break;
4930
        case NE:
4931
          fputs ("z", file);
4932
          break;
4933
        default:
4934
          output_operand_lossage ("invalid %%C operand");
4935
        }
4936
      return;
4937
 
4938
    case 'd':
4939
      {
4940
        /* Print the compare operator opcode for conditional moves. */
4941
        switch (GET_CODE (x))
4942
          {
4943
          case EQ:
4944
            fputs ("eq", file);
4945
            break;
4946
          case NE:
4947
            fputs ("ne", file);
4948
            break;
4949
          default:
4950
            output_operand_lossage ("invalid %%d operand");
4951
          }
4952
        return;
4953
      }
4954
 
4955
    case 'D':
4956
      {
4957
        /* Print the compare operator opcode for conditional moves. */
4958
        switch (GET_CODE (x))
4959
          {
4960
          case EQ:
4961
            fputs ("ne", file);
4962
            break;
4963
          case NE:
4964
            fputs ("eq", file);
4965
            break;
4966
          default:
4967
            output_operand_lossage ("invalid %%D operand");
4968
          }
4969
        return;
4970
      }
4971
 
4972
    case 'H':
4973
      {
4974
      if (GET_CODE (x) == CONST
4975
          && GET_CODE (XEXP (x, 0)) == UNSPEC)
4976
        {
4977
          rtx addr = XVECEXP (XEXP (x, 0), 0, 0);
4978
          int unspec = XINT (XEXP (x, 0), 1);
4979
          const char *opstr = NULL;
4980
          switch (unspec)
4981
            {
4982
            case UNSPEC_HW0:
4983
            case UNSPEC_HW0_PCREL:
4984
              opstr = "hw0";
4985
              break;
4986
            case UNSPEC_HW1:
4987
              opstr = "hw1";
4988
              break;
4989
            case UNSPEC_HW2:
4990
              opstr = "hw2";
4991
              break;
4992
            case UNSPEC_HW3:
4993
              opstr = "hw3";
4994
              break;
4995
            case UNSPEC_HW0_LAST:
4996
              opstr = "hw0_last";
4997
              break;
4998
            case UNSPEC_HW1_LAST:
4999
            case UNSPEC_HW1_LAST_PCREL:
5000
              opstr = "hw1_last";
5001
              break;
5002
            case UNSPEC_HW2_LAST:
5003
              opstr = "hw2_last";
5004
              break;
5005
            case UNSPEC_HW0_GOT:
5006
              opstr = "hw0_got";
5007
              break;
5008
            case UNSPEC_HW0_LAST_GOT:
5009
              opstr = "hw0_last_got";
5010
              break;
5011
            case UNSPEC_HW1_LAST_GOT:
5012
              opstr = "hw1_last_got";
5013
              break;
5014
            case UNSPEC_HW0_TLS_GD:
5015
              opstr = "hw0_tls_gd";
5016
              break;
5017
            case UNSPEC_HW1_LAST_TLS_GD:
5018
              opstr = "hw1_last_tls_gd";
5019
              break;
5020
            case UNSPEC_HW0_TLS_IE:
5021
              opstr = "hw0_tls_ie";
5022
              break;
5023
            case UNSPEC_HW1_LAST_TLS_IE:
5024
              opstr = "hw1_last_tls_ie";
5025
              break;
5026
            case UNSPEC_HW0_TLS_LE:
5027
              opstr = "hw0_tls_le";
5028
              break;
5029
            case UNSPEC_HW1_LAST_TLS_LE:
5030
              opstr = "hw1_last_tls_le";
5031
              break;
5032
            default:
5033
              output_operand_lossage ("invalid %%H specifier");
5034
            }
5035
 
5036
          fputs (opstr, file);
5037
          fputc ('(', file);
5038
          output_addr_const (file, addr);
5039
 
5040
          if (unspec == UNSPEC_HW0_PCREL
5041
              || unspec == UNSPEC_HW1_LAST_PCREL)
5042
            {
5043
              rtx addr2 = XVECEXP (XEXP (x, 0), 0, 1);
5044
              fputs (" - " , file);
5045
              output_addr_const (file, addr2);
5046
            }
5047
 
5048
          fputc (')', file);
5049
          return;
5050
        }
5051
      else if (symbolic_operand (x, VOIDmode))
5052
        {
5053
          output_addr_const (file, x);
5054
          return;
5055
        }
5056
      }
5057
      /* FALLTHRU */
5058
 
5059
    case 'h':
5060
      {
5061
        /* Print the low 16 bits of a constant.  */
5062
        HOST_WIDE_INT i;
5063
        if (CONST_INT_P (x))
5064
          i = INTVAL (x);
5065
        else if (GET_CODE (x) == CONST_DOUBLE)
5066
          i = CONST_DOUBLE_LOW (x);
5067
        else
5068
          {
5069
            output_operand_lossage ("invalid %%h operand");
5070
            return;
5071
          }
5072
        i = trunc_int_for_mode (i, HImode);
5073
        fprintf (file, HOST_WIDE_INT_PRINT_DEC, i);
5074
        return;
5075
      }
5076
 
5077
    case 'I':
5078
      /* Print an auto-inc memory operand.  */
5079
      if (!MEM_P (x))
5080
        {
5081
          output_operand_lossage ("invalid %%I operand");
5082
          return;
5083
        }
5084
 
5085
      output_memory_reference_mode = GET_MODE (x);
5086
      output_memory_autoinc_first = true;
5087
      output_address (XEXP (x, 0));
5088
      output_memory_reference_mode = VOIDmode;
5089
      return;
5090
 
5091
    case 'i':
5092
      /* Print an auto-inc memory operand.  */
5093
      if (!MEM_P (x))
5094
        {
5095
          output_operand_lossage ("invalid %%i operand");
5096
          return;
5097
        }
5098
 
5099
      output_memory_reference_mode = GET_MODE (x);
5100
      output_memory_autoinc_first = false;
5101
      output_address (XEXP (x, 0));
5102
      output_memory_reference_mode = VOIDmode;
5103
      return;
5104
 
5105
    case 'j':
5106
      {
5107
        /* Print the low 8 bits of a constant.  */
5108
        HOST_WIDE_INT i;
5109
        if (CONST_INT_P (x))
5110
          i = INTVAL (x);
5111
        else if (GET_CODE (x) == CONST_DOUBLE)
5112
          i = CONST_DOUBLE_LOW (x);
5113
        else if (GET_CODE (x) == CONST_VECTOR
5114
                 && CONST_INT_P (CONST_VECTOR_ELT (x, 0)))
5115
          i = INTVAL (CONST_VECTOR_ELT (x, 0));
5116
        else
5117
          {
5118
            output_operand_lossage ("invalid %%j operand");
5119
            return;
5120
          }
5121
        i = trunc_int_for_mode (i, QImode);
5122
        fprintf (file, HOST_WIDE_INT_PRINT_DEC, i);
5123
        return;
5124
      }
5125
 
5126
    case 'P':
5127
      {
5128
        /* Print a constant plus one.  */
5129
        if (!CONST_INT_P (x))
5130
          {
5131
            output_operand_lossage ("invalid %%P operand");
5132
            return;
5133
          }
5134
        fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
5135
        return;
5136
      }
5137
 
5138
    case 'm':
5139
    case 'M':
5140
      {
5141
        /* Print a bfextu-style bit range.  */
5142
        int first_bit, last_bit;
5143
        HOST_WIDE_INT flip = (code == 'm') ? ~0 : 0;
5144
 
5145
        if (!CONST_INT_P (x)
5146
            || !tilegx_bitfield_operand_p (INTVAL (x) ^ flip,
5147
                                           &first_bit, &last_bit))
5148
          {
5149
            output_operand_lossage ("invalid %%%c operand", code);
5150
            return;
5151
          }
5152
 
5153
        fprintf (file, "%d, %d", first_bit, last_bit);
5154
        return;
5155
      }
5156
 
5157
    case 'N':
5158
      {
5159
        const char *reg = NULL;
5160
 
5161
        /* Print a network register.  */
5162
        if (!CONST_INT_P (x))
5163
          {
5164
            output_operand_lossage ("invalid %%N operand");
5165
            return;
5166
          }
5167
 
5168
        switch (INTVAL (x))
5169
          {
5170
          case TILEGX_NETREG_IDN0: reg = "idn0"; break;
5171
          case TILEGX_NETREG_IDN1: reg = "idn1"; break;
5172
          case TILEGX_NETREG_UDN0: reg = "udn0"; break;
5173
          case TILEGX_NETREG_UDN1: reg = "udn1"; break;
5174
          case TILEGX_NETREG_UDN2: reg = "udn2"; break;
5175
          case TILEGX_NETREG_UDN3: reg = "udn3"; break;
5176
          default:
5177
            gcc_unreachable ();
5178
          }
5179
 
5180
        fprintf (file, reg);
5181
        return;
5182
      }
5183
 
5184
    case 'p':
5185
      if (GET_CODE (x) == SYMBOL_REF)
5186
        {
5187
          if (flag_pic && !SYMBOL_REF_LOCAL_P (x))
5188
            fprintf (file, "plt(");
5189
          output_addr_const (file, x);
5190
          if (flag_pic && !SYMBOL_REF_LOCAL_P (x))
5191
            fprintf (file, ")");
5192
        }
5193
      else
5194
        output_addr_const (file, x);
5195
      return;
5196
 
5197
    case 'r':
5198
      /* In this case we need a register.  Use 'zero' if the operand
5199
         is const0_rtx.  */
5200
      if (x == const0_rtx
5201
          || (GET_MODE (x) != VOIDmode && x == CONST0_RTX (GET_MODE (x))))
5202
        {
5203
          fputs ("zero", file);
5204
          return;
5205
        }
5206
      else if (!REG_P (x))
5207
        {
5208
          output_operand_lossage ("invalid operand for 'r' specifier");
5209
          return;
5210
        }
5211
      /* FALLTHRU */
5212
 
5213
    case 0:
5214
      if (REG_P (x))
5215
        {
5216
          fprintf (file, "%s", reg_names[REGNO (x)]);
5217
          return;
5218
        }
5219
      else if (MEM_P (x))
5220
        {
5221
          output_memory_reference_mode = VOIDmode;
5222
          output_address (XEXP (x, 0));
5223
          return;
5224
        }
5225
      else
5226
        {
5227
          output_addr_const (file, x);
5228
          return;
5229
        }
5230
    }
5231
 
5232
  debug_rtx (x);
5233
  output_operand_lossage ("unable to print out operand yet; code == %d (%c)",
5234
                          code, code);
5235
}
5236
 
5237
 
5238
/* Implement TARGET_PRINT_OPERAND_ADDRESS.  */
5239
static void
5240
tilegx_print_operand_address (FILE *file, rtx addr)
5241
{
5242
  if (GET_CODE (addr) == POST_DEC
5243
      || GET_CODE (addr) == POST_INC)
5244
    {
5245
      int offset = GET_MODE_SIZE (output_memory_reference_mode);
5246
 
5247
      gcc_assert (output_memory_reference_mode != VOIDmode);
5248
 
5249
      if (output_memory_autoinc_first)
5250
        fprintf (file, "%s", reg_names[REGNO (XEXP (addr, 0))]);
5251
      else
5252
        fprintf (file, "%d",
5253
                 GET_CODE (addr) == POST_DEC ? -offset : offset);
5254
    }
5255
  else if (GET_CODE (addr) == POST_MODIFY)
5256
    {
5257
      gcc_assert (output_memory_reference_mode != VOIDmode);
5258
 
5259
      gcc_assert (GET_CODE (XEXP (addr, 1)) == PLUS);
5260
 
5261
      if (output_memory_autoinc_first)
5262
        fprintf (file, "%s", reg_names[REGNO (XEXP (addr, 0))]);
5263
      else
5264
        fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5265
                 INTVAL (XEXP (XEXP (addr, 1), 1)));
5266
    }
5267
  else
5268
    tilegx_print_operand (file, addr, 'r');
5269
}
5270
 
5271
 
5272
/* Machine mode of current insn, for determining curly brace
5273
   placement.  */
5274
static enum machine_mode insn_mode;
5275
 
5276
 
5277
/* Implement FINAL_PRESCAN_INSN.  This is used to emit bundles.  */
5278
void
5279
tilegx_final_prescan_insn (rtx insn)
5280
{
5281
  /* Record this for tilegx_asm_output_opcode to examine.  */
5282
  insn_mode = GET_MODE (insn);
5283
}
5284
 
5285
 
5286
/* While emitting asm, are we currently inside '{' for a bundle? */
5287
static bool tilegx_in_bundle = false;
5288
 
5289
/* Implement ASM_OUTPUT_OPCODE.  Prepend/append curly braces as
5290
   appropriate given the bundling information recorded by
5291
   tilegx_gen_bundles.  */
5292
const char *
5293
tilegx_asm_output_opcode (FILE *stream, const char *code)
5294
{
5295
  bool pseudo = !strcmp (code, "pseudo");
5296
 
5297
  if (!tilegx_in_bundle && insn_mode == SImode)
5298
    {
5299
      /* Start a new bundle.  */
5300
      fprintf (stream, "{\n\t");
5301
      tilegx_in_bundle = true;
5302
    }
5303
 
5304
  if (tilegx_in_bundle && insn_mode == QImode)
5305
    {
5306
      /* Close an existing bundle.  */
5307
      static char buf[100];
5308
 
5309
      gcc_assert (strlen (code) + 3 + 1 < sizeof (buf));
5310
 
5311
      strcpy (buf, pseudo ? "" : code);
5312
      strcat (buf, "\n\t}");
5313
      tilegx_in_bundle = false;
5314
 
5315
      return buf;
5316
    }
5317
  else
5318
    {
5319
      return pseudo ? "" : code;
5320
    }
5321
}
5322
 
5323
 
5324
/* Output assembler code to FILE to increment profiler label # LABELNO
5325
   for profiling a function entry.  */
5326
void
5327
tilegx_function_profiler (FILE *file, int labelno ATTRIBUTE_UNUSED)
5328
{
5329
  if (tilegx_in_bundle)
5330
    {
5331
      fprintf (file, "\t}\n");
5332
    }
5333
 
5334
  if (flag_pic)
5335
    {
5336
      fprintf (file,
5337
               "\t{\n"
5338
               "\tmove\tr10, lr\n"
5339
               "\tjal\t%s@plt\n"
5340
               "\t}\n", MCOUNT_NAME);
5341
    }
5342
  else
5343
    {
5344
      fprintf (file,
5345
               "\t{\n"
5346
               "\tmove\tr10, lr\n"
5347
               "\tjal\t%s\n"
5348
               "\t}\t\n", MCOUNT_NAME);
5349
    }
5350
 
5351
  tilegx_in_bundle = false;
5352
}
5353
 
5354
 
5355
/* Implement TARGET_ASM_FILE_END.  */
5356
static void
5357
tilegx_file_end (void)
5358
{
5359
  if (NEED_INDICATE_EXEC_STACK)
5360
    file_end_indicate_exec_stack ();
5361
}
5362
 
5363
 
5364
 
5365
#undef  TARGET_HAVE_TLS
5366
#define TARGET_HAVE_TLS HAVE_AS_TLS
5367
 
5368
#undef  TARGET_OPTION_OVERRIDE
5369
#define TARGET_OPTION_OVERRIDE tilegx_option_override
5370
 
5371
#undef  TARGET_SCALAR_MODE_SUPPORTED_P
5372
#define TARGET_SCALAR_MODE_SUPPORTED_P tilegx_scalar_mode_supported_p
5373
 
5374
#undef  TARGET_VECTOR_MODE_SUPPORTED_P
5375
#define TARGET_VECTOR_MODE_SUPPORTED_P tilegx_vector_mode_supported_p
5376
 
5377
#undef  TARGET_CANNOT_FORCE_CONST_MEM
5378
#define TARGET_CANNOT_FORCE_CONST_MEM tilegx_cannot_force_const_mem
5379
 
5380
#undef  TARGET_FUNCTION_OK_FOR_SIBCALL
5381
#define TARGET_FUNCTION_OK_FOR_SIBCALL tilegx_function_ok_for_sibcall
5382
 
5383
#undef  TARGET_PASS_BY_REFERENCE
5384
#define TARGET_PASS_BY_REFERENCE tilegx_pass_by_reference
5385
 
5386
#undef  TARGET_RETURN_IN_MEMORY
5387
#define TARGET_RETURN_IN_MEMORY tilegx_return_in_memory
5388
 
5389
#undef  TARGET_MODE_REP_EXTENDED
5390
#define TARGET_MODE_REP_EXTENDED tilegx_mode_rep_extended
5391
 
5392
#undef  TARGET_FUNCTION_ARG_BOUNDARY
5393
#define TARGET_FUNCTION_ARG_BOUNDARY tilegx_function_arg_boundary
5394
 
5395
#undef  TARGET_FUNCTION_ARG
5396
#define TARGET_FUNCTION_ARG tilegx_function_arg
5397
 
5398
#undef  TARGET_FUNCTION_ARG_ADVANCE
5399
#define TARGET_FUNCTION_ARG_ADVANCE tilegx_function_arg_advance
5400
 
5401
#undef  TARGET_FUNCTION_VALUE
5402
#define TARGET_FUNCTION_VALUE tilegx_function_value
5403
 
5404
#undef  TARGET_LIBCALL_VALUE
5405
#define TARGET_LIBCALL_VALUE tilegx_libcall_value
5406
 
5407
#undef  TARGET_FUNCTION_VALUE_REGNO_P
5408
#define TARGET_FUNCTION_VALUE_REGNO_P tilegx_function_value_regno_p
5409
 
5410
#undef  TARGET_PROMOTE_FUNCTION_MODE
5411
#define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
5412
 
5413
#undef  TARGET_PROMOTE_PROTOTYPES
5414
#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_false
5415
 
5416
#undef  TARGET_BUILD_BUILTIN_VA_LIST
5417
#define TARGET_BUILD_BUILTIN_VA_LIST tilegx_build_builtin_va_list
5418
 
5419
#undef  TARGET_EXPAND_BUILTIN_VA_START
5420
#define TARGET_EXPAND_BUILTIN_VA_START tilegx_va_start
5421
 
5422
#undef  TARGET_SETUP_INCOMING_VARARGS
5423
#define TARGET_SETUP_INCOMING_VARARGS tilegx_setup_incoming_varargs
5424
 
5425
#undef  TARGET_GIMPLIFY_VA_ARG_EXPR
5426
#define TARGET_GIMPLIFY_VA_ARG_EXPR tilegx_gimplify_va_arg_expr
5427
 
5428
#undef  TARGET_RTX_COSTS
5429
#define TARGET_RTX_COSTS tilegx_rtx_costs
5430
 
5431
#undef  TARGET_SHIFT_TRUNCATION_MASK
5432
#define TARGET_SHIFT_TRUNCATION_MASK tilegx_shift_truncation_mask
5433
 
5434
#undef  TARGET_INIT_LIBFUNCS
5435
#define TARGET_INIT_LIBFUNCS tilegx_init_libfuncs
5436
 
5437
/* Limit to what we can reach in one addli.  */
5438
#undef  TARGET_MIN_ANCHOR_OFFSET
5439
#define TARGET_MIN_ANCHOR_OFFSET -32768
5440
#undef  TARGET_MAX_ANCHOR_OFFSET
5441
#define TARGET_MAX_ANCHOR_OFFSET 32767
5442
 
5443
#undef  TARGET_LEGITIMATE_CONSTANT_P
5444
#define TARGET_LEGITIMATE_CONSTANT_P tilegx_legitimate_constant_p
5445
 
5446
#undef  TARGET_LEGITIMATE_ADDRESS_P
5447
#define TARGET_LEGITIMATE_ADDRESS_P tilegx_legitimate_address_p
5448
 
5449
#undef  TARGET_LEGITIMIZE_ADDRESS
5450
#define TARGET_LEGITIMIZE_ADDRESS tilegx_legitimize_address
5451
 
5452
#undef  TARGET_DELEGITIMIZE_ADDRESS
5453
#define TARGET_DELEGITIMIZE_ADDRESS tilegx_delegitimize_address
5454
 
5455
#undef  TARGET_INIT_BUILTINS
5456
#define TARGET_INIT_BUILTINS  tilegx_init_builtins
5457
 
5458
#undef  TARGET_BUILTIN_DECL
5459
#define TARGET_BUILTIN_DECL tilegx_builtin_decl
5460
 
5461
#undef   TARGET_EXPAND_BUILTIN
5462
#define TARGET_EXPAND_BUILTIN tilegx_expand_builtin
5463
 
5464
#undef  TARGET_CONDITIONAL_REGISTER_USAGE
5465
#define TARGET_CONDITIONAL_REGISTER_USAGE tilegx_conditional_register_usage
5466
 
5467
#undef  TARGET_FRAME_POINTER_REQUIRED
5468
#define TARGET_FRAME_POINTER_REQUIRED tilegx_frame_pointer_required
5469
 
5470
#undef  TARGET_DELAY_SCHED2
5471
#define TARGET_DELAY_SCHED2 true
5472
 
5473
#undef  TARGET_DELAY_VARTRACK
5474
#define TARGET_DELAY_VARTRACK true
5475
 
5476
#undef  TARGET_SCHED_ISSUE_RATE
5477
#define TARGET_SCHED_ISSUE_RATE tilegx_issue_rate
5478
 
5479
#undef  TARGET_SCHED_ADJUST_COST
5480
#define TARGET_SCHED_ADJUST_COST tilegx_sched_adjust_cost
5481
 
5482
#undef  TARGET_MACHINE_DEPENDENT_REORG
5483
#define TARGET_MACHINE_DEPENDENT_REORG tilegx_reorg
5484
 
5485
#undef  TARGET_ASM_CAN_OUTPUT_MI_THUNK
5486
#define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
5487
  hook_bool_const_tree_hwi_hwi_const_tree_true
5488
 
5489
#undef  TARGET_ASM_OUTPUT_MI_THUNK
5490
#define TARGET_ASM_OUTPUT_MI_THUNK tilegx_output_mi_thunk
5491
 
5492
#undef  TARGET_ASM_TRAMPOLINE_TEMPLATE
5493
#define TARGET_ASM_TRAMPOLINE_TEMPLATE tilegx_asm_trampoline_template
5494
 
5495
#undef  TARGET_TRAMPOLINE_INIT
5496
#define TARGET_TRAMPOLINE_INIT tilegx_trampoline_init
5497
 
5498
#undef  TARGET_PRINT_OPERAND
5499
#define TARGET_PRINT_OPERAND tilegx_print_operand
5500
 
5501
#undef  TARGET_PRINT_OPERAND_ADDRESS
5502
#define TARGET_PRINT_OPERAND_ADDRESS tilegx_print_operand_address
5503
 
5504
#undef  TARGET_ASM_FILE_END
5505
#define TARGET_ASM_FILE_END tilegx_file_end
5506
 
5507
#undef  TARGET_ASM_ALIGNED_DI_OP
5508
#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
5509
 
5510
 
5511
struct gcc_target targetm = TARGET_INITIALIZER;
5512
 
5513
#include "gt-tilegx.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.