OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [config/] [tilepro/] [tilepro.c] - Blame information for rev 713

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 709 jeremybenn
/* Subroutines used for code generation on the Tilera TILEPro.
2
   Copyright (C) 2011, 2012
3
   Free Software Foundation, Inc.
4
   Contributed by Walter Lee (walt@tilera.com)
5
 
6
   This file is part of GCC.
7
 
8
   GCC is free software; you can redistribute it and/or modify it
9
   under the terms of the GNU General Public License as published
10
   by the Free Software Foundation; either version 3, or (at your
11
   option) any later version.
12
 
13
   GCC is distributed in the hope that it will be useful, but WITHOUT
14
   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15
   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
16
   License for more details.
17
 
18
   You should have received a copy of the GNU General Public License
19
   along with GCC; see the file COPYING3.  If not see
20
   <http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "rtl.h"
27
#include "regs.h"
28
#include "insn-config.h"
29
#include "output.h"
30
#include "insn-attr.h"
31
#include "recog.h"
32
#include "expr.h"
33
#include "langhooks.h"
34
#include "optabs.h"
35
#include "sched-int.h"
36
#include "sel-sched.h"
37
#include "tm_p.h"
38
#include "tm-constrs.h"
39
#include "target.h"
40
#include "target-def.h"
41
#include "integrate.h"
42
#include "dwarf2.h"
43
#include "timevar.h"
44
#include "gimple.h"
45
#include "cfgloop.h"
46
#include "tilepro-builtins.h"
47
#include "tilepro-multiply.h"
48
#include "diagnostic.h"
49
 
50
/* SYMBOL_REF for GOT */
51
static GTY(()) rtx g_got_symbol = NULL;
52
 
53
/* In case of a POST_INC or POST_DEC memory reference, we must report
54
   the mode of the memory reference from TARGET_PRINT_OPERAND to
55
   TARGET_PRINT_OPERAND_ADDRESS.  */
56
static enum machine_mode output_memory_reference_mode;
57
 
58
/* Report whether we're printing out the first address fragment of a
59
   POST_INC or POST_DEC memory reference, from TARGET_PRINT_OPERAND to
60
   TARGET_PRINT_OPERAND_ADDRESS.  */
61
static bool output_memory_autoinc_first;
62
 
63
 
64
 
65
/* Option handling  */
66
 
67
/* Implement TARGET_OPTION_OVERRIDE.  */
68
static void
69
tilepro_option_override (void)
70
{
71
  /* When modulo scheduling is enabled, we still rely on regular
72
     scheduler for bundling.  */
73
  if (flag_modulo_sched)
74
    flag_resched_modulo_sched = 1;
75
}
76
 
77
 
78
 
79
/* Implement TARGET_SCALAR_MODE_SUPPORTED_P.  */
80
static bool
81
tilepro_scalar_mode_supported_p (enum machine_mode mode)
82
{
83
  switch (mode)
84
    {
85
    case QImode:
86
    case HImode:
87
    case SImode:
88
    case DImode:
89
      return true;
90
 
91
    case SFmode:
92
    case DFmode:
93
      return true;
94
 
95
    default:
96
      return false;
97
    }
98
}
99
 
100
 
101
/* Implement TARGET_VECTOR_MODE_SUPPORTED_P.  */
102
static bool
103
tile_vector_mode_supported_p (enum machine_mode mode)
104
{
105
  return mode == V4QImode || mode == V2HImode;
106
}
107
 
108
 
109
/* Implement TARGET_CANNOT_FORCE_CONST_MEM.  */
110
static bool
111
tilepro_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED,
112
                                rtx x ATTRIBUTE_UNUSED)
113
{
114
  return true;
115
}
116
 
117
 
118
/* Implement TARGET_FUNCTION_OK_FOR_SIBCALL.  */
119
static bool
120
tilepro_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
121
{
122
  return decl != NULL;
123
}
124
 
125
 
126
/* Implement TARGET_PASS_BY_REFERENCE.  Variable sized types are
127
   passed by reference.  */
128
static bool
129
tilepro_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
130
                           enum machine_mode mode ATTRIBUTE_UNUSED,
131
                           const_tree type, bool named ATTRIBUTE_UNUSED)
132
{
133
  return (type && TYPE_SIZE (type)
134
          && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST);
135
}
136
 
137
 
138
/* Implement TARGET_RETURN_IN_MEMORY.  */
139
static bool
140
tilepro_return_in_memory (const_tree type, const_tree fndecl ATTRIBUTE_UNUSED)
141
{
142
  return !IN_RANGE (int_size_in_bytes (type),
143
                    0, TILEPRO_NUM_RETURN_REGS * UNITS_PER_WORD);
144
}
145
 
146
 
147
/* Implement TARGET_FUNCTION_ARG_BOUNDARY.  */
148
static unsigned int
149
tilepro_function_arg_boundary (enum machine_mode mode, const_tree type)
150
{
151
  unsigned int alignment;
152
 
153
  alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
154
  if (alignment < PARM_BOUNDARY)
155
    alignment = PARM_BOUNDARY;
156
  if (alignment > STACK_BOUNDARY)
157
    alignment = STACK_BOUNDARY;
158
  return alignment;
159
}
160
 
161
 
162
/* Implement TARGET_FUNCTION_ARG.  */
163
static rtx
164
tilepro_function_arg (cumulative_args_t cum_v,
165
                      enum machine_mode mode,
166
                      const_tree type, bool named ATTRIBUTE_UNUSED)
167
{
168
  CUMULATIVE_ARGS cum = *get_cumulative_args (cum_v);
169
  int byte_size = ((mode == BLKmode)
170
                   ? int_size_in_bytes (type) : GET_MODE_SIZE (mode));
171
  bool doubleword_aligned_p;
172
 
173
  if (cum >= TILEPRO_NUM_ARG_REGS)
174
    return NULL_RTX;
175
 
176
  /* See whether the argument has doubleword alignment.  */
177
  doubleword_aligned_p =
178
    tilepro_function_arg_boundary (mode, type) > BITS_PER_WORD;
179
 
180
  if (doubleword_aligned_p)
181
    cum += cum & 1;
182
 
183
  /* The ABI does not allow parameters to be passed partially in reg
184
     and partially in stack.  */
185
  if ((cum + (byte_size + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
186
      > TILEPRO_NUM_ARG_REGS)
187
    return NULL_RTX;
188
 
189
  return gen_rtx_REG (mode, cum);
190
}
191
 
192
 
193
/* Implement TARGET_FUNCTION_ARG_ADVANCE.  */
194
static void
195
tilepro_function_arg_advance (cumulative_args_t cum_v,
196
                              enum machine_mode mode,
197
                              const_tree type, bool named ATTRIBUTE_UNUSED)
198
{
199
  CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
200
 
201
  int byte_size = ((mode == BLKmode)
202
                   ? int_size_in_bytes (type) : GET_MODE_SIZE (mode));
203
  int word_size = (byte_size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
204
  bool doubleword_aligned_p;
205
 
206
  /* See whether the argument has doubleword alignment.  */
207
  doubleword_aligned_p =
208
    tilepro_function_arg_boundary (mode, type) > BITS_PER_WORD;
209
 
210
  if (doubleword_aligned_p)
211
    *cum += *cum & 1;
212
 
213
  /* If the current argument does not fit in the pretend_args space,
214
     skip over it.  */
215
  if (*cum < TILEPRO_NUM_ARG_REGS
216
      && *cum + word_size > TILEPRO_NUM_ARG_REGS)
217
    *cum = TILEPRO_NUM_ARG_REGS;
218
 
219
  *cum += word_size;
220
}
221
 
222
 
223
/* Implement TARGET_FUNCTION_VALUE.  */
224
static rtx
225
tilepro_function_value (const_tree valtype, const_tree fn_decl_or_type,
226
                        bool outgoing ATTRIBUTE_UNUSED)
227
{
228
  enum machine_mode mode;
229
  int unsigned_p;
230
 
231
  mode = TYPE_MODE (valtype);
232
  unsigned_p = TYPE_UNSIGNED (valtype);
233
 
234
  mode = promote_function_mode (valtype, mode, &unsigned_p,
235
                                fn_decl_or_type, 1);
236
 
237
  return gen_rtx_REG (mode, 0);
238
}
239
 
240
 
241
/* Implement TARGET_LIBCALL_VALUE.  */
242
static rtx
243
tilepro_libcall_value (enum machine_mode mode,
244
                       const_rtx fun ATTRIBUTE_UNUSED)
245
{
246
  return gen_rtx_REG (mode, 0);
247
}
248
 
249
 
250
/* Implement FUNCTION_VALUE_REGNO_P.  */
251
static bool
252
tilepro_function_value_regno_p (const unsigned int regno)
253
{
254
  return regno < TILEPRO_NUM_RETURN_REGS;
255
}
256
 
257
 
258
/* Implement TARGET_BUILD_BUILTIN_VA_LIST.  */
259
static tree
260
tilepro_build_builtin_va_list (void)
261
{
262
  tree f_args, f_skip, record, type_decl;
263
  bool owp;
264
 
265
  record = lang_hooks.types.make_type (RECORD_TYPE);
266
 
267
  type_decl = build_decl (BUILTINS_LOCATION, TYPE_DECL,
268
                          get_identifier ("__va_list_tag"), record);
269
 
270
  f_args = build_decl (BUILTINS_LOCATION, FIELD_DECL,
271
                       get_identifier ("__args"), ptr_type_node);
272
  f_skip = build_decl (BUILTINS_LOCATION, FIELD_DECL,
273
                       get_identifier ("__skip"), ptr_type_node);
274
 
275
  DECL_FIELD_CONTEXT (f_args) = record;
276
 
277
  DECL_FIELD_CONTEXT (f_skip) = record;
278
 
279
  TREE_CHAIN (record) = type_decl;
280
  TYPE_NAME (record) = type_decl;
281
  TYPE_FIELDS (record) = f_args;
282
  TREE_CHAIN (f_args) = f_skip;
283
 
284
  /* We know this is being padded and we want it too.  It is an
285
     internal type so hide the warnings from the user.  */
286
  owp = warn_padded;
287
  warn_padded = false;
288
 
289
  layout_type (record);
290
 
291
  warn_padded = owp;
292
 
293
  /* The correct type is an array type of one element.  */
294
  return record;
295
}
296
 
297
 
298
/* Implement TARGET_EXPAND_BUILTIN_VA_START.  */
299
static void
300
tilepro_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
301
{
302
  tree f_args, f_skip;
303
  tree args, skip, t;
304
 
305
  f_args = TYPE_FIELDS (TREE_TYPE (valist));
306
  f_skip = TREE_CHAIN (f_args);
307
 
308
  args =
309
    build3 (COMPONENT_REF, TREE_TYPE (f_args), valist, f_args, NULL_TREE);
310
  skip =
311
    build3 (COMPONENT_REF, TREE_TYPE (f_skip), valist, f_skip, NULL_TREE);
312
 
313
  /* Find the __args area.  */
314
  t = make_tree (TREE_TYPE (args), virtual_incoming_args_rtx);
315
  t = fold_build_pointer_plus_hwi (t,
316
                                   UNITS_PER_WORD *
317
                                   (crtl->args.info - TILEPRO_NUM_ARG_REGS));
318
 
319
  if (crtl->args.pretend_args_size > 0)
320
    t = fold_build_pointer_plus_hwi (t, -STACK_POINTER_OFFSET);
321
 
322
  t = build2 (MODIFY_EXPR, TREE_TYPE (args), args, t);
323
  TREE_SIDE_EFFECTS (t) = 1;
324
  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
325
 
326
  /* Find the __skip area.  */
327
  t = make_tree (TREE_TYPE (skip), virtual_incoming_args_rtx);
328
  t = fold_build_pointer_plus_hwi (t, -STACK_POINTER_OFFSET);
329
  t = build2 (MODIFY_EXPR, TREE_TYPE (skip), skip, t);
330
  TREE_SIDE_EFFECTS (t) = 1;
331
  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
332
}
333
 
334
 
335
/* Implement TARGET_SETUP_INCOMING_VARARGS.  */
336
static void
337
tilepro_setup_incoming_varargs (cumulative_args_t cum,
338
                                enum machine_mode mode,
339
                                tree type, int *pretend_args, int no_rtl)
340
{
341
  CUMULATIVE_ARGS local_cum = *get_cumulative_args (cum);
342
  int first_reg;
343
 
344
  /* The caller has advanced CUM up to, but not beyond, the last named
345
     argument.  Advance a local copy of CUM past the last "real" named
346
     argument, to find out how many registers are left over.  */
347
  targetm.calls.function_arg_advance (pack_cumulative_args (&local_cum),
348
                                      mode, type, true);
349
  first_reg = local_cum;
350
 
351
  if (local_cum < TILEPRO_NUM_ARG_REGS)
352
    {
353
      *pretend_args = UNITS_PER_WORD * (TILEPRO_NUM_ARG_REGS - first_reg);
354
 
355
      if (!no_rtl)
356
        {
357
          alias_set_type set = get_varargs_alias_set ();
358
          rtx tmp =
359
            gen_rtx_MEM (BLKmode, plus_constant (virtual_incoming_args_rtx,
360
                                                 -STACK_POINTER_OFFSET -
361
                                                 UNITS_PER_WORD *
362
                                                 (TILEPRO_NUM_ARG_REGS -
363
                                                  first_reg)));
364
          MEM_NOTRAP_P (tmp) = 1;
365
          set_mem_alias_set (tmp, set);
366
          move_block_from_reg (first_reg, tmp,
367
                               TILEPRO_NUM_ARG_REGS - first_reg);
368
        }
369
    }
370
  else
371
    *pretend_args = 0;
372
}
373
 
374
 
375
/* Implement TARGET_GIMPLIFY_VA_ARG_EXPR.  Gimplify va_arg by updating
376
   the va_list structure VALIST as required to retrieve an argument of
377
   type TYPE, and returning that argument.
378
 
379
   ret = va_arg(VALIST, TYPE);
380
 
381
   generates code equivalent to:
382
 
383
    paddedsize = (sizeof(TYPE) + 3) & -4;
384
    if ((VALIST.__args + paddedsize > VALIST.__skip)
385
        & (VALIST.__args <= VALIST.__skip))
386
      addr = VALIST.__skip + STACK_POINTER_OFFSET;
387
    else
388
      addr = VALIST.__args;
389
    VALIST.__args = addr + paddedsize;
390
    ret = *(TYPE *)addr;                                          */
391
static tree
392
tilepro_gimplify_va_arg_expr (tree valist, tree type, gimple_seq * pre_p,
393
                              gimple_seq * post_p ATTRIBUTE_UNUSED)
394
{
395
  tree f_args, f_skip;
396
  tree args, skip;
397
  HOST_WIDE_INT size, rsize;
398
  tree addr, tmp;
399
  bool pass_by_reference_p;
400
 
401
  f_args = TYPE_FIELDS (va_list_type_node);
402
  f_skip = TREE_CHAIN (f_args);
403
 
404
  args =
405
    build3 (COMPONENT_REF, TREE_TYPE (f_args), valist, f_args, NULL_TREE);
406
  skip =
407
    build3 (COMPONENT_REF, TREE_TYPE (f_skip), valist, f_skip, NULL_TREE);
408
 
409
  addr = create_tmp_var (ptr_type_node, "va_arg");
410
 
411
  /* if an object is dynamically sized, a pointer to it is passed
412
     instead of the object itself.  */
413
  pass_by_reference_p = pass_by_reference (NULL, TYPE_MODE (type), type,
414
                                           false);
415
 
416
  if (pass_by_reference_p)
417
    type = build_pointer_type (type);
418
 
419
  size = int_size_in_bytes (type);
420
  rsize = ((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD) * UNITS_PER_WORD;
421
 
422
  /* If the alignment of the type is greater than the default for a
423
     parameter, align to STACK_BOUNDARY.  */
424
  if (TYPE_ALIGN (type) > PARM_BOUNDARY)
425
    {
426
      /* Assert the only case we generate code for: when
427
         stack boundary = 2 * parm boundary.  */
428
      gcc_assert (STACK_BOUNDARY == PARM_BOUNDARY * 2);
429
 
430
      tmp = build2 (BIT_AND_EXPR, sizetype,
431
                    fold_convert (sizetype, unshare_expr (args)),
432
                    size_int (PARM_BOUNDARY / 8));
433
      tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
434
                    unshare_expr (args), tmp);
435
 
436
      gimplify_assign (unshare_expr (args), tmp, pre_p);
437
    }
438
 
439
  /* Build conditional expression to calculate addr. The expression
440
     will be gimplified later.  */
441
  tmp = fold_build_pointer_plus_hwi (unshare_expr (args), rsize);
442
  tmp = build2 (TRUTH_AND_EXPR, boolean_type_node,
443
                build2 (GT_EXPR, boolean_type_node, tmp, unshare_expr (skip)),
444
                build2 (LE_EXPR, boolean_type_node, unshare_expr (args),
445
                        unshare_expr (skip)));
446
 
447
  tmp = build3 (COND_EXPR, ptr_type_node, tmp,
448
                build2 (POINTER_PLUS_EXPR, ptr_type_node, unshare_expr (skip),
449
                        size_int (STACK_POINTER_OFFSET)),
450
                unshare_expr (args));
451
 
452
  gimplify_assign (addr, tmp, pre_p);
453
 
454
  /* Update VALIST.__args.  */
455
  tmp = fold_build_pointer_plus_hwi (addr, rsize);
456
  gimplify_assign (unshare_expr (args), tmp, pre_p);
457
 
458
  addr = fold_convert (build_pointer_type (type), addr);
459
 
460
  if (pass_by_reference_p)
461
    addr = build_va_arg_indirect_ref (addr);
462
 
463
  return build_va_arg_indirect_ref (addr);
464
}
465
 
466
 
467
 
468
/* Implement TARGET_RTX_COSTS.  */
469
static bool
470
tilepro_rtx_costs (rtx x, int code, int outer_code, int opno, int *total,
471
                   bool speed)
472
{
473
  switch (code)
474
    {
475
    case CONST_INT:
476
      /* If this is an 8-bit constant, return zero since it can be
477
         used nearly anywhere with no cost.  If it is a valid operand
478
         for an ADD or AND, likewise return 0 if we know it will be
479
         used in that context.  Otherwise, return 2 since it might be
480
         used there later.  All other constants take at least two
481
         insns.  */
482
      if (satisfies_constraint_I (x))
483
        {
484
          *total = 0;
485
          return true;
486
        }
487
      else if (outer_code == PLUS && add_operand (x, VOIDmode))
488
        {
489
          /* Slightly penalize large constants even though we can add
490
             them in one instruction, because it forces the use of
491
             2-wide bundling mode.  */
492
          *total = 1;
493
          return true;
494
        }
495
      else if (move_operand (x, SImode))
496
        {
497
          /* We can materialize in one move.  */
498
          *total = COSTS_N_INSNS (1);
499
          return true;
500
        }
501
      else
502
        {
503
          /* We can materialize in two moves.  */
504
          *total = COSTS_N_INSNS (2);
505
          return true;
506
        }
507
 
508
      return false;
509
 
510
    case CONST:
511
    case LABEL_REF:
512
    case SYMBOL_REF:
513
      *total = COSTS_N_INSNS (2);
514
      return true;
515
 
516
    case CONST_DOUBLE:
517
      *total = COSTS_N_INSNS (4);
518
      return true;
519
 
520
    case HIGH:
521
      *total = 0;
522
      return true;
523
 
524
    case MEM:
525
      /* If outer-code was a sign or zero extension, a cost of
526
         COSTS_N_INSNS (1) was already added in, so account for
527
         that.  */
528
      if (outer_code == ZERO_EXTEND || outer_code == SIGN_EXTEND)
529
        *total = COSTS_N_INSNS (1);
530
      else
531
        *total = COSTS_N_INSNS (2);
532
      return true;
533
 
534
    case PLUS:
535
      /* Convey that s[123]a are efficient.  */
536
      if (GET_CODE (XEXP (x, 0)) == MULT
537
          && cint_248_operand (XEXP (XEXP (x, 0), 1), VOIDmode))
538
        {
539
          *total = (rtx_cost (XEXP (XEXP (x, 0), 0),
540
                              (enum rtx_code) outer_code, opno, speed)
541
                    + rtx_cost (XEXP (x, 1),
542
                                (enum rtx_code) outer_code, opno, speed)
543
                    + COSTS_N_INSNS (1));
544
          return true;
545
        }
546
      return false;
547
 
548
    case MULT:
549
      *total = COSTS_N_INSNS (2);
550
      return false;
551
 
552
    case SIGN_EXTEND:
553
    case ZERO_EXTEND:
554
      if (outer_code == MULT)
555
        *total = 0;
556
      else
557
        *total = COSTS_N_INSNS (1);
558
      return false;
559
 
560
    case DIV:
561
    case UDIV:
562
    case MOD:
563
    case UMOD:
564
      /* These are handled by software and are very expensive.  */
565
      *total = COSTS_N_INSNS (100);
566
      return false;
567
 
568
    case UNSPEC:
569
    case UNSPEC_VOLATILE:
570
      {
571
        int num = XINT (x, 1);
572
 
573
        if (num <= TILEPRO_LAST_LATENCY_1_INSN)
574
          *total = COSTS_N_INSNS (1);
575
        else if (num <= TILEPRO_LAST_LATENCY_2_INSN)
576
          *total = COSTS_N_INSNS (2);
577
        else if (num > TILEPRO_LAST_LATENCY_INSN)
578
          {
579
            if (outer_code == PLUS)
580
              *total = 0;
581
            else
582
              *total = COSTS_N_INSNS (1);
583
          }
584
        else
585
          {
586
            switch (num)
587
              {
588
              case UNSPEC_BLOCKAGE:
589
              case UNSPEC_NETWORK_BARRIER:
590
                *total = 0;
591
                break;
592
 
593
              case UNSPEC_LNK_AND_LABEL:
594
              case UNSPEC_MF:
595
              case UNSPEC_NETWORK_RECEIVE:
596
              case UNSPEC_NETWORK_SEND:
597
              case UNSPEC_TLS_GD_ADD:
598
                *total = COSTS_N_INSNS (1);
599
                break;
600
 
601
              case UNSPEC_TLS_IE_LOAD:
602
                *total = COSTS_N_INSNS (2);
603
                break;
604
 
605
              case UNSPEC_SP_SET:
606
                *total = COSTS_N_INSNS (3);
607
                break;
608
 
609
              case UNSPEC_SP_TEST:
610
                *total = COSTS_N_INSNS (4);
611
                break;
612
 
613
              case UNSPEC_LATENCY_L2:
614
                *total = COSTS_N_INSNS (8);
615
                break;
616
 
617
              case UNSPEC_TLS_GD_CALL:
618
                *total = COSTS_N_INSNS (30);
619
                break;
620
 
621
              case UNSPEC_LATENCY_MISS:
622
                *total = COSTS_N_INSNS (80);
623
                break;
624
 
625
              default:
626
                *total = COSTS_N_INSNS (1);
627
              }
628
          }
629
        return true;
630
      }
631
 
632
    default:
633
      return false;
634
    }
635
}
636
 
637
 
638
 
639
/* Returns an SImode integer rtx with value VAL.  */
640
static rtx
641
gen_int_si (HOST_WIDE_INT val)
642
{
643
  return gen_int_mode (val, SImode);
644
}
645
 
646
 
647
/* Create a temporary variable to hold a partial result, to enable
648
   CSE.  */
649
static rtx
650
create_temp_reg_if_possible (enum machine_mode mode, rtx default_reg)
651
{
652
  return can_create_pseudo_p ()? gen_reg_rtx (mode) : default_reg;
653
}
654
 
655
 
656
/* Functions to save and restore machine-specific function data.  */
657
static struct machine_function *
658
tilepro_init_machine_status (void)
659
{
660
  return ggc_alloc_cleared_machine_function ();
661
}
662
 
663
 
664
/* Do anything needed before RTL is emitted for each function.  */
665
void
666
tilepro_init_expanders (void)
667
{
668
  /* Arrange to initialize and mark the machine per-function
669
     status.  */
670
  init_machine_status = tilepro_init_machine_status;
671
 
672
  if (cfun && cfun->machine && flag_pic)
673
    {
674
      static int label_num = 0;
675
 
676
      char text_label_name[32];
677
 
678
      struct machine_function *machine = cfun->machine;
679
 
680
      ASM_GENERATE_INTERNAL_LABEL (text_label_name, "L_PICLNK", label_num++);
681
 
682
      machine->text_label_symbol =
683
        gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (text_label_name));
684
 
685
      machine->text_label_rtx =
686
        gen_rtx_REG (Pmode, TILEPRO_PIC_TEXT_LABEL_REGNUM);
687
 
688
      machine->got_rtx = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
689
 
690
      machine->calls_tls_get_addr = false;
691
    }
692
}
693
 
694
 
695
/* Return true if X contains a thread-local symbol.  */
696
static bool
697
tilepro_tls_referenced_p (rtx x)
698
{
699
  if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS)
700
    x = XEXP (XEXP (x, 0), 0);
701
 
702
  if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x))
703
    return true;
704
 
705
  /* That's all we handle in tilepro_legitimize_tls_address for
706
     now.  */
707
  return false;
708
}
709
 
710
 
711
/* Return true if X requires a scratch register.  It is given that
712
   flag_pic is on and that X satisfies CONSTANT_P.  */
713
static int
714
tilepro_pic_address_needs_scratch (rtx x)
715
{
716
  if (GET_CODE (x) == CONST
717
      && GET_CODE (XEXP (x, 0)) == PLUS
718
      && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
719
          || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
720
      && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
721
    return true;
722
 
723
  return false;
724
}
725
 
726
 
727
/* Implement TARGET_LEGITIMATE_CONSTANT_P.  This is all constants for
728
   which we are willing to load the value into a register via a move
729
   pattern.  TLS cannot be treated as a constant because it can
730
   include a function call.  */
731
static bool
732
tilepro_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
733
{
734
  switch (GET_CODE (x))
735
    {
736
    case CONST:
737
    case SYMBOL_REF:
738
      return !tilepro_tls_referenced_p (x);
739
 
740
    default:
741
      return true;
742
    }
743
}
744
 
745
 
746
/* Return true if the constant value X is a legitimate general operand
747
   when generating PIC code.  It is given that flag_pic is on and that
748
   X satisfies CONSTANT_P.  */
749
bool
750
tilepro_legitimate_pic_operand_p (rtx x)
751
{
752
  if (tilepro_pic_address_needs_scratch (x))
753
    return false;
754
 
755
  if (tilepro_tls_referenced_p (x))
756
    return false;
757
 
758
  return true;
759
}
760
 
761
 
762
/* Return true if the rtx X can be used as an address operand.  */
763
static bool
764
tilepro_legitimate_address_p (enum machine_mode ARG_UNUSED (mode), rtx x,
765
                              bool strict)
766
{
767
  if (GET_CODE (x) == SUBREG)
768
    x = SUBREG_REG (x);
769
 
770
  switch (GET_CODE (x))
771
    {
772
    case POST_INC:
773
    case POST_DEC:
774
      if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
775
        return false;
776
 
777
      x = XEXP (x, 0);
778
      break;
779
 
780
    case POST_MODIFY:
781
      if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
782
        return false;
783
 
784
      if (GET_CODE (XEXP (x, 1)) != PLUS)
785
        return false;
786
 
787
      if (!rtx_equal_p (XEXP (x, 0), XEXP (XEXP (x, 1), 0)))
788
        return false;
789
 
790
      if (!satisfies_constraint_I (XEXP (XEXP (x, 1), 1)))
791
        return false;
792
 
793
      x = XEXP (x, 0);
794
      break;
795
 
796
    case REG:
797
      break;
798
 
799
    default:
800
      return false;
801
    }
802
 
803
  /* Check if x is a valid reg.  */
804
  if (!REG_P (x))
805
    return false;
806
 
807
  if (strict)
808
    return REGNO_OK_FOR_BASE_P (REGNO (x));
809
  else
810
    return true;
811
}
812
 
813
 
814
/* Return the rtx containing SYMBOL_REF to the text label.  */
815
static rtx
816
tilepro_text_label_symbol (void)
817
{
818
  return cfun->machine->text_label_symbol;
819
}
820
 
821
 
822
/* Return the register storing the value of the text label.  */
823
static rtx
824
tilepro_text_label_rtx (void)
825
{
826
  return cfun->machine->text_label_rtx;
827
}
828
 
829
 
830
/* Return the register storing the value of the global offset
831
   table.  */
832
static rtx
833
tilepro_got_rtx (void)
834
{
835
  return cfun->machine->got_rtx;
836
}
837
 
838
 
839
/* Return the SYMBOL_REF for _GLOBAL_OFFSET_TABLE_.  */
840
static rtx
841
tilepro_got_symbol (void)
842
{
843
  if (g_got_symbol == NULL)
844
    g_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
845
 
846
  return g_got_symbol;
847
}
848
 
849
 
850
/* Return a reference to the got to be used by tls references.  */
851
static rtx
852
tilepro_tls_got (void)
853
{
854
  rtx temp;
855
  if (flag_pic)
856
    {
857
      crtl->uses_pic_offset_table = 1;
858
      return tilepro_got_rtx ();
859
    }
860
 
861
  temp = gen_reg_rtx (Pmode);
862
  emit_move_insn (temp, tilepro_got_symbol ());
863
 
864
  return temp;
865
}
866
 
867
 
868
/* ADDR contains a thread-local SYMBOL_REF.  Generate code to compute
869
   this (thread-local) address.  */
870
static rtx
871
tilepro_legitimize_tls_address (rtx addr)
872
{
873
  rtx ret;
874
 
875
  gcc_assert (can_create_pseudo_p ());
876
 
877
  if (GET_CODE (addr) == SYMBOL_REF)
878
    switch (SYMBOL_REF_TLS_MODEL (addr))
879
      {
880
      case TLS_MODEL_GLOBAL_DYNAMIC:
881
      case TLS_MODEL_LOCAL_DYNAMIC:
882
        {
883
          rtx r0, temp1, temp2, temp3, got, last;
884
 
885
          ret = gen_reg_rtx (Pmode);
886
          r0 = gen_rtx_REG (Pmode, 0);
887
          temp1 = gen_reg_rtx (Pmode);
888
          temp2 = gen_reg_rtx (Pmode);
889
          temp3 = gen_reg_rtx (Pmode);
890
 
891
          got = tilepro_tls_got ();
892
          emit_insn (gen_tls_gd_addhi (temp1, got, addr));
893
          emit_insn (gen_tls_gd_addlo (temp2, temp1, addr));
894
          emit_move_insn (r0, temp2);
895
          emit_insn (gen_tls_gd_call (addr));
896
          emit_move_insn (temp3, r0);
897
          last = emit_insn (gen_tls_gd_add (ret, temp3, addr));
898
          set_unique_reg_note (last, REG_EQUAL, copy_rtx (addr));
899
          break;
900
        }
901
      case TLS_MODEL_INITIAL_EXEC:
902
        {
903
          rtx temp1, temp2, temp3, got, last;
904
 
905
          ret = gen_reg_rtx (Pmode);
906
          temp1 = gen_reg_rtx (Pmode);
907
          temp2 = gen_reg_rtx (Pmode);
908
          temp3 = gen_reg_rtx (Pmode);
909
 
910
          got = tilepro_tls_got ();
911
          emit_insn (gen_tls_ie_addhi (temp1, got, addr));
912
          emit_insn (gen_tls_ie_addlo (temp2, temp1, addr));
913
          emit_insn (gen_tls_ie_load (temp3, temp2, addr));
914
          last =
915
            emit_move_insn(ret,
916
                           gen_rtx_PLUS (Pmode,
917
                                         gen_rtx_REG (Pmode,
918
                                                      THREAD_POINTER_REGNUM),
919
                                         temp3));
920
          set_unique_reg_note (last, REG_EQUAL, copy_rtx (addr));
921
          break;
922
        }
923
      case TLS_MODEL_LOCAL_EXEC:
924
        {
925
          rtx temp1, last;
926
 
927
          ret = gen_reg_rtx (Pmode);
928
          temp1 = gen_reg_rtx (Pmode);
929
 
930
          emit_insn (gen_tls_le_addhi (temp1,
931
                                       gen_rtx_REG (Pmode,
932
                                                    THREAD_POINTER_REGNUM),
933
                                       addr));
934
          last = emit_insn (gen_tls_le_addlo (ret, temp1, addr));
935
          set_unique_reg_note (last, REG_EQUAL, copy_rtx (addr));
936
          break;
937
        }
938
      default:
939
        gcc_unreachable ();
940
      }
941
  else if (GET_CODE (addr) == CONST)
942
    {
943
      rtx base, offset;
944
 
945
      gcc_assert (GET_CODE (XEXP (addr, 0)) == PLUS);
946
 
947
      base = tilepro_legitimize_tls_address (XEXP (XEXP (addr, 0), 0));
948
      offset = XEXP (XEXP (addr, 0), 1);
949
 
950
      base = force_operand (base, NULL_RTX);
951
      ret = force_reg (Pmode, gen_rtx_PLUS (Pmode, base, offset));
952
    }
953
  else
954
    gcc_unreachable ();
955
 
956
  return ret;
957
}
958
 
959
 
960
/* Legitimize PIC addresses.  If the address is already
961
   position-independent, we return ORIG.  Newly generated
962
   position-independent addresses go into a reg.  This is REG if
963
   nonzero, otherwise we allocate register(s) as necessary.  */
964
static rtx
965
tilepro_legitimize_pic_address (rtx orig,
966
                                enum machine_mode mode ATTRIBUTE_UNUSED,
967
                                rtx reg)
968
{
969
  if (GET_CODE (orig) == SYMBOL_REF)
970
    {
971
      rtx address, pic_ref;
972
 
973
      if (reg == 0)
974
        {
975
          gcc_assert (can_create_pseudo_p ());
976
          reg = gen_reg_rtx (Pmode);
977
        }
978
 
979
      if (SYMBOL_REF_LOCAL_P (orig))
980
        {
981
          /* If not during reload, allocate another temp reg here for
982
             loading in the address, so that these instructions can be
983
             optimized properly.  */
984
          rtx temp_reg = create_temp_reg_if_possible (Pmode, reg);
985
          rtx text_label_symbol = tilepro_text_label_symbol ();
986
          rtx text_label_rtx = tilepro_text_label_rtx ();
987
 
988
          emit_insn (gen_addli_pcrel (temp_reg, text_label_rtx, orig,
989
                                      text_label_symbol));
990
          emit_insn (gen_auli_pcrel (temp_reg, temp_reg, orig,
991
                                     text_label_symbol));
992
 
993
          /* Note: this is conservative.  We use the text_label but we
994
             don't use the pic_offset_table.  However, in some cases
995
             we may need the pic_offset_table (see
996
             tilepro_fixup_pcrel_references).  */
997
          crtl->uses_pic_offset_table = 1;
998
 
999
          address = temp_reg;
1000
 
1001
          emit_move_insn (reg, address);
1002
          return reg;
1003
        }
1004
      else
1005
        {
1006
          /* If not during reload, allocate another temp reg here for
1007
             loading in the address, so that these instructions can be
1008
             optimized properly.  */
1009
          rtx temp_reg = create_temp_reg_if_possible (Pmode, reg);
1010
 
1011
          gcc_assert (flag_pic);
1012
          if (flag_pic == 1)
1013
            {
1014
              emit_insn (gen_add_got16 (temp_reg,
1015
                                        tilepro_got_rtx (), orig));
1016
            }
1017
          else
1018
            {
1019
              rtx temp_reg2 = create_temp_reg_if_possible (Pmode, reg);
1020
              emit_insn (gen_addhi_got32 (temp_reg2,
1021
                                          tilepro_got_rtx (), orig));
1022
              emit_insn (gen_addlo_got32 (temp_reg, temp_reg2, orig));
1023
            }
1024
 
1025
          address = temp_reg;
1026
 
1027
          pic_ref = gen_const_mem (Pmode, address);
1028
          crtl->uses_pic_offset_table = 1;
1029
          emit_move_insn (reg, pic_ref);
1030
          /* The following put a REG_EQUAL note on this insn, so that
1031
             it can be optimized by loop.  But it causes the label to
1032
             be optimized away.  */
1033
          /* set_unique_reg_note (insn, REG_EQUAL, orig); */
1034
          return reg;
1035
        }
1036
    }
1037
  else if (GET_CODE (orig) == CONST)
1038
    {
1039
      rtx base, offset;
1040
 
1041
      if (GET_CODE (XEXP (orig, 0)) == PLUS
1042
          && XEXP (XEXP (orig, 0), 0) == tilepro_got_rtx ())
1043
        return orig;
1044
 
1045
      if (reg == 0)
1046
        {
1047
          gcc_assert (can_create_pseudo_p ());
1048
          reg = gen_reg_rtx (Pmode);
1049
        }
1050
 
1051
      gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
1052
      base = tilepro_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode,
1053
                                             reg);
1054
      offset =
1055
        tilepro_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1056
                                        base == reg ? 0 : reg);
1057
 
1058
      if (CONST_INT_P (offset))
1059
        {
1060
          if (can_create_pseudo_p ())
1061
            offset = force_reg (Pmode, offset);
1062
          else
1063
            /* If we reach here, then something is seriously
1064
               wrong.  */
1065
            gcc_unreachable ();
1066
        }
1067
 
1068
      if (can_create_pseudo_p ())
1069
        return force_reg (Pmode, gen_rtx_PLUS (Pmode, base, offset));
1070
      else
1071
        gcc_unreachable ();
1072
    }
1073
  else if (GET_CODE (orig) == LABEL_REF)
1074
    {
1075
      rtx address, temp_reg;
1076
      rtx text_label_symbol;
1077
      rtx text_label_rtx;
1078
 
1079
      if (reg == 0)
1080
        {
1081
          gcc_assert (can_create_pseudo_p ());
1082
          reg = gen_reg_rtx (Pmode);
1083
        }
1084
 
1085
      /* If not during reload, allocate another temp reg here for
1086
         loading in the address, so that these instructions can be
1087
         optimized properly.  */
1088
      temp_reg = create_temp_reg_if_possible (Pmode, reg);
1089
      text_label_symbol = tilepro_text_label_symbol ();
1090
      text_label_rtx = tilepro_text_label_rtx ();
1091
 
1092
      emit_insn (gen_addli_pcrel (temp_reg, text_label_rtx, orig,
1093
                                  text_label_symbol));
1094
      emit_insn (gen_auli_pcrel (temp_reg, temp_reg, orig,
1095
                                 text_label_symbol));
1096
 
1097
      /* Note: this is conservative.  We use the text_label but we
1098
         don't use the pic_offset_table.  */
1099
      crtl->uses_pic_offset_table = 1;
1100
 
1101
      address = temp_reg;
1102
 
1103
      emit_move_insn (reg, address);
1104
 
1105
      return reg;
1106
    }
1107
 
1108
  return orig;
1109
}
1110
 
1111
 
1112
/* Implement TARGET_LEGITIMIZE_ADDRESS.  */
1113
static rtx
1114
tilepro_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
1115
                            enum machine_mode mode)
1116
{
1117
  if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
1118
      && symbolic_operand (x, Pmode) && tilepro_tls_referenced_p (x))
1119
    {
1120
      return tilepro_legitimize_tls_address (x);
1121
    }
1122
  else if (flag_pic)
1123
    {
1124
      return tilepro_legitimize_pic_address (x, mode, 0);
1125
    }
1126
  else
1127
    return x;
1128
}
1129
 
1130
 
1131
/* Implement TARGET_DELEGITIMIZE_ADDRESS.  */
1132
static rtx
1133
tilepro_delegitimize_address (rtx x)
1134
{
1135
  x = delegitimize_mem_from_attrs (x);
1136
 
1137
  if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == UNSPEC)
1138
    {
1139
      switch (XINT (XEXP (x, 0), 1))
1140
        {
1141
        case UNSPEC_PCREL_SYM:
1142
        case UNSPEC_GOT16_SYM:
1143
        case UNSPEC_GOT32_SYM:
1144
        case UNSPEC_TLS_GD:
1145
        case UNSPEC_TLS_IE:
1146
          x = XVECEXP (XEXP (x, 0), 0, 0);
1147
          break;
1148
        }
1149
    }
1150
 
1151
  return x;
1152
}
1153
 
1154
 
1155
/* Emit code to load the PIC register.  */
1156
static void
1157
load_pic_register (bool delay_pic_helper ATTRIBUTE_UNUSED)
1158
{
1159
  int orig_flag_pic = flag_pic;
1160
 
1161
  rtx got_symbol = tilepro_got_symbol ();
1162
  rtx text_label_symbol = tilepro_text_label_symbol ();
1163
  rtx text_label_rtx = tilepro_text_label_rtx ();
1164
  flag_pic = 0;
1165
 
1166
  emit_insn (gen_insn_lnk_and_label (text_label_rtx, text_label_symbol));
1167
 
1168
  emit_insn (gen_addli_pcrel (tilepro_got_rtx (),
1169
                              text_label_rtx, got_symbol, text_label_symbol));
1170
 
1171
  emit_insn (gen_auli_pcrel (tilepro_got_rtx (),
1172
                             tilepro_got_rtx (),
1173
                             got_symbol, text_label_symbol));
1174
 
1175
  flag_pic = orig_flag_pic;
1176
 
1177
  /* Need to emit this whether or not we obey regdecls, since
1178
     setjmp/longjmp can cause life info to screw up.  ??? In the case
1179
     where we don't obey regdecls, this is not sufficient since we may
1180
     not fall out the bottom.  */
1181
  emit_use (tilepro_got_rtx ());
1182
}
1183
 
1184
 
1185
/* Return the simd variant of the constant NUM of mode MODE, by
1186
   replicating it to fill an interger of mode SImode.  NUM is first
1187
   truncated to fit in MODE.  */
1188
rtx
1189
tilepro_simd_int (rtx num, enum machine_mode mode)
1190
{
1191
  HOST_WIDE_INT n = 0;
1192
 
1193
  gcc_assert (CONST_INT_P (num));
1194
 
1195
  n = INTVAL (num);
1196
 
1197
  switch (mode)
1198
    {
1199
    case QImode:
1200
      n = 0x01010101 * (n & 0x000000FF);
1201
      break;
1202
    case HImode:
1203
      n = 0x00010001 * (n & 0x0000FFFF);
1204
      break;
1205
    case SImode:
1206
      break;
1207
    case DImode:
1208
      break;
1209
    default:
1210
      gcc_unreachable ();
1211
    }
1212
 
1213
  return gen_int_si (n);
1214
}
1215
 
1216
 
1217
/* Split one or more DImode RTL references into pairs of SImode
1218
   references.  The RTL can be REG, offsettable MEM, integer constant,
1219
   or CONST_DOUBLE.  "operands" is a pointer to an array of DImode RTL
1220
   to split and "num" is its length.  lo_half and hi_half are output
1221
   arrays that parallel "operands".  */
1222
void
1223
split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1224
{
1225
  while (num--)
1226
    {
1227
      rtx op = operands[num];
1228
 
1229
      /* simplify_subreg refuse to split volatile memory addresses,
1230
         but we still have to handle it.  */
1231
      if (MEM_P (op))
1232
        {
1233
          lo_half[num] = adjust_address (op, SImode, 0);
1234
          hi_half[num] = adjust_address (op, SImode, 4);
1235
        }
1236
      else
1237
        {
1238
          lo_half[num] = simplify_gen_subreg (SImode, op,
1239
                                              GET_MODE (op) == VOIDmode
1240
                                              ? DImode : GET_MODE (op), 0);
1241
          hi_half[num] = simplify_gen_subreg (SImode, op,
1242
                                              GET_MODE (op) == VOIDmode
1243
                                              ? DImode : GET_MODE (op), 4);
1244
        }
1245
    }
1246
}
1247
 
1248
 
1249
/* Returns true iff val can be moved into a register in one
1250
   instruction.  And if it can, it emits the code to move the
1251
   constant.
1252
 
1253
   If three_wide_only is true, this insists on an instruction that
1254
   works in a bundle containing three instructions.  */
1255
static bool
1256
expand_set_cint32_one_inst (rtx dest_reg,
1257
                            HOST_WIDE_INT val, bool three_wide_only)
1258
{
1259
  val = trunc_int_for_mode (val, SImode);
1260
 
1261
  if (val == trunc_int_for_mode (val, QImode))
1262
    {
1263
      /* Success! */
1264
      emit_move_insn (dest_reg, GEN_INT (val));
1265
      return true;
1266
    }
1267
  else if (!three_wide_only)
1268
    {
1269
      rtx imm_op = GEN_INT (val);
1270
 
1271
      if (satisfies_constraint_J (imm_op)
1272
          || satisfies_constraint_K (imm_op)
1273
          || satisfies_constraint_N (imm_op)
1274
          || satisfies_constraint_P (imm_op))
1275
        {
1276
          emit_move_insn (dest_reg, imm_op);
1277
          return true;
1278
        }
1279
    }
1280
 
1281
  return false;
1282
}
1283
 
1284
 
1285
/* Implement SImode rotatert.  */
1286
static HOST_WIDE_INT
1287
rotate_right (HOST_WIDE_INT n, int count)
1288
{
1289
  unsigned HOST_WIDE_INT x = n & 0xFFFFFFFF;
1290
  if (count == 0)
1291
    return x;
1292
  return ((x >> count) | (x << (32 - count))) & 0xFFFFFFFF;
1293
}
1294
 
1295
 
1296
/* Return true iff n contains exactly one contiguous sequence of 1
1297
   bits, possibly wrapping around from high bits to low bits.  */
1298
bool
1299
tilepro_bitfield_operand_p (HOST_WIDE_INT n, int *first_bit, int *last_bit)
1300
{
1301
  int i;
1302
 
1303
  if (n == 0)
1304
    return false;
1305
 
1306
  for (i = 0; i < 32; i++)
1307
    {
1308
      unsigned HOST_WIDE_INT x = rotate_right (n, i);
1309
      if (!(x & 1))
1310
        continue;
1311
 
1312
      /* See if x is a power of two minus one, i.e. only consecutive 1
1313
         bits starting from bit 0.  */
1314
      if ((x & (x + 1)) == 0)
1315
        {
1316
          if (first_bit != NULL)
1317
            *first_bit = i;
1318
          if (last_bit != NULL)
1319
            *last_bit = (i + exact_log2 (x ^ (x >> 1))) & 31;
1320
 
1321
          return true;
1322
        }
1323
    }
1324
 
1325
  return false;
1326
}
1327
 
1328
 
1329
/* Create code to move the CONST_INT value in src_val to dest_reg.  */
1330
static void
1331
expand_set_cint32 (rtx dest_reg, rtx src_val)
1332
{
1333
  HOST_WIDE_INT val;
1334
  int leading_zeroes, trailing_zeroes;
1335
  int lower, upper;
1336
  int three_wide_only;
1337
  rtx temp;
1338
 
1339
  gcc_assert (CONST_INT_P (src_val));
1340
  val = trunc_int_for_mode (INTVAL (src_val), SImode);
1341
 
1342
  /* See if we can generate the constant in one instruction.  */
1343
  if (expand_set_cint32_one_inst (dest_reg, val, false))
1344
    return;
1345
 
1346
  /* Create a temporary variable to hold a partial result, to enable
1347
     CSE.  */
1348
  temp = create_temp_reg_if_possible (SImode, dest_reg);
1349
 
1350
  leading_zeroes = 31 - floor_log2 (val & 0xFFFFFFFF);
1351
  trailing_zeroes = exact_log2 (val & -val);
1352
 
1353
  lower = trunc_int_for_mode (val, HImode);
1354
  upper = trunc_int_for_mode ((val - lower) >> 16, HImode);
1355
 
1356
  /* First try all three-wide instructions that generate a constant
1357
     (i.e. movei) followed by various shifts and rotates. If none of
1358
     those work, try various two-wide ways of generating a constant
1359
     followed by various shifts and rotates.  */
1360
  for (three_wide_only = 1; three_wide_only >= 0; three_wide_only--)
1361
    {
1362
      int count;
1363
 
1364
      if (expand_set_cint32_one_inst (temp, val >> trailing_zeroes,
1365
                                      three_wide_only))
1366
        {
1367
          /* 0xFFFFA500 becomes:
1368
             movei temp, 0xFFFFFFA5
1369
             shli dest, temp, 8  */
1370
          emit_move_insn (dest_reg,
1371
                          gen_rtx_ASHIFT (SImode, temp,
1372
                                          GEN_INT (trailing_zeroes)));
1373
          return;
1374
        }
1375
 
1376
      if (expand_set_cint32_one_inst (temp, val << leading_zeroes,
1377
                                      three_wide_only))
1378
        {
1379
          /* 0x7FFFFFFF becomes:
1380
             movei temp, -2
1381
             shri dest, temp, 1  */
1382
          emit_move_insn (dest_reg,
1383
                          gen_rtx_LSHIFTRT (SImode, temp,
1384
                                            GEN_INT (leading_zeroes)));
1385
          return;
1386
        }
1387
 
1388
      /* Try rotating a one-instruction immediate, since rotate is
1389
         3-wide.  */
1390
      for (count = 1; count < 32; count++)
1391
        {
1392
          HOST_WIDE_INT r = rotate_right (val, count);
1393
          if (expand_set_cint32_one_inst (temp, r, three_wide_only))
1394
            {
1395
              /* 0xFFA5FFFF becomes:
1396
                 movei temp, 0xFFFFFFA5
1397
                 rli dest, temp, 16  */
1398
              emit_move_insn (dest_reg,
1399
                              gen_rtx_ROTATE (SImode, temp, GEN_INT (count)));
1400
              return;
1401
            }
1402
        }
1403
 
1404
      if (lower == trunc_int_for_mode (lower, QImode))
1405
        {
1406
          /* We failed to use two 3-wide instructions, but the low 16
1407
             bits are a small number so just use a 2-wide + 3-wide
1408
             auli + addi pair rather than anything more exotic.
1409
 
1410
             0x12340056 becomes:
1411
             auli temp, zero, 0x1234
1412
             addi dest, temp, 0x56  */
1413
          break;
1414
        }
1415
    }
1416
 
1417
  /* Fallback case: use a auli + addli/addi pair.  */
1418
  emit_move_insn (temp, GEN_INT (upper << 16));
1419
  emit_move_insn (dest_reg, (gen_rtx_PLUS (SImode, temp, GEN_INT (lower))));
1420
}
1421
 
1422
 
1423
/* Load OP1, a 32-bit constant, into OP0, a register.  We know it
1424
   can't be done in one insn when we get here, the move expander
1425
   guarantees this.  */
1426
void
1427
tilepro_expand_set_const32 (rtx op0, rtx op1)
1428
{
1429
  enum machine_mode mode = GET_MODE (op0);
1430
  rtx temp;
1431
 
1432
  if (CONST_INT_P (op1))
1433
    {
1434
      /* TODO: I don't know if we want to split large constants now,
1435
         or wait until later (with a define_split).
1436
 
1437
         Does splitting early help CSE?  Does it harm other
1438
         optimizations that might fold loads? */
1439
      expand_set_cint32 (op0, op1);
1440
    }
1441
  else
1442
    {
1443
      temp = create_temp_reg_if_possible (mode, op0);
1444
 
1445
      /* A symbol, emit in the traditional way.  */
1446
      emit_move_insn (temp, gen_rtx_HIGH (mode, op1));
1447
      emit_move_insn (op0, gen_rtx_LO_SUM (mode, temp, op1));
1448
    }
1449
}
1450
 
1451
 
1452
/* Expand a move instruction.  Return true if all work is done.  */
1453
bool
1454
tilepro_expand_mov (enum machine_mode mode, rtx *operands)
1455
{
1456
  /* Handle sets of MEM first.  */
1457
  if (MEM_P (operands[0]))
1458
    {
1459
      if (can_create_pseudo_p ())
1460
        operands[0] = validize_mem (operands[0]);
1461
 
1462
      if (reg_or_0_operand (operands[1], mode))
1463
        return false;
1464
 
1465
      if (!reload_in_progress)
1466
        operands[1] = force_reg (mode, operands[1]);
1467
    }
1468
 
1469
  /* Fixup TLS cases.  */
1470
  if (CONSTANT_P (operands[1]) && tilepro_tls_referenced_p (operands[1]))
1471
    {
1472
      operands[1] = tilepro_legitimize_tls_address (operands[1]);
1473
      return false;
1474
    }
1475
 
1476
  /* Fixup PIC cases.  */
1477
  if (flag_pic && CONSTANT_P (operands[1]))
1478
    {
1479
      if (tilepro_pic_address_needs_scratch (operands[1]))
1480
        operands[1] = tilepro_legitimize_pic_address (operands[1], mode, 0);
1481
 
1482
      if (symbolic_operand (operands[1], mode))
1483
        {
1484
          operands[1] = tilepro_legitimize_pic_address (operands[1],
1485
                                                        mode,
1486
                                                        (reload_in_progress ?
1487
                                                         operands[0] :
1488
                                                         NULL_RTX));
1489
          return false;
1490
        }
1491
    }
1492
 
1493
  /* Fixup for UNSPEC addresses.  */
1494
  if (flag_pic
1495
      && GET_CODE (operands[1]) == HIGH
1496
      && GET_CODE (XEXP (operands[1], 0)) == CONST
1497
      && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == UNSPEC)
1498
    {
1499
      rtx unspec = XEXP (XEXP (operands[1], 0), 0);
1500
      int unspec_num = XINT (unspec, 1);
1501
      if (unspec_num == UNSPEC_PCREL_SYM)
1502
        {
1503
          emit_insn (gen_auli_pcrel (operands[0], const0_rtx,
1504
                                     XVECEXP (unspec, 0, 0),
1505
                                     XVECEXP (unspec, 0, 1)));
1506
          return true;
1507
        }
1508
      else if (flag_pic == 2 && unspec_num == UNSPEC_GOT32_SYM)
1509
        {
1510
          emit_insn (gen_addhi_got32 (operands[0], const0_rtx,
1511
                                      XVECEXP (unspec, 0, 0)));
1512
          return true;
1513
        }
1514
      else if (HAVE_AS_TLS && unspec_num == UNSPEC_TLS_GD)
1515
        {
1516
          emit_insn (gen_tls_gd_addhi (operands[0], const0_rtx,
1517
                                       XVECEXP (unspec, 0, 0)));
1518
          return true;
1519
        }
1520
      else if (HAVE_AS_TLS && unspec_num == UNSPEC_TLS_IE)
1521
        {
1522
          emit_insn (gen_tls_ie_addhi (operands[0], const0_rtx,
1523
                                       XVECEXP (unspec, 0, 0)));
1524
          return true;
1525
        }
1526
      else if (HAVE_AS_TLS && unspec_num == UNSPEC_TLS_LE)
1527
        {
1528
          emit_insn (gen_tls_le_addhi (operands[0], const0_rtx,
1529
                                       XVECEXP (unspec, 0, 0)));
1530
          return true;
1531
        }
1532
    }
1533
 
1534
  /* Accept non-constants and valid constants unmodified.  */
1535
  if (!CONSTANT_P (operands[1])
1536
      || GET_CODE (operands[1]) == HIGH || move_operand (operands[1], mode))
1537
    return false;
1538
 
1539
  /* Split large integers.  */
1540
  if (GET_MODE_SIZE (mode) <= 4)
1541
    {
1542
      tilepro_expand_set_const32 (operands[0], operands[1]);
1543
      return true;
1544
    }
1545
 
1546
  return false;
1547
}
1548
 
1549
 
1550
/* Expand the "insv" pattern.  */
1551
void
1552
tilepro_expand_insv (rtx operands[4])
1553
{
1554
  rtx first_rtx = operands[2];
1555
  HOST_WIDE_INT first = INTVAL (first_rtx);
1556
  HOST_WIDE_INT width = INTVAL (operands[1]);
1557
  rtx v = operands[3];
1558
 
1559
  /* Shift the inserted bits into position.  */
1560
  if (first != 0)
1561
    {
1562
      if (CONST_INT_P (v))
1563
        {
1564
          /* Shift the constant into mm position.  */
1565
          v = gen_int_si (INTVAL (v) << first);
1566
        }
1567
      else
1568
        {
1569
          /* Shift over the value to be inserted.  */
1570
          rtx tmp = gen_reg_rtx (SImode);
1571
          emit_insn (gen_ashlsi3 (tmp, v, first_rtx));
1572
          v = tmp;
1573
        }
1574
    }
1575
 
1576
  /* Insert the shifted bits using an 'mm' insn.  */
1577
  emit_insn (gen_insn_mm (operands[0], v, operands[0], first_rtx,
1578
                          GEN_INT (first + width - 1)));
1579
}
1580
 
1581
 
1582
/* Expand unaligned loads.  */
1583
void
1584
tilepro_expand_unaligned_load (rtx dest_reg, rtx mem, HOST_WIDE_INT bitsize,
1585
                               HOST_WIDE_INT bit_offset, bool sign)
1586
{
1587
  enum machine_mode mode;
1588
  rtx addr_lo, addr_hi;
1589
  rtx mem_lo, mem_hi, hi;
1590
  rtx mema, wide_result;
1591
  int last_byte_offset;
1592
  HOST_WIDE_INT byte_offset = bit_offset / BITS_PER_UNIT;
1593
 
1594
  mode = GET_MODE (dest_reg);
1595
 
1596
  hi = gen_reg_rtx (mode);
1597
 
1598
  if (bitsize == 2 * BITS_PER_UNIT && (bit_offset % BITS_PER_UNIT) == 0)
1599
    {
1600
      rtx lo;
1601
 
1602
      /* When just loading a two byte value, we can load the two bytes
1603
         individually and combine them efficiently.  */
1604
 
1605
      mem_lo = adjust_address (mem, QImode, byte_offset);
1606
      mem_hi = adjust_address (mem, QImode, byte_offset + 1);
1607
 
1608
      lo = gen_reg_rtx (mode);
1609
      emit_insn (gen_zero_extendqisi2 (lo, mem_lo));
1610
 
1611
      if (sign)
1612
        {
1613
          rtx tmp = gen_reg_rtx (mode);
1614
 
1615
          /* Do a signed load of the second byte then shift and OR it
1616
             in.  */
1617
          emit_insn (gen_extendqisi2 (gen_lowpart (SImode, hi), mem_hi));
1618
          emit_insn (gen_ashlsi3 (gen_lowpart (SImode, tmp),
1619
                                  gen_lowpart (SImode, hi), GEN_INT (8)));
1620
          emit_insn (gen_iorsi3 (gen_lowpart (SImode, dest_reg),
1621
                                 gen_lowpart (SImode, lo),
1622
                                 gen_lowpart (SImode, tmp)));
1623
        }
1624
      else
1625
        {
1626
          /* Do two unsigned loads and use intlb to interleave
1627
             them.  */
1628
          emit_insn (gen_zero_extendqisi2 (gen_lowpart (SImode, hi), mem_hi));
1629
          emit_insn (gen_insn_intlb (gen_lowpart (SImode, dest_reg),
1630
                                     gen_lowpart (SImode, hi),
1631
                                     gen_lowpart (SImode, lo)));
1632
        }
1633
 
1634
      return;
1635
    }
1636
 
1637
  mema = XEXP (mem, 0);
1638
 
1639
  /* AND addresses cannot be in any alias set, since they may
1640
     implicitly alias surrounding code.  Ideally we'd have some alias
1641
     set that covered all types except those with alignment 8 or
1642
     higher.  */
1643
  addr_lo = force_reg (Pmode, plus_constant (mema, byte_offset));
1644
  mem_lo = change_address (mem, mode,
1645
                           gen_rtx_AND (Pmode, addr_lo, GEN_INT (-4)));
1646
  set_mem_alias_set (mem_lo, 0);
1647
 
1648
  /* Load the high word at an address that will not fault if the low
1649
     address is aligned and at the very end of a page.  */
1650
  last_byte_offset = (bit_offset + bitsize - 1) / BITS_PER_UNIT;
1651
  addr_hi = force_reg (Pmode, plus_constant (mema, last_byte_offset));
1652
  mem_hi = change_address (mem, mode,
1653
                           gen_rtx_AND (Pmode, addr_hi, GEN_INT (-4)));
1654
  set_mem_alias_set (mem_hi, 0);
1655
 
1656
  if (bitsize == 32)
1657
    {
1658
      addr_lo = make_safe_from (addr_lo, dest_reg);
1659
      wide_result = dest_reg;
1660
    }
1661
  else
1662
    {
1663
      wide_result = gen_reg_rtx (mode);
1664
    }
1665
 
1666
  /* Load hi first in case dest_reg is used in mema.  */
1667
  emit_move_insn (hi, mem_hi);
1668
  emit_move_insn (wide_result, mem_lo);
1669
 
1670
  emit_insn (gen_insn_dword_align (gen_lowpart (SImode, wide_result),
1671
                                   gen_lowpart (SImode, wide_result),
1672
                                   gen_lowpart (SImode, hi), addr_lo));
1673
 
1674
  if (bitsize != 32)
1675
    {
1676
      rtx extracted =
1677
        extract_bit_field (gen_lowpart (SImode, wide_result),
1678
                           bitsize, bit_offset % BITS_PER_UNIT,
1679
                           !sign, false, gen_lowpart (SImode, dest_reg),
1680
                           SImode, SImode);
1681
 
1682
      if (extracted != dest_reg)
1683
        emit_move_insn (dest_reg, gen_lowpart (SImode, extracted));
1684
    }
1685
}
1686
 
1687
 
1688
/* Expand unaligned stores.  */
1689
static void
1690
tilepro_expand_unaligned_store (rtx mem, rtx src, HOST_WIDE_INT bitsize,
1691
                                HOST_WIDE_INT bit_offset)
1692
{
1693
  HOST_WIDE_INT byte_offset = bit_offset / BITS_PER_UNIT;
1694
  HOST_WIDE_INT bytesize = bitsize / BITS_PER_UNIT;
1695
  HOST_WIDE_INT shift_amt;
1696
  HOST_WIDE_INT i;
1697
  rtx mem_addr;
1698
  rtx store_val;
1699
 
1700
  for (i = 0, shift_amt = 0; i < bytesize; i++, shift_amt += BITS_PER_UNIT)
1701
    {
1702
      mem_addr = adjust_address (mem, QImode, byte_offset + i);
1703
 
1704
      if (shift_amt)
1705
        {
1706
          store_val = expand_simple_binop (SImode, LSHIFTRT,
1707
                                           gen_lowpart (SImode, src),
1708
                                           GEN_INT (shift_amt), NULL, 1,
1709
                                           OPTAB_LIB_WIDEN);
1710
          store_val = gen_lowpart (QImode, store_val);
1711
        }
1712
      else
1713
        {
1714
          store_val = gen_lowpart (QImode, src);
1715
        }
1716
 
1717
      emit_move_insn (mem_addr, store_val);
1718
    }
1719
}
1720
 
1721
 
1722
/* Implement the movmisalign patterns.  One of the operands is a
1723
   memory that is not naturally aligned.  Emit instructions to load
1724
   it.  */
1725
void
1726
tilepro_expand_movmisalign (enum machine_mode mode, rtx *operands)
1727
{
1728
  if (MEM_P (operands[1]))
1729
    {
1730
      rtx tmp;
1731
 
1732
      if (register_operand (operands[0], mode))
1733
        tmp = operands[0];
1734
      else
1735
        tmp = gen_reg_rtx (mode);
1736
 
1737
      tilepro_expand_unaligned_load (tmp, operands[1],
1738
                                     GET_MODE_BITSIZE (mode), 0, true);
1739
 
1740
      if (tmp != operands[0])
1741
        emit_move_insn (operands[0], tmp);
1742
    }
1743
  else if (MEM_P (operands[0]))
1744
    {
1745
      if (!reg_or_0_operand (operands[1], mode))
1746
        operands[1] = force_reg (mode, operands[1]);
1747
 
1748
      tilepro_expand_unaligned_store (operands[0], operands[1],
1749
                                      GET_MODE_BITSIZE (mode), 0);
1750
    }
1751
  else
1752
    gcc_unreachable ();
1753
}
1754
 
1755
 
1756
/* Implement the addsi3 pattern.  */
1757
bool
1758
tilepro_expand_addsi (rtx op0, rtx op1, rtx op2)
1759
{
1760
  rtx temp;
1761
  HOST_WIDE_INT n;
1762
  HOST_WIDE_INT high;
1763
 
1764
  /* Skip anything that only takes one instruction.  */
1765
  if (add_operand (op2, SImode))
1766
    return false;
1767
 
1768
  /* We can only optimize ints here (it should be impossible to get
1769
     here with any other type, but it is harmless to check.  */
1770
  if (!CONST_INT_P (op2))
1771
    return false;
1772
 
1773
  temp = create_temp_reg_if_possible (SImode, op0);
1774
  n = INTVAL (op2);
1775
  high = (n + (n & 0x8000)) & ~0xffff;
1776
 
1777
  emit_move_insn (temp, gen_rtx_PLUS (SImode, op1, gen_int_si (high)));
1778
  emit_move_insn (op0, gen_rtx_PLUS (SImode, temp, gen_int_si (n - high)));
1779
 
1780
  return true;
1781
}
1782
 
1783
 
1784
/* Implement the allocate_stack pattern (alloca).  */
1785
void
1786
tilepro_allocate_stack (rtx op0, rtx op1)
1787
{
1788
  /* Technically the correct way to initialize chain_loc is with
1789
   * gen_frame_mem() instead of gen_rtx_MEM(), but gen_frame_mem()
1790
   * sets the alias_set to that of a frame reference.  Some of our
1791
   * tests rely on some unsafe assumption about when the chaining
1792
   * update is done, we need to be conservative about reordering the
1793
   * chaining instructions.
1794
   */
1795
  rtx fp_addr = gen_reg_rtx (Pmode);
1796
  rtx fp_value = gen_reg_rtx (Pmode);
1797
  rtx fp_loc;
1798
 
1799
  emit_move_insn (fp_addr, gen_rtx_PLUS (Pmode, stack_pointer_rtx,
1800
                                         GEN_INT (UNITS_PER_WORD)));
1801
 
1802
  fp_loc = gen_frame_mem (Pmode, fp_addr);
1803
 
1804
  emit_move_insn (fp_value, fp_loc);
1805
 
1806
  op1 = force_reg (Pmode, op1);
1807
 
1808
  emit_move_insn (stack_pointer_rtx,
1809
                  gen_rtx_MINUS (Pmode, stack_pointer_rtx, op1));
1810
 
1811
  emit_move_insn (fp_addr, gen_rtx_PLUS (Pmode, stack_pointer_rtx,
1812
                                         GEN_INT (UNITS_PER_WORD)));
1813
 
1814
  fp_loc = gen_frame_mem (Pmode, fp_addr);
1815
 
1816
  emit_move_insn (fp_loc, fp_value);
1817
 
1818
  emit_move_insn (op0, virtual_stack_dynamic_rtx);
1819
}
1820
 
1821
 
1822
 
1823
/* Multiplies */
1824
 
1825
/* Returns the insn_code in ENTRY.  */
1826
static enum insn_code
1827
tilepro_multiply_get_opcode (const struct tilepro_multiply_insn_seq_entry
1828
                             *entry)
1829
{
1830
  return tilepro_multiply_insn_seq_decode_opcode[entry->compressed_opcode];
1831
}
1832
 
1833
 
1834
/* Returns the length of the 'op' array.  */
1835
static int
1836
tilepro_multiply_get_num_ops (const struct tilepro_multiply_insn_seq *seq)
1837
{
1838
  /* The array either uses all of its allocated slots or is terminated
1839
     by a bogus opcode. Either way, the array size is the index of the
1840
     last valid opcode plus one.  */
1841
  int i;
1842
  for (i = tilepro_multiply_insn_seq_MAX_OPERATIONS - 1; i >= 0; i--)
1843
    if (tilepro_multiply_get_opcode (&seq->op[i]) != CODE_FOR_nothing)
1844
      return i + 1;
1845
 
1846
  /* An empty array is not allowed.  */
1847
  gcc_unreachable ();
1848
}
1849
 
1850
 
1851
/* We precompute a number of expression trees for multiplying by
1852
   constants.  This generates code for such an expression tree by
1853
   walking through the nodes in the tree (which are conveniently
1854
   pre-linearized) and emitting an instruction for each one.  */
1855
static void
1856
tilepro_expand_constant_multiply_given_sequence (rtx result, rtx src,
1857
                                                 const struct
1858
                                                 tilepro_multiply_insn_seq
1859
                                                 *seq)
1860
{
1861
  int i;
1862
  int num_ops;
1863
 
1864
  /* Keep track of the subexpressions computed so far, so later
1865
     instructions can refer to them.  We seed the array with zero and
1866
     the value being multiplied.  */
1867
  int num_subexprs = 2;
1868
  rtx subexprs[tilepro_multiply_insn_seq_MAX_OPERATIONS + 2];
1869
  subexprs[0] = const0_rtx;
1870
  subexprs[1] = src;
1871
 
1872
  /* Determine how many instructions we are going to generate.  */
1873
  num_ops = tilepro_multiply_get_num_ops (seq);
1874
  gcc_assert (num_ops > 0
1875
              && num_ops <= tilepro_multiply_insn_seq_MAX_OPERATIONS);
1876
 
1877
  for (i = 0; i < num_ops; i++)
1878
    {
1879
      const struct tilepro_multiply_insn_seq_entry *entry = &seq->op[i];
1880
 
1881
      /* Figure out where to store the output of this instruction.  */
1882
      const bool is_last_op = (i + 1 == num_ops);
1883
      rtx out = is_last_op ? result : gen_reg_rtx (SImode);
1884
 
1885
      enum insn_code opcode = tilepro_multiply_get_opcode (entry);
1886
      if (opcode == CODE_FOR_ashlsi3)
1887
        {
1888
          /* Handle shift by immediate. This is a special case because
1889
             the meaning of the second operand is a constant shift
1890
             count rather than an operand index.  */
1891
 
1892
          /* Make sure the shift count is in range. Zero should not
1893
             happen.  */
1894
          const int shift_count = entry->rhs;
1895
          gcc_assert (shift_count > 0 && shift_count < 32);
1896
 
1897
          /* Emit the actual instruction.  */
1898
          emit_insn (GEN_FCN (opcode)
1899
                     (out, subexprs[entry->lhs],
1900
                      gen_rtx_CONST_INT (SImode, shift_count)));
1901
        }
1902
      else
1903
        {
1904
          /* Handle a normal two-operand instruction, such as add or
1905
             s1a.  */
1906
 
1907
          /* Make sure we are referring to a previously computed
1908
             subexpression.  */
1909
          gcc_assert (entry->rhs < num_subexprs);
1910
 
1911
          /* Emit the actual instruction.  */
1912
          emit_insn (GEN_FCN (opcode)
1913
                     (out, subexprs[entry->lhs], subexprs[entry->rhs]));
1914
        }
1915
 
1916
      /* Record this subexpression for use by later expressions.  */
1917
      subexprs[num_subexprs++] = out;
1918
    }
1919
}
1920
 
1921
 
1922
/* bsearch helper function.  */
1923
static int
1924
tilepro_compare_multipliers (const void *key, const void *t)
1925
{
1926
  return *(const int *) key -
1927
    ((const struct tilepro_multiply_insn_seq *) t)->multiplier;
1928
}
1929
 
1930
 
1931
/* Returns the tilepro_multiply_insn_seq for multiplier, or NULL if
1932
   none exists.  */
1933
static const struct tilepro_multiply_insn_seq *
1934
tilepro_find_multiply_insn_seq_for_constant (int multiplier)
1935
{
1936
  return ((const struct tilepro_multiply_insn_seq *)
1937
          bsearch (&multiplier, tilepro_multiply_insn_seq_table,
1938
                   tilepro_multiply_insn_seq_table_size,
1939
                   sizeof tilepro_multiply_insn_seq_table[0],
1940
                   tilepro_compare_multipliers));
1941
}
1942
 
1943
 
1944
/* Try to a expand constant multiply in SImode by looking it up in a
1945
   precompiled table.  OP0 is the result operand, OP1 is the source
1946
   operand, and MULTIPLIER is the value of the constant.  Return true
1947
   if it succeeds.  */
1948
static bool
1949
tilepro_expand_const_mulsi (rtx op0, rtx op1, int multiplier)
1950
{
1951
  /* See if we have precomputed an efficient way to multiply by this
1952
     constant.  */
1953
  const struct tilepro_multiply_insn_seq *seq =
1954
    tilepro_find_multiply_insn_seq_for_constant (multiplier);
1955
  if (seq != NULL)
1956
    {
1957
      tilepro_expand_constant_multiply_given_sequence (op0, op1, seq);
1958
      return true;
1959
    }
1960
  else
1961
    return false;
1962
}
1963
 
1964
 
1965
/* Expand the mulsi pattern.  */
1966
bool
1967
tilepro_expand_mulsi (rtx op0, rtx op1, rtx op2)
1968
{
1969
  if (CONST_INT_P (op2))
1970
    {
1971
      HOST_WIDE_INT n = trunc_int_for_mode (INTVAL (op2), SImode);
1972
      return tilepro_expand_const_mulsi (op0, op1, n);
1973
    }
1974
  return false;
1975
}
1976
 
1977
 
1978
/* Expand a high multiply pattern in SImode.  RESULT, OP1, OP2 are the
1979
   operands, and SIGN is true if it's a signed multiply, and false if
1980
   it's an unsigned multiply.  */
1981
static void
1982
tilepro_expand_high_multiply (rtx result, rtx op1, rtx op2, bool sign)
1983
{
1984
  rtx tmp0 = gen_reg_rtx (SImode);
1985
  rtx tmp1 = gen_reg_rtx (SImode);
1986
  rtx tmp2 = gen_reg_rtx (SImode);
1987
  rtx tmp3 = gen_reg_rtx (SImode);
1988
  rtx tmp4 = gen_reg_rtx (SImode);
1989
  rtx tmp5 = gen_reg_rtx (SImode);
1990
  rtx tmp6 = gen_reg_rtx (SImode);
1991
  rtx tmp7 = gen_reg_rtx (SImode);
1992
  rtx tmp8 = gen_reg_rtx (SImode);
1993
  rtx tmp9 = gen_reg_rtx (SImode);
1994
  rtx tmp10 = gen_reg_rtx (SImode);
1995
  rtx tmp11 = gen_reg_rtx (SImode);
1996
  rtx tmp12 = gen_reg_rtx (SImode);
1997
  rtx tmp13 = gen_reg_rtx (SImode);
1998
  rtx result_lo = gen_reg_rtx (SImode);
1999
 
2000
  if (sign)
2001
    {
2002
      emit_insn (gen_insn_mulhl_su (tmp0, op1, op2));
2003
      emit_insn (gen_insn_mulhl_su (tmp1, op2, op1));
2004
      emit_insn (gen_insn_mulll_uu (tmp2, op1, op2));
2005
      emit_insn (gen_insn_mulhh_ss (tmp3, op1, op2));
2006
    }
2007
  else
2008
    {
2009
      emit_insn (gen_insn_mulhl_uu (tmp0, op1, op2));
2010
      emit_insn (gen_insn_mulhl_uu (tmp1, op2, op1));
2011
      emit_insn (gen_insn_mulll_uu (tmp2, op1, op2));
2012
      emit_insn (gen_insn_mulhh_uu (tmp3, op1, op2));
2013
    }
2014
 
2015
  emit_move_insn (tmp4, (gen_rtx_ASHIFT (SImode, tmp0, GEN_INT (16))));
2016
 
2017
  emit_move_insn (tmp5, (gen_rtx_ASHIFT (SImode, tmp1, GEN_INT (16))));
2018
 
2019
  emit_move_insn (tmp6, (gen_rtx_PLUS (SImode, tmp4, tmp5)));
2020
  emit_move_insn (result_lo, (gen_rtx_PLUS (SImode, tmp2, tmp6)));
2021
 
2022
  emit_move_insn (tmp7, gen_rtx_LTU (SImode, tmp6, tmp4));
2023
  emit_move_insn (tmp8, gen_rtx_LTU (SImode, result_lo, tmp2));
2024
 
2025
  if (sign)
2026
    {
2027
      emit_move_insn (tmp9, (gen_rtx_ASHIFTRT (SImode, tmp0, GEN_INT (16))));
2028
      emit_move_insn (tmp10, (gen_rtx_ASHIFTRT (SImode, tmp1, GEN_INT (16))));
2029
    }
2030
  else
2031
    {
2032
      emit_move_insn (tmp9, (gen_rtx_LSHIFTRT (SImode, tmp0, GEN_INT (16))));
2033
      emit_move_insn (tmp10, (gen_rtx_LSHIFTRT (SImode, tmp1, GEN_INT (16))));
2034
    }
2035
 
2036
  emit_move_insn (tmp11, (gen_rtx_PLUS (SImode, tmp3, tmp7)));
2037
  emit_move_insn (tmp12, (gen_rtx_PLUS (SImode, tmp8, tmp9)));
2038
  emit_move_insn (tmp13, (gen_rtx_PLUS (SImode, tmp11, tmp12)));
2039
  emit_move_insn (result, (gen_rtx_PLUS (SImode, tmp13, tmp10)));
2040
}
2041
 
2042
 
2043
/* Implement smulsi3_highpart.  */
2044
void
2045
tilepro_expand_smulsi3_highpart (rtx op0, rtx op1, rtx op2)
2046
{
2047
  tilepro_expand_high_multiply (op0, op1, op2, true);
2048
}
2049
 
2050
 
2051
/* Implement umulsi3_highpart.  */
2052
void
2053
tilepro_expand_umulsi3_highpart (rtx op0, rtx op1, rtx op2)
2054
{
2055
  tilepro_expand_high_multiply (op0, op1, op2, false);
2056
}
2057
 
2058
 
2059
 
2060
/* Compare and branches  */
2061
 
2062
/* Helper function to handle DImode for tilepro_emit_setcc_internal.  */
2063
static bool
2064
tilepro_emit_setcc_internal_di (rtx res, enum rtx_code code, rtx op0, rtx op1)
2065
{
2066
  rtx operands[2], lo_half[2], hi_half[2];
2067
  rtx tmp, tmp0, tmp1, tmp2;
2068
  bool swap = false;
2069
 
2070
  /* Reduce the number of cases we need to handle by reversing the
2071
     operands.  */
2072
  switch (code)
2073
    {
2074
    case EQ:
2075
    case NE:
2076
    case LE:
2077
    case LT:
2078
    case LEU:
2079
    case LTU:
2080
      /* We handle these compares directly.  */
2081
      break;
2082
 
2083
    case GE:
2084
    case GT:
2085
    case GEU:
2086
    case GTU:
2087
      /* Reverse the operands.  */
2088
      swap = true;
2089
      break;
2090
 
2091
    default:
2092
      /* We should not have called this with any other code.  */
2093
      gcc_unreachable ();
2094
    }
2095
 
2096
  if (swap)
2097
    {
2098
      code = swap_condition (code);
2099
      tmp = op0, op0 = op1, op1 = tmp;
2100
    }
2101
 
2102
  operands[0] = op0;
2103
  operands[1] = op1;
2104
 
2105
  split_di (operands, 2, lo_half, hi_half);
2106
 
2107
  if (!reg_or_0_operand (lo_half[0], SImode))
2108
    lo_half[0] = force_reg (SImode, lo_half[0]);
2109
 
2110
  if (!reg_or_0_operand (hi_half[0], SImode))
2111
    hi_half[0] = force_reg (SImode, hi_half[0]);
2112
 
2113
  if (!CONST_INT_P (lo_half[1]) && !register_operand (lo_half[1], SImode))
2114
    lo_half[1] = force_reg (SImode, lo_half[1]);
2115
 
2116
  if (!CONST_INT_P (hi_half[1]) && !register_operand (hi_half[1], SImode))
2117
    hi_half[1] = force_reg (SImode, hi_half[1]);
2118
 
2119
  tmp0 = gen_reg_rtx (SImode);
2120
  tmp1 = gen_reg_rtx (SImode);
2121
  tmp2 = gen_reg_rtx (SImode);
2122
 
2123
  switch (code)
2124
    {
2125
    case EQ:
2126
      emit_insn (gen_insn_seq (tmp0, lo_half[0], lo_half[1]));
2127
      emit_insn (gen_insn_seq (tmp1, hi_half[0], hi_half[1]));
2128
      emit_insn (gen_andsi3 (res, tmp0, tmp1));
2129
      return true;
2130
      break;
2131
    case NE:
2132
      emit_insn (gen_insn_sne (tmp0, lo_half[0], lo_half[1]));
2133
      emit_insn (gen_insn_sne (tmp1, hi_half[0], hi_half[1]));
2134
      emit_insn (gen_iorsi3 (res, tmp0, tmp1));
2135
      return true;
2136
      break;
2137
    case LE:
2138
      emit_insn (gen_insn_slte (tmp0, hi_half[0], hi_half[1]));
2139
      emit_insn (gen_insn_seq (tmp1, hi_half[0], hi_half[1]));
2140
      emit_insn (gen_insn_slte_u (tmp2, lo_half[0], lo_half[1]));
2141
      emit_insn (gen_insn_mvnz (res, tmp0, tmp1, tmp2));
2142
      return true;
2143
    case LT:
2144
      if (operands[1] == const0_rtx)
2145
        {
2146
          emit_insn (gen_lshrsi3 (res, hi_half[0], GEN_INT (31)));
2147
          return true;
2148
        }
2149
      else
2150
        {
2151
          emit_insn (gen_insn_slt (tmp0, hi_half[0], hi_half[1]));
2152
          emit_insn (gen_insn_seq (tmp1, hi_half[0], hi_half[1]));
2153
          emit_insn (gen_insn_slt_u (tmp2, lo_half[0], lo_half[1]));
2154
          emit_insn (gen_insn_mvnz (res, tmp0, tmp1, tmp2));
2155
        }
2156
      return true;
2157
    case LEU:
2158
      emit_insn (gen_insn_slte_u (tmp0, hi_half[0], hi_half[1]));
2159
      emit_insn (gen_insn_seq (tmp1, hi_half[0], hi_half[1]));
2160
      emit_insn (gen_insn_slte_u (tmp2, lo_half[0], lo_half[1]));
2161
      emit_insn (gen_insn_mvnz (res, tmp0, tmp1, tmp2));
2162
      return true;
2163
    case LTU:
2164
      emit_insn (gen_insn_slt_u (tmp0, hi_half[0], hi_half[1]));
2165
      emit_insn (gen_insn_seq (tmp1, hi_half[0], hi_half[1]));
2166
      emit_insn (gen_insn_slt_u (tmp2, lo_half[0], lo_half[1]));
2167
      emit_insn (gen_insn_mvnz (res, tmp0, tmp1, tmp2));
2168
      return true;
2169
    default:
2170
      gcc_unreachable ();
2171
    }
2172
 
2173
  return false;
2174
}
2175
 
2176
 
2177
/* Certain simplifications can be done to make invalid setcc
2178
   operations valid.  Return the final comparison, or NULL if we can't
2179
   work.  */
2180
static bool
2181
tilepro_emit_setcc_internal (rtx res, enum rtx_code code, rtx op0, rtx op1,
2182
                             enum machine_mode cmp_mode)
2183
{
2184
  rtx tmp;
2185
  bool swap = false;
2186
 
2187
  if (cmp_mode == DImode)
2188
    {
2189
      return tilepro_emit_setcc_internal_di (res, code, op0, op1);
2190
    }
2191
 
2192
  /* The general case: fold the comparison code to the types of
2193
     compares that we have, choosing the branch as necessary.  */
2194
 
2195
  switch (code)
2196
    {
2197
    case EQ:
2198
    case NE:
2199
    case LE:
2200
    case LT:
2201
    case LEU:
2202
    case LTU:
2203
      /* We have these compares.  */
2204
      break;
2205
 
2206
    case GE:
2207
    case GT:
2208
    case GEU:
2209
    case GTU:
2210
      /* We do not have these compares, so we reverse the
2211
         operands.  */
2212
      swap = true;
2213
      break;
2214
 
2215
    default:
2216
      /* We should not have called this with any other code.  */
2217
      gcc_unreachable ();
2218
    }
2219
 
2220
  if (swap)
2221
    {
2222
      code = swap_condition (code);
2223
      tmp = op0, op0 = op1, op1 = tmp;
2224
    }
2225
 
2226
  if (!reg_or_0_operand (op0, SImode))
2227
    op0 = force_reg (SImode, op0);
2228
 
2229
  if (!CONST_INT_P (op1) && !register_operand (op1, SImode))
2230
    op1 = force_reg (SImode, op1);
2231
 
2232
  /* Return the setcc comparison.  */
2233
  emit_insn (gen_rtx_SET (VOIDmode, res,
2234
                          gen_rtx_fmt_ee (code, SImode, op0, op1)));
2235
 
2236
  return true;
2237
}
2238
 
2239
 
2240
/* Implement cstore patterns.  */
2241
bool
2242
tilepro_emit_setcc (rtx operands[], enum machine_mode cmp_mode)
2243
{
2244
  return
2245
    tilepro_emit_setcc_internal (operands[0], GET_CODE (operands[1]),
2246
                                 operands[2], operands[3], cmp_mode);
2247
}
2248
 
2249
 
2250
/* Return whether CODE is a signed comparison.  */
2251
static bool
2252
signed_compare_p (enum rtx_code code)
2253
{
2254
  return (code == EQ || code == NE || code == LT || code == LE
2255
          || code == GT || code == GE);
2256
}
2257
 
2258
 
2259
/* Generate the comparison for an SImode conditional branch.  */
2260
static rtx
2261
tilepro_emit_cc_test (enum rtx_code code, rtx op0, rtx op1,
2262
                      enum machine_mode cmp_mode, bool eq_ne_only)
2263
{
2264
  enum rtx_code branch_code;
2265
  rtx temp;
2266
 
2267
  /* Check for a compare against zero using a comparison we can do
2268
     directly.  */
2269
  if (cmp_mode != DImode
2270
      && op1 == const0_rtx
2271
      && (code == EQ || code == NE
2272
          || (!eq_ne_only && signed_compare_p (code))))
2273
    {
2274
      op0 = force_reg (SImode, op0);
2275
      return gen_rtx_fmt_ee (code, VOIDmode, op0, const0_rtx);
2276
    }
2277
 
2278
  /* The general case: fold the comparison code to the types of
2279
     compares that we have, choosing the branch as necessary.  */
2280
  switch (code)
2281
    {
2282
    case EQ:
2283
    case LE:
2284
    case LT:
2285
    case LEU:
2286
    case LTU:
2287
      /* We have these compares.  */
2288
      branch_code = NE;
2289
      break;
2290
 
2291
    case NE:
2292
    case GE:
2293
    case GT:
2294
    case GEU:
2295
    case GTU:
2296
      /* These must be reversed (except NE, but let's
2297
         canonicalize).  */
2298
      code = reverse_condition (code);
2299
      branch_code = EQ;
2300
      break;
2301
 
2302
    default:
2303
      gcc_unreachable ();
2304
    }
2305
 
2306
  if (cmp_mode != DImode
2307
      && CONST_INT_P (op1) && (!satisfies_constraint_I (op1) || code == LEU))
2308
    {
2309
      HOST_WIDE_INT n = trunc_int_for_mode (INTVAL (op1), SImode);
2310
 
2311
      switch (code)
2312
        {
2313
        case EQ:
2314
          /* Subtract off the value we want to compare against and see
2315
             if we get zero.  This is cheaper than creating a constant
2316
             in a register. Except that subtracting -128 is more
2317
             expensive than seqi to -128, so we leave that alone.  */
2318
          /* ??? Don't do this when comparing against symbols,
2319
             otherwise we'll reduce (&x == 0x1234) to (&x-0x1234 ==
2320
             0), which will be declared false out of hand (at least
2321
             for non-weak).  */
2322
          if (!(symbolic_operand (op0, VOIDmode)
2323
                || (REG_P (op0) && REG_POINTER (op0))))
2324
            {
2325
              /* To compare against MIN_INT, we add MIN_INT and check
2326
                 for 0.  */
2327
              HOST_WIDE_INT add;
2328
              if (n != -2147483647 - 1)
2329
                add = -n;
2330
              else
2331
                add = n;
2332
 
2333
              op0 = force_reg (SImode, op0);
2334
              temp = gen_reg_rtx (SImode);
2335
              emit_insn (gen_addsi3 (temp, op0, gen_int_si (add)));
2336
              return gen_rtx_fmt_ee (reverse_condition (branch_code),
2337
                                     VOIDmode, temp, const0_rtx);
2338
            }
2339
          break;
2340
 
2341
        case LEU:
2342
          if (n == -1)
2343
            break;
2344
          /* FALLTHRU */
2345
 
2346
        case LTU:
2347
          /* Change ((unsigned)x < 0x1000) into !((unsigned)x >> 12),
2348
             etc.  */
2349
          {
2350
            int first = exact_log2 (code == LTU ? n : n + 1);
2351
            if (first != -1)
2352
              {
2353
                op0 = force_reg (SImode, op0);
2354
                temp = gen_reg_rtx (SImode);
2355
                emit_move_insn (temp,
2356
                                gen_rtx_LSHIFTRT (SImode, op0,
2357
                                                  gen_int_si (first)));
2358
                return gen_rtx_fmt_ee (reverse_condition (branch_code),
2359
                                       VOIDmode, temp, const0_rtx);
2360
              }
2361
          }
2362
          break;
2363
 
2364
        default:
2365
          break;
2366
        }
2367
    }
2368
 
2369
  /* Compute a flag saying whether we should branch.  */
2370
  temp = gen_reg_rtx (SImode);
2371
  tilepro_emit_setcc_internal (temp, code, op0, op1, cmp_mode);
2372
 
2373
  /* Return the branch comparison.  */
2374
  return gen_rtx_fmt_ee (branch_code, VOIDmode, temp, const0_rtx);
2375
}
2376
 
2377
 
2378
/* Generate the comparison for a conditional branch.  */
2379
void
2380
tilepro_emit_conditional_branch (rtx operands[], enum machine_mode cmp_mode)
2381
{
2382
  rtx cmp_rtx =
2383
    tilepro_emit_cc_test (GET_CODE (operands[0]), operands[1], operands[2],
2384
                          cmp_mode, false);
2385
  rtx branch_rtx = gen_rtx_SET (VOIDmode, pc_rtx,
2386
                                gen_rtx_IF_THEN_ELSE (VOIDmode, cmp_rtx,
2387
                                                      gen_rtx_LABEL_REF
2388
                                                      (VOIDmode,
2389
                                                       operands[3]),
2390
                                                      pc_rtx));
2391
  emit_jump_insn (branch_rtx);
2392
}
2393
 
2394
 
2395
/* Implement the movsicc pattern.  */
2396
rtx
2397
tilepro_emit_conditional_move (rtx cmp)
2398
{
2399
  return
2400
    tilepro_emit_cc_test (GET_CODE (cmp), XEXP (cmp, 0), XEXP (cmp, 1),
2401
                          GET_MODE (XEXP (cmp, 0)), true);
2402
}
2403
 
2404
 
2405
/* Return true if INSN is annotated with a REG_BR_PROB note that
2406
   indicates it's a branch that's predicted taken.  */
2407
static bool
2408
cbranch_predicted_p (rtx insn)
2409
{
2410
  rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2411
 
2412
  if (x)
2413
    {
2414
      int pred_val = INTVAL (XEXP (x, 0));
2415
 
2416
      return pred_val >= REG_BR_PROB_BASE / 2;
2417
    }
2418
 
2419
  return false;
2420
}
2421
 
2422
 
2423
/* Output assembly code for a specific branch instruction, appending
2424
   the branch prediction flag to the opcode if appropriate.  */
2425
static const char *
2426
tilepro_output_simple_cbranch_with_opcode (rtx insn, const char *opcode,
2427
                                           int regop, bool netreg_p,
2428
                                           bool reverse_predicted)
2429
{
2430
  static char buf[64];
2431
  sprintf (buf, "%s%s\t%%%c%d, %%l0", opcode,
2432
           (cbranch_predicted_p (insn) ^ reverse_predicted) ? "t" : "",
2433
           netreg_p ? 'N' : 'r', regop);
2434
  return buf;
2435
}
2436
 
2437
 
2438
/* Output assembly code for a specific branch instruction, appending
2439
   the branch prediction flag to the opcode if appropriate.  */
2440
const char *
2441
tilepro_output_cbranch_with_opcode (rtx insn, rtx *operands,
2442
                                    const char *opcode,
2443
                                    const char *rev_opcode,
2444
                                    int regop, bool netreg_p)
2445
{
2446
  const char *branch_if_false;
2447
  rtx taken, not_taken;
2448
  bool is_simple_branch;
2449
 
2450
  gcc_assert (LABEL_P (operands[0]));
2451
 
2452
  is_simple_branch = true;
2453
  if (INSN_ADDRESSES_SET_P ())
2454
    {
2455
      int from_addr = INSN_ADDRESSES (INSN_UID (insn));
2456
      int to_addr = INSN_ADDRESSES (INSN_UID (operands[0]));
2457
      int delta = to_addr - from_addr;
2458
      is_simple_branch = IN_RANGE (delta, -524288, 524280);
2459
    }
2460
 
2461
  if (is_simple_branch)
2462
    {
2463
      /* Just a simple conditional branch.  */
2464
      return
2465
        tilepro_output_simple_cbranch_with_opcode (insn, opcode, regop,
2466
                                                   netreg_p, false);
2467
    }
2468
 
2469
  /* Generate a reversed branch around a direct jump.  This fallback
2470
     does not use branch-likely instructions.  */
2471
  not_taken = gen_label_rtx ();
2472
  taken = operands[0];
2473
 
2474
  /* Generate the reversed branch to NOT_TAKEN.  */
2475
  operands[0] = not_taken;
2476
  branch_if_false =
2477
    tilepro_output_simple_cbranch_with_opcode (insn, rev_opcode, regop,
2478
                                               netreg_p, true);
2479
  output_asm_insn (branch_if_false, operands);
2480
 
2481
  output_asm_insn ("j\t%l0", &taken);
2482
 
2483
  /* Output NOT_TAKEN.  */
2484
  targetm.asm_out.internal_label (asm_out_file, "L",
2485
                                  CODE_LABEL_NUMBER (not_taken));
2486
  return "";
2487
}
2488
 
2489
 
2490
/* Output assembly code for a conditional branch instruction.  */
2491
const char *
2492
tilepro_output_cbranch (rtx insn, rtx *operands, bool reversed)
2493
{
2494
  enum rtx_code code = GET_CODE (operands[1]);
2495
  const char *opcode;
2496
  const char *rev_opcode;
2497
 
2498
  if (reversed)
2499
    code = reverse_condition (code);
2500
 
2501
  switch (code)
2502
    {
2503
    case NE:
2504
      opcode = "bnz";
2505
      rev_opcode = "bz";
2506
      break;
2507
    case EQ:
2508
      opcode = "bz";
2509
      rev_opcode = "bnz";
2510
      break;
2511
    case GE:
2512
      opcode = "bgez";
2513
      rev_opcode = "blz";
2514
      break;
2515
    case GT:
2516
      opcode = "bgz";
2517
      rev_opcode = "blez";
2518
      break;
2519
    case LE:
2520
      opcode = "blez";
2521
      rev_opcode = "bgz";
2522
      break;
2523
    case LT:
2524
      opcode = "blz";
2525
      rev_opcode = "bgez";
2526
      break;
2527
    default:
2528
      gcc_unreachable ();
2529
    }
2530
 
2531
  return
2532
    tilepro_output_cbranch_with_opcode (insn, operands, opcode, rev_opcode,
2533
                                        2, false);
2534
}
2535
 
2536
 
2537
/* Implement the tablejump pattern.  */
2538
void
2539
tilepro_expand_tablejump (rtx op0, rtx op1)
2540
{
2541
  if (flag_pic)
2542
    {
2543
      rtx table = gen_rtx_LABEL_REF (Pmode, op1);
2544
      rtx temp = gen_reg_rtx (Pmode);
2545
      rtx text_label_symbol = tilepro_text_label_symbol ();
2546
      rtx text_label_rtx = tilepro_text_label_rtx ();
2547
 
2548
      emit_insn (gen_addli_pcrel (temp, text_label_rtx,
2549
                                  table, text_label_symbol));
2550
      emit_insn (gen_auli_pcrel (temp, temp, table, text_label_symbol));
2551
      emit_move_insn (temp,
2552
                      gen_rtx_PLUS (Pmode,
2553
                                    convert_to_mode (Pmode, op0, false),
2554
                                    temp));
2555
      op0 = temp;
2556
    }
2557
 
2558
  emit_jump_insn (gen_tablejump_aux (op0, op1));
2559
}
2560
 
2561
 
2562
/* Expand a builtin vector binary op, by calling gen function GEN with
2563
   operands in the proper modes.  DEST is converted to DEST_MODE, and
2564
   src0 and src1 (if DO_SRC1 is true) is converted to SRC_MODE.  */
2565
void
2566
tilepro_expand_builtin_vector_binop (rtx (*gen) (rtx, rtx, rtx),
2567
                                     enum machine_mode dest_mode,
2568
                                     rtx dest,
2569
                                     enum machine_mode src_mode,
2570
                                     rtx src0, rtx src1, bool do_src1)
2571
{
2572
  dest = gen_lowpart (dest_mode, dest);
2573
 
2574
  if (src0 == const0_rtx)
2575
    src0 = CONST0_RTX (src_mode);
2576
  else
2577
    src0 = gen_lowpart (src_mode, src0);
2578
 
2579
  if (do_src1)
2580
    {
2581
      if (src1 == const0_rtx)
2582
        src1 = CONST0_RTX (src_mode);
2583
      else
2584
        src1 = gen_lowpart (src_mode, src1);
2585
    }
2586
 
2587
  emit_insn ((*gen) (dest, src0, src1));
2588
}
2589
 
2590
 
2591
 
2592
/* Intrinsics  */
2593
 
2594
struct tile_builtin_info
2595
{
2596
  enum insn_code icode;
2597
  tree fndecl;
2598
};
2599
 
2600
static struct tile_builtin_info tilepro_builtin_info[TILEPRO_BUILTIN_max] = {
2601
  { CODE_FOR_addsi3,                    NULL }, /* add */
2602
  { CODE_FOR_insn_addb,                 NULL }, /* addb */
2603
  { CODE_FOR_insn_addbs_u,              NULL }, /* addbs_u */
2604
  { CODE_FOR_insn_addh,                 NULL }, /* addh */
2605
  { CODE_FOR_insn_addhs,                NULL }, /* addhs */
2606
  { CODE_FOR_insn_addib,                NULL }, /* addib */
2607
  { CODE_FOR_insn_addih,                NULL }, /* addih */
2608
  { CODE_FOR_insn_addlis,               NULL }, /* addlis */
2609
  { CODE_FOR_ssaddsi3,                  NULL }, /* adds */
2610
  { CODE_FOR_insn_adiffb_u,             NULL }, /* adiffb_u */
2611
  { CODE_FOR_insn_adiffh,               NULL }, /* adiffh */
2612
  { CODE_FOR_andsi3,                    NULL }, /* and */
2613
  { CODE_FOR_insn_auli,                 NULL }, /* auli */
2614
  { CODE_FOR_insn_avgb_u,               NULL }, /* avgb_u */
2615
  { CODE_FOR_insn_avgh,                 NULL }, /* avgh */
2616
  { CODE_FOR_insn_bitx,                 NULL }, /* bitx */
2617
  { CODE_FOR_bswapsi2,                  NULL }, /* bytex */
2618
  { CODE_FOR_clzsi2,                    NULL }, /* clz */
2619
  { CODE_FOR_insn_crc32_32,             NULL }, /* crc32_32 */
2620
  { CODE_FOR_insn_crc32_8,              NULL }, /* crc32_8 */
2621
  { CODE_FOR_ctzsi2,                    NULL }, /* ctz */
2622
  { CODE_FOR_insn_drain,                NULL }, /* drain */
2623
  { CODE_FOR_insn_dtlbpr,               NULL }, /* dtlbpr */
2624
  { CODE_FOR_insn_dword_align,          NULL }, /* dword_align */
2625
  { CODE_FOR_insn_finv,                 NULL }, /* finv */
2626
  { CODE_FOR_insn_flush,                NULL }, /* flush */
2627
  { CODE_FOR_insn_fnop,                 NULL }, /* fnop */
2628
  { CODE_FOR_insn_icoh,                 NULL }, /* icoh */
2629
  { CODE_FOR_insn_ill,                  NULL }, /* ill */
2630
  { CODE_FOR_insn_info,                 NULL }, /* info */
2631
  { CODE_FOR_insn_infol,                NULL }, /* infol */
2632
  { CODE_FOR_insn_inthb,                NULL }, /* inthb */
2633
  { CODE_FOR_insn_inthh,                NULL }, /* inthh */
2634
  { CODE_FOR_insn_intlb,                NULL }, /* intlb */
2635
  { CODE_FOR_insn_intlh,                NULL }, /* intlh */
2636
  { CODE_FOR_insn_inv,                  NULL }, /* inv */
2637
  { CODE_FOR_insn_lb,                   NULL }, /* lb */
2638
  { CODE_FOR_insn_lb_u,                 NULL }, /* lb_u */
2639
  { CODE_FOR_insn_lh,                   NULL }, /* lh */
2640
  { CODE_FOR_insn_lh_u,                 NULL }, /* lh_u */
2641
  { CODE_FOR_insn_lnk,                  NULL }, /* lnk */
2642
  { CODE_FOR_insn_lw,                   NULL }, /* lw */
2643
  { CODE_FOR_insn_lw_na,                NULL }, /* lw_na */
2644
  { CODE_FOR_insn_lb_L2,                NULL }, /* lb_L2 */
2645
  { CODE_FOR_insn_lb_u_L2,              NULL }, /* lb_u_L2 */
2646
  { CODE_FOR_insn_lh_L2,                NULL }, /* lh_L2 */
2647
  { CODE_FOR_insn_lh_u_L2,              NULL }, /* lh_u_L2 */
2648
  { CODE_FOR_insn_lw_L2,                NULL }, /* lw_L2 */
2649
  { CODE_FOR_insn_lw_na_L2,             NULL }, /* lw_na_L2 */
2650
  { CODE_FOR_insn_lb_miss,              NULL }, /* lb_miss */
2651
  { CODE_FOR_insn_lb_u_miss,            NULL }, /* lb_u_miss */
2652
  { CODE_FOR_insn_lh_miss,              NULL }, /* lh_miss */
2653
  { CODE_FOR_insn_lh_u_miss,            NULL }, /* lh_u_miss */
2654
  { CODE_FOR_insn_lw_miss,              NULL }, /* lw_miss */
2655
  { CODE_FOR_insn_lw_na_miss,           NULL }, /* lw_na_miss */
2656
  { CODE_FOR_insn_maxb_u,               NULL }, /* maxb_u */
2657
  { CODE_FOR_insn_maxh,                 NULL }, /* maxh */
2658
  { CODE_FOR_insn_maxib_u,              NULL }, /* maxib_u */
2659
  { CODE_FOR_insn_maxih,                NULL }, /* maxih */
2660
  { CODE_FOR_memory_barrier,            NULL }, /* mf */
2661
  { CODE_FOR_insn_mfspr,                NULL }, /* mfspr */
2662
  { CODE_FOR_insn_minb_u,               NULL }, /* minb_u */
2663
  { CODE_FOR_insn_minh,                 NULL }, /* minh */
2664
  { CODE_FOR_insn_minib_u,              NULL }, /* minib_u */
2665
  { CODE_FOR_insn_minih,                NULL }, /* minih */
2666
  { CODE_FOR_insn_mm,                   NULL }, /* mm */
2667
  { CODE_FOR_insn_mnz,                  NULL }, /* mnz */
2668
  { CODE_FOR_insn_mnzb,                 NULL }, /* mnzb */
2669
  { CODE_FOR_insn_mnzh,                 NULL }, /* mnzh */
2670
  { CODE_FOR_movsi,                     NULL }, /* move */
2671
  { CODE_FOR_insn_movelis,              NULL }, /* movelis */
2672
  { CODE_FOR_insn_mtspr,                NULL }, /* mtspr */
2673
  { CODE_FOR_insn_mulhh_ss,             NULL }, /* mulhh_ss */
2674
  { CODE_FOR_insn_mulhh_su,             NULL }, /* mulhh_su */
2675
  { CODE_FOR_insn_mulhh_uu,             NULL }, /* mulhh_uu */
2676
  { CODE_FOR_insn_mulhha_ss,            NULL }, /* mulhha_ss */
2677
  { CODE_FOR_insn_mulhha_su,            NULL }, /* mulhha_su */
2678
  { CODE_FOR_insn_mulhha_uu,            NULL }, /* mulhha_uu */
2679
  { CODE_FOR_insn_mulhhsa_uu,           NULL }, /* mulhhsa_uu */
2680
  { CODE_FOR_insn_mulhl_ss,             NULL }, /* mulhl_ss */
2681
  { CODE_FOR_insn_mulhl_su,             NULL }, /* mulhl_su */
2682
  { CODE_FOR_insn_mulhl_us,             NULL }, /* mulhl_us */
2683
  { CODE_FOR_insn_mulhl_uu,             NULL }, /* mulhl_uu */
2684
  { CODE_FOR_insn_mulhla_ss,            NULL }, /* mulhla_ss */
2685
  { CODE_FOR_insn_mulhla_su,            NULL }, /* mulhla_su */
2686
  { CODE_FOR_insn_mulhla_us,            NULL }, /* mulhla_us */
2687
  { CODE_FOR_insn_mulhla_uu,            NULL }, /* mulhla_uu */
2688
  { CODE_FOR_insn_mulhlsa_uu,           NULL }, /* mulhlsa_uu */
2689
  { CODE_FOR_insn_mulll_ss,             NULL }, /* mulll_ss */
2690
  { CODE_FOR_insn_mulll_su,             NULL }, /* mulll_su */
2691
  { CODE_FOR_insn_mulll_uu,             NULL }, /* mulll_uu */
2692
  { CODE_FOR_insn_mullla_ss,            NULL }, /* mullla_ss */
2693
  { CODE_FOR_insn_mullla_su,            NULL }, /* mullla_su */
2694
  { CODE_FOR_insn_mullla_uu,            NULL }, /* mullla_uu */
2695
  { CODE_FOR_insn_mulllsa_uu,           NULL }, /* mulllsa_uu */
2696
  { CODE_FOR_insn_mvnz,                 NULL }, /* mvnz */
2697
  { CODE_FOR_insn_mvz,                  NULL }, /* mvz */
2698
  { CODE_FOR_insn_mz,                   NULL }, /* mz */
2699
  { CODE_FOR_insn_mzb,                  NULL }, /* mzb */
2700
  { CODE_FOR_insn_mzh,                  NULL }, /* mzh */
2701
  { CODE_FOR_insn_nap,                  NULL }, /* nap */
2702
  { CODE_FOR_nop,                       NULL }, /* nop */
2703
  { CODE_FOR_insn_nor,                  NULL }, /* nor */
2704
  { CODE_FOR_iorsi3,                    NULL }, /* or */
2705
  { CODE_FOR_insn_packbs_u,             NULL }, /* packbs_u */
2706
  { CODE_FOR_insn_packhb,               NULL }, /* packhb */
2707
  { CODE_FOR_insn_packhs,               NULL }, /* packhs */
2708
  { CODE_FOR_insn_packlb,               NULL }, /* packlb */
2709
  { CODE_FOR_popcountsi2,               NULL }, /* pcnt */
2710
  { CODE_FOR_insn_prefetch,             NULL }, /* prefetch */
2711
  { CODE_FOR_insn_prefetch_L1,          NULL }, /* prefetch_L1 */
2712
  { CODE_FOR_rotlsi3,                   NULL }, /* rl */
2713
  { CODE_FOR_insn_s1a,                  NULL }, /* s1a */
2714
  { CODE_FOR_insn_s2a,                  NULL }, /* s2a */
2715
  { CODE_FOR_insn_s3a,                  NULL }, /* s3a */
2716
  { CODE_FOR_insn_sadab_u,              NULL }, /* sadab_u */
2717
  { CODE_FOR_insn_sadah,                NULL }, /* sadah */
2718
  { CODE_FOR_insn_sadah_u,              NULL }, /* sadah_u */
2719
  { CODE_FOR_insn_sadb_u,               NULL }, /* sadb_u */
2720
  { CODE_FOR_insn_sadh,                 NULL }, /* sadh */
2721
  { CODE_FOR_insn_sadh_u,               NULL }, /* sadh_u */
2722
  { CODE_FOR_insn_sb,                   NULL }, /* sb */
2723
  { CODE_FOR_insn_seq,                  NULL }, /* seq */
2724
  { CODE_FOR_insn_seqb,                 NULL }, /* seqb */
2725
  { CODE_FOR_insn_seqh,                 NULL }, /* seqh */
2726
  { CODE_FOR_insn_seqib,                NULL }, /* seqib */
2727
  { CODE_FOR_insn_seqih,                NULL }, /* seqih */
2728
  { CODE_FOR_insn_sh,                   NULL }, /* sh */
2729
  { CODE_FOR_ashlsi3,                   NULL }, /* shl */
2730
  { CODE_FOR_insn_shlb,                 NULL }, /* shlb */
2731
  { CODE_FOR_insn_shlh,                 NULL }, /* shlh */
2732
  { CODE_FOR_insn_shlb,                 NULL }, /* shlib */
2733
  { CODE_FOR_insn_shlh,                 NULL }, /* shlih */
2734
  { CODE_FOR_lshrsi3,                   NULL }, /* shr */
2735
  { CODE_FOR_insn_shrb,                 NULL }, /* shrb */
2736
  { CODE_FOR_insn_shrh,                 NULL }, /* shrh */
2737
  { CODE_FOR_insn_shrb,                 NULL }, /* shrib */
2738
  { CODE_FOR_insn_shrh,                 NULL }, /* shrih */
2739
  { CODE_FOR_insn_slt,                  NULL }, /* slt */
2740
  { CODE_FOR_insn_slt_u,                NULL }, /* slt_u */
2741
  { CODE_FOR_insn_sltb,                 NULL }, /* sltb */
2742
  { CODE_FOR_insn_sltb_u,               NULL }, /* sltb_u */
2743
  { CODE_FOR_insn_slte,                 NULL }, /* slte */
2744
  { CODE_FOR_insn_slte_u,               NULL }, /* slte_u */
2745
  { CODE_FOR_insn_slteb,                NULL }, /* slteb */
2746
  { CODE_FOR_insn_slteb_u,              NULL }, /* slteb_u */
2747
  { CODE_FOR_insn_slteh,                NULL }, /* slteh */
2748
  { CODE_FOR_insn_slteh_u,              NULL }, /* slteh_u */
2749
  { CODE_FOR_insn_slth,                 NULL }, /* slth */
2750
  { CODE_FOR_insn_slth_u,               NULL }, /* slth_u */
2751
  { CODE_FOR_insn_sltib,                NULL }, /* sltib */
2752
  { CODE_FOR_insn_sltib_u,              NULL }, /* sltib_u */
2753
  { CODE_FOR_insn_sltih,                NULL }, /* sltih */
2754
  { CODE_FOR_insn_sltih_u,              NULL }, /* sltih_u */
2755
  { CODE_FOR_insn_sne,                  NULL }, /* sne */
2756
  { CODE_FOR_insn_sneb,                 NULL }, /* sneb */
2757
  { CODE_FOR_insn_sneh,                 NULL }, /* sneh */
2758
  { CODE_FOR_ashrsi3,                   NULL }, /* sra */
2759
  { CODE_FOR_insn_srab,                 NULL }, /* srab */
2760
  { CODE_FOR_insn_srah,                 NULL }, /* srah */
2761
  { CODE_FOR_insn_srab,                 NULL }, /* sraib */
2762
  { CODE_FOR_insn_srah,                 NULL }, /* sraih */
2763
  { CODE_FOR_subsi3,                    NULL }, /* sub */
2764
  { CODE_FOR_insn_subb,                 NULL }, /* subb */
2765
  { CODE_FOR_insn_subbs_u,              NULL }, /* subbs_u */
2766
  { CODE_FOR_insn_subh,                 NULL }, /* subh */
2767
  { CODE_FOR_insn_subhs,                NULL }, /* subhs */
2768
  { CODE_FOR_sssubsi3,                  NULL }, /* subs */
2769
  { CODE_FOR_insn_sw,                   NULL }, /* sw */
2770
  { CODE_FOR_insn_tblidxb0,             NULL }, /* tblidxb0 */
2771
  { CODE_FOR_insn_tblidxb1,             NULL }, /* tblidxb1 */
2772
  { CODE_FOR_insn_tblidxb2,             NULL }, /* tblidxb2 */
2773
  { CODE_FOR_insn_tblidxb3,             NULL }, /* tblidxb3 */
2774
  { CODE_FOR_insn_tns,                  NULL }, /* tns */
2775
  { CODE_FOR_insn_wh64,                 NULL }, /* wh64 */
2776
  { CODE_FOR_xorsi3,                    NULL }, /* xor */
2777
  { CODE_FOR_tilepro_network_barrier,   NULL }, /* network_barrier */
2778
  { CODE_FOR_tilepro_idn0_receive,      NULL }, /* idn0_receive */
2779
  { CODE_FOR_tilepro_idn1_receive,      NULL }, /* idn1_receive */
2780
  { CODE_FOR_tilepro_idn_send,          NULL }, /* idn_send */
2781
  { CODE_FOR_tilepro_sn_receive,        NULL }, /* sn_receive */
2782
  { CODE_FOR_tilepro_sn_send,           NULL }, /* sn_send */
2783
  { CODE_FOR_tilepro_udn0_receive,      NULL }, /* udn0_receive */
2784
  { CODE_FOR_tilepro_udn1_receive,      NULL }, /* udn1_receive */
2785
  { CODE_FOR_tilepro_udn2_receive,      NULL }, /* udn2_receive */
2786
  { CODE_FOR_tilepro_udn3_receive,      NULL }, /* udn3_receive */
2787
  { CODE_FOR_tilepro_udn_send,          NULL }, /* udn_send */
2788
};
2789
 
2790
 
2791
struct tilepro_builtin_def
2792
{
2793
  const char *name;
2794
  enum tilepro_builtin code;
2795
  bool is_const;
2796
  /* The first character is the return type.  Subsequent characters
2797
     are the argument types. See char_to_type.  */
2798
  const char *type;
2799
};
2800
 
2801
 
2802
static const struct tilepro_builtin_def tilepro_builtins[] = {
2803
  { "__insn_add",             TILEPRO_INSN_ADD,         true,  "lll"   },
2804
  { "__insn_addb",            TILEPRO_INSN_ADDB,        true,  "lll"   },
2805
  { "__insn_addbs_u",         TILEPRO_INSN_ADDBS_U,     false, "lll"   },
2806
  { "__insn_addh",            TILEPRO_INSN_ADDH,        true,  "lll"   },
2807
  { "__insn_addhs",           TILEPRO_INSN_ADDHS,       false, "lll"   },
2808
  { "__insn_addi",            TILEPRO_INSN_ADD,         true,  "lll"   },
2809
  { "__insn_addib",           TILEPRO_INSN_ADDIB,       true,  "lll"   },
2810
  { "__insn_addih",           TILEPRO_INSN_ADDIH,       true,  "lll"   },
2811
  { "__insn_addli",           TILEPRO_INSN_ADD,         true,  "lll"   },
2812
  { "__insn_addlis",          TILEPRO_INSN_ADDLIS,      false, "lll"   },
2813
  { "__insn_adds",            TILEPRO_INSN_ADDS,        false, "lll"   },
2814
  { "__insn_adiffb_u",        TILEPRO_INSN_ADIFFB_U,    true,  "lll"   },
2815
  { "__insn_adiffh",          TILEPRO_INSN_ADIFFH,      true,  "lll"   },
2816
  { "__insn_and",             TILEPRO_INSN_AND,         true,  "lll"   },
2817
  { "__insn_andi",            TILEPRO_INSN_AND,         true,  "lll"   },
2818
  { "__insn_auli",            TILEPRO_INSN_AULI,        true,  "lll"   },
2819
  { "__insn_avgb_u",          TILEPRO_INSN_AVGB_U,      true,  "lll"   },
2820
  { "__insn_avgh",            TILEPRO_INSN_AVGH,        true,  "lll"   },
2821
  { "__insn_bitx",            TILEPRO_INSN_BITX,        true,  "ll"    },
2822
  { "__insn_bytex",           TILEPRO_INSN_BYTEX,       true,  "ll"    },
2823
  { "__insn_clz",             TILEPRO_INSN_CLZ,         true,  "ll"    },
2824
  { "__insn_crc32_32",        TILEPRO_INSN_CRC32_32,    true,  "lll"   },
2825
  { "__insn_crc32_8",         TILEPRO_INSN_CRC32_8,     true,  "lll"   },
2826
  { "__insn_ctz",             TILEPRO_INSN_CTZ,         true,  "ll"    },
2827
  { "__insn_drain",           TILEPRO_INSN_DRAIN,       false, "v"     },
2828
  { "__insn_dtlbpr",          TILEPRO_INSN_DTLBPR,      false, "vl"    },
2829
  { "__insn_dword_align",     TILEPRO_INSN_DWORD_ALIGN, true,  "lllk"  },
2830
  { "__insn_finv",            TILEPRO_INSN_FINV,        false, "vk"    },
2831
  { "__insn_flush",           TILEPRO_INSN_FLUSH,       false, "vk"    },
2832
  { "__insn_fnop",            TILEPRO_INSN_FNOP,        false, "v"     },
2833
  { "__insn_icoh",            TILEPRO_INSN_ICOH,        false, "vk"    },
2834
  { "__insn_ill",             TILEPRO_INSN_ILL,         false, "v"     },
2835
  { "__insn_info",            TILEPRO_INSN_INFO,        false, "vl"    },
2836
  { "__insn_infol",           TILEPRO_INSN_INFOL,       false, "vl"    },
2837
  { "__insn_inthb",           TILEPRO_INSN_INTHB,       true,  "lll"   },
2838
  { "__insn_inthh",           TILEPRO_INSN_INTHH,       true,  "lll"   },
2839
  { "__insn_intlb",           TILEPRO_INSN_INTLB,       true,  "lll"   },
2840
  { "__insn_intlh",           TILEPRO_INSN_INTLH,       true,  "lll"   },
2841
  { "__insn_inv",             TILEPRO_INSN_INV,         false, "vp"    },
2842
  { "__insn_lb",              TILEPRO_INSN_LB,          false, "lk"    },
2843
  { "__insn_lb_u",            TILEPRO_INSN_LB_U,        false, "lk"    },
2844
  { "__insn_lh",              TILEPRO_INSN_LH,          false, "lk"    },
2845
  { "__insn_lh_u",            TILEPRO_INSN_LH_U,        false, "lk"    },
2846
  { "__insn_lnk",             TILEPRO_INSN_LNK,         true,  "l"     },
2847
  { "__insn_lw",              TILEPRO_INSN_LW,          false, "lk"    },
2848
  { "__insn_lw_na",           TILEPRO_INSN_LW_NA,       false, "lk"    },
2849
  { "__insn_lb_L2",           TILEPRO_INSN_LB_L2,       false, "lk"    },
2850
  { "__insn_lb_u_L2",         TILEPRO_INSN_LB_U_L2,     false, "lk"    },
2851
  { "__insn_lh_L2",           TILEPRO_INSN_LH_L2,       false, "lk"    },
2852
  { "__insn_lh_u_L2",         TILEPRO_INSN_LH_U_L2,     false, "lk"    },
2853
  { "__insn_lw_L2",           TILEPRO_INSN_LW_L2,       false, "lk"    },
2854
  { "__insn_lw_na_L2",        TILEPRO_INSN_LW_NA_L2,    false, "lk"    },
2855
  { "__insn_lb_miss",         TILEPRO_INSN_LB_MISS,     false, "lk"    },
2856
  { "__insn_lb_u_miss",       TILEPRO_INSN_LB_U_MISS,   false, "lk"    },
2857
  { "__insn_lh_miss",         TILEPRO_INSN_LH_MISS,     false, "lk"    },
2858
  { "__insn_lh_u_miss",       TILEPRO_INSN_LH_U_MISS,   false, "lk"    },
2859
  { "__insn_lw_miss",         TILEPRO_INSN_LW_MISS,     false, "lk"    },
2860
  { "__insn_lw_na_miss",      TILEPRO_INSN_LW_NA_MISS,  false, "lk"    },
2861
  { "__insn_maxb_u",          TILEPRO_INSN_MAXB_U,      true,  "lll"   },
2862
  { "__insn_maxh",            TILEPRO_INSN_MAXH,        true,  "lll"   },
2863
  { "__insn_maxib_u",         TILEPRO_INSN_MAXIB_U,     true,  "lll"   },
2864
  { "__insn_maxih",           TILEPRO_INSN_MAXIH,       true,  "lll"   },
2865
  { "__insn_mf",              TILEPRO_INSN_MF,          false, "v"     },
2866
  { "__insn_mfspr",           TILEPRO_INSN_MFSPR,       false, "ll"    },
2867
  { "__insn_minb_u",          TILEPRO_INSN_MINB_U,      true,  "lll"   },
2868
  { "__insn_minh",            TILEPRO_INSN_MINH,        true,  "lll"   },
2869
  { "__insn_minib_u",         TILEPRO_INSN_MINIB_U,     true,  "lll"   },
2870
  { "__insn_minih",           TILEPRO_INSN_MINIH,       true,  "lll"   },
2871
  { "__insn_mm",              TILEPRO_INSN_MM,          true,  "lllll" },
2872
  { "__insn_mnz",             TILEPRO_INSN_MNZ,         true,  "lll"   },
2873
  { "__insn_mnzb",            TILEPRO_INSN_MNZB,        true,  "lll"   },
2874
  { "__insn_mnzh",            TILEPRO_INSN_MNZH,        true,  "lll"   },
2875
  { "__insn_move",            TILEPRO_INSN_MOVE,        true,  "ll"    },
2876
  { "__insn_movei",           TILEPRO_INSN_MOVE,        true,  "ll"    },
2877
  { "__insn_moveli",          TILEPRO_INSN_MOVE,        true,  "ll"    },
2878
  { "__insn_movelis",         TILEPRO_INSN_MOVELIS,     false, "ll"    },
2879
  { "__insn_mtspr",           TILEPRO_INSN_MTSPR,       false, "vll"   },
2880
  { "__insn_mulhh_ss",        TILEPRO_INSN_MULHH_SS,    true,  "lll"   },
2881
  { "__insn_mulhh_su",        TILEPRO_INSN_MULHH_SU,    true,  "lll"   },
2882
  { "__insn_mulhh_uu",        TILEPRO_INSN_MULHH_UU,    true,  "lll"   },
2883
  { "__insn_mulhha_ss",       TILEPRO_INSN_MULHHA_SS,   true,  "llll"  },
2884
  { "__insn_mulhha_su",       TILEPRO_INSN_MULHHA_SU,   true,  "llll"  },
2885
  { "__insn_mulhha_uu",       TILEPRO_INSN_MULHHA_UU,   true,  "llll"  },
2886
  { "__insn_mulhhsa_uu",      TILEPRO_INSN_MULHHSA_UU,  true,  "llll"  },
2887
  { "__insn_mulhl_ss",        TILEPRO_INSN_MULHL_SS,    true,  "lll"   },
2888
  { "__insn_mulhl_su",        TILEPRO_INSN_MULHL_SU,    true,  "lll"   },
2889
  { "__insn_mulhl_us",        TILEPRO_INSN_MULHL_US,    true,  "lll"   },
2890
  { "__insn_mulhl_uu",        TILEPRO_INSN_MULHL_UU,    true,  "lll"   },
2891
  { "__insn_mulhla_ss",       TILEPRO_INSN_MULHLA_SS,   true,  "llll"  },
2892
  { "__insn_mulhla_su",       TILEPRO_INSN_MULHLA_SU,   true,  "llll"  },
2893
  { "__insn_mulhla_us",       TILEPRO_INSN_MULHLA_US,   true,  "llll"  },
2894
  { "__insn_mulhla_uu",       TILEPRO_INSN_MULHLA_UU,   true,  "llll"  },
2895
  { "__insn_mulhlsa_uu",      TILEPRO_INSN_MULHLSA_UU,  true,  "llll"  },
2896
  { "__insn_mulll_ss",        TILEPRO_INSN_MULLL_SS,    true,  "lll"   },
2897
  { "__insn_mulll_su",        TILEPRO_INSN_MULLL_SU,    true,  "lll"   },
2898
  { "__insn_mulll_uu",        TILEPRO_INSN_MULLL_UU,    true,  "lll"   },
2899
  { "__insn_mullla_ss",       TILEPRO_INSN_MULLLA_SS,   true,  "llll"  },
2900
  { "__insn_mullla_su",       TILEPRO_INSN_MULLLA_SU,   true,  "llll"  },
2901
  { "__insn_mullla_uu",       TILEPRO_INSN_MULLLA_UU,   true,  "llll"  },
2902
  { "__insn_mulllsa_uu",      TILEPRO_INSN_MULLLSA_UU,  true,  "llll"  },
2903
  { "__insn_mvnz",            TILEPRO_INSN_MVNZ,        true,  "llll"  },
2904
  { "__insn_mvz",             TILEPRO_INSN_MVZ,         true,  "llll"  },
2905
  { "__insn_mz",              TILEPRO_INSN_MZ,          true,  "lll"   },
2906
  { "__insn_mzb",             TILEPRO_INSN_MZB,         true,  "lll"   },
2907
  { "__insn_mzh",             TILEPRO_INSN_MZH,         true,  "lll"   },
2908
  { "__insn_nap",             TILEPRO_INSN_NAP,         false, "v"     },
2909
  { "__insn_nop",             TILEPRO_INSN_NOP,         true,  "v"     },
2910
  { "__insn_nor",             TILEPRO_INSN_NOR,         true,  "lll"   },
2911
  { "__insn_or",              TILEPRO_INSN_OR,          true,  "lll"   },
2912
  { "__insn_ori",             TILEPRO_INSN_OR,          true,  "lll"   },
2913
  { "__insn_packbs_u",        TILEPRO_INSN_PACKBS_U,    false, "lll"   },
2914
  { "__insn_packhb",          TILEPRO_INSN_PACKHB,      true,  "lll"   },
2915
  { "__insn_packhs",          TILEPRO_INSN_PACKHS,      false, "lll"   },
2916
  { "__insn_packlb",          TILEPRO_INSN_PACKLB,      true,  "lll"   },
2917
  { "__insn_pcnt",            TILEPRO_INSN_PCNT,        true,  "ll"    },
2918
  { "__insn_prefetch",        TILEPRO_INSN_PREFETCH,    false, "vk"    },
2919
  { "__insn_prefetch_L1",     TILEPRO_INSN_PREFETCH_L1, false, "vk"    },
2920
  { "__insn_rl",              TILEPRO_INSN_RL,          true,  "lll"   },
2921
  { "__insn_rli",             TILEPRO_INSN_RL,          true,  "lll"   },
2922
  { "__insn_s1a",             TILEPRO_INSN_S1A,         true,  "lll"   },
2923
  { "__insn_s2a",             TILEPRO_INSN_S2A,         true,  "lll"   },
2924
  { "__insn_s3a",             TILEPRO_INSN_S3A,         true,  "lll"   },
2925
  { "__insn_sadab_u",         TILEPRO_INSN_SADAB_U,     true,  "llll"  },
2926
  { "__insn_sadah",           TILEPRO_INSN_SADAH,       true,  "llll"  },
2927
  { "__insn_sadah_u",         TILEPRO_INSN_SADAH_U,     true,  "llll"  },
2928
  { "__insn_sadb_u",          TILEPRO_INSN_SADB_U,      true,  "lll"   },
2929
  { "__insn_sadh",            TILEPRO_INSN_SADH,        true,  "lll"   },
2930
  { "__insn_sadh_u",          TILEPRO_INSN_SADH_U,      true,  "lll"   },
2931
  { "__insn_sb",              TILEPRO_INSN_SB,          false, "vpl"   },
2932
  { "__insn_seq",             TILEPRO_INSN_SEQ,         true,  "lll"   },
2933
  { "__insn_seqb",            TILEPRO_INSN_SEQB,        true,  "lll"   },
2934
  { "__insn_seqh",            TILEPRO_INSN_SEQH,        true,  "lll"   },
2935
  { "__insn_seqi",            TILEPRO_INSN_SEQ,         true,  "lll"   },
2936
  { "__insn_seqib",           TILEPRO_INSN_SEQIB,       true,  "lll"   },
2937
  { "__insn_seqih",           TILEPRO_INSN_SEQIH,       true,  "lll"   },
2938
  { "__insn_sh",              TILEPRO_INSN_SH,          false, "vpl"   },
2939
  { "__insn_shl",             TILEPRO_INSN_SHL,         true,  "lll"   },
2940
  { "__insn_shlb",            TILEPRO_INSN_SHLB,        true,  "lll"   },
2941
  { "__insn_shlh",            TILEPRO_INSN_SHLH,        true,  "lll"   },
2942
  { "__insn_shli",            TILEPRO_INSN_SHL,         true,  "lll"   },
2943
  { "__insn_shlib",           TILEPRO_INSN_SHLIB,       true,  "lll"   },
2944
  { "__insn_shlih",           TILEPRO_INSN_SHLIH,       true,  "lll"   },
2945
  { "__insn_shr",             TILEPRO_INSN_SHR,         true,  "lll"   },
2946
  { "__insn_shrb",            TILEPRO_INSN_SHRB,        true,  "lll"   },
2947
  { "__insn_shrh",            TILEPRO_INSN_SHRH,        true,  "lll"   },
2948
  { "__insn_shri",            TILEPRO_INSN_SHR,         true,  "lll"   },
2949
  { "__insn_shrib",           TILEPRO_INSN_SHRIB,       true,  "lll"   },
2950
  { "__insn_shrih",           TILEPRO_INSN_SHRIH,       true,  "lll"   },
2951
  { "__insn_slt",             TILEPRO_INSN_SLT,         true,  "lll"   },
2952
  { "__insn_slt_u",           TILEPRO_INSN_SLT_U,       true,  "lll"   },
2953
  { "__insn_sltb",            TILEPRO_INSN_SLTB,        true,  "lll"   },
2954
  { "__insn_sltb_u",          TILEPRO_INSN_SLTB_U,      true,  "lll"   },
2955
  { "__insn_slte",            TILEPRO_INSN_SLTE,        true,  "lll"   },
2956
  { "__insn_slte_u",          TILEPRO_INSN_SLTE_U,      true,  "lll"   },
2957
  { "__insn_slteb",           TILEPRO_INSN_SLTEB,       true,  "lll"   },
2958
  { "__insn_slteb_u",         TILEPRO_INSN_SLTEB_U,     true,  "lll"   },
2959
  { "__insn_slteh",           TILEPRO_INSN_SLTEH,       true,  "lll"   },
2960
  { "__insn_slteh_u",         TILEPRO_INSN_SLTEH_U,     true,  "lll"   },
2961
  { "__insn_slth",            TILEPRO_INSN_SLTH,        true,  "lll"   },
2962
  { "__insn_slth_u",          TILEPRO_INSN_SLTH_U,      true,  "lll"   },
2963
  { "__insn_slti",            TILEPRO_INSN_SLT,         true,  "lll"   },
2964
  { "__insn_slti_u",          TILEPRO_INSN_SLT_U,       true,  "lll"   },
2965
  { "__insn_sltib",           TILEPRO_INSN_SLTIB,       true,  "lll"   },
2966
  { "__insn_sltib_u",         TILEPRO_INSN_SLTIB_U,     true,  "lll"   },
2967
  { "__insn_sltih",           TILEPRO_INSN_SLTIH,       true,  "lll"   },
2968
  { "__insn_sltih_u",         TILEPRO_INSN_SLTIH_U,     true,  "lll"   },
2969
  { "__insn_sne",             TILEPRO_INSN_SNE,         true,  "lll"   },
2970
  { "__insn_sneb",            TILEPRO_INSN_SNEB,        true,  "lll"   },
2971
  { "__insn_sneh",            TILEPRO_INSN_SNEH,        true,  "lll"   },
2972
  { "__insn_sra",             TILEPRO_INSN_SRA,         true,  "lll"   },
2973
  { "__insn_srab",            TILEPRO_INSN_SRAB,        true,  "lll"   },
2974
  { "__insn_srah",            TILEPRO_INSN_SRAH,        true,  "lll"   },
2975
  { "__insn_srai",            TILEPRO_INSN_SRA,         true,  "lll"   },
2976
  { "__insn_sraib",           TILEPRO_INSN_SRAIB,       true,  "lll"   },
2977
  { "__insn_sraih",           TILEPRO_INSN_SRAIH,       true,  "lll"   },
2978
  { "__insn_sub",             TILEPRO_INSN_SUB,         true,  "lll"   },
2979
  { "__insn_subb",            TILEPRO_INSN_SUBB,        true,  "lll"   },
2980
  { "__insn_subbs_u",         TILEPRO_INSN_SUBBS_U,     false, "lll"   },
2981
  { "__insn_subh",            TILEPRO_INSN_SUBH,        true,  "lll"   },
2982
  { "__insn_subhs",           TILEPRO_INSN_SUBHS,       false, "lll"   },
2983
  { "__insn_subs",            TILEPRO_INSN_SUBS,        false, "lll"   },
2984
  { "__insn_sw",              TILEPRO_INSN_SW,          false, "vpl"   },
2985
  { "__insn_tblidxb0",        TILEPRO_INSN_TBLIDXB0,    true,  "lll"   },
2986
  { "__insn_tblidxb1",        TILEPRO_INSN_TBLIDXB1,    true,  "lll"   },
2987
  { "__insn_tblidxb2",        TILEPRO_INSN_TBLIDXB2,    true,  "lll"   },
2988
  { "__insn_tblidxb3",        TILEPRO_INSN_TBLIDXB3,    true,  "lll"   },
2989
  { "__insn_tns",             TILEPRO_INSN_TNS,         false, "lp"    },
2990
  { "__insn_wh64",            TILEPRO_INSN_WH64,        false, "vp"    },
2991
  { "__insn_xor",             TILEPRO_INSN_XOR,         true,  "lll"   },
2992
  { "__insn_xori",            TILEPRO_INSN_XOR,         true,  "lll"   },
2993
  { "__tile_network_barrier", TILEPRO_NETWORK_BARRIER,  false, "v"     },
2994
  { "__tile_idn0_receive",    TILEPRO_IDN0_RECEIVE,     false, "l"     },
2995
  { "__tile_idn1_receive",    TILEPRO_IDN1_RECEIVE,     false, "l"     },
2996
  { "__tile_idn_send",        TILEPRO_IDN_SEND,         false, "vl"    },
2997
  { "__tile_sn_receive",      TILEPRO_SN_RECEIVE,       false, "l"     },
2998
  { "__tile_sn_send",         TILEPRO_SN_SEND,          false, "vl"    },
2999
  { "__tile_udn0_receive",    TILEPRO_UDN0_RECEIVE,     false, "l"     },
3000
  { "__tile_udn1_receive",    TILEPRO_UDN1_RECEIVE,     false, "l"     },
3001
  { "__tile_udn2_receive",    TILEPRO_UDN2_RECEIVE,     false, "l"     },
3002
  { "__tile_udn3_receive",    TILEPRO_UDN3_RECEIVE,     false, "l"     },
3003
  { "__tile_udn_send",        TILEPRO_UDN_SEND,         false, "vl"    },
3004
};
3005
 
3006
 
3007
/* Convert a character in a builtin type string to a tree type.  */
3008
static tree
3009
char_to_type (char c)
3010
{
3011
  static tree volatile_ptr_type_node = NULL;
3012
  static tree volatile_const_ptr_type_node = NULL;
3013
 
3014
  if (volatile_ptr_type_node == NULL)
3015
    {
3016
      volatile_ptr_type_node =
3017
        build_pointer_type (build_qualified_type (void_type_node,
3018
                                                  TYPE_QUAL_VOLATILE));
3019
      volatile_const_ptr_type_node =
3020
        build_pointer_type (build_qualified_type (void_type_node,
3021
                                                  TYPE_QUAL_CONST
3022
                                                  | TYPE_QUAL_VOLATILE));
3023
    }
3024
 
3025
  switch (c)
3026
    {
3027
    case 'v':
3028
      return void_type_node;
3029
    case 'l':
3030
      return long_unsigned_type_node;
3031
    case 'p':
3032
      return volatile_ptr_type_node;
3033
    case 'k':
3034
      return volatile_const_ptr_type_node;
3035
    default:
3036
      gcc_unreachable ();
3037
    }
3038
}
3039
 
3040
 
3041
/* Implement TARGET_INIT_BUILTINS.  */
3042
static void
3043
tilepro_init_builtins (void)
3044
{
3045
  size_t i;
3046
 
3047
  for (i = 0; i < ARRAY_SIZE (tilepro_builtins); i++)
3048
    {
3049
      const struct tilepro_builtin_def *p = &tilepro_builtins[i];
3050
      tree ftype, ret_type, arg_type_list = void_list_node;
3051
      tree decl;
3052
      int j;
3053
 
3054
      for (j = strlen (p->type) - 1; j > 0; j--)
3055
        {
3056
          arg_type_list =
3057
            tree_cons (NULL_TREE, char_to_type (p->type[j]), arg_type_list);
3058
        }
3059
 
3060
      ret_type = char_to_type (p->type[0]);
3061
 
3062
      ftype = build_function_type (ret_type, arg_type_list);
3063
 
3064
      decl = add_builtin_function (p->name, ftype, p->code, BUILT_IN_MD,
3065
                                   NULL, NULL);
3066
 
3067
      if (p->is_const)
3068
        TREE_READONLY (decl) = 1;
3069
      TREE_NOTHROW (decl) = 1;
3070
 
3071
      if (tilepro_builtin_info[p->code].fndecl == NULL)
3072
        tilepro_builtin_info[p->code].fndecl = decl;
3073
    }
3074
}
3075
 
3076
 
3077
/* Implement TARGET_EXPAND_BUILTIN.  */
3078
static rtx
3079
tilepro_expand_builtin (tree exp,
3080
                        rtx target,
3081
                        rtx subtarget ATTRIBUTE_UNUSED,
3082
                        enum machine_mode mode ATTRIBUTE_UNUSED,
3083
                        int ignore ATTRIBUTE_UNUSED)
3084
{
3085
#define MAX_BUILTIN_ARGS 4
3086
 
3087
  tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3088
  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3089
  tree arg;
3090
  call_expr_arg_iterator iter;
3091
  enum insn_code icode;
3092
  rtx op[MAX_BUILTIN_ARGS + 1], pat;
3093
  int opnum;
3094
  bool nonvoid;
3095
  insn_gen_fn fn;
3096
 
3097
  if (fcode >= TILEPRO_BUILTIN_max)
3098
    internal_error ("bad builtin fcode");
3099
  icode = tilepro_builtin_info[fcode].icode;
3100
  if (icode == 0)
3101
    internal_error ("bad builtin icode");
3102
 
3103
  nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
3104
 
3105
  opnum = nonvoid;
3106
  FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3107
  {
3108
    const struct insn_operand_data *insn_op;
3109
 
3110
    if (arg == error_mark_node)
3111
      return NULL_RTX;
3112
    if (opnum > MAX_BUILTIN_ARGS)
3113
      return NULL_RTX;
3114
 
3115
    insn_op = &insn_data[icode].operand[opnum];
3116
 
3117
    op[opnum] = expand_expr (arg, NULL_RTX, insn_op->mode, EXPAND_NORMAL);
3118
 
3119
    if (!(*insn_op->predicate) (op[opnum], insn_op->mode))
3120
      op[opnum] = copy_to_mode_reg (insn_op->mode, op[opnum]);
3121
 
3122
    if (!(*insn_op->predicate) (op[opnum], insn_op->mode))
3123
      {
3124
        /* We still failed to meet the predicate even after moving
3125
           into a register. Assume we needed an immediate.  */
3126
        error_at (EXPR_LOCATION (exp),
3127
                  "operand must be an immediate of the right size");
3128
        return const0_rtx;
3129
      }
3130
 
3131
    opnum++;
3132
  }
3133
 
3134
  if (nonvoid)
3135
    {
3136
      enum machine_mode tmode = insn_data[icode].operand[0].mode;
3137
      if (!target
3138
          || GET_MODE (target) != tmode
3139
          || !(*insn_data[icode].operand[0].predicate) (target, tmode))
3140
        target = gen_reg_rtx (tmode);
3141
      op[0] = target;
3142
    }
3143
 
3144
  fn = GEN_FCN (icode);
3145
  switch (opnum)
3146
    {
3147
    case 0:
3148
      pat = fn (NULL_RTX);
3149
      break;
3150
    case 1:
3151
      pat = fn (op[0]);
3152
      break;
3153
    case 2:
3154
      pat = fn (op[0], op[1]);
3155
      break;
3156
    case 3:
3157
      pat = fn (op[0], op[1], op[2]);
3158
      break;
3159
    case 4:
3160
      pat = fn (op[0], op[1], op[2], op[3]);
3161
      break;
3162
    case 5:
3163
      pat = fn (op[0], op[1], op[2], op[3], op[4]);
3164
      break;
3165
    default:
3166
      gcc_unreachable ();
3167
    }
3168
  if (!pat)
3169
    return NULL_RTX;
3170
  emit_insn (pat);
3171
 
3172
  if (nonvoid)
3173
    return target;
3174
  else
3175
    return const0_rtx;
3176
}
3177
 
3178
 
3179
/* Implement TARGET_BUILTIN_DECL.  */
3180
static tree
3181
tilepro_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
3182
{
3183
  if (code >= TILEPRO_BUILTIN_max)
3184
    return error_mark_node;
3185
 
3186
  return tilepro_builtin_info[code].fndecl;
3187
}
3188
 
3189
 
3190
 
3191
/* Stack frames  */
3192
 
3193
/* Return whether REGNO needs to be saved in the stack frame.  */
3194
static bool
3195
need_to_save_reg (unsigned int regno)
3196
{
3197
  if (!fixed_regs[regno] && !call_used_regs[regno]
3198
      && df_regs_ever_live_p (regno))
3199
    return true;
3200
 
3201
  if (flag_pic
3202
      && (regno == PIC_OFFSET_TABLE_REGNUM
3203
          || regno == TILEPRO_PIC_TEXT_LABEL_REGNUM)
3204
      && (crtl->uses_pic_offset_table || crtl->saves_all_registers))
3205
    return true;
3206
 
3207
  if (crtl->calls_eh_return)
3208
    {
3209
      unsigned i;
3210
      for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; i++)
3211
        {
3212
          if (regno == EH_RETURN_DATA_REGNO (i))
3213
            return true;
3214
        }
3215
    }
3216
 
3217
  return false;
3218
}
3219
 
3220
 
3221
/* Return the size of the register savev area.  This function is only
3222
   correct starting with local register allocation */
3223
static int
3224
tilepro_saved_regs_size (void)
3225
{
3226
  int reg_save_size = 0;
3227
  int regno;
3228
  int offset_to_frame;
3229
  int align_mask;
3230
 
3231
  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
3232
    if (need_to_save_reg (regno))
3233
      reg_save_size += UNITS_PER_WORD;
3234
 
3235
  /* Pad out the register save area if necessary to make
3236
     frame_pointer_rtx be as aligned as the stack pointer.  */
3237
  offset_to_frame = crtl->args.pretend_args_size + reg_save_size;
3238
  align_mask = (STACK_BOUNDARY / BITS_PER_UNIT) - 1;
3239
  reg_save_size += (-offset_to_frame) & align_mask;
3240
 
3241
  return reg_save_size;
3242
}
3243
 
3244
 
3245
/* Round up frame size SIZE.  */
3246
static int
3247
round_frame_size (int size)
3248
{
3249
  return ((size + STACK_BOUNDARY / BITS_PER_UNIT - 1)
3250
          & -STACK_BOUNDARY / BITS_PER_UNIT);
3251
}
3252
 
3253
 
3254
/* Emit a store in the stack frame to save REGNO at address ADDR, and
3255
   emit the corresponding REG_CFA_OFFSET note described by CFA and
3256
   CFA_OFFSET.  Return the emitted insn.  */
3257
static rtx
3258
frame_emit_store (int regno, int regno_note, rtx addr, rtx cfa,
3259
                  int cfa_offset)
3260
{
3261
  rtx reg = gen_rtx_REG (Pmode, regno);
3262
  rtx mem = gen_frame_mem (Pmode, addr);
3263
  rtx mov = gen_movsi (mem, reg);
3264
 
3265
  /* Describe what just happened in a way that dwarf understands.  We
3266
     use temporary registers to hold the address to make scheduling
3267
     easier, and use the REG_CFA_OFFSET to describe the address as an
3268
     offset from the CFA.  */
3269
  rtx reg_note = gen_rtx_REG (Pmode, regno_note);
3270
  rtx cfa_relative_addr = gen_rtx_PLUS (Pmode, cfa, gen_int_si (cfa_offset));
3271
  rtx cfa_relative_mem = gen_frame_mem (Pmode, cfa_relative_addr);
3272
  rtx real = gen_rtx_SET (VOIDmode, cfa_relative_mem, reg_note);
3273
  add_reg_note (mov, REG_CFA_OFFSET, real);
3274
 
3275
  return emit_insn (mov);
3276
}
3277
 
3278
 
3279
/* Emit a load in the stack frame to load REGNO from address ADDR.
3280
   Add a REG_CFA_RESTORE note to CFA_RESTORES if CFA_RESTORES is
3281
   non-null.  Return the emitted insn.  */
3282
static rtx
3283
frame_emit_load (int regno, rtx addr, rtx *cfa_restores)
3284
{
3285
  rtx reg = gen_rtx_REG (Pmode, regno);
3286
  rtx mem = gen_frame_mem (Pmode, addr);
3287
  if (cfa_restores)
3288
    *cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg, *cfa_restores);
3289
  return emit_insn (gen_movsi (reg, mem));
3290
}
3291
 
3292
 
3293
/* Helper function to set RTX_FRAME_RELATED_P on instructions,
3294
   including sequences.  */
3295
static rtx
3296
set_frame_related_p (void)
3297
{
3298
  rtx seq = get_insns ();
3299
  rtx insn;
3300
 
3301
  end_sequence ();
3302
 
3303
  if (!seq)
3304
    return NULL_RTX;
3305
 
3306
  if (INSN_P (seq))
3307
    {
3308
      insn = seq;
3309
      while (insn != NULL_RTX)
3310
        {
3311
          RTX_FRAME_RELATED_P (insn) = 1;
3312
          insn = NEXT_INSN (insn);
3313
        }
3314
      seq = emit_insn (seq);
3315
    }
3316
  else
3317
    {
3318
      seq = emit_insn (seq);
3319
      RTX_FRAME_RELATED_P (seq) = 1;
3320
    }
3321
  return seq;
3322
}
3323
 
3324
 
3325
#define FRP(exp)  (start_sequence (), exp, set_frame_related_p ())
3326
 
3327
/* This emits code for 'sp += offset'.
3328
 
3329
   The ABI only allows us to modify 'sp' in a single 'addi' or
3330
   'addli', so the backtracer understands it. Larger amounts cannot
3331
   use those instructions, so are added by placing the offset into a
3332
   large register and using 'add'.
3333
 
3334
   This happens after reload, so we need to expand it ourselves.  */
3335
static rtx
3336
emit_sp_adjust (int offset, int *next_scratch_regno, bool frame_related,
3337
                rtx reg_notes)
3338
{
3339
  rtx to_add;
3340
  rtx imm_rtx = gen_int_si (offset);
3341
 
3342
  rtx insn;
3343
  if (satisfies_constraint_J (imm_rtx))
3344
    {
3345
      /* We can add this using a single addi or addli.  */
3346
      to_add = imm_rtx;
3347
    }
3348
  else
3349
    {
3350
      rtx tmp = gen_rtx_REG (Pmode, (*next_scratch_regno)--);
3351
      tilepro_expand_set_const32 (tmp, imm_rtx);
3352
      to_add = tmp;
3353
    }
3354
 
3355
  /* Actually adjust the stack pointer.  */
3356
  insn = emit_insn (gen_sp_adjust (stack_pointer_rtx, stack_pointer_rtx,
3357
                                   to_add));
3358
  REG_NOTES (insn) = reg_notes;
3359
 
3360
  /* Describe what just happened in a way that dwarf understands.  */
3361
  if (frame_related)
3362
    {
3363
      rtx real = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
3364
                              gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3365
                                            imm_rtx));
3366
      RTX_FRAME_RELATED_P (insn) = 1;
3367
      add_reg_note (insn, REG_CFA_ADJUST_CFA, real);
3368
    }
3369
 
3370
  return insn;
3371
}
3372
 
3373
 
3374
/* Return whether the current function is leaf.  This takes into
3375
   account whether the function calls tls_get_addr.  */
3376
static bool
3377
tilepro_current_function_is_leaf (void)
3378
{
3379
  return current_function_is_leaf && !cfun->machine->calls_tls_get_addr;
3380
}
3381
 
3382
 
3383
/* Return the frame size.  */
3384
static int
3385
compute_total_frame_size (void)
3386
{
3387
  int total_size = (get_frame_size () + tilepro_saved_regs_size ()
3388
                    + crtl->outgoing_args_size
3389
                    + crtl->args.pretend_args_size);
3390
 
3391
  if (!tilepro_current_function_is_leaf () || cfun->calls_alloca)
3392
    {
3393
      /* Make room for save area in callee.  */
3394
      total_size += STACK_POINTER_OFFSET;
3395
    }
3396
 
3397
  return round_frame_size (total_size);
3398
}
3399
 
3400
 
3401
/* Return nonzero if this function is known to have a null epilogue.
3402
   This allows the optimizer to omit jumps to jumps if no stack was
3403
   created.  */
3404
bool
3405
tilepro_can_use_return_insn_p (void)
3406
{
3407
  return (reload_completed
3408
          && cfun->static_chain_decl == 0
3409
          && compute_total_frame_size () == 0
3410
          && tilepro_current_function_is_leaf ()
3411
          && !crtl->profile && !df_regs_ever_live_p (TILEPRO_LINK_REGNUM));
3412
}
3413
 
3414
 
3415
/* Returns an rtx for a stack slot at 'FP + offset_from_fp'.  If there
3416
   is a frame pointer, it computes the value relative to
3417
   that. Otherwise it uses the stack pointer.  */
3418
static rtx
3419
compute_frame_addr (int offset_from_fp, int *next_scratch_regno)
3420
{
3421
  rtx base_reg_rtx, tmp_reg_rtx, offset_rtx;
3422
  int offset_from_base;
3423
 
3424
  if (frame_pointer_needed)
3425
    {
3426
      base_reg_rtx = hard_frame_pointer_rtx;
3427
      offset_from_base = offset_from_fp;
3428
    }
3429
  else
3430
    {
3431
      int offset_from_sp = compute_total_frame_size () + offset_from_fp;
3432
      base_reg_rtx = stack_pointer_rtx;
3433
      offset_from_base = offset_from_sp;
3434
    }
3435
 
3436
  if (offset_from_base == 0)
3437
    return base_reg_rtx;
3438
 
3439
  /* Compute the new value of the stack pointer.  */
3440
  tmp_reg_rtx = gen_rtx_REG (Pmode, (*next_scratch_regno)--);
3441
  offset_rtx = gen_int_si (offset_from_base);
3442
 
3443
  if (!tilepro_expand_addsi (tmp_reg_rtx, base_reg_rtx, offset_rtx))
3444
    {
3445
      emit_insn (gen_rtx_SET (VOIDmode, tmp_reg_rtx,
3446
                              gen_rtx_PLUS (Pmode, base_reg_rtx,
3447
                                            offset_rtx)));
3448
    }
3449
 
3450
  return tmp_reg_rtx;
3451
}
3452
 
3453
 
3454
/* The stack frame looks like this:
3455
         +-------------+
3456
         |    ...      |
3457
         |  incoming   |
3458
         | stack args  |
3459
   AP -> +-------------+
3460
         | caller's HFP|
3461
         +-------------+
3462
         | lr save     |
3463
  HFP -> +-------------+
3464
         |  var args   |
3465
         |  reg save   | crtl->args.pretend_args_size bytes
3466
         +-------------+
3467
         |    ...      |
3468
         | saved regs  | tilepro_saved_regs_size() bytes
3469
   FP -> +-------------+
3470
         |    ...      |
3471
         |   vars      | get_frame_size() bytes
3472
         +-------------+
3473
         |    ...      |
3474
         |  outgoing   |
3475
         |  stack args | crtl->outgoing_args_size bytes
3476
         +-------------+
3477
         | HFP         | 4 bytes (only here if nonleaf / alloca)
3478
         +-------------+
3479
         | callee lr   | 4 bytes (only here if nonleaf / alloca)
3480
         | save        |
3481
   SP -> +-------------+
3482
 
3483
  HFP == incoming SP.
3484
 
3485
  For functions with a frame larger than 32767 bytes, or which use
3486
  alloca (), r52 is used as a frame pointer.  Otherwise there is no
3487
  frame pointer.
3488
 
3489
  FP is saved at SP+4 before calling a subroutine so the
3490
  callee can chain.  */
3491
void
3492
tilepro_expand_prologue (void)
3493
{
3494
#define ROUND_ROBIN_SIZE 4
3495
  /* We round-robin through four scratch registers to hold temporary
3496
     addresses for saving registers, to make instruction scheduling
3497
     easier.  */
3498
  rtx reg_save_addr[ROUND_ROBIN_SIZE] = {
3499
    NULL_RTX, NULL_RTX, NULL_RTX, NULL_RTX
3500
  };
3501
  rtx insn, cfa;
3502
  unsigned int which_scratch;
3503
  int offset, start_offset, regno;
3504
 
3505
  /* A register that holds a copy of the incoming fp.  */
3506
  int fp_copy_regno = -1;
3507
 
3508
  /* A register that holds a copy of the incoming sp.  */
3509
  int sp_copy_regno = -1;
3510
 
3511
  /* Next scratch register number to hand out (postdecrementing).  */
3512
  int next_scratch_regno = 29;
3513
 
3514
  int total_size = compute_total_frame_size ();
3515
 
3516
  if (flag_stack_usage_info)
3517
    current_function_static_stack_size = total_size;
3518
 
3519
  /* Save lr first in its special location because code after this
3520
     might use the link register as a scratch register.  */
3521
  if (df_regs_ever_live_p (TILEPRO_LINK_REGNUM) || crtl->calls_eh_return)
3522
    FRP (frame_emit_store (TILEPRO_LINK_REGNUM, TILEPRO_LINK_REGNUM,
3523
                           stack_pointer_rtx, stack_pointer_rtx, 0));
3524
 
3525
  if (total_size == 0)
3526
    {
3527
      /* Load the PIC register if needed.  */
3528
      if (flag_pic && crtl->uses_pic_offset_table)
3529
        load_pic_register (false);
3530
 
3531
      return;
3532
    }
3533
 
3534
  cfa = stack_pointer_rtx;
3535
 
3536
  if (frame_pointer_needed)
3537
    {
3538
      fp_copy_regno = next_scratch_regno--;
3539
 
3540
      /* Copy the old frame pointer aside so we can save it later.  */
3541
      insn = FRP (emit_move_insn (gen_rtx_REG (word_mode, fp_copy_regno),
3542
                                  hard_frame_pointer_rtx));
3543
      add_reg_note (insn, REG_CFA_REGISTER, NULL_RTX);
3544
 
3545
      /* Set up the frame pointer.  */
3546
      insn = FRP (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
3547
      add_reg_note (insn, REG_CFA_DEF_CFA, hard_frame_pointer_rtx);
3548
      cfa = hard_frame_pointer_rtx;
3549
      REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
3550
 
3551
      /* fp holds a copy of the incoming sp, in case we need to store
3552
         it.  */
3553
      sp_copy_regno = HARD_FRAME_POINTER_REGNUM;
3554
    }
3555
  else if (!tilepro_current_function_is_leaf ())
3556
    {
3557
      /* Copy the old stack pointer aside so we can save it later.  */
3558
      sp_copy_regno = next_scratch_regno--;
3559
      insn = FRP (emit_move_insn (gen_rtx_REG (Pmode, sp_copy_regno),
3560
                                  stack_pointer_rtx));
3561
      add_reg_note (insn, REG_CFA_REGISTER, NULL_RTX);
3562
    }
3563
 
3564
  if (tilepro_current_function_is_leaf ())
3565
    {
3566
      /* No need to store chain pointer to caller's frame.  */
3567
      emit_sp_adjust (-total_size, &next_scratch_regno,
3568
                      !frame_pointer_needed, NULL_RTX);
3569
    }
3570
  else
3571
    {
3572
      /* Save the frame pointer (incoming sp value) to support
3573
         backtracing.  First we need to create an rtx with the store
3574
         address.  */
3575
      rtx chain_addr = gen_rtx_REG (Pmode, next_scratch_regno--);
3576
      rtx size_rtx = gen_int_si (-(total_size - UNITS_PER_WORD));
3577
      int cfa_offset =
3578
        frame_pointer_needed ? UNITS_PER_WORD - total_size : UNITS_PER_WORD;
3579
 
3580
      if (add_operand (size_rtx, Pmode))
3581
        {
3582
          /* Expose more parallelism by computing this value from the
3583
             original stack pointer, not the one after we have pushed
3584
             the frame.  */
3585
          rtx p = gen_rtx_PLUS (Pmode, stack_pointer_rtx, size_rtx);
3586
          emit_insn (gen_rtx_SET (VOIDmode, chain_addr, p));
3587
          emit_sp_adjust (-total_size, &next_scratch_regno,
3588
                          !frame_pointer_needed, NULL_RTX);
3589
        }
3590
      else
3591
        {
3592
          /* The stack frame is large, so just store the incoming sp
3593
             value at *(new_sp + UNITS_PER_WORD).  */
3594
          rtx p;
3595
          emit_sp_adjust (-total_size, &next_scratch_regno,
3596
                          !frame_pointer_needed, NULL_RTX);
3597
          p = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3598
                            GEN_INT (UNITS_PER_WORD));
3599
          emit_insn (gen_rtx_SET (VOIDmode, chain_addr, p));
3600
        }
3601
 
3602
      /* Save our frame pointer for backtrace chaining.  */
3603
      FRP (frame_emit_store (sp_copy_regno, STACK_POINTER_REGNUM,
3604
                             chain_addr, cfa, cfa_offset));
3605
    }
3606
 
3607
  /* Compute where to start storing registers we need to save.  */
3608
  start_offset = -crtl->args.pretend_args_size - UNITS_PER_WORD;
3609
  offset = start_offset;
3610
 
3611
  /* Store all registers that need saving.  */
3612
  which_scratch = 0;
3613
  for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
3614
    if (need_to_save_reg (regno))
3615
      {
3616
        rtx r = reg_save_addr[which_scratch];
3617
        int from_regno;
3618
        int cfa_offset = frame_pointer_needed ? offset : total_size + offset;
3619
 
3620
        if (r == NULL_RTX)
3621
          {
3622
            rtx p = compute_frame_addr (offset, &next_scratch_regno);
3623
            r = gen_rtx_REG (word_mode, next_scratch_regno--);
3624
            reg_save_addr[which_scratch] = r;
3625
 
3626
            emit_insn (gen_rtx_SET (VOIDmode, r, p));
3627
          }
3628
        else
3629
          {
3630
            /* Advance to the next stack slot to store this register.  */
3631
            int stride = ROUND_ROBIN_SIZE * -UNITS_PER_WORD;
3632
            rtx p = gen_rtx_PLUS (Pmode, r, GEN_INT (stride));
3633
            emit_insn (gen_rtx_SET (VOIDmode, r, p));
3634
          }
3635
 
3636
        /* Save this register to the stack (but use the old fp value
3637
           we copied aside if appropriate).  */
3638
        from_regno = (fp_copy_regno >= 0
3639
                      && regno ==
3640
                      HARD_FRAME_POINTER_REGNUM) ? fp_copy_regno : regno;
3641
        FRP (frame_emit_store (from_regno, regno, r, cfa, cfa_offset));
3642
 
3643
        offset -= UNITS_PER_WORD;
3644
        which_scratch = (which_scratch + 1) % ROUND_ROBIN_SIZE;
3645
      }
3646
 
3647
  /* If profiling, force that to happen after the frame is set up.  */
3648
  if (crtl->profile)
3649
    emit_insn (gen_blockage ());
3650
 
3651
  /* Load the PIC register if needed.  */
3652
  if (flag_pic && crtl->uses_pic_offset_table)
3653
    load_pic_register (false);
3654
}
3655
 
3656
 
3657
/* Implement the epilogue and sibcall_epilogue patterns.  SIBCALL_P is
3658
   true for a sibcall_epilogue pattern, and false for an epilogue
3659
   pattern.  */
3660
void
3661
tilepro_expand_epilogue (bool sibcall_p)
3662
{
3663
  /* We round-robin through four scratch registers to hold temporary
3664
     addresses for saving registers, to make instruction scheduling
3665
     easier.  */
3666
  rtx reg_save_addr[ROUND_ROBIN_SIZE] = {
3667
    NULL_RTX, NULL_RTX, NULL_RTX, NULL_RTX
3668
  };
3669
  rtx last_insn, insn;
3670
  unsigned int which_scratch;
3671
  int offset, start_offset, regno;
3672
  rtx cfa_restores = NULL_RTX;
3673
 
3674
  /* A register that holds a copy of the incoming fp.  */
3675
  int fp_copy_regno = -1;
3676
 
3677
  /* Next scratch register number to hand out (postdecrementing).  */
3678
  int next_scratch_regno = 29;
3679
 
3680
  int total_size = compute_total_frame_size ();
3681
 
3682
  last_insn = get_last_insn ();
3683
 
3684
  /* Load lr first since we are going to need it first.  */
3685
  insn = NULL;
3686
  if (df_regs_ever_live_p (TILEPRO_LINK_REGNUM))
3687
    {
3688
      insn = frame_emit_load (TILEPRO_LINK_REGNUM,
3689
                              compute_frame_addr (0, &next_scratch_regno),
3690
                              &cfa_restores);
3691
    }
3692
 
3693
  if (total_size == 0)
3694
    {
3695
      if (insn)
3696
        {
3697
          RTX_FRAME_RELATED_P (insn) = 1;
3698
          REG_NOTES (insn) = cfa_restores;
3699
        }
3700
      goto done;
3701
    }
3702
 
3703
  /* Compute where to start restoring registers.  */
3704
  start_offset = -crtl->args.pretend_args_size - UNITS_PER_WORD;
3705
  offset = start_offset;
3706
 
3707
  if (frame_pointer_needed)
3708
    fp_copy_regno = next_scratch_regno--;
3709
 
3710
  /* Restore all callee-saved registers.  */
3711
  which_scratch = 0;
3712
  for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
3713
    if (need_to_save_reg (regno))
3714
      {
3715
        rtx r = reg_save_addr[which_scratch];
3716
        if (r == NULL_RTX)
3717
          {
3718
            r = compute_frame_addr (offset, &next_scratch_regno);
3719
            reg_save_addr[which_scratch] = r;
3720
          }
3721
        else
3722
          {
3723
            /* Advance to the next stack slot to store this
3724
               register.  */
3725
            int stride = ROUND_ROBIN_SIZE * -UNITS_PER_WORD;
3726
            rtx p = gen_rtx_PLUS (Pmode, r, GEN_INT (stride));
3727
            emit_insn (gen_rtx_SET (VOIDmode, r, p));
3728
          }
3729
 
3730
        if (fp_copy_regno >= 0 && regno == HARD_FRAME_POINTER_REGNUM)
3731
          frame_emit_load (fp_copy_regno, r, NULL);
3732
        else
3733
          frame_emit_load (regno, r, &cfa_restores);
3734
 
3735
        offset -= UNITS_PER_WORD;
3736
        which_scratch = (which_scratch + 1) % ROUND_ROBIN_SIZE;
3737
      }
3738
 
3739
  if (!tilepro_current_function_is_leaf ())
3740
    cfa_restores =
3741
      alloc_reg_note (REG_CFA_RESTORE, stack_pointer_rtx, cfa_restores);
3742
 
3743
  emit_insn (gen_blockage ());
3744
 
3745
  if (crtl->calls_eh_return)
3746
    {
3747
      rtx r = compute_frame_addr (-total_size + UNITS_PER_WORD,
3748
                                  &next_scratch_regno);
3749
      insn = emit_move_insn (gen_rtx_REG (Pmode, STACK_POINTER_REGNUM),
3750
                             gen_frame_mem (Pmode, r));
3751
      RTX_FRAME_RELATED_P (insn) = 1;
3752
      REG_NOTES (insn) = cfa_restores;
3753
    }
3754
  else if (frame_pointer_needed)
3755
    {
3756
      /* Restore the old stack pointer by copying from the frame
3757
         pointer.  */
3758
      insn = emit_insn (gen_sp_restore (stack_pointer_rtx,
3759
                                        hard_frame_pointer_rtx));
3760
      RTX_FRAME_RELATED_P (insn) = 1;
3761
      REG_NOTES (insn) = cfa_restores;
3762
      add_reg_note (insn, REG_CFA_DEF_CFA, stack_pointer_rtx);
3763
    }
3764
  else
3765
    {
3766
      insn = emit_sp_adjust (total_size, &next_scratch_regno, true,
3767
                             cfa_restores);
3768
    }
3769
 
3770
  /* Restore the old frame pointer.  */
3771
  if (frame_pointer_needed)
3772
    {
3773
      insn = emit_move_insn (hard_frame_pointer_rtx,
3774
                             gen_rtx_REG (Pmode, fp_copy_regno));
3775
      add_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx);
3776
    }
3777
 
3778
  /* Mark the pic registers as live outside of the function.  */
3779
  if (flag_pic)
3780
    {
3781
      emit_use (cfun->machine->text_label_rtx);
3782
      emit_use (cfun->machine->got_rtx);
3783
    }
3784
 
3785
done:
3786
  if (!sibcall_p)
3787
    {
3788
      /* Emit the actual 'return' instruction.  */
3789
      emit_jump_insn (gen__return ());
3790
    }
3791
  else
3792
    {
3793
      emit_use (gen_rtx_REG (Pmode, TILEPRO_LINK_REGNUM));
3794
    }
3795
 
3796
  /* Mark all insns we just emitted as frame-related.  */
3797
  for (; last_insn != NULL_RTX; last_insn = next_insn (last_insn))
3798
    RTX_FRAME_RELATED_P (last_insn) = 1;
3799
}
3800
 
3801
#undef ROUND_ROBIN_SIZE
3802
 
3803
 
3804
/* Implement INITIAL_ELIMINATION_OFFSET.  */
3805
int
3806
tilepro_initial_elimination_offset (int from, int to)
3807
{
3808
  int total_size = compute_total_frame_size ();
3809
 
3810
  if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
3811
    {
3812
      return (total_size - crtl->args.pretend_args_size
3813
              - tilepro_saved_regs_size ());
3814
    }
3815
  else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
3816
    {
3817
      return -(crtl->args.pretend_args_size + tilepro_saved_regs_size ());
3818
    }
3819
  else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
3820
    {
3821
      return STACK_POINTER_OFFSET + total_size;
3822
    }
3823
  else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
3824
    {
3825
      return STACK_POINTER_OFFSET;
3826
    }
3827
  else
3828
    gcc_unreachable ();
3829
}
3830
 
3831
 
3832
/* Return an RTX indicating where the return address to the
3833
   calling function can be found.  */
3834
rtx
3835
tilepro_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
3836
{
3837
  if (count != 0)
3838
    return const0_rtx;
3839
 
3840
  return get_hard_reg_initial_val (Pmode, TILEPRO_LINK_REGNUM);
3841
}
3842
 
3843
 
3844
/* Implement EH_RETURN_HANDLER_RTX.  */
3845
rtx
3846
tilepro_eh_return_handler_rtx (void)
3847
{
3848
  /* The MEM needs to be volatile to prevent it from being
3849
     deleted.  */
3850
  rtx tmp = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
3851
  MEM_VOLATILE_P (tmp) = true;
3852
  return tmp;
3853
}
3854
 
3855
 
3856
 
3857
/* Registers  */
3858
 
3859
/* Implemnet TARGET_CONDITIONAL_REGISTER_USAGE.  */
3860
static void
3861
tilepro_conditional_register_usage (void)
3862
{
3863
  global_regs[TILEPRO_NETORDER_REGNUM] = 1;
3864
  /* TILEPRO_PIC_TEXT_LABEL_REGNUM is conditionally used.  It is a
3865
     member of fixed_regs, and therefore must be member of
3866
     call_used_regs, but it is not a member of call_really_used_regs[]
3867
     because it is not clobbered by a call.  */
3868
  if (TILEPRO_PIC_TEXT_LABEL_REGNUM != INVALID_REGNUM)
3869
    {
3870
      fixed_regs[TILEPRO_PIC_TEXT_LABEL_REGNUM] = 1;
3871
      call_used_regs[TILEPRO_PIC_TEXT_LABEL_REGNUM] = 1;
3872
    }
3873
  if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
3874
    {
3875
      fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
3876
      call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
3877
    }
3878
}
3879
 
3880
 
3881
/* Implement TARGET_FRAME_POINTER_REQUIRED.  */
3882
static bool
3883
tilepro_frame_pointer_required (void)
3884
{
3885
  return crtl->calls_eh_return || cfun->calls_alloca;
3886
}
3887
 
3888
 
3889
 
3890
/* Scheduling and reorg  */
3891
 
3892
/* Return the length of INSN.  LENGTH is the initial length computed
3893
   by attributes in the machine-description file.  This is where we
3894
   account for bundles.  */
3895
int
3896
tilepro_adjust_insn_length (rtx insn, int length)
3897
{
3898
  enum machine_mode mode = GET_MODE (insn);
3899
 
3900
  /* A non-termininating instruction in a bundle has length 0.  */
3901
  if (mode == SImode)
3902
    return 0;
3903
 
3904
  /* By default, there is not length adjustment.  */
3905
  return length;
3906
}
3907
 
3908
 
3909
/* Implement TARGET_SCHED_ISSUE_RATE.  */
3910
static int
3911
tilepro_issue_rate (void)
3912
{
3913
  return 3;
3914
}
3915
 
3916
 
3917
/* Return the rtx for the jump target.  */
3918
static rtx
3919
get_jump_target (rtx branch)
3920
{
3921
  if (CALL_P (branch))
3922
    {
3923
      rtx call;
3924
      call = PATTERN (branch);
3925
 
3926
      if (GET_CODE (call) == PARALLEL)
3927
        call = XVECEXP (call, 0, 0);
3928
 
3929
      if (GET_CODE (call) == SET)
3930
        call = SET_SRC (call);
3931
 
3932
      if (GET_CODE (call) == CALL)
3933
        return XEXP (XEXP (call, 0), 0);
3934
    }
3935
  return 0;
3936
}
3937
 
3938
/* Implement TARGET_SCHED_ADJUST_COST.  */
3939
static int
3940
tilepro_sched_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
3941
{
3942
  /* If we have a true dependence, INSN is a call, and DEP_INSN
3943
     defines a register that is needed by the call (argument or stack
3944
     pointer), set its latency to 0 so that it can be bundled with
3945
     the call.  Explicitly check for and exclude the case when
3946
     DEP_INSN defines the target of the jump.  */
3947
  if (CALL_P (insn) && REG_NOTE_KIND (link) == REG_DEP_TRUE)
3948
    {
3949
      rtx target = get_jump_target (insn);
3950
      if (!REG_P (target) || !set_of (target, dep_insn))
3951
        return 0;
3952
    }
3953
 
3954
  return cost;
3955
}
3956
 
3957
 
3958
/* Skip over irrelevant NOTEs and such and look for the next insn we
3959
   would consider bundling.  */
3960
static rtx
3961
next_insn_to_bundle (rtx r, rtx end)
3962
{
3963
  for (; r != end; r = NEXT_INSN (r))
3964
    {
3965
      if (NONDEBUG_INSN_P (r)
3966
          && GET_CODE (PATTERN (r)) != USE
3967
          && GET_CODE (PATTERN (r)) != CLOBBER)
3968
        return r;
3969
    }
3970
 
3971
  return NULL_RTX;
3972
}
3973
 
3974
 
3975
/* Go through all insns, and use the information generated during
3976
   scheduling to generate SEQUENCEs to represent bundles of
3977
   instructions issued simultaneously.  */
3978
static void
3979
tilepro_gen_bundles (void)
3980
{
3981
  basic_block bb;
3982
  FOR_EACH_BB (bb)
3983
  {
3984
    rtx insn, next;
3985
    rtx end = NEXT_INSN (BB_END (bb));
3986
 
3987
    for (insn = next_insn_to_bundle (BB_HEAD (bb), end); insn; insn = next)
3988
      {
3989
        next = next_insn_to_bundle (NEXT_INSN (insn), end);
3990
 
3991
        /* Never wrap {} around inline asm.  */
3992
        if (GET_CODE (PATTERN (insn)) != ASM_INPUT)
3993
          {
3994
            if (next == NULL_RTX || GET_MODE (next) == TImode
3995
                /* NOTE: The scheduler incorrectly believes a call
3996
                   insn can execute in the same cycle as the insn
3997
                   after the call.  This is of course impossible.
3998
                   Really we need to fix the scheduler somehow, so
3999
                   the code after the call gets scheduled
4000
                   optimally.  */
4001
                || CALL_P (insn))
4002
              {
4003
                /* Mark current insn as the end of a bundle.  */
4004
                PUT_MODE (insn, QImode);
4005
              }
4006
            else
4007
              {
4008
                /* Mark it as part of a bundle.  */
4009
                PUT_MODE (insn, SImode);
4010
              }
4011
          }
4012
      }
4013
  }
4014
}
4015
 
4016
 
4017
/* Helper function for tilepro_fixup_pcrel_references.  */
4018
static void
4019
replace_pc_relative_symbol_ref (rtx insn, rtx opnds[4], bool first_insn_p)
4020
{
4021
  rtx new_insns;
4022
 
4023
  start_sequence ();
4024
 
4025
  if (flag_pic == 1)
4026
    {
4027
      if (!first_insn_p)
4028
        {
4029
          emit_insn (gen_add_got16 (opnds[0], tilepro_got_rtx (),
4030
                                    opnds[2]));
4031
          emit_insn (gen_insn_lw (opnds[0], opnds[0]));
4032
        }
4033
    }
4034
  else
4035
    {
4036
      if (first_insn_p)
4037
        {
4038
          emit_insn (gen_addhi_got32 (opnds[0], tilepro_got_rtx (),
4039
                                      opnds[2]));
4040
        }
4041
      else
4042
        {
4043
          emit_insn (gen_addlo_got32 (opnds[0], opnds[1], opnds[2]));
4044
          emit_insn (gen_insn_lw (opnds[0], opnds[0]));
4045
        }
4046
    }
4047
 
4048
  new_insns = get_insns ();
4049
  end_sequence ();
4050
 
4051
  if (new_insns)
4052
    emit_insn_before (new_insns, insn);
4053
 
4054
  delete_insn (insn);
4055
}
4056
 
4057
 
4058
/* Returns whether INSN is a pc-relative addli insn.   */
4059
static bool
4060
match_addli_pcrel (rtx insn)
4061
{
4062
  rtx pattern = PATTERN (insn);
4063
  rtx unspec;
4064
 
4065
  if (GET_CODE (pattern) != SET)
4066
    return false;
4067
 
4068
  if (GET_CODE (SET_SRC (pattern)) != LO_SUM)
4069
    return false;
4070
 
4071
  if (GET_CODE (XEXP (SET_SRC (pattern), 1)) != CONST)
4072
    return false;
4073
 
4074
  unspec = XEXP (XEXP (SET_SRC (pattern), 1), 0);
4075
 
4076
  return (GET_CODE (unspec) == UNSPEC
4077
          && XINT (unspec, 1) == UNSPEC_PCREL_SYM);
4078
}
4079
 
4080
 
4081
/* Helper function for tilepro_fixup_pcrel_references.  */
4082
static void
4083
replace_addli_pcrel (rtx insn)
4084
{
4085
  rtx pattern = PATTERN (insn);
4086
  rtx set_src;
4087
  rtx unspec;
4088
  rtx opnds[4];
4089
  bool first_insn_p;
4090
 
4091
  gcc_assert (GET_CODE (pattern) == SET);
4092
  opnds[0] = SET_DEST (pattern);
4093
 
4094
  set_src = SET_SRC (pattern);
4095
  gcc_assert (GET_CODE (set_src) == LO_SUM);
4096
  gcc_assert (GET_CODE (XEXP (set_src, 1)) == CONST);
4097
  opnds[1] = XEXP (set_src, 0);
4098
 
4099
  unspec = XEXP (XEXP (set_src, 1), 0);
4100
  gcc_assert (GET_CODE (unspec) == UNSPEC);
4101
  gcc_assert (XINT (unspec, 1) == UNSPEC_PCREL_SYM);
4102
  opnds[2] = XVECEXP (unspec, 0, 0);
4103
  opnds[3] = XVECEXP (unspec, 0, 1);
4104
 
4105
  /* We only need to replace SYMBOL_REFs, not LABEL_REFs.  */
4106
  if (GET_CODE (opnds[2]) != SYMBOL_REF)
4107
    return;
4108
 
4109
  first_insn_p = (opnds[1] == tilepro_text_label_rtx ());
4110
 
4111
  replace_pc_relative_symbol_ref (insn, opnds, first_insn_p);
4112
}
4113
 
4114
 
4115
/* Returns whether INSN is a pc-relative auli insn.   */
4116
static bool
4117
match_auli_pcrel (rtx insn)
4118
{
4119
  rtx pattern = PATTERN (insn);
4120
  rtx high;
4121
  rtx unspec;
4122
 
4123
  if (GET_CODE (pattern) != SET)
4124
    return false;
4125
 
4126
  if (GET_CODE (SET_SRC (pattern)) != PLUS)
4127
    return false;
4128
 
4129
  high = XEXP (SET_SRC (pattern), 1);
4130
 
4131
  if (GET_CODE (high) != HIGH
4132
      || GET_CODE (XEXP (high, 0)) != CONST)
4133
    return false;
4134
 
4135
  unspec = XEXP (XEXP (high, 0), 0);
4136
 
4137
  return (GET_CODE (unspec) == UNSPEC
4138
          && XINT (unspec, 1) == UNSPEC_PCREL_SYM);
4139
}
4140
 
4141
 
4142
/* Helper function for tilepro_fixup_pcrel_references.  */
4143
static void
4144
replace_auli_pcrel (rtx insn)
4145
{
4146
  rtx pattern = PATTERN (insn);
4147
  rtx set_src;
4148
  rtx high;
4149
  rtx unspec;
4150
  rtx opnds[4];
4151
  bool first_insn_p;
4152
 
4153
  gcc_assert (GET_CODE (pattern) == SET);
4154
  opnds[0] = SET_DEST (pattern);
4155
 
4156
  set_src = SET_SRC (pattern);
4157
  gcc_assert (GET_CODE (set_src) == PLUS);
4158
  opnds[1] = XEXP (set_src, 0);
4159
 
4160
  high = XEXP (set_src, 1);
4161
  gcc_assert (GET_CODE (high) == HIGH);
4162
  gcc_assert (GET_CODE (XEXP (high, 0)) == CONST);
4163
 
4164
  unspec = XEXP (XEXP (high, 0), 0);
4165
  gcc_assert (GET_CODE (unspec) == UNSPEC);
4166
  gcc_assert (XINT (unspec, 1) == UNSPEC_PCREL_SYM);
4167
  opnds[2] = XVECEXP (unspec, 0, 0);
4168
  opnds[3] = XVECEXP (unspec, 0, 1);
4169
 
4170
  /* We only need to replace SYMBOL_REFs, not LABEL_REFs.  */
4171
  if (GET_CODE (opnds[2]) != SYMBOL_REF)
4172
    return;
4173
 
4174
  first_insn_p = (opnds[1] == tilepro_text_label_rtx ());
4175
 
4176
  replace_pc_relative_symbol_ref (insn, opnds, first_insn_p);
4177
}
4178
 
4179
 
4180
/* We generate PC relative SYMBOL_REFs as an optimization, to avoid
4181
   going through the GOT when the symbol is local to the compilation
4182
   unit.  But such a symbol requires that the common text_label that
4183
   we generate at the beginning of the function be in the same section
4184
   as the reference to the SYMBOL_REF.  This may not be true if we
4185
   generate hot/cold sections.  This function looks for such cases and
4186
   replaces such references with the longer sequence going through the
4187
   GOT.
4188
 
4189
   We expect one of the following two instruction sequences:
4190
   addli tmp1, txt_label_reg, lo16(sym - txt_label)
4191
   auli  tmp2,          tmp1, ha16(sym - txt_label)
4192
 
4193
   auli  tmp1, txt_label_reg, ha16(sym - txt_label)
4194
   addli tmp2,          tmp1, lo16(sym - txt_label)
4195
 
4196
   If we're compiling -fpic, we replace the first instruction with
4197
   nothing, and the second instruction with:
4198
 
4199
   addli tmp2, got_rtx, got(sym)
4200
   lw    tmp2,    tmp2
4201
 
4202
   If we're compiling -fPIC, we replace the first instruction with:
4203
 
4204
   auli  tmp1, got_rtx, got_ha16(sym)
4205
 
4206
   and the second instruction with:
4207
 
4208
   addli tmp2,    tmp1, got_lo16(sym)
4209
   lw    tmp2,    tmp2
4210
 
4211
   Note that we're careful to disturb the instruction sequence as
4212
   little as possible, since it's very late in the compilation
4213
   process.
4214
*/
4215
static void
4216
tilepro_fixup_pcrel_references (void)
4217
{
4218
  rtx insn, next_insn;
4219
  bool same_section_as_entry = true;
4220
 
4221
  for (insn = get_insns (); insn; insn = next_insn)
4222
    {
4223
      next_insn = NEXT_INSN (insn);
4224
 
4225
      if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
4226
        {
4227
          same_section_as_entry = !same_section_as_entry;
4228
          continue;
4229
        }
4230
 
4231
      if (same_section_as_entry)
4232
        continue;
4233
 
4234
      if (!(INSN_P (insn)
4235
            && GET_CODE (PATTERN (insn)) != USE
4236
            && GET_CODE (PATTERN (insn)) != CLOBBER))
4237
        continue;
4238
 
4239
      if (match_addli_pcrel (insn))
4240
        replace_addli_pcrel (insn);
4241
      else if (match_auli_pcrel (insn))
4242
        replace_auli_pcrel (insn);
4243
    }
4244
}
4245
 
4246
 
4247
/* Ensure that no var tracking notes are emitted in the middle of a
4248
   three-instruction bundle.  */
4249
static void
4250
reorder_var_tracking_notes (void)
4251
{
4252
  basic_block bb;
4253
  FOR_EACH_BB (bb)
4254
  {
4255
    rtx insn, next;
4256
    rtx queue = NULL_RTX;
4257
    bool in_bundle = false;
4258
 
4259
    for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = next)
4260
      {
4261
        next = NEXT_INSN (insn);
4262
 
4263
        if (INSN_P (insn))
4264
          {
4265
            /* Emit queued up notes at the last instruction of a bundle.  */
4266
            if (GET_MODE (insn) == QImode)
4267
              {
4268
                while (queue)
4269
                  {
4270
                    rtx next_queue = PREV_INSN (queue);
4271
                    PREV_INSN (NEXT_INSN (insn)) = queue;
4272
                    NEXT_INSN (queue) = NEXT_INSN (insn);
4273
                    NEXT_INSN (insn) = queue;
4274
                    PREV_INSN (queue) = insn;
4275
                    queue = next_queue;
4276
                  }
4277
                in_bundle = false;
4278
              }
4279
            else if (GET_MODE (insn) == SImode)
4280
              in_bundle = true;
4281
          }
4282
        else if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
4283
          {
4284
            if (in_bundle)
4285
              {
4286
                rtx prev = PREV_INSN (insn);
4287
                PREV_INSN (next) = prev;
4288
                NEXT_INSN (prev) = next;
4289
 
4290
                PREV_INSN (insn) = queue;
4291
                queue = insn;
4292
              }
4293
          }
4294
      }
4295
  }
4296
}
4297
 
4298
 
4299
/* Perform machine dependent operations on the rtl chain INSNS.  */
4300
static void
4301
tilepro_reorg (void)
4302
{
4303
  /* We are freeing block_for_insn in the toplev to keep compatibility
4304
     with old MDEP_REORGS that are not CFG based.  Recompute it
4305
     now.  */
4306
  compute_bb_for_insn ();
4307
 
4308
  if (flag_reorder_blocks_and_partition)
4309
    {
4310
      tilepro_fixup_pcrel_references ();
4311
    }
4312
 
4313
  if (flag_schedule_insns_after_reload)
4314
    {
4315
      split_all_insns ();
4316
 
4317
      timevar_push (TV_SCHED2);
4318
      schedule_insns ();
4319
      timevar_pop (TV_SCHED2);
4320
 
4321
      /* Examine the schedule to group into bundles.  */
4322
      tilepro_gen_bundles ();
4323
    }
4324
 
4325
  df_analyze ();
4326
 
4327
  if (flag_var_tracking)
4328
    {
4329
      timevar_push (TV_VAR_TRACKING);
4330
      variable_tracking_main ();
4331
      reorder_var_tracking_notes ();
4332
      timevar_pop (TV_VAR_TRACKING);
4333
    }
4334
 
4335
  df_finish_pass (false);
4336
}
4337
 
4338
 
4339
 
4340
/* Assembly  */
4341
 
4342
/* Select a format to encode pointers in exception handling data.
4343
   CODE is 0 for data, 1 for code labels, 2 for function pointers.
4344
   GLOBAL is true if the symbol may be affected by dynamic
4345
   relocations.  */
4346
int
4347
tilepro_asm_preferred_eh_data_format (int code ATTRIBUTE_UNUSED, int global)
4348
{
4349
  if (flag_pic)
4350
    return (global ? DW_EH_PE_indirect : 0) | DW_EH_PE_pcrel | DW_EH_PE_sdata4;
4351
  else
4352
    return DW_EH_PE_absptr;
4353
}
4354
 
4355
 
4356
/* Implement TARGET_ASM_OUTPUT_MI_THUNK.  */
4357
static void
4358
tilepro_asm_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
4359
                             HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
4360
                             tree function)
4361
{
4362
  rtx this_rtx, insn, funexp;
4363
 
4364
  /* Pretend to be a post-reload pass while generating rtl.  */
4365
  reload_completed = 1;
4366
 
4367
  /* Mark the end of the (empty) prologue.  */
4368
  emit_note (NOTE_INSN_PROLOGUE_END);
4369
 
4370
  /* Find the "this" pointer.  If the function returns a structure,
4371
     the structure return pointer is in $1.  */
4372
  if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
4373
    this_rtx = gen_rtx_REG (Pmode, 1);
4374
  else
4375
    this_rtx = gen_rtx_REG (Pmode, 0);
4376
 
4377
  /* Add DELTA to THIS_RTX.  */
4378
  emit_insn (gen_addsi3 (this_rtx, this_rtx, GEN_INT (delta)));
4379
 
4380
  /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX.  */
4381
  if (vcall_offset)
4382
    {
4383
      rtx tmp;
4384
 
4385
      tmp = gen_rtx_REG (Pmode, 29);
4386
      emit_move_insn (tmp, gen_rtx_MEM (Pmode, this_rtx));
4387
 
4388
      emit_insn (gen_addsi3 (tmp, tmp, GEN_INT (vcall_offset)));
4389
 
4390
      emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
4391
 
4392
      emit_insn (gen_addsi3 (this_rtx, this_rtx, tmp));
4393
    }
4394
 
4395
  /* Generate a tail call to the target function.  */
4396
  if (!TREE_USED (function))
4397
    {
4398
      assemble_external (function);
4399
      TREE_USED (function) = 1;
4400
    }
4401
  funexp = XEXP (DECL_RTL (function), 0);
4402
  funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
4403
  insn = emit_call_insn (gen_sibcall (funexp, const0_rtx));
4404
  SIBLING_CALL_P (insn) = 1;
4405
 
4406
  /* Run just enough of rest_of_compilation to get the insns emitted.
4407
     There's not really enough bulk here to make other passes such as
4408
     instruction scheduling worth while.  Note that use_thunk calls
4409
     assemble_start_function and assemble_end_function.
4410
 
4411
     We don't currently bundle, but the instruciton sequence is all
4412
     serial except for the tail call, so we're only wasting one cycle.
4413
   */
4414
  insn = get_insns ();
4415
  insn_locators_alloc ();
4416
  shorten_branches (insn);
4417
  final_start_function (insn, file, 1);
4418
  final (insn, file, 1);
4419
  final_end_function ();
4420
 
4421
  /* Stop pretending to be a post-reload pass.  */
4422
  reload_completed = 0;
4423
}
4424
 
4425
 
4426
/* Implement TARGET_ASM_TRAMPOLINE_TEMPLATE.  */
4427
static void
4428
tilepro_asm_trampoline_template (FILE *file)
4429
{
4430
  fprintf (file, "\tlnk   r10\n");
4431
  fprintf (file, "\taddi  r10, r10, 32\n");
4432
  fprintf (file, "\tlwadd r11, r10, %d\n", GET_MODE_SIZE (ptr_mode));
4433
  fprintf (file, "\tlw    r10, r10\n");
4434
  fprintf (file, "\tjr    r11\n");
4435
  fprintf (file, "\t.word 0 # <function address>\n");
4436
  fprintf (file, "\t.word 0 # <static chain value>\n");
4437
}
4438
 
4439
 
4440
/* Implement TARGET_TRAMPOLINE_INIT.  */
4441
static void
4442
tilepro_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4443
{
4444
  rtx fnaddr, chaddr;
4445
  rtx mem;
4446
  rtx begin_addr, end_addr;
4447
  int ptr_mode_size = GET_MODE_SIZE (ptr_mode);
4448
 
4449
  fnaddr = copy_to_reg (XEXP (DECL_RTL (fndecl), 0));
4450
  chaddr = copy_to_reg (static_chain);
4451
 
4452
  emit_block_move (m_tramp, assemble_trampoline_template (),
4453
                   GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
4454
 
4455
  mem = adjust_address (m_tramp, ptr_mode,
4456
                        TRAMPOLINE_SIZE - 2 * ptr_mode_size);
4457
  emit_move_insn (mem, fnaddr);
4458
  mem = adjust_address (m_tramp, ptr_mode,
4459
                        TRAMPOLINE_SIZE - ptr_mode_size);
4460
  emit_move_insn (mem, chaddr);
4461
 
4462
  /* Get pointers to the beginning and end of the code block.  */
4463
  begin_addr = force_reg (Pmode, XEXP (m_tramp, 0));
4464
  end_addr = force_reg (Pmode, plus_constant (XEXP (m_tramp, 0),
4465
                                              TRAMPOLINE_SIZE));
4466
 
4467
  emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__clear_cache"),
4468
                     LCT_NORMAL, VOIDmode, 2, begin_addr, Pmode,
4469
                     end_addr, Pmode);
4470
}
4471
 
4472
 
4473
/* Implement TARGET_PRINT_OPERAND.  */
4474
static void
4475
tilepro_print_operand (FILE *file, rtx x, int code)
4476
{
4477
  switch (code)
4478
    {
4479
    case 'c':
4480
      /* Print the compare operator opcode for conditional moves. */
4481
      switch (GET_CODE (x))
4482
        {
4483
        case EQ:
4484
          fputs ("z", file);
4485
          break;
4486
        case NE:
4487
          fputs ("nz", file);
4488
          break;
4489
        default:
4490
          output_operand_lossage ("invalid %%c operand");
4491
        }
4492
      return;
4493
 
4494
    case 'C':
4495
      /* Print the compare operator opcode for conditional moves. */
4496
      switch (GET_CODE (x))
4497
        {
4498
        case EQ:
4499
          fputs ("nz", file);
4500
          break;
4501
        case NE:
4502
          fputs ("z", file);
4503
          break;
4504
        default:
4505
          output_operand_lossage ("invalid %%C operand");
4506
        }
4507
      return;
4508
 
4509
    case 'h':
4510
      {
4511
        /* Print the high 16 bits of a 32-bit constant.  */
4512
        HOST_WIDE_INT i;
4513
        if (CONST_INT_P (x))
4514
          i = INTVAL (x);
4515
        else if (GET_CODE (x) == CONST_DOUBLE)
4516
          i = CONST_DOUBLE_LOW (x);
4517
        else
4518
          {
4519
            output_operand_lossage ("invalid %%h operand");
4520
            return;
4521
          }
4522
        i = trunc_int_for_mode (i >> 16, HImode);
4523
        fprintf (file, HOST_WIDE_INT_PRINT_DEC, i);
4524
        return;
4525
      }
4526
 
4527
    case 'H':
4528
      {
4529
        rtx addr = NULL;
4530
        const char *opstr = NULL;
4531
        bool pcrel = false;
4532
        if (GET_CODE (x) == CONST
4533
            && GET_CODE (XEXP (x, 0)) == UNSPEC)
4534
          {
4535
            addr = XVECEXP (XEXP (x, 0), 0, 0);
4536
            switch (XINT (XEXP (x, 0), 1))
4537
            {
4538
            case UNSPEC_GOT32_SYM:
4539
              opstr = "got_ha16";
4540
              break;
4541
            case UNSPEC_PCREL_SYM:
4542
              opstr = "ha16";
4543
              pcrel = true;
4544
              break;
4545
            case UNSPEC_TLS_GD:
4546
              opstr = "tls_gd_ha16";
4547
              break;
4548
            case UNSPEC_TLS_IE:
4549
              opstr = "tls_ie_ha16";
4550
              break;
4551
            case UNSPEC_TLS_LE:
4552
              opstr = "tls_le_ha16";
4553
              break;
4554
            default:
4555
              output_operand_lossage ("invalid %%H operand");
4556
            }
4557
          }
4558
        else
4559
          {
4560
            addr = x;
4561
            opstr = "ha16";
4562
          }
4563
 
4564
        fputs (opstr, file);
4565
        fputc ('(', file);
4566
        output_addr_const (file, addr);
4567
 
4568
        if (pcrel)
4569
          {
4570
            rtx addr2 = XVECEXP (XEXP (x, 0), 0, 1);
4571
            fputs (" - " , file);
4572
            output_addr_const (file, addr2);
4573
          }
4574
 
4575
        fputc (')', file);
4576
        return;
4577
      }
4578
 
4579
    case 'I':
4580
      /* Print an auto-inc memory operand.  */
4581
      if (!MEM_P (x))
4582
        {
4583
          output_operand_lossage ("invalid %%I operand");
4584
          return;
4585
        }
4586
 
4587
      output_memory_reference_mode = GET_MODE (x);
4588
      output_memory_autoinc_first = true;
4589
      output_address (XEXP (x, 0));
4590
      output_memory_reference_mode = VOIDmode;
4591
      return;
4592
 
4593
    case 'i':
4594
      /* Print an auto-inc memory operand.  */
4595
      if (!MEM_P (x))
4596
        {
4597
          output_operand_lossage ("invalid %%i operand");
4598
          return;
4599
        }
4600
 
4601
      output_memory_reference_mode = GET_MODE (x);
4602
      output_memory_autoinc_first = false;
4603
      output_address (XEXP (x, 0));
4604
      output_memory_reference_mode = VOIDmode;
4605
      return;
4606
 
4607
    case 'j':
4608
      {
4609
        /* Print the low 8 bits of a constant.  */
4610
        HOST_WIDE_INT i;
4611
        if (CONST_INT_P (x))
4612
          i = INTVAL (x);
4613
        else if (GET_CODE (x) == CONST_DOUBLE)
4614
          i = CONST_DOUBLE_LOW (x);
4615
        else if (GET_CODE (x) == CONST_VECTOR
4616
                 && CONST_INT_P (CONST_VECTOR_ELT (x, 0)))
4617
          i = INTVAL (CONST_VECTOR_ELT (x, 0));
4618
        else
4619
          {
4620
            output_operand_lossage ("invalid %%j operand");
4621
            return;
4622
          }
4623
        i = trunc_int_for_mode (i, QImode);
4624
        fprintf (file, HOST_WIDE_INT_PRINT_DEC, i);
4625
        return;
4626
      }
4627
 
4628
    case 'L':
4629
      {
4630
        rtx addr = NULL;
4631
        const char *opstr = NULL;
4632
        bool pcrel = false;
4633
        if (GET_CODE (x) == CONST
4634
            && GET_CODE (XEXP (x, 0)) == UNSPEC)
4635
          {
4636
            addr = XVECEXP (XEXP (x, 0), 0, 0);
4637
            switch (XINT (XEXP (x, 0), 1))
4638
            {
4639
            case UNSPEC_GOT16_SYM:
4640
              opstr = "got";
4641
              break;
4642
            case UNSPEC_GOT32_SYM:
4643
              opstr = "got_lo16";
4644
              break;
4645
            case UNSPEC_PCREL_SYM:
4646
              opstr = "lo16";
4647
              pcrel = true;
4648
              break;
4649
            case UNSPEC_TLS_GD:
4650
              opstr = "tls_gd_lo16";
4651
              break;
4652
            case UNSPEC_TLS_IE:
4653
              opstr = "tls_ie_lo16";
4654
              break;
4655
            case UNSPEC_TLS_LE:
4656
              opstr = "tls_le_lo16";
4657
              break;
4658
            default:
4659
              output_operand_lossage ("invalid %%L operand");
4660
            }
4661
          }
4662
        else
4663
          {
4664
            addr = x;
4665
            opstr = "lo16";
4666
          }
4667
 
4668
        fputs (opstr, file);
4669
        fputc ('(', file);
4670
        output_addr_const (file, addr);
4671
 
4672
        if (pcrel)
4673
          {
4674
            rtx addr2 = XVECEXP (XEXP (x, 0), 0, 1);
4675
            fputs (" - " , file);
4676
            output_addr_const (file, addr2);
4677
          }
4678
 
4679
        fputc (')', file);
4680
        return;
4681
      }
4682
 
4683
    case 'p':
4684
      if (GET_CODE (x) == SYMBOL_REF)
4685
        {
4686
          if (flag_pic && !SYMBOL_REF_LOCAL_P (x))
4687
            fprintf (file, "plt(");
4688
          output_addr_const (file, x);
4689
          if (flag_pic && !SYMBOL_REF_LOCAL_P (x))
4690
            fprintf (file, ")");
4691
        }
4692
      else
4693
        output_addr_const (file, x);
4694
      return;
4695
 
4696
    case 'P':
4697
      {
4698
        /* Print a 32-bit constant plus one.  */
4699
        HOST_WIDE_INT i;
4700
        if (!CONST_INT_P (x))
4701
          {
4702
            output_operand_lossage ("invalid %%P operand");
4703
            return;
4704
          }
4705
        i = trunc_int_for_mode (INTVAL (x) + 1, SImode);
4706
        fprintf (file, HOST_WIDE_INT_PRINT_DEC, i);
4707
        return;
4708
      }
4709
 
4710
    case 'M':
4711
      {
4712
        /* Print an mm-style bit range.  */
4713
        int first_bit, last_bit;
4714
 
4715
        if (!CONST_INT_P (x)
4716
            || !tilepro_bitfield_operand_p (INTVAL (x), &first_bit,
4717
                                            &last_bit))
4718
          {
4719
            output_operand_lossage ("invalid %%M operand");
4720
            return;
4721
          }
4722
 
4723
        fprintf (file, "%d, %d", first_bit, last_bit);
4724
        return;
4725
      }
4726
 
4727
    case 'N':
4728
      {
4729
        const char *reg = NULL;
4730
 
4731
        /* Print a network register.  */
4732
        if (!CONST_INT_P (x))
4733
          {
4734
            output_operand_lossage ("invalid %%N operand");
4735
            return;
4736
          }
4737
 
4738
        switch (INTVAL (x))
4739
          {
4740
          case TILEPRO_NETREG_IDN0: reg = "idn0"; break;
4741
          case TILEPRO_NETREG_IDN1: reg = "idn1"; break;
4742
          case TILEPRO_NETREG_SN:   reg = "sn";   break;
4743
          case TILEPRO_NETREG_UDN0: reg = "udn0"; break;
4744
          case TILEPRO_NETREG_UDN1: reg = "udn1"; break;
4745
          case TILEPRO_NETREG_UDN2: reg = "udn2"; break;
4746
          case TILEPRO_NETREG_UDN3: reg = "udn3"; break;
4747
          default: gcc_unreachable ();
4748
          }
4749
 
4750
        fprintf (file, reg);
4751
        return;
4752
      }
4753
 
4754
    case 't':
4755
      {
4756
        /* Log base 2 of a power of two.  */
4757
        HOST_WIDE_INT i;
4758
        HOST_WIDE_INT n;
4759
 
4760
        if (!CONST_INT_P (x))
4761
          {
4762
            output_operand_lossage ("invalid %%t operand");
4763
            return;
4764
          }
4765
        n = trunc_int_for_mode (INTVAL (x), SImode);
4766
        i = exact_log2 (n);
4767
        if (i < 0)
4768
          {
4769
            output_operand_lossage ("invalid %%t operand '"
4770
                                    HOST_WIDE_INT_PRINT_DEC "'", n);
4771
            return;
4772
          }
4773
 
4774
        fprintf (file, HOST_WIDE_INT_PRINT_DEC, i);
4775
        return;
4776
      }
4777
      break;
4778
 
4779
    case 'r':
4780
      /* In this case we need a register.  Use 'zero' if the
4781
         operand is const0_rtx.  */
4782
      if (x == const0_rtx
4783
          || (GET_MODE (x) != VOIDmode && x == CONST0_RTX (GET_MODE (x))))
4784
        {
4785
          fputs ("zero", file);
4786
          return;
4787
        }
4788
      else if (!REG_P (x))
4789
        {
4790
          output_operand_lossage ("invalid %%r operand");
4791
          return;
4792
        }
4793
      /* FALLTHRU */
4794
 
4795
    case 0:
4796
      if (REG_P (x))
4797
        {
4798
          fprintf (file, "%s", reg_names[REGNO (x)]);
4799
          return;
4800
        }
4801
      else if (MEM_P (x))
4802
        {
4803
          output_memory_reference_mode = VOIDmode;
4804
          output_address (XEXP (x, 0));
4805
          return;
4806
        }
4807
      else
4808
        {
4809
          output_addr_const (file, x);
4810
          return;
4811
        }
4812
      break;
4813
    }
4814
 
4815
  debug_rtx (x);
4816
  output_operand_lossage ("unable to print out operand yet; code == %d (%c)",
4817
                          code, code);
4818
}
4819
 
4820
 
4821
/* Implement TARGET_PRINT_OPERAND_ADDRESS.  */
4822
static void
4823
tilepro_print_operand_address (FILE *file, rtx addr)
4824
{
4825
  if (GET_CODE (addr) == POST_DEC
4826
      || GET_CODE (addr) == POST_INC)
4827
    {
4828
      int offset = GET_MODE_SIZE (output_memory_reference_mode);
4829
 
4830
      gcc_assert (output_memory_reference_mode != VOIDmode);
4831
 
4832
      if (output_memory_autoinc_first)
4833
        fprintf (file, "%s", reg_names[REGNO (XEXP (addr, 0))]);
4834
      else
4835
        fprintf (file, "%d",
4836
                 GET_CODE (addr) == POST_DEC ? -offset : offset);
4837
    }
4838
  else if (GET_CODE (addr) == POST_MODIFY)
4839
    {
4840
      gcc_assert (output_memory_reference_mode != VOIDmode);
4841
 
4842
      gcc_assert (GET_CODE (XEXP (addr, 1)) == PLUS);
4843
 
4844
      if (output_memory_autoinc_first)
4845
        fprintf (file, "%s", reg_names[REGNO (XEXP (addr, 0))]);
4846
      else
4847
        fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4848
                 INTVAL (XEXP (XEXP (addr, 1), 1)));
4849
    }
4850
  else
4851
    tilepro_print_operand (file, addr, 'r');
4852
}
4853
 
4854
 
4855
/* Machine mode of current insn, for determining curly brace
4856
   placement.  */
4857
static enum machine_mode insn_mode;
4858
 
4859
 
4860
/* Implement FINAL_PRESCAN_INSN.  This is used to emit bundles.  */
4861
void
4862
tilepro_final_prescan_insn (rtx insn)
4863
{
4864
  /* Record this for tilepro_asm_output_opcode to examine.  */
4865
  insn_mode = GET_MODE (insn);
4866
}
4867
 
4868
 
4869
/* While emitting asm, are we currently inside '{' for a bundle? */
4870
static bool tilepro_in_bundle = false;
4871
 
4872
/* Implement ASM_OUTPUT_OPCODE.  Prepend/append curly braces as
4873
   appropriate given the bundling information recorded by
4874
   tilepro_gen_bundles.  */
4875
const char *
4876
tilepro_asm_output_opcode (FILE *stream, const char *code)
4877
{
4878
  bool pseudo = !strcmp (code, "pseudo");
4879
 
4880
  if (!tilepro_in_bundle && insn_mode == SImode)
4881
    {
4882
      /* Start a new bundle.  */
4883
      fprintf (stream, "{\n\t");
4884
      tilepro_in_bundle = true;
4885
    }
4886
 
4887
  if (tilepro_in_bundle && insn_mode == QImode)
4888
    {
4889
      /* Close an existing bundle.  */
4890
      static char buf[100];
4891
 
4892
      gcc_assert (strlen (code) + 3 + 1 < sizeof (buf));
4893
 
4894
      strcpy (buf, pseudo ? "" : code);
4895
      strcat (buf, "\n\t}");
4896
      tilepro_in_bundle = false;
4897
 
4898
      return buf;
4899
    }
4900
  else
4901
    {
4902
      return pseudo ? "" : code;
4903
    }
4904
}
4905
 
4906
 
4907
/* Output assembler code to FILE to increment profiler label # LABELNO
4908
   for profiling a function entry.  */
4909
void
4910
tilepro_function_profiler (FILE *file, int labelno ATTRIBUTE_UNUSED)
4911
{
4912
  if (tilepro_in_bundle)
4913
    {
4914
      fprintf (file, "\t}\n");
4915
    }
4916
 
4917
  if (flag_pic)
4918
    {
4919
      fprintf (file,
4920
               "\t{\n"
4921
               "\tmove\tr10, lr\n"
4922
               "\tjal\t%s@plt\n"
4923
               "\t}\n", MCOUNT_NAME);
4924
    }
4925
  else
4926
    {
4927
      fprintf (file,
4928
               "\t{\n"
4929
               "\tmove\tr10, lr\n"
4930
               "\tjal\t%s\n"
4931
               "\t}\n", MCOUNT_NAME);
4932
    }
4933
 
4934
  tilepro_in_bundle = false;
4935
}
4936
 
4937
 
4938
/* Implement TARGET_ASM_FILE_END.  */
4939
static void
4940
tilepro_file_end (void)
4941
{
4942
  if (NEED_INDICATE_EXEC_STACK)
4943
    file_end_indicate_exec_stack ();
4944
}
4945
 
4946
 
4947
#undef  TARGET_HAVE_TLS
4948
#define TARGET_HAVE_TLS HAVE_AS_TLS
4949
 
4950
#undef  TARGET_OPTION_OVERRIDE
4951
#define TARGET_OPTION_OVERRIDE tilepro_option_override
4952
 
4953
#undef  TARGET_SCALAR_MODE_SUPPORTED_P
4954
#define TARGET_SCALAR_MODE_SUPPORTED_P tilepro_scalar_mode_supported_p
4955
 
4956
#undef  TARGET_VECTOR_MODE_SUPPORTED_P
4957
#define TARGET_VECTOR_MODE_SUPPORTED_P tile_vector_mode_supported_p
4958
 
4959
#undef  TARGET_CANNOT_FORCE_CONST_MEM
4960
#define TARGET_CANNOT_FORCE_CONST_MEM tilepro_cannot_force_const_mem
4961
 
4962
#undef  TARGET_FUNCTION_OK_FOR_SIBCALL
4963
#define TARGET_FUNCTION_OK_FOR_SIBCALL tilepro_function_ok_for_sibcall
4964
 
4965
#undef  TARGET_PASS_BY_REFERENCE
4966
#define TARGET_PASS_BY_REFERENCE tilepro_pass_by_reference
4967
 
4968
#undef  TARGET_RETURN_IN_MEMORY
4969
#define TARGET_RETURN_IN_MEMORY tilepro_return_in_memory
4970
 
4971
#undef  TARGET_FUNCTION_ARG_BOUNDARY
4972
#define TARGET_FUNCTION_ARG_BOUNDARY tilepro_function_arg_boundary
4973
 
4974
#undef  TARGET_FUNCTION_ARG
4975
#define TARGET_FUNCTION_ARG tilepro_function_arg
4976
 
4977
#undef  TARGET_FUNCTION_ARG_ADVANCE
4978
#define TARGET_FUNCTION_ARG_ADVANCE tilepro_function_arg_advance
4979
 
4980
#undef  TARGET_FUNCTION_VALUE
4981
#define TARGET_FUNCTION_VALUE tilepro_function_value
4982
 
4983
#undef  TARGET_LIBCALL_VALUE
4984
#define TARGET_LIBCALL_VALUE tilepro_libcall_value
4985
 
4986
#undef  TARGET_FUNCTION_VALUE_REGNO_P
4987
#define TARGET_FUNCTION_VALUE_REGNO_P tilepro_function_value_regno_p
4988
 
4989
#undef  TARGET_PROMOTE_FUNCTION_MODE
4990
#define TARGET_PROMOTE_FUNCTION_MODE \
4991
  default_promote_function_mode_always_promote
4992
 
4993
#undef  TARGET_PROMOTE_PROTOTYPES
4994
#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_false
4995
 
4996
#undef  TARGET_BUILD_BUILTIN_VA_LIST
4997
#define TARGET_BUILD_BUILTIN_VA_LIST tilepro_build_builtin_va_list
4998
 
4999
#undef  TARGET_EXPAND_BUILTIN_VA_START
5000
#define TARGET_EXPAND_BUILTIN_VA_START tilepro_va_start
5001
 
5002
#undef  TARGET_SETUP_INCOMING_VARARGS
5003
#define TARGET_SETUP_INCOMING_VARARGS tilepro_setup_incoming_varargs
5004
 
5005
#undef  TARGET_GIMPLIFY_VA_ARG_EXPR
5006
#define TARGET_GIMPLIFY_VA_ARG_EXPR tilepro_gimplify_va_arg_expr
5007
 
5008
#undef  TARGET_RTX_COSTS
5009
#define TARGET_RTX_COSTS tilepro_rtx_costs
5010
 
5011
/* Limit to what we can reach in one addli.  */
5012
#undef  TARGET_MIN_ANCHOR_OFFSET
5013
#define TARGET_MIN_ANCHOR_OFFSET -32768
5014
#undef  TARGET_MAX_ANCHOR_OFFSET
5015
#define TARGET_MAX_ANCHOR_OFFSET 32767
5016
 
5017
#undef  TARGET_LEGITIMATE_CONSTANT_P
5018
#define TARGET_LEGITIMATE_CONSTANT_P tilepro_legitimate_constant_p
5019
 
5020
#undef  TARGET_LEGITIMATE_ADDRESS_P
5021
#define TARGET_LEGITIMATE_ADDRESS_P tilepro_legitimate_address_p
5022
 
5023
#undef  TARGET_LEGITIMIZE_ADDRESS
5024
#define TARGET_LEGITIMIZE_ADDRESS tilepro_legitimize_address
5025
 
5026
#undef  TARGET_DELEGITIMIZE_ADDRESS
5027
#define TARGET_DELEGITIMIZE_ADDRESS tilepro_delegitimize_address
5028
 
5029
#undef  TARGET_INIT_BUILTINS
5030
#define TARGET_INIT_BUILTINS  tilepro_init_builtins
5031
 
5032
#undef  TARGET_BUILTIN_DECL
5033
#define TARGET_BUILTIN_DECL tilepro_builtin_decl
5034
 
5035
#undef  TARGET_EXPAND_BUILTIN
5036
#define TARGET_EXPAND_BUILTIN tilepro_expand_builtin
5037
 
5038
#undef  TARGET_CONDITIONAL_REGISTER_USAGE
5039
#define TARGET_CONDITIONAL_REGISTER_USAGE tilepro_conditional_register_usage
5040
 
5041
#undef  TARGET_FRAME_POINTER_REQUIRED
5042
#define TARGET_FRAME_POINTER_REQUIRED tilepro_frame_pointer_required
5043
 
5044
#undef  TARGET_DELAY_SCHED2
5045
#define TARGET_DELAY_SCHED2 true
5046
 
5047
#undef  TARGET_DELAY_VARTRACK
5048
#define TARGET_DELAY_VARTRACK true
5049
 
5050
#undef  TARGET_SCHED_ISSUE_RATE
5051
#define TARGET_SCHED_ISSUE_RATE tilepro_issue_rate
5052
 
5053
#undef  TARGET_SCHED_ADJUST_COST
5054
#define TARGET_SCHED_ADJUST_COST tilepro_sched_adjust_cost
5055
 
5056
#undef  TARGET_MACHINE_DEPENDENT_REORG
5057
#define TARGET_MACHINE_DEPENDENT_REORG tilepro_reorg
5058
 
5059
#undef  TARGET_ASM_CAN_OUTPUT_MI_THUNK
5060
#define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
5061
  hook_bool_const_tree_hwi_hwi_const_tree_true
5062
 
5063
#undef  TARGET_ASM_OUTPUT_MI_THUNK
5064
#define TARGET_ASM_OUTPUT_MI_THUNK tilepro_asm_output_mi_thunk
5065
 
5066
#undef  TARGET_ASM_TRAMPOLINE_TEMPLATE
5067
#define TARGET_ASM_TRAMPOLINE_TEMPLATE tilepro_asm_trampoline_template
5068
 
5069
#undef  TARGET_TRAMPOLINE_INIT
5070
#define TARGET_TRAMPOLINE_INIT tilepro_trampoline_init
5071
 
5072
#undef  TARGET_PRINT_OPERAND
5073
#define TARGET_PRINT_OPERAND tilepro_print_operand
5074
 
5075
#undef  TARGET_PRINT_OPERAND_ADDRESS
5076
#define TARGET_PRINT_OPERAND_ADDRESS tilepro_print_operand_address
5077
 
5078
#undef  TARGET_ASM_FILE_END
5079
#define TARGET_ASM_FILE_END tilepro_file_end
5080
 
5081
 
5082
struct gcc_target targetm = TARGET_INITIALIZER;
5083
 
5084
#include "gt-tilepro.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.