OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [targhooks.c] - Blame information for rev 856

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Default target hook functions.
2
   Copyright (C) 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
3
 
4
This file is part of GCC.
5
 
6
GCC is free software; you can redistribute it and/or modify it under
7
the terms of the GNU General Public License as published by the Free
8
Software Foundation; either version 3, or (at your option) any later
9
version.
10
 
11
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12
WARRANTY; without even the implied warranty of MERCHANTABILITY or
13
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14
for more details.
15
 
16
You should have received a copy of the GNU General Public License
17
along with GCC; see the file COPYING3.  If not see
18
<http://www.gnu.org/licenses/>.  */
19
 
20
/* The migration of target macros to target hooks works as follows:
21
 
22
   1. Create a target hook that uses the existing target macros to
23
      implement the same functionality.
24
 
25
   2. Convert all the MI files to use the hook instead of the macro.
26
 
27
   3. Repeat for a majority of the remaining target macros.  This will
28
      take some time.
29
 
30
   4. Tell target maintainers to start migrating.
31
 
32
   5. Eventually convert the backends to override the hook instead of
33
      defining the macros.  This will take some time too.
34
 
35
   6. TBD when, poison the macros.  Unmigrated targets will break at
36
      this point.
37
 
38
   Note that we expect steps 1-3 to be done by the people that
39
   understand what the MI does with each macro, and step 5 to be done
40
   by the target maintainers for their respective targets.
41
 
42
   Note that steps 1 and 2 don't have to be done together, but no
43
   target can override the new hook until step 2 is complete for it.
44
 
45
   Once the macros are poisoned, we will revert to the old migration
46
   rules - migrate the macro, callers, and targets all at once.  This
47
   comment can thus be removed at that point.  */
48
 
49
#include "config.h"
50
#include "system.h"
51
#include "coretypes.h"
52
#include "tm.h"
53
#include "machmode.h"
54
#include "rtl.h"
55
#include "tree.h"
56
#include "expr.h"
57
#include "output.h"
58
#include "toplev.h"
59
#include "function.h"
60
#include "target.h"
61
#include "tm_p.h"
62
#include "target-def.h"
63
#include "ggc.h"
64
#include "hard-reg-set.h"
65
#include "reload.h"
66
#include "optabs.h"
67
#include "recog.h"
68
 
69
 
70
void
71
default_external_libcall (rtx fun ATTRIBUTE_UNUSED)
72
{
73
#ifdef ASM_OUTPUT_EXTERNAL_LIBCALL
74
  ASM_OUTPUT_EXTERNAL_LIBCALL(asm_out_file, fun);
75
#endif
76
}
77
 
78
enum machine_mode
79
default_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
80
{
81
  if (m1 == m2)
82
    return m1;
83
  return VOIDmode;
84
}
85
 
86
bool
87
default_return_in_memory (tree type,
88
                          tree fntype ATTRIBUTE_UNUSED)
89
{
90
#ifndef RETURN_IN_MEMORY
91
  return (TYPE_MODE (type) == BLKmode);
92
#else
93
  return RETURN_IN_MEMORY (type);
94
#endif
95
}
96
 
97
rtx
98
default_expand_builtin_saveregs (void)
99
{
100
  error ("__builtin_saveregs not supported by this target");
101
  return const0_rtx;
102
}
103
 
104
void
105
default_setup_incoming_varargs (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
106
                                enum machine_mode mode ATTRIBUTE_UNUSED,
107
                                tree type ATTRIBUTE_UNUSED,
108
                                int *pretend_arg_size ATTRIBUTE_UNUSED,
109
                                int second_time ATTRIBUTE_UNUSED)
110
{
111
}
112
 
113
/* The default implementation of TARGET_BUILTIN_SETJMP_FRAME_VALUE.  */
114
 
115
rtx
116
default_builtin_setjmp_frame_value (void)
117
{
118
  return virtual_stack_vars_rtx;
119
}
120
 
121
/* Generic hook that takes a CUMULATIVE_ARGS pointer and returns false.  */
122
 
123
bool
124
hook_bool_CUMULATIVE_ARGS_false (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
125
{
126
  return false;
127
}
128
 
129
bool
130
default_pretend_outgoing_varargs_named (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
131
{
132
  return (targetm.calls.setup_incoming_varargs
133
          != default_setup_incoming_varargs);
134
}
135
 
136
enum machine_mode
137
default_eh_return_filter_mode (void)
138
{
139
  return word_mode;
140
}
141
 
142
/* The default implementation of TARGET_SHIFT_TRUNCATION_MASK.  */
143
 
144
unsigned HOST_WIDE_INT
145
default_shift_truncation_mask (enum machine_mode mode)
146
{
147
  return SHIFT_COUNT_TRUNCATED ? GET_MODE_BITSIZE (mode) - 1 : 0;
148
}
149
 
150
/* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL.  */
151
 
152
unsigned int
153
default_min_divisions_for_recip_mul (enum machine_mode mode ATTRIBUTE_UNUSED)
154
{
155
  return have_insn_for (DIV, mode) ? 3 : 2;
156
}
157
 
158
/* The default implementation of TARGET_MODE_REP_EXTENDED.  */
159
 
160
int
161
default_mode_rep_extended (enum machine_mode mode ATTRIBUTE_UNUSED,
162
                           enum machine_mode mode_rep ATTRIBUTE_UNUSED)
163
{
164
  return UNKNOWN;
165
}
166
 
167
/* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true.  */
168
 
169
bool
170
hook_bool_CUMULATIVE_ARGS_true (CUMULATIVE_ARGS * a ATTRIBUTE_UNUSED)
171
{
172
  return true;
173
}
174
 
175
 
176
/* The generic C++ ABI specifies this is a 64-bit value.  */
177
tree
178
default_cxx_guard_type (void)
179
{
180
  return long_long_integer_type_node;
181
}
182
 
183
 
184
/* Returns the size of the cookie to use when allocating an array
185
   whose elements have the indicated TYPE.  Assumes that it is already
186
   known that a cookie is needed.  */
187
 
188
tree
189
default_cxx_get_cookie_size (tree type)
190
{
191
  tree cookie_size;
192
 
193
  /* We need to allocate an additional max (sizeof (size_t), alignof
194
     (true_type)) bytes.  */
195
  tree sizetype_size;
196
  tree type_align;
197
 
198
  sizetype_size = size_in_bytes (sizetype);
199
  type_align = size_int (TYPE_ALIGN_UNIT (type));
200
  if (INT_CST_LT_UNSIGNED (type_align, sizetype_size))
201
    cookie_size = sizetype_size;
202
  else
203
    cookie_size = type_align;
204
 
205
  return cookie_size;
206
}
207
 
208
/* Return true if a parameter must be passed by reference.  This version
209
   of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK.  */
210
 
211
bool
212
hook_pass_by_reference_must_pass_in_stack (CUMULATIVE_ARGS *c ATTRIBUTE_UNUSED,
213
        enum machine_mode mode ATTRIBUTE_UNUSED, tree type ATTRIBUTE_UNUSED,
214
        bool named_arg ATTRIBUTE_UNUSED)
215
{
216
  return targetm.calls.must_pass_in_stack (mode, type);
217
}
218
 
219
/* Return true if a parameter follows callee copies conventions.  This
220
   version of the hook is true for all named arguments.  */
221
 
222
bool
223
hook_callee_copies_named (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
224
                          enum machine_mode mode ATTRIBUTE_UNUSED,
225
                          tree type ATTRIBUTE_UNUSED, bool named)
226
{
227
  return named;
228
}
229
 
230
/* Emit any directives required to unwind this instruction.  */
231
 
232
void
233
default_unwind_emit (FILE * stream ATTRIBUTE_UNUSED,
234
                     rtx insn ATTRIBUTE_UNUSED)
235
{
236
  /* Should never happen.  */
237
  gcc_unreachable ();
238
}
239
 
240
/* True if MODE is valid for the target.  By "valid", we mean able to
241
   be manipulated in non-trivial ways.  In particular, this means all
242
   the arithmetic is supported.
243
 
244
   By default we guess this means that any C type is supported.  If
245
   we can't map the mode back to a type that would be available in C,
246
   then reject it.  Special case, here, is the double-word arithmetic
247
   supported by optabs.c.  */
248
 
249
bool
250
default_scalar_mode_supported_p (enum machine_mode mode)
251
{
252
  int precision = GET_MODE_PRECISION (mode);
253
 
254
  switch (GET_MODE_CLASS (mode))
255
    {
256
    case MODE_PARTIAL_INT:
257
    case MODE_INT:
258
      if (precision == CHAR_TYPE_SIZE)
259
        return true;
260
      if (precision == SHORT_TYPE_SIZE)
261
        return true;
262
      if (precision == INT_TYPE_SIZE)
263
        return true;
264
      if (precision == LONG_TYPE_SIZE)
265
        return true;
266
      if (precision == LONG_LONG_TYPE_SIZE)
267
        return true;
268
      if (precision == 2 * BITS_PER_WORD)
269
        return true;
270
      return false;
271
 
272
    case MODE_FLOAT:
273
      if (precision == FLOAT_TYPE_SIZE)
274
        return true;
275
      if (precision == DOUBLE_TYPE_SIZE)
276
        return true;
277
      if (precision == LONG_DOUBLE_TYPE_SIZE)
278
        return true;
279
      return false;
280
 
281
    case MODE_DECIMAL_FLOAT:
282
      return false;
283
 
284
    default:
285
      gcc_unreachable ();
286
    }
287
}
288
 
289
/* True if the target supports decimal floating point.  */
290
 
291
bool
292
default_decimal_float_supported_p (void)
293
{
294
  return ENABLE_DECIMAL_FLOAT;
295
}
296
 
297
/* NULL if INSN insn is valid within a low-overhead loop, otherwise returns
298
   an error message.
299
 
300
   This function checks whether a given INSN is valid within a low-overhead
301
   loop.  If INSN is invalid it returns the reason for that, otherwise it
302
   returns NULL. A called function may clobber any special registers required
303
   for low-overhead looping. Additionally, some targets (eg, PPC) use the count
304
   register for branch on table instructions. We reject the doloop pattern in
305
   these cases.  */
306
 
307
const char *
308
default_invalid_within_doloop (rtx insn)
309
{
310
  if (CALL_P (insn))
311
    return "Function call in loop.";
312
 
313
  if (JUMP_P (insn)
314
      && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
315
          || GET_CODE (PATTERN (insn)) == ADDR_VEC))
316
    return "Computed branch in the loop.";
317
 
318
  return NULL;
319
}
320
 
321
bool
322
hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false (
323
        CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
324
        enum machine_mode mode ATTRIBUTE_UNUSED,
325
        tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
326
{
327
  return false;
328
}
329
 
330
bool
331
hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true (
332
        CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
333
        enum machine_mode mode ATTRIBUTE_UNUSED,
334
        tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
335
{
336
  return true;
337
}
338
 
339
int
340
hook_int_CUMULATIVE_ARGS_mode_tree_bool_0 (
341
        CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
342
        enum machine_mode mode ATTRIBUTE_UNUSED,
343
        tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
344
{
345
  return 0;
346
}
347
 
348
void
349
hook_void_bitmap (bitmap regs ATTRIBUTE_UNUSED)
350
{
351
}
352
 
353
const char *
354
hook_invalid_arg_for_unprototyped_fn (
355
        tree typelist ATTRIBUTE_UNUSED,
356
        tree funcdecl ATTRIBUTE_UNUSED,
357
        tree val ATTRIBUTE_UNUSED)
358
{
359
  return NULL;
360
}
361
 
362
/* Initialize the stack protection decls.  */
363
 
364
/* Stack protection related decls living in libgcc.  */
365
static GTY(()) tree stack_chk_guard_decl;
366
 
367
tree
368
default_stack_protect_guard (void)
369
{
370
  tree t = stack_chk_guard_decl;
371
 
372
  if (t == NULL)
373
    {
374
      t = build_decl (VAR_DECL, get_identifier ("__stack_chk_guard"),
375
                      ptr_type_node);
376
      TREE_STATIC (t) = 1;
377
      TREE_PUBLIC (t) = 1;
378
      DECL_EXTERNAL (t) = 1;
379
      TREE_USED (t) = 1;
380
      TREE_THIS_VOLATILE (t) = 1;
381
      DECL_ARTIFICIAL (t) = 1;
382
      DECL_IGNORED_P (t) = 1;
383
 
384
      stack_chk_guard_decl = t;
385
    }
386
 
387
  return t;
388
}
389
 
390
static GTY(()) tree stack_chk_fail_decl;
391
 
392
tree
393
default_external_stack_protect_fail (void)
394
{
395
  tree t = stack_chk_fail_decl;
396
 
397
  if (t == NULL_TREE)
398
    {
399
      t = build_function_type_list (void_type_node, NULL_TREE);
400
      t = build_decl (FUNCTION_DECL, get_identifier ("__stack_chk_fail"), t);
401
      TREE_STATIC (t) = 1;
402
      TREE_PUBLIC (t) = 1;
403
      DECL_EXTERNAL (t) = 1;
404
      TREE_USED (t) = 1;
405
      TREE_THIS_VOLATILE (t) = 1;
406
      TREE_NOTHROW (t) = 1;
407
      DECL_ARTIFICIAL (t) = 1;
408
      DECL_IGNORED_P (t) = 1;
409
      DECL_VISIBILITY (t) = VISIBILITY_DEFAULT;
410
      DECL_VISIBILITY_SPECIFIED (t) = 1;
411
 
412
      stack_chk_fail_decl = t;
413
    }
414
 
415
  return build_function_call_expr (t, NULL_TREE);
416
}
417
 
418
tree
419
default_hidden_stack_protect_fail (void)
420
{
421
#ifndef HAVE_GAS_HIDDEN
422
  return default_external_stack_protect_fail ();
423
#else
424
  tree t = stack_chk_fail_decl;
425
 
426
  if (!flag_pic)
427
    return default_external_stack_protect_fail ();
428
 
429
  if (t == NULL_TREE)
430
    {
431
      t = build_function_type_list (void_type_node, NULL_TREE);
432
      t = build_decl (FUNCTION_DECL,
433
                      get_identifier ("__stack_chk_fail_local"), t);
434
      TREE_STATIC (t) = 1;
435
      TREE_PUBLIC (t) = 1;
436
      DECL_EXTERNAL (t) = 1;
437
      TREE_USED (t) = 1;
438
      TREE_THIS_VOLATILE (t) = 1;
439
      TREE_NOTHROW (t) = 1;
440
      DECL_ARTIFICIAL (t) = 1;
441
      DECL_IGNORED_P (t) = 1;
442
      DECL_VISIBILITY_SPECIFIED (t) = 1;
443
      DECL_VISIBILITY (t) = VISIBILITY_HIDDEN;
444
 
445
      stack_chk_fail_decl = t;
446
    }
447
 
448
  return build_function_call_expr (t, NULL_TREE);
449
#endif
450
}
451
 
452
bool
453
hook_bool_rtx_commutative_p (rtx x, int outer_code ATTRIBUTE_UNUSED)
454
{
455
  return COMMUTATIVE_P (x);
456
}
457
 
458
rtx
459
default_function_value (tree ret_type ATTRIBUTE_UNUSED,
460
                        tree fn_decl_or_type,
461
                        bool outgoing ATTRIBUTE_UNUSED)
462
{
463
  /* The old interface doesn't handle receiving the function type.  */
464
  if (fn_decl_or_type
465
      && !DECL_P (fn_decl_or_type))
466
    fn_decl_or_type = NULL;
467
 
468
#ifdef FUNCTION_OUTGOING_VALUE
469
  if (outgoing)
470
    return FUNCTION_OUTGOING_VALUE (ret_type, fn_decl_or_type);
471
#endif
472
 
473
#ifdef FUNCTION_VALUE
474
  return FUNCTION_VALUE (ret_type, fn_decl_or_type);
475
#else
476
  return NULL_RTX;
477
#endif
478
}
479
 
480
rtx
481
default_internal_arg_pointer (void)
482
{
483
  /* If the reg that the virtual arg pointer will be translated into is
484
     not a fixed reg or is the stack pointer, make a copy of the virtual
485
     arg pointer, and address parms via the copy.  The frame pointer is
486
     considered fixed even though it is not marked as such.  */
487
  if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
488
       || ! (fixed_regs[ARG_POINTER_REGNUM]
489
             || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
490
    return copy_to_reg (virtual_incoming_args_rtx);
491
  else
492
    return virtual_incoming_args_rtx;
493
}
494
 
495
enum reg_class
496
default_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
497
                          enum reg_class reload_class ATTRIBUTE_UNUSED,
498
                          enum machine_mode reload_mode ATTRIBUTE_UNUSED,
499
                          secondary_reload_info *sri)
500
{
501
  enum reg_class class = NO_REGS;
502
 
503
  if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
504
    {
505
      sri->icode = sri->prev_sri->t_icode;
506
      return NO_REGS;
507
    }
508
#ifdef SECONDARY_INPUT_RELOAD_CLASS
509
  if (in_p)
510
    class = SECONDARY_INPUT_RELOAD_CLASS (reload_class, reload_mode, x);
511
#endif
512
#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
513
  if (! in_p)
514
    class = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class, reload_mode, x);
515
#endif
516
  if (class != NO_REGS)
517
    {
518
      enum insn_code icode = (in_p ? reload_in_optab[(int) reload_mode]
519
                              : reload_out_optab[(int) reload_mode]);
520
 
521
      if (icode != CODE_FOR_nothing
522
          && insn_data[(int) icode].operand[in_p].predicate
523
          && ! insn_data[(int) icode].operand[in_p].predicate (x, reload_mode))
524
        icode = CODE_FOR_nothing;
525
      else if (icode != CODE_FOR_nothing)
526
        {
527
          const char *insn_constraint, *scratch_constraint;
528
          char insn_letter, scratch_letter;
529
          enum reg_class insn_class, scratch_class;
530
 
531
          gcc_assert (insn_data[(int) icode].n_operands == 3);
532
          insn_constraint = insn_data[(int) icode].operand[!in_p].constraint;
533
          if (!*insn_constraint)
534
            insn_class = ALL_REGS;
535
          else
536
            {
537
              if (in_p)
538
                {
539
                  gcc_assert (*insn_constraint == '=');
540
                  insn_constraint++;
541
                }
542
              insn_letter = *insn_constraint;
543
              insn_class
544
                = (insn_letter == 'r' ? GENERAL_REGS
545
                   : REG_CLASS_FROM_CONSTRAINT ((unsigned char) insn_letter,
546
                                                insn_constraint));
547
              gcc_assert (insn_class != NO_REGS);
548
            }
549
 
550
          scratch_constraint = insn_data[(int) icode].operand[2].constraint;
551
          /* The scratch register's constraint must start with "=&",
552
             except for an input reload, where only "=" is necessary,
553
             and where it might be beneficial to re-use registers from
554
             the input.  */
555
          gcc_assert (scratch_constraint[0] == '='
556
                      && (in_p || scratch_constraint[1] == '&'));
557
          scratch_constraint++;
558
          if (*scratch_constraint == '&')
559
            scratch_constraint++;
560
          scratch_letter = *scratch_constraint;
561
          scratch_class
562
            = (scratch_letter == 'r' ? GENERAL_REGS
563
               : REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
564
                                            scratch_constraint));
565
 
566
          if (reg_class_subset_p (reload_class, insn_class))
567
            {
568
              gcc_assert (scratch_class == class);
569
              class = NO_REGS;
570
            }
571
          else
572
            class = insn_class;
573
 
574
        }
575
      if (class == NO_REGS)
576
        sri->icode = icode;
577
      else
578
        sri->t_icode = icode;
579
    }
580
  return class;
581
}
582
 
583
 
584
/* If STRICT_ALIGNMENT is true we use the container type for accessing
585
   volatile bitfields.  This is generally the preferred behavior for memory
586
   mapped peripherals on RISC architectures.
587
   If STRICT_ALIGNMENT is false we use the narrowest type possible.  This
588
   is typically used to avoid spurious page faults and extra memory accesses
589
   due to unaligned accesses on CISC architectures.  */
590
 
591
bool
592
default_narrow_bitfield (void)
593
{
594
  return !STRICT_ALIGNMENT;
595
}
596
 
597
/* By default, if flag_pic is true, then neither local nor global relocs
598
   should be placed in readonly memory.  */
599
 
600
int
601
default_reloc_rw_mask (void)
602
{
603
  return flag_pic ? 3 : 0;
604
}
605
 
606
bool
607
default_builtin_vector_alignment_reachable (tree type, bool is_packed)
608
{
609
  if (is_packed)
610
    return false;
611
 
612
  /* Assuming that types whose size is > pointer-size are not guaranteed to be
613
     naturally aligned.  */
614
  if (tree_int_cst_compare (TYPE_SIZE (type), bitsize_int (POINTER_SIZE)) > 0)
615
    return false;
616
 
617
  /* Assuming that types whose size is <= pointer-size
618
     are naturally aligned.  */
619
  return true;
620
}
621
 
622
#include "gt-targhooks.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.