OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [config/] [pa/] [pa.c] - Blame information for rev 753

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 709 jeremybenn
/* Subroutines for insn-output.c for HPPA.
2
   Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001,
3
   2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4
   Free Software Foundation, Inc.
5
   Contributed by Tim Moore (moore@cs.utah.edu), based on sparc.c
6
 
7
This file is part of GCC.
8
 
9
GCC is free software; you can redistribute it and/or modify
10
it under the terms of the GNU General Public License as published by
11
the Free Software Foundation; either version 3, or (at your option)
12
any later version.
13
 
14
GCC is distributed in the hope that it will be useful,
15
but WITHOUT ANY WARRANTY; without even the implied warranty of
16
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
17
GNU General Public License for more details.
18
 
19
You should have received a copy of the GNU General Public License
20
along with GCC; see the file COPYING3.  If not see
21
<http://www.gnu.org/licenses/>.  */
22
 
23
#include "config.h"
24
#include "system.h"
25
#include "coretypes.h"
26
#include "tm.h"
27
#include "rtl.h"
28
#include "regs.h"
29
#include "hard-reg-set.h"
30
#include "insn-config.h"
31
#include "conditions.h"
32
#include "insn-attr.h"
33
#include "flags.h"
34
#include "tree.h"
35
#include "output.h"
36
#include "except.h"
37
#include "expr.h"
38
#include "optabs.h"
39
#include "reload.h"
40
#include "integrate.h"
41
#include "function.h"
42
#include "diagnostic-core.h"
43
#include "ggc.h"
44
#include "recog.h"
45
#include "predict.h"
46
#include "tm_p.h"
47
#include "target.h"
48
#include "common/common-target.h"
49
#include "target-def.h"
50
#include "langhooks.h"
51
#include "df.h"
52
#include "opts.h"
53
 
54
/* Return nonzero if there is a bypass for the output of
55
   OUT_INSN and the fp store IN_INSN.  */
56
int
57
pa_fpstore_bypass_p (rtx out_insn, rtx in_insn)
58
{
59
  enum machine_mode store_mode;
60
  enum machine_mode other_mode;
61
  rtx set;
62
 
63
  if (recog_memoized (in_insn) < 0
64
      || (get_attr_type (in_insn) != TYPE_FPSTORE
65
          && get_attr_type (in_insn) != TYPE_FPSTORE_LOAD)
66
      || recog_memoized (out_insn) < 0)
67
    return 0;
68
 
69
  store_mode = GET_MODE (SET_SRC (PATTERN (in_insn)));
70
 
71
  set = single_set (out_insn);
72
  if (!set)
73
    return 0;
74
 
75
  other_mode = GET_MODE (SET_SRC (set));
76
 
77
  return (GET_MODE_SIZE (store_mode) == GET_MODE_SIZE (other_mode));
78
}
79
 
80
 
81
#ifndef DO_FRAME_NOTES
82
#ifdef INCOMING_RETURN_ADDR_RTX
83
#define DO_FRAME_NOTES 1
84
#else
85
#define DO_FRAME_NOTES 0
86
#endif
87
#endif
88
 
89
static void pa_option_override (void);
90
static void copy_reg_pointer (rtx, rtx);
91
static void fix_range (const char *);
92
static int hppa_register_move_cost (enum machine_mode mode, reg_class_t,
93
                                    reg_class_t);
94
static int hppa_address_cost (rtx, bool);
95
static bool hppa_rtx_costs (rtx, int, int, int, int *, bool);
96
static inline rtx force_mode (enum machine_mode, rtx);
97
static void pa_reorg (void);
98
static void pa_combine_instructions (void);
99
static int pa_can_combine_p (rtx, rtx, rtx, int, rtx, rtx, rtx);
100
static bool forward_branch_p (rtx);
101
static void compute_zdepwi_operands (unsigned HOST_WIDE_INT, unsigned *);
102
static void compute_zdepdi_operands (unsigned HOST_WIDE_INT, unsigned *);
103
static int compute_movmem_length (rtx);
104
static int compute_clrmem_length (rtx);
105
static bool pa_assemble_integer (rtx, unsigned int, int);
106
static void remove_useless_addtr_insns (int);
107
static void store_reg (int, HOST_WIDE_INT, int);
108
static void store_reg_modify (int, int, HOST_WIDE_INT);
109
static void load_reg (int, HOST_WIDE_INT, int);
110
static void set_reg_plus_d (int, int, HOST_WIDE_INT, int);
111
static rtx pa_function_value (const_tree, const_tree, bool);
112
static rtx pa_libcall_value (enum machine_mode, const_rtx);
113
static bool pa_function_value_regno_p (const unsigned int);
114
static void pa_output_function_prologue (FILE *, HOST_WIDE_INT);
115
static void update_total_code_bytes (unsigned int);
116
static void pa_output_function_epilogue (FILE *, HOST_WIDE_INT);
117
static int pa_adjust_cost (rtx, rtx, rtx, int);
118
static int pa_adjust_priority (rtx, int);
119
static int pa_issue_rate (void);
120
static void pa_som_asm_init_sections (void) ATTRIBUTE_UNUSED;
121
static section *pa_som_tm_clone_table_section (void) ATTRIBUTE_UNUSED;
122
static section *pa_select_section (tree, int, unsigned HOST_WIDE_INT)
123
     ATTRIBUTE_UNUSED;
124
static void pa_encode_section_info (tree, rtx, int);
125
static const char *pa_strip_name_encoding (const char *);
126
static bool pa_function_ok_for_sibcall (tree, tree);
127
static void pa_globalize_label (FILE *, const char *)
128
     ATTRIBUTE_UNUSED;
129
static void pa_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
130
                                    HOST_WIDE_INT, tree);
131
#if !defined(USE_COLLECT2)
132
static void pa_asm_out_constructor (rtx, int);
133
static void pa_asm_out_destructor (rtx, int);
134
#endif
135
static void pa_init_builtins (void);
136
static rtx pa_expand_builtin (tree, rtx, rtx, enum machine_mode mode, int);
137
static rtx hppa_builtin_saveregs (void);
138
static void hppa_va_start (tree, rtx);
139
static tree hppa_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
140
static bool pa_scalar_mode_supported_p (enum machine_mode);
141
static bool pa_commutative_p (const_rtx x, int outer_code);
142
static void copy_fp_args (rtx) ATTRIBUTE_UNUSED;
143
static int length_fp_args (rtx) ATTRIBUTE_UNUSED;
144
static rtx hppa_legitimize_address (rtx, rtx, enum machine_mode);
145
static inline void pa_file_start_level (void) ATTRIBUTE_UNUSED;
146
static inline void pa_file_start_space (int) ATTRIBUTE_UNUSED;
147
static inline void pa_file_start_file (int) ATTRIBUTE_UNUSED;
148
static inline void pa_file_start_mcount (const char*) ATTRIBUTE_UNUSED;
149
static void pa_elf_file_start (void) ATTRIBUTE_UNUSED;
150
static void pa_som_file_start (void) ATTRIBUTE_UNUSED;
151
static void pa_linux_file_start (void) ATTRIBUTE_UNUSED;
152
static void pa_hpux64_gas_file_start (void) ATTRIBUTE_UNUSED;
153
static void pa_hpux64_hpas_file_start (void) ATTRIBUTE_UNUSED;
154
static void output_deferred_plabels (void);
155
static void output_deferred_profile_counters (void) ATTRIBUTE_UNUSED;
156
#ifdef ASM_OUTPUT_EXTERNAL_REAL
157
static void pa_hpux_file_end (void);
158
#endif
159
static void pa_init_libfuncs (void);
160
static rtx pa_struct_value_rtx (tree, int);
161
static bool pa_pass_by_reference (cumulative_args_t, enum machine_mode,
162
                                  const_tree, bool);
163
static int pa_arg_partial_bytes (cumulative_args_t, enum machine_mode,
164
                                 tree, bool);
165
static void pa_function_arg_advance (cumulative_args_t, enum machine_mode,
166
                                     const_tree, bool);
167
static rtx pa_function_arg (cumulative_args_t, enum machine_mode,
168
                            const_tree, bool);
169
static unsigned int pa_function_arg_boundary (enum machine_mode, const_tree);
170
static struct machine_function * pa_init_machine_status (void);
171
static reg_class_t pa_secondary_reload (bool, rtx, reg_class_t,
172
                                        enum machine_mode,
173
                                        secondary_reload_info *);
174
static void pa_extra_live_on_entry (bitmap);
175
static enum machine_mode pa_promote_function_mode (const_tree,
176
                                                   enum machine_mode, int *,
177
                                                   const_tree, int);
178
 
179
static void pa_asm_trampoline_template (FILE *);
180
static void pa_trampoline_init (rtx, tree, rtx);
181
static rtx pa_trampoline_adjust_address (rtx);
182
static rtx pa_delegitimize_address (rtx);
183
static bool pa_print_operand_punct_valid_p (unsigned char);
184
static rtx pa_internal_arg_pointer (void);
185
static bool pa_can_eliminate (const int, const int);
186
static void pa_conditional_register_usage (void);
187
static enum machine_mode pa_c_mode_for_suffix (char);
188
static section *pa_function_section (tree, enum node_frequency, bool, bool);
189
static bool pa_cannot_force_const_mem (enum machine_mode, rtx);
190
static bool pa_legitimate_constant_p (enum machine_mode, rtx);
191
 
192
/* The following extra sections are only used for SOM.  */
193
static GTY(()) section *som_readonly_data_section;
194
static GTY(()) section *som_one_only_readonly_data_section;
195
static GTY(()) section *som_one_only_data_section;
196
static GTY(()) section *som_tm_clone_table_section;
197
 
198
/* Counts for the number of callee-saved general and floating point
199
   registers which were saved by the current function's prologue.  */
200
static int gr_saved, fr_saved;
201
 
202
/* Boolean indicating whether the return pointer was saved by the
203
   current function's prologue.  */
204
static bool rp_saved;
205
 
206
static rtx find_addr_reg (rtx);
207
 
208
/* Keep track of the number of bytes we have output in the CODE subspace
209
   during this compilation so we'll know when to emit inline long-calls.  */
210
unsigned long total_code_bytes;
211
 
212
/* The last address of the previous function plus the number of bytes in
213
   associated thunks that have been output.  This is used to determine if
214
   a thunk can use an IA-relative branch to reach its target function.  */
215
static unsigned int last_address;
216
 
217
/* Variables to handle plabels that we discover are necessary at assembly
218
   output time.  They are output after the current function.  */
219
struct GTY(()) deferred_plabel
220
{
221
  rtx internal_label;
222
  rtx symbol;
223
};
224
static GTY((length ("n_deferred_plabels"))) struct deferred_plabel *
225
  deferred_plabels;
226
static size_t n_deferred_plabels = 0;
227
 
228
/* Initialize the GCC target structure.  */
229
 
230
#undef TARGET_OPTION_OVERRIDE
231
#define TARGET_OPTION_OVERRIDE pa_option_override
232
 
233
#undef TARGET_ASM_ALIGNED_HI_OP
234
#define TARGET_ASM_ALIGNED_HI_OP "\t.half\t"
235
#undef TARGET_ASM_ALIGNED_SI_OP
236
#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
237
#undef TARGET_ASM_ALIGNED_DI_OP
238
#define TARGET_ASM_ALIGNED_DI_OP "\t.dword\t"
239
#undef TARGET_ASM_UNALIGNED_HI_OP
240
#define TARGET_ASM_UNALIGNED_HI_OP TARGET_ASM_ALIGNED_HI_OP
241
#undef TARGET_ASM_UNALIGNED_SI_OP
242
#define TARGET_ASM_UNALIGNED_SI_OP TARGET_ASM_ALIGNED_SI_OP
243
#undef TARGET_ASM_UNALIGNED_DI_OP
244
#define TARGET_ASM_UNALIGNED_DI_OP TARGET_ASM_ALIGNED_DI_OP
245
#undef TARGET_ASM_INTEGER
246
#define TARGET_ASM_INTEGER pa_assemble_integer
247
 
248
#undef TARGET_ASM_FUNCTION_PROLOGUE
249
#define TARGET_ASM_FUNCTION_PROLOGUE pa_output_function_prologue
250
#undef TARGET_ASM_FUNCTION_EPILOGUE
251
#define TARGET_ASM_FUNCTION_EPILOGUE pa_output_function_epilogue
252
 
253
#undef TARGET_FUNCTION_VALUE
254
#define TARGET_FUNCTION_VALUE pa_function_value
255
#undef TARGET_LIBCALL_VALUE
256
#define TARGET_LIBCALL_VALUE pa_libcall_value
257
#undef TARGET_FUNCTION_VALUE_REGNO_P
258
#define TARGET_FUNCTION_VALUE_REGNO_P pa_function_value_regno_p
259
 
260
#undef TARGET_LEGITIMIZE_ADDRESS
261
#define TARGET_LEGITIMIZE_ADDRESS hppa_legitimize_address
262
 
263
#undef TARGET_SCHED_ADJUST_COST
264
#define TARGET_SCHED_ADJUST_COST pa_adjust_cost
265
#undef TARGET_SCHED_ADJUST_PRIORITY
266
#define TARGET_SCHED_ADJUST_PRIORITY pa_adjust_priority
267
#undef TARGET_SCHED_ISSUE_RATE
268
#define TARGET_SCHED_ISSUE_RATE pa_issue_rate
269
 
270
#undef TARGET_ENCODE_SECTION_INFO
271
#define TARGET_ENCODE_SECTION_INFO pa_encode_section_info
272
#undef TARGET_STRIP_NAME_ENCODING
273
#define TARGET_STRIP_NAME_ENCODING pa_strip_name_encoding
274
 
275
#undef TARGET_FUNCTION_OK_FOR_SIBCALL
276
#define TARGET_FUNCTION_OK_FOR_SIBCALL pa_function_ok_for_sibcall
277
 
278
#undef TARGET_COMMUTATIVE_P
279
#define TARGET_COMMUTATIVE_P pa_commutative_p
280
 
281
#undef TARGET_ASM_OUTPUT_MI_THUNK
282
#define TARGET_ASM_OUTPUT_MI_THUNK pa_asm_output_mi_thunk
283
#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
284
#define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
285
 
286
#undef TARGET_ASM_FILE_END
287
#ifdef ASM_OUTPUT_EXTERNAL_REAL
288
#define TARGET_ASM_FILE_END pa_hpux_file_end
289
#else
290
#define TARGET_ASM_FILE_END output_deferred_plabels
291
#endif
292
 
293
#undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
294
#define TARGET_PRINT_OPERAND_PUNCT_VALID_P pa_print_operand_punct_valid_p
295
 
296
#if !defined(USE_COLLECT2)
297
#undef TARGET_ASM_CONSTRUCTOR
298
#define TARGET_ASM_CONSTRUCTOR pa_asm_out_constructor
299
#undef TARGET_ASM_DESTRUCTOR
300
#define TARGET_ASM_DESTRUCTOR pa_asm_out_destructor
301
#endif
302
 
303
#undef TARGET_INIT_BUILTINS
304
#define TARGET_INIT_BUILTINS pa_init_builtins
305
 
306
#undef TARGET_EXPAND_BUILTIN
307
#define TARGET_EXPAND_BUILTIN pa_expand_builtin
308
 
309
#undef TARGET_REGISTER_MOVE_COST
310
#define TARGET_REGISTER_MOVE_COST hppa_register_move_cost
311
#undef TARGET_RTX_COSTS
312
#define TARGET_RTX_COSTS hppa_rtx_costs
313
#undef TARGET_ADDRESS_COST
314
#define TARGET_ADDRESS_COST hppa_address_cost
315
 
316
#undef TARGET_MACHINE_DEPENDENT_REORG
317
#define TARGET_MACHINE_DEPENDENT_REORG pa_reorg
318
 
319
#undef TARGET_INIT_LIBFUNCS
320
#define TARGET_INIT_LIBFUNCS pa_init_libfuncs
321
 
322
#undef TARGET_PROMOTE_FUNCTION_MODE
323
#define TARGET_PROMOTE_FUNCTION_MODE pa_promote_function_mode
324
#undef TARGET_PROMOTE_PROTOTYPES
325
#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
326
 
327
#undef TARGET_STRUCT_VALUE_RTX
328
#define TARGET_STRUCT_VALUE_RTX pa_struct_value_rtx
329
#undef TARGET_RETURN_IN_MEMORY
330
#define TARGET_RETURN_IN_MEMORY pa_return_in_memory
331
#undef TARGET_MUST_PASS_IN_STACK
332
#define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
333
#undef TARGET_PASS_BY_REFERENCE
334
#define TARGET_PASS_BY_REFERENCE pa_pass_by_reference
335
#undef TARGET_CALLEE_COPIES
336
#define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
337
#undef TARGET_ARG_PARTIAL_BYTES
338
#define TARGET_ARG_PARTIAL_BYTES pa_arg_partial_bytes
339
#undef TARGET_FUNCTION_ARG
340
#define TARGET_FUNCTION_ARG pa_function_arg
341
#undef TARGET_FUNCTION_ARG_ADVANCE
342
#define TARGET_FUNCTION_ARG_ADVANCE pa_function_arg_advance
343
#undef TARGET_FUNCTION_ARG_BOUNDARY
344
#define TARGET_FUNCTION_ARG_BOUNDARY pa_function_arg_boundary
345
 
346
#undef TARGET_EXPAND_BUILTIN_SAVEREGS
347
#define TARGET_EXPAND_BUILTIN_SAVEREGS hppa_builtin_saveregs
348
#undef TARGET_EXPAND_BUILTIN_VA_START
349
#define TARGET_EXPAND_BUILTIN_VA_START hppa_va_start
350
#undef TARGET_GIMPLIFY_VA_ARG_EXPR
351
#define TARGET_GIMPLIFY_VA_ARG_EXPR hppa_gimplify_va_arg_expr
352
 
353
#undef TARGET_SCALAR_MODE_SUPPORTED_P
354
#define TARGET_SCALAR_MODE_SUPPORTED_P pa_scalar_mode_supported_p
355
 
356
#undef TARGET_CANNOT_FORCE_CONST_MEM
357
#define TARGET_CANNOT_FORCE_CONST_MEM pa_cannot_force_const_mem
358
 
359
#undef TARGET_SECONDARY_RELOAD
360
#define TARGET_SECONDARY_RELOAD pa_secondary_reload
361
 
362
#undef TARGET_EXTRA_LIVE_ON_ENTRY
363
#define TARGET_EXTRA_LIVE_ON_ENTRY pa_extra_live_on_entry
364
 
365
#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
366
#define TARGET_ASM_TRAMPOLINE_TEMPLATE pa_asm_trampoline_template
367
#undef TARGET_TRAMPOLINE_INIT
368
#define TARGET_TRAMPOLINE_INIT pa_trampoline_init
369
#undef TARGET_TRAMPOLINE_ADJUST_ADDRESS
370
#define TARGET_TRAMPOLINE_ADJUST_ADDRESS pa_trampoline_adjust_address
371
#undef TARGET_DELEGITIMIZE_ADDRESS
372
#define TARGET_DELEGITIMIZE_ADDRESS pa_delegitimize_address
373
#undef TARGET_INTERNAL_ARG_POINTER
374
#define TARGET_INTERNAL_ARG_POINTER pa_internal_arg_pointer
375
#undef TARGET_CAN_ELIMINATE
376
#define TARGET_CAN_ELIMINATE pa_can_eliminate
377
#undef TARGET_CONDITIONAL_REGISTER_USAGE
378
#define TARGET_CONDITIONAL_REGISTER_USAGE pa_conditional_register_usage
379
#undef TARGET_C_MODE_FOR_SUFFIX
380
#define TARGET_C_MODE_FOR_SUFFIX pa_c_mode_for_suffix
381
#undef TARGET_ASM_FUNCTION_SECTION
382
#define TARGET_ASM_FUNCTION_SECTION pa_function_section
383
 
384
#undef TARGET_LEGITIMATE_CONSTANT_P
385
#define TARGET_LEGITIMATE_CONSTANT_P pa_legitimate_constant_p
386
 
387
struct gcc_target targetm = TARGET_INITIALIZER;
388
 
389
/* Parse the -mfixed-range= option string.  */
390
 
391
static void
392
fix_range (const char *const_str)
393
{
394
  int i, first, last;
395
  char *str, *dash, *comma;
396
 
397
  /* str must be of the form REG1'-'REG2{,REG1'-'REG} where REG1 and
398
     REG2 are either register names or register numbers.  The effect
399
     of this option is to mark the registers in the range from REG1 to
400
     REG2 as ``fixed'' so they won't be used by the compiler.  This is
401
     used, e.g., to ensure that kernel mode code doesn't use fr4-fr31.  */
402
 
403
  i = strlen (const_str);
404
  str = (char *) alloca (i + 1);
405
  memcpy (str, const_str, i + 1);
406
 
407
  while (1)
408
    {
409
      dash = strchr (str, '-');
410
      if (!dash)
411
        {
412
          warning (0, "value of -mfixed-range must have form REG1-REG2");
413
          return;
414
        }
415
      *dash = '\0';
416
 
417
      comma = strchr (dash + 1, ',');
418
      if (comma)
419
        *comma = '\0';
420
 
421
      first = decode_reg_name (str);
422
      if (first < 0)
423
        {
424
          warning (0, "unknown register name: %s", str);
425
          return;
426
        }
427
 
428
      last = decode_reg_name (dash + 1);
429
      if (last < 0)
430
        {
431
          warning (0, "unknown register name: %s", dash + 1);
432
          return;
433
        }
434
 
435
      *dash = '-';
436
 
437
      if (first > last)
438
        {
439
          warning (0, "%s-%s is an empty range", str, dash + 1);
440
          return;
441
        }
442
 
443
      for (i = first; i <= last; ++i)
444
        fixed_regs[i] = call_used_regs[i] = 1;
445
 
446
      if (!comma)
447
        break;
448
 
449
      *comma = ',';
450
      str = comma + 1;
451
    }
452
 
453
  /* Check if all floating point registers have been fixed.  */
454
  for (i = FP_REG_FIRST; i <= FP_REG_LAST; i++)
455
    if (!fixed_regs[i])
456
      break;
457
 
458
  if (i > FP_REG_LAST)
459
    target_flags |= MASK_DISABLE_FPREGS;
460
}
461
 
462
/* Implement the TARGET_OPTION_OVERRIDE hook.  */
463
 
464
static void
465
pa_option_override (void)
466
{
467
  unsigned int i;
468
  cl_deferred_option *opt;
469
  VEC(cl_deferred_option,heap) *vec
470
    = (VEC(cl_deferred_option,heap) *) pa_deferred_options;
471
 
472
  FOR_EACH_VEC_ELT (cl_deferred_option, vec, i, opt)
473
    {
474
      switch (opt->opt_index)
475
        {
476
        case OPT_mfixed_range_:
477
          fix_range (opt->arg);
478
          break;
479
 
480
        default:
481
          gcc_unreachable ();
482
        }
483
    }
484
 
485
  /* Unconditional branches in the delay slot are not compatible with dwarf2
486
     call frame information.  There is no benefit in using this optimization
487
     on PA8000 and later processors.  */
488
  if (pa_cpu >= PROCESSOR_8000
489
      || (targetm_common.except_unwind_info (&global_options) == UI_DWARF2
490
          && flag_exceptions)
491
      || flag_unwind_tables)
492
    target_flags &= ~MASK_JUMP_IN_DELAY;
493
 
494
  if (flag_pic && TARGET_PORTABLE_RUNTIME)
495
    {
496
      warning (0, "PIC code generation is not supported in the portable runtime model");
497
    }
498
 
499
  if (flag_pic && TARGET_FAST_INDIRECT_CALLS)
500
   {
501
      warning (0, "PIC code generation is not compatible with fast indirect calls");
502
   }
503
 
504
  if (! TARGET_GAS && write_symbols != NO_DEBUG)
505
    {
506
      warning (0, "-g is only supported when using GAS on this processor,");
507
      warning (0, "-g option disabled");
508
      write_symbols = NO_DEBUG;
509
    }
510
 
511
  /* We only support the "big PIC" model now.  And we always generate PIC
512
     code when in 64bit mode.  */
513
  if (flag_pic == 1 || TARGET_64BIT)
514
    flag_pic = 2;
515
 
516
  /* Disable -freorder-blocks-and-partition as we don't support hot and
517
     cold partitioning.  */
518
  if (flag_reorder_blocks_and_partition)
519
    {
520
      inform (input_location,
521
              "-freorder-blocks-and-partition does not work "
522
              "on this architecture");
523
      flag_reorder_blocks_and_partition = 0;
524
      flag_reorder_blocks = 1;
525
    }
526
 
527
  /* We can't guarantee that .dword is available for 32-bit targets.  */
528
  if (UNITS_PER_WORD == 4)
529
    targetm.asm_out.aligned_op.di = NULL;
530
 
531
  /* The unaligned ops are only available when using GAS.  */
532
  if (!TARGET_GAS)
533
    {
534
      targetm.asm_out.unaligned_op.hi = NULL;
535
      targetm.asm_out.unaligned_op.si = NULL;
536
      targetm.asm_out.unaligned_op.di = NULL;
537
    }
538
 
539
  init_machine_status = pa_init_machine_status;
540
}
541
 
542
enum pa_builtins
543
{
544
  PA_BUILTIN_COPYSIGNQ,
545
  PA_BUILTIN_FABSQ,
546
  PA_BUILTIN_INFQ,
547
  PA_BUILTIN_HUGE_VALQ,
548
  PA_BUILTIN_max
549
};
550
 
551
static GTY(()) tree pa_builtins[(int) PA_BUILTIN_max];
552
 
553
static void
554
pa_init_builtins (void)
555
{
556
#ifdef DONT_HAVE_FPUTC_UNLOCKED
557
  {
558
    tree decl = builtin_decl_explicit (BUILT_IN_PUTC_UNLOCKED);
559
    set_builtin_decl (BUILT_IN_FPUTC_UNLOCKED, decl,
560
                      builtin_decl_implicit_p (BUILT_IN_PUTC_UNLOCKED));
561
  }
562
#endif
563
#if TARGET_HPUX_11
564
  {
565
    tree decl;
566
 
567
    if ((decl = builtin_decl_explicit (BUILT_IN_FINITE)) != NULL_TREE)
568
      set_user_assembler_name (decl, "_Isfinite");
569
    if ((decl = builtin_decl_explicit (BUILT_IN_FINITEF)) != NULL_TREE)
570
      set_user_assembler_name (decl, "_Isfinitef");
571
  }
572
#endif
573
 
574
  if (HPUX_LONG_DOUBLE_LIBRARY)
575
    {
576
      tree decl, ftype;
577
 
578
      /* Under HPUX, the __float128 type is a synonym for "long double".  */
579
      (*lang_hooks.types.register_builtin_type) (long_double_type_node,
580
                                                 "__float128");
581
 
582
      /* TFmode support builtins.  */
583
      ftype = build_function_type_list (long_double_type_node,
584
                                        long_double_type_node,
585
                                        NULL_TREE);
586
      decl = add_builtin_function ("__builtin_fabsq", ftype,
587
                                   PA_BUILTIN_FABSQ, BUILT_IN_MD,
588
                                   "_U_Qfabs", NULL_TREE);
589
      TREE_READONLY (decl) = 1;
590
      pa_builtins[PA_BUILTIN_FABSQ] = decl;
591
 
592
      ftype = build_function_type_list (long_double_type_node,
593
                                        long_double_type_node,
594
                                        long_double_type_node,
595
                                        NULL_TREE);
596
      decl = add_builtin_function ("__builtin_copysignq", ftype,
597
                                   PA_BUILTIN_COPYSIGNQ, BUILT_IN_MD,
598
                                   "_U_Qfcopysign", NULL_TREE);
599
      TREE_READONLY (decl) = 1;
600
      pa_builtins[PA_BUILTIN_COPYSIGNQ] = decl;
601
 
602
      ftype = build_function_type_list (long_double_type_node, NULL_TREE);
603
      decl = add_builtin_function ("__builtin_infq", ftype,
604
                                   PA_BUILTIN_INFQ, BUILT_IN_MD,
605
                                   NULL, NULL_TREE);
606
      pa_builtins[PA_BUILTIN_INFQ] = decl;
607
 
608
      decl = add_builtin_function ("__builtin_huge_valq", ftype,
609
                                   PA_BUILTIN_HUGE_VALQ, BUILT_IN_MD,
610
                                   NULL, NULL_TREE);
611
      pa_builtins[PA_BUILTIN_HUGE_VALQ] = decl;
612
    }
613
}
614
 
615
static rtx
616
pa_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
617
                   enum machine_mode mode ATTRIBUTE_UNUSED,
618
                   int ignore ATTRIBUTE_UNUSED)
619
{
620
  tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
621
  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
622
 
623
  switch (fcode)
624
    {
625
    case PA_BUILTIN_FABSQ:
626
    case PA_BUILTIN_COPYSIGNQ:
627
      return expand_call (exp, target, ignore);
628
 
629
    case PA_BUILTIN_INFQ:
630
    case PA_BUILTIN_HUGE_VALQ:
631
      {
632
        enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
633
        REAL_VALUE_TYPE inf;
634
        rtx tmp;
635
 
636
        real_inf (&inf);
637
        tmp = CONST_DOUBLE_FROM_REAL_VALUE (inf, target_mode);
638
 
639
        tmp = validize_mem (force_const_mem (target_mode, tmp));
640
 
641
        if (target == 0)
642
          target = gen_reg_rtx (target_mode);
643
 
644
        emit_move_insn (target, tmp);
645
        return target;
646
      }
647
 
648
    default:
649
      gcc_unreachable ();
650
    }
651
 
652
  return NULL_RTX;
653
}
654
 
655
/* Function to init struct machine_function.
656
   This will be called, via a pointer variable,
657
   from push_function_context.  */
658
 
659
static struct machine_function *
660
pa_init_machine_status (void)
661
{
662
  return ggc_alloc_cleared_machine_function ();
663
}
664
 
665
/* If FROM is a probable pointer register, mark TO as a probable
666
   pointer register with the same pointer alignment as FROM.  */
667
 
668
static void
669
copy_reg_pointer (rtx to, rtx from)
670
{
671
  if (REG_POINTER (from))
672
    mark_reg_pointer (to, REGNO_POINTER_ALIGN (REGNO (from)));
673
}
674
 
675
/* Return 1 if X contains a symbolic expression.  We know these
676
   expressions will have one of a few well defined forms, so
677
   we need only check those forms.  */
678
int
679
pa_symbolic_expression_p (rtx x)
680
{
681
 
682
  /* Strip off any HIGH.  */
683
  if (GET_CODE (x) == HIGH)
684
    x = XEXP (x, 0);
685
 
686
  return (symbolic_operand (x, VOIDmode));
687
}
688
 
689
/* Accept any constant that can be moved in one instruction into a
690
   general register.  */
691
int
692
pa_cint_ok_for_move (HOST_WIDE_INT ival)
693
{
694
  /* OK if ldo, ldil, or zdepi, can be used.  */
695
  return (VAL_14_BITS_P (ival)
696
          || pa_ldil_cint_p (ival)
697
          || pa_zdepi_cint_p (ival));
698
}
699
 
700
/* True iff ldil can be used to load this CONST_INT.  The least
701
   significant 11 bits of the value must be zero and the value must
702
   not change sign when extended from 32 to 64 bits.  */
703
int
704
pa_ldil_cint_p (HOST_WIDE_INT ival)
705
{
706
  HOST_WIDE_INT x = ival & (((HOST_WIDE_INT) -1 << 31) | 0x7ff);
707
 
708
  return x == 0 || x == ((HOST_WIDE_INT) -1 << 31);
709
}
710
 
711
/* True iff zdepi can be used to generate this CONST_INT.
712
   zdepi first sign extends a 5-bit signed number to a given field
713
   length, then places this field anywhere in a zero.  */
714
int
715
pa_zdepi_cint_p (unsigned HOST_WIDE_INT x)
716
{
717
  unsigned HOST_WIDE_INT lsb_mask, t;
718
 
719
  /* This might not be obvious, but it's at least fast.
720
     This function is critical; we don't have the time loops would take.  */
721
  lsb_mask = x & -x;
722
  t = ((x >> 4) + lsb_mask) & ~(lsb_mask - 1);
723
  /* Return true iff t is a power of two.  */
724
  return ((t & (t - 1)) == 0);
725
}
726
 
727
/* True iff depi or extru can be used to compute (reg & mask).
728
   Accept bit pattern like these:
729
   0....01....1
730
   1....10....0
731
   1..10..01..1  */
732
int
733
pa_and_mask_p (unsigned HOST_WIDE_INT mask)
734
{
735
  mask = ~mask;
736
  mask += mask & -mask;
737
  return (mask & (mask - 1)) == 0;
738
}
739
 
740
/* True iff depi can be used to compute (reg | MASK).  */
741
int
742
pa_ior_mask_p (unsigned HOST_WIDE_INT mask)
743
{
744
  mask += mask & -mask;
745
  return (mask & (mask - 1)) == 0;
746
}
747
 
748
/* Legitimize PIC addresses.  If the address is already
749
   position-independent, we return ORIG.  Newly generated
750
   position-independent addresses go to REG.  If we need more
751
   than one register, we lose.  */
752
 
753
static rtx
754
legitimize_pic_address (rtx orig, enum machine_mode mode, rtx reg)
755
{
756
  rtx pic_ref = orig;
757
 
758
  gcc_assert (!PA_SYMBOL_REF_TLS_P (orig));
759
 
760
  /* Labels need special handling.  */
761
  if (pic_label_operand (orig, mode))
762
    {
763
      rtx insn;
764
 
765
      /* We do not want to go through the movXX expanders here since that
766
         would create recursion.
767
 
768
         Nor do we really want to call a generator for a named pattern
769
         since that requires multiple patterns if we want to support
770
         multiple word sizes.
771
 
772
         So instead we just emit the raw set, which avoids the movXX
773
         expanders completely.  */
774
      mark_reg_pointer (reg, BITS_PER_UNIT);
775
      insn = emit_insn (gen_rtx_SET (VOIDmode, reg, orig));
776
 
777
      /* Put a REG_EQUAL note on this insn, so that it can be optimized.  */
778
      add_reg_note (insn, REG_EQUAL, orig);
779
 
780
      /* During and after reload, we need to generate a REG_LABEL_OPERAND note
781
         and update LABEL_NUSES because this is not done automatically.  */
782
      if (reload_in_progress || reload_completed)
783
        {
784
          /* Extract LABEL_REF.  */
785
          if (GET_CODE (orig) == CONST)
786
            orig = XEXP (XEXP (orig, 0), 0);
787
          /* Extract CODE_LABEL.  */
788
          orig = XEXP (orig, 0);
789
          add_reg_note (insn, REG_LABEL_OPERAND, orig);
790
          LABEL_NUSES (orig)++;
791
        }
792
      crtl->uses_pic_offset_table = 1;
793
      return reg;
794
    }
795
  if (GET_CODE (orig) == SYMBOL_REF)
796
    {
797
      rtx insn, tmp_reg;
798
 
799
      gcc_assert (reg);
800
 
801
      /* Before reload, allocate a temporary register for the intermediate
802
         result.  This allows the sequence to be deleted when the final
803
         result is unused and the insns are trivially dead.  */
804
      tmp_reg = ((reload_in_progress || reload_completed)
805
                 ? reg : gen_reg_rtx (Pmode));
806
 
807
      if (function_label_operand (orig, VOIDmode))
808
        {
809
          /* Force function label into memory in word mode.  */
810
          orig = XEXP (force_const_mem (word_mode, orig), 0);
811
          /* Load plabel address from DLT.  */
812
          emit_move_insn (tmp_reg,
813
                          gen_rtx_PLUS (word_mode, pic_offset_table_rtx,
814
                                        gen_rtx_HIGH (word_mode, orig)));
815
          pic_ref
816
            = gen_const_mem (Pmode,
817
                             gen_rtx_LO_SUM (Pmode, tmp_reg,
818
                                             gen_rtx_UNSPEC (Pmode,
819
                                                         gen_rtvec (1, orig),
820
                                                         UNSPEC_DLTIND14R)));
821
          emit_move_insn (reg, pic_ref);
822
          /* Now load address of function descriptor.  */
823
          pic_ref = gen_rtx_MEM (Pmode, reg);
824
        }
825
      else
826
        {
827
          /* Load symbol reference from DLT.  */
828
          emit_move_insn (tmp_reg,
829
                          gen_rtx_PLUS (word_mode, pic_offset_table_rtx,
830
                                        gen_rtx_HIGH (word_mode, orig)));
831
          pic_ref
832
            = gen_const_mem (Pmode,
833
                             gen_rtx_LO_SUM (Pmode, tmp_reg,
834
                                             gen_rtx_UNSPEC (Pmode,
835
                                                         gen_rtvec (1, orig),
836
                                                         UNSPEC_DLTIND14R)));
837
        }
838
 
839
      crtl->uses_pic_offset_table = 1;
840
      mark_reg_pointer (reg, BITS_PER_UNIT);
841
      insn = emit_move_insn (reg, pic_ref);
842
 
843
      /* Put a REG_EQUAL note on this insn, so that it can be optimized.  */
844
      set_unique_reg_note (insn, REG_EQUAL, orig);
845
 
846
      return reg;
847
    }
848
  else if (GET_CODE (orig) == CONST)
849
    {
850
      rtx base;
851
 
852
      if (GET_CODE (XEXP (orig, 0)) == PLUS
853
          && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
854
        return orig;
855
 
856
      gcc_assert (reg);
857
      gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
858
 
859
      base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
860
      orig = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
861
                                     base == reg ? 0 : reg);
862
 
863
      if (GET_CODE (orig) == CONST_INT)
864
        {
865
          if (INT_14_BITS (orig))
866
            return plus_constant (base, INTVAL (orig));
867
          orig = force_reg (Pmode, orig);
868
        }
869
      pic_ref = gen_rtx_PLUS (Pmode, base, orig);
870
      /* Likewise, should we set special REG_NOTEs here?  */
871
    }
872
 
873
  return pic_ref;
874
}
875
 
876
static GTY(()) rtx gen_tls_tga;
877
 
878
static rtx
879
gen_tls_get_addr (void)
880
{
881
  if (!gen_tls_tga)
882
    gen_tls_tga = init_one_libfunc ("__tls_get_addr");
883
  return gen_tls_tga;
884
}
885
 
886
static rtx
887
hppa_tls_call (rtx arg)
888
{
889
  rtx ret;
890
 
891
  ret = gen_reg_rtx (Pmode);
892
  emit_library_call_value (gen_tls_get_addr (), ret,
893
                           LCT_CONST, Pmode, 1, arg, Pmode);
894
 
895
  return ret;
896
}
897
 
898
static rtx
899
legitimize_tls_address (rtx addr)
900
{
901
  rtx ret, insn, tmp, t1, t2, tp;
902
  enum tls_model model = SYMBOL_REF_TLS_MODEL (addr);
903
 
904
  switch (model)
905
    {
906
      case TLS_MODEL_GLOBAL_DYNAMIC:
907
        tmp = gen_reg_rtx (Pmode);
908
        if (flag_pic)
909
          emit_insn (gen_tgd_load_pic (tmp, addr));
910
        else
911
          emit_insn (gen_tgd_load (tmp, addr));
912
        ret = hppa_tls_call (tmp);
913
        break;
914
 
915
      case TLS_MODEL_LOCAL_DYNAMIC:
916
        ret = gen_reg_rtx (Pmode);
917
        tmp = gen_reg_rtx (Pmode);
918
        start_sequence ();
919
        if (flag_pic)
920
          emit_insn (gen_tld_load_pic (tmp, addr));
921
        else
922
          emit_insn (gen_tld_load (tmp, addr));
923
        t1 = hppa_tls_call (tmp);
924
        insn = get_insns ();
925
        end_sequence ();
926
        t2 = gen_reg_rtx (Pmode);
927
        emit_libcall_block (insn, t2, t1,
928
                            gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
929
                                            UNSPEC_TLSLDBASE));
930
        emit_insn (gen_tld_offset_load (ret, addr, t2));
931
        break;
932
 
933
      case TLS_MODEL_INITIAL_EXEC:
934
        tp = gen_reg_rtx (Pmode);
935
        tmp = gen_reg_rtx (Pmode);
936
        ret = gen_reg_rtx (Pmode);
937
        emit_insn (gen_tp_load (tp));
938
        if (flag_pic)
939
          emit_insn (gen_tie_load_pic (tmp, addr));
940
        else
941
          emit_insn (gen_tie_load (tmp, addr));
942
        emit_move_insn (ret, gen_rtx_PLUS (Pmode, tp, tmp));
943
        break;
944
 
945
      case TLS_MODEL_LOCAL_EXEC:
946
        tp = gen_reg_rtx (Pmode);
947
        ret = gen_reg_rtx (Pmode);
948
        emit_insn (gen_tp_load (tp));
949
        emit_insn (gen_tle_load (ret, addr, tp));
950
        break;
951
 
952
      default:
953
        gcc_unreachable ();
954
    }
955
 
956
  return ret;
957
}
958
 
959
/* Try machine-dependent ways of modifying an illegitimate address
960
   to be legitimate.  If we find one, return the new, valid address.
961
   This macro is used in only one place: `memory_address' in explow.c.
962
 
963
   OLDX is the address as it was before break_out_memory_refs was called.
964
   In some cases it is useful to look at this to decide what needs to be done.
965
 
966
   It is always safe for this macro to do nothing.  It exists to recognize
967
   opportunities to optimize the output.
968
 
969
   For the PA, transform:
970
 
971
        memory(X + <large int>)
972
 
973
   into:
974
 
975
        if (<large int> & mask) >= 16
976
          Y = (<large int> & ~mask) + mask + 1  Round up.
977
        else
978
          Y = (<large int> & ~mask)             Round down.
979
        Z = X + Y
980
        memory (Z + (<large int> - Y));
981
 
982
   This is for CSE to find several similar references, and only use one Z.
983
 
984
   X can either be a SYMBOL_REF or REG, but because combine cannot
985
   perform a 4->2 combination we do nothing for SYMBOL_REF + D where
986
   D will not fit in 14 bits.
987
 
988
   MODE_FLOAT references allow displacements which fit in 5 bits, so use
989
   0x1f as the mask.
990
 
991
   MODE_INT references allow displacements which fit in 14 bits, so use
992
   0x3fff as the mask.
993
 
994
   This relies on the fact that most mode MODE_FLOAT references will use FP
995
   registers and most mode MODE_INT references will use integer registers.
996
   (In the rare case of an FP register used in an integer MODE, we depend
997
   on secondary reloads to clean things up.)
998
 
999
 
1000
   It is also beneficial to handle (plus (mult (X) (Y)) (Z)) in a special
1001
   manner if Y is 2, 4, or 8.  (allows more shadd insns and shifted indexed
1002
   addressing modes to be used).
1003
 
1004
   Put X and Z into registers.  Then put the entire expression into
1005
   a register.  */
1006
 
1007
rtx
1008
hppa_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
1009
                         enum machine_mode mode)
1010
{
1011
  rtx orig = x;
1012
 
1013
  /* We need to canonicalize the order of operands in unscaled indexed
1014
     addresses since the code that checks if an address is valid doesn't
1015
     always try both orders.  */
1016
  if (!TARGET_NO_SPACE_REGS
1017
      && GET_CODE (x) == PLUS
1018
      && GET_MODE (x) == Pmode
1019
      && REG_P (XEXP (x, 0))
1020
      && REG_P (XEXP (x, 1))
1021
      && REG_POINTER (XEXP (x, 0))
1022
      && !REG_POINTER (XEXP (x, 1)))
1023
    return gen_rtx_PLUS (Pmode, XEXP (x, 1), XEXP (x, 0));
1024
 
1025
  if (PA_SYMBOL_REF_TLS_P (x))
1026
    return legitimize_tls_address (x);
1027
  else if (flag_pic)
1028
    return legitimize_pic_address (x, mode, gen_reg_rtx (Pmode));
1029
 
1030
  /* Strip off CONST.  */
1031
  if (GET_CODE (x) == CONST)
1032
    x = XEXP (x, 0);
1033
 
1034
  /* Special case.  Get the SYMBOL_REF into a register and use indexing.
1035
     That should always be safe.  */
1036
  if (GET_CODE (x) == PLUS
1037
      && GET_CODE (XEXP (x, 0)) == REG
1038
      && GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
1039
    {
1040
      rtx reg = force_reg (Pmode, XEXP (x, 1));
1041
      return force_reg (Pmode, gen_rtx_PLUS (Pmode, reg, XEXP (x, 0)));
1042
    }
1043
 
1044
  /* Note we must reject symbols which represent function addresses
1045
     since the assembler/linker can't handle arithmetic on plabels.  */
1046
  if (GET_CODE (x) == PLUS
1047
      && GET_CODE (XEXP (x, 1)) == CONST_INT
1048
      && ((GET_CODE (XEXP (x, 0)) == SYMBOL_REF
1049
           && !FUNCTION_NAME_P (XSTR (XEXP (x, 0), 0)))
1050
          || GET_CODE (XEXP (x, 0)) == REG))
1051
    {
1052
      rtx int_part, ptr_reg;
1053
      int newoffset;
1054
      int offset = INTVAL (XEXP (x, 1));
1055
      int mask;
1056
 
1057
      mask = (GET_MODE_CLASS (mode) == MODE_FLOAT
1058
              ? (INT14_OK_STRICT ? 0x3fff : 0x1f) : 0x3fff);
1059
 
1060
      /* Choose which way to round the offset.  Round up if we
1061
         are >= halfway to the next boundary.  */
1062
      if ((offset & mask) >= ((mask + 1) / 2))
1063
        newoffset = (offset & ~ mask) + mask + 1;
1064
      else
1065
        newoffset = (offset & ~ mask);
1066
 
1067
      /* If the newoffset will not fit in 14 bits (ldo), then
1068
         handling this would take 4 or 5 instructions (2 to load
1069
         the SYMBOL_REF + 1 or 2 to load the newoffset + 1 to
1070
         add the new offset and the SYMBOL_REF.)  Combine can
1071
         not handle 4->2 or 5->2 combinations, so do not create
1072
         them.  */
1073
      if (! VAL_14_BITS_P (newoffset)
1074
          && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
1075
        {
1076
          rtx const_part = plus_constant (XEXP (x, 0), newoffset);
1077
          rtx tmp_reg
1078
            = force_reg (Pmode,
1079
                         gen_rtx_HIGH (Pmode, const_part));
1080
          ptr_reg
1081
            = force_reg (Pmode,
1082
                         gen_rtx_LO_SUM (Pmode,
1083
                                         tmp_reg, const_part));
1084
        }
1085
      else
1086
        {
1087
          if (! VAL_14_BITS_P (newoffset))
1088
            int_part = force_reg (Pmode, GEN_INT (newoffset));
1089
          else
1090
            int_part = GEN_INT (newoffset);
1091
 
1092
          ptr_reg = force_reg (Pmode,
1093
                               gen_rtx_PLUS (Pmode,
1094
                                             force_reg (Pmode, XEXP (x, 0)),
1095
                                             int_part));
1096
        }
1097
      return plus_constant (ptr_reg, offset - newoffset);
1098
    }
1099
 
1100
  /* Handle (plus (mult (a) (shadd_constant)) (b)).  */
1101
 
1102
  if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == MULT
1103
      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1104
      && pa_shadd_constant_p (INTVAL (XEXP (XEXP (x, 0), 1)))
1105
      && (OBJECT_P (XEXP (x, 1))
1106
          || GET_CODE (XEXP (x, 1)) == SUBREG)
1107
      && GET_CODE (XEXP (x, 1)) != CONST)
1108
    {
1109
      int val = INTVAL (XEXP (XEXP (x, 0), 1));
1110
      rtx reg1, reg2;
1111
 
1112
      reg1 = XEXP (x, 1);
1113
      if (GET_CODE (reg1) != REG)
1114
        reg1 = force_reg (Pmode, force_operand (reg1, 0));
1115
 
1116
      reg2 = XEXP (XEXP (x, 0), 0);
1117
      if (GET_CODE (reg2) != REG)
1118
        reg2 = force_reg (Pmode, force_operand (reg2, 0));
1119
 
1120
      return force_reg (Pmode, gen_rtx_PLUS (Pmode,
1121
                                             gen_rtx_MULT (Pmode,
1122
                                                           reg2,
1123
                                                           GEN_INT (val)),
1124
                                             reg1));
1125
    }
1126
 
1127
  /* Similarly for (plus (plus (mult (a) (shadd_constant)) (b)) (c)).
1128
 
1129
     Only do so for floating point modes since this is more speculative
1130
     and we lose if it's an integer store.  */
1131
  if (GET_CODE (x) == PLUS
1132
      && GET_CODE (XEXP (x, 0)) == PLUS
1133
      && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
1134
      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
1135
      && pa_shadd_constant_p (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)))
1136
      && (mode == SFmode || mode == DFmode))
1137
    {
1138
 
1139
      /* First, try and figure out what to use as a base register.  */
1140
      rtx reg1, reg2, base, idx;
1141
 
1142
      reg1 = XEXP (XEXP (x, 0), 1);
1143
      reg2 = XEXP (x, 1);
1144
      base = NULL_RTX;
1145
      idx = NULL_RTX;
1146
 
1147
      /* Make sure they're both regs.  If one was a SYMBOL_REF [+ const],
1148
         then pa_emit_move_sequence will turn on REG_POINTER so we'll know
1149
         it's a base register below.  */
1150
      if (GET_CODE (reg1) != REG)
1151
        reg1 = force_reg (Pmode, force_operand (reg1, 0));
1152
 
1153
      if (GET_CODE (reg2) != REG)
1154
        reg2 = force_reg (Pmode, force_operand (reg2, 0));
1155
 
1156
      /* Figure out what the base and index are.  */
1157
 
1158
      if (GET_CODE (reg1) == REG
1159
          && REG_POINTER (reg1))
1160
        {
1161
          base = reg1;
1162
          idx = gen_rtx_PLUS (Pmode,
1163
                              gen_rtx_MULT (Pmode,
1164
                                            XEXP (XEXP (XEXP (x, 0), 0), 0),
1165
                                            XEXP (XEXP (XEXP (x, 0), 0), 1)),
1166
                              XEXP (x, 1));
1167
        }
1168
      else if (GET_CODE (reg2) == REG
1169
               && REG_POINTER (reg2))
1170
        {
1171
          base = reg2;
1172
          idx = XEXP (x, 0);
1173
        }
1174
 
1175
      if (base == 0)
1176
        return orig;
1177
 
1178
      /* If the index adds a large constant, try to scale the
1179
         constant so that it can be loaded with only one insn.  */
1180
      if (GET_CODE (XEXP (idx, 1)) == CONST_INT
1181
          && VAL_14_BITS_P (INTVAL (XEXP (idx, 1))
1182
                            / INTVAL (XEXP (XEXP (idx, 0), 1)))
1183
          && INTVAL (XEXP (idx, 1)) % INTVAL (XEXP (XEXP (idx, 0), 1)) == 0)
1184
        {
1185
          /* Divide the CONST_INT by the scale factor, then add it to A.  */
1186
          int val = INTVAL (XEXP (idx, 1));
1187
 
1188
          val /= INTVAL (XEXP (XEXP (idx, 0), 1));
1189
          reg1 = XEXP (XEXP (idx, 0), 0);
1190
          if (GET_CODE (reg1) != REG)
1191
            reg1 = force_reg (Pmode, force_operand (reg1, 0));
1192
 
1193
          reg1 = force_reg (Pmode, gen_rtx_PLUS (Pmode, reg1, GEN_INT (val)));
1194
 
1195
          /* We can now generate a simple scaled indexed address.  */
1196
          return
1197
            force_reg
1198
              (Pmode, gen_rtx_PLUS (Pmode,
1199
                                    gen_rtx_MULT (Pmode, reg1,
1200
                                                  XEXP (XEXP (idx, 0), 1)),
1201
                                    base));
1202
        }
1203
 
1204
      /* If B + C is still a valid base register, then add them.  */
1205
      if (GET_CODE (XEXP (idx, 1)) == CONST_INT
1206
          && INTVAL (XEXP (idx, 1)) <= 4096
1207
          && INTVAL (XEXP (idx, 1)) >= -4096)
1208
        {
1209
          int val = INTVAL (XEXP (XEXP (idx, 0), 1));
1210
          rtx reg1, reg2;
1211
 
1212
          reg1 = force_reg (Pmode, gen_rtx_PLUS (Pmode, base, XEXP (idx, 1)));
1213
 
1214
          reg2 = XEXP (XEXP (idx, 0), 0);
1215
          if (GET_CODE (reg2) != CONST_INT)
1216
            reg2 = force_reg (Pmode, force_operand (reg2, 0));
1217
 
1218
          return force_reg (Pmode, gen_rtx_PLUS (Pmode,
1219
                                                 gen_rtx_MULT (Pmode,
1220
                                                               reg2,
1221
                                                               GEN_INT (val)),
1222
                                                 reg1));
1223
        }
1224
 
1225
      /* Get the index into a register, then add the base + index and
1226
         return a register holding the result.  */
1227
 
1228
      /* First get A into a register.  */
1229
      reg1 = XEXP (XEXP (idx, 0), 0);
1230
      if (GET_CODE (reg1) != REG)
1231
        reg1 = force_reg (Pmode, force_operand (reg1, 0));
1232
 
1233
      /* And get B into a register.  */
1234
      reg2 = XEXP (idx, 1);
1235
      if (GET_CODE (reg2) != REG)
1236
        reg2 = force_reg (Pmode, force_operand (reg2, 0));
1237
 
1238
      reg1 = force_reg (Pmode,
1239
                        gen_rtx_PLUS (Pmode,
1240
                                      gen_rtx_MULT (Pmode, reg1,
1241
                                                    XEXP (XEXP (idx, 0), 1)),
1242
                                      reg2));
1243
 
1244
      /* Add the result to our base register and return.  */
1245
      return force_reg (Pmode, gen_rtx_PLUS (Pmode, base, reg1));
1246
 
1247
    }
1248
 
1249
  /* Uh-oh.  We might have an address for x[n-100000].  This needs
1250
     special handling to avoid creating an indexed memory address
1251
     with x-100000 as the base.
1252
 
1253
     If the constant part is small enough, then it's still safe because
1254
     there is a guard page at the beginning and end of the data segment.
1255
 
1256
     Scaled references are common enough that we want to try and rearrange the
1257
     terms so that we can use indexing for these addresses too.  Only
1258
     do the optimization for floatint point modes.  */
1259
 
1260
  if (GET_CODE (x) == PLUS
1261
      && pa_symbolic_expression_p (XEXP (x, 1)))
1262
    {
1263
      /* Ugly.  We modify things here so that the address offset specified
1264
         by the index expression is computed first, then added to x to form
1265
         the entire address.  */
1266
 
1267
      rtx regx1, regx2, regy1, regy2, y;
1268
 
1269
      /* Strip off any CONST.  */
1270
      y = XEXP (x, 1);
1271
      if (GET_CODE (y) == CONST)
1272
        y = XEXP (y, 0);
1273
 
1274
      if (GET_CODE (y) == PLUS || GET_CODE (y) == MINUS)
1275
        {
1276
          /* See if this looks like
1277
                (plus (mult (reg) (shadd_const))
1278
                      (const (plus (symbol_ref) (const_int))))
1279
 
1280
             Where const_int is small.  In that case the const
1281
             expression is a valid pointer for indexing.
1282
 
1283
             If const_int is big, but can be divided evenly by shadd_const
1284
             and added to (reg).  This allows more scaled indexed addresses.  */
1285
          if (GET_CODE (XEXP (y, 0)) == SYMBOL_REF
1286
              && GET_CODE (XEXP (x, 0)) == MULT
1287
              && GET_CODE (XEXP (y, 1)) == CONST_INT
1288
              && INTVAL (XEXP (y, 1)) >= -4096
1289
              && INTVAL (XEXP (y, 1)) <= 4095
1290
              && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1291
              && pa_shadd_constant_p (INTVAL (XEXP (XEXP (x, 0), 1))))
1292
            {
1293
              int val = INTVAL (XEXP (XEXP (x, 0), 1));
1294
              rtx reg1, reg2;
1295
 
1296
              reg1 = XEXP (x, 1);
1297
              if (GET_CODE (reg1) != REG)
1298
                reg1 = force_reg (Pmode, force_operand (reg1, 0));
1299
 
1300
              reg2 = XEXP (XEXP (x, 0), 0);
1301
              if (GET_CODE (reg2) != REG)
1302
                reg2 = force_reg (Pmode, force_operand (reg2, 0));
1303
 
1304
              return force_reg (Pmode,
1305
                                gen_rtx_PLUS (Pmode,
1306
                                              gen_rtx_MULT (Pmode,
1307
                                                            reg2,
1308
                                                            GEN_INT (val)),
1309
                                              reg1));
1310
            }
1311
          else if ((mode == DFmode || mode == SFmode)
1312
                   && GET_CODE (XEXP (y, 0)) == SYMBOL_REF
1313
                   && GET_CODE (XEXP (x, 0)) == MULT
1314
                   && GET_CODE (XEXP (y, 1)) == CONST_INT
1315
                   && INTVAL (XEXP (y, 1)) % INTVAL (XEXP (XEXP (x, 0), 1)) == 0
1316
                   && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1317
                   && pa_shadd_constant_p (INTVAL (XEXP (XEXP (x, 0), 1))))
1318
            {
1319
              regx1
1320
                = force_reg (Pmode, GEN_INT (INTVAL (XEXP (y, 1))
1321
                                             / INTVAL (XEXP (XEXP (x, 0), 1))));
1322
              regx2 = XEXP (XEXP (x, 0), 0);
1323
              if (GET_CODE (regx2) != REG)
1324
                regx2 = force_reg (Pmode, force_operand (regx2, 0));
1325
              regx2 = force_reg (Pmode, gen_rtx_fmt_ee (GET_CODE (y), Pmode,
1326
                                                        regx2, regx1));
1327
              return
1328
                force_reg (Pmode,
1329
                           gen_rtx_PLUS (Pmode,
1330
                                         gen_rtx_MULT (Pmode, regx2,
1331
                                                       XEXP (XEXP (x, 0), 1)),
1332
                                         force_reg (Pmode, XEXP (y, 0))));
1333
            }
1334
          else if (GET_CODE (XEXP (y, 1)) == CONST_INT
1335
                   && INTVAL (XEXP (y, 1)) >= -4096
1336
                   && INTVAL (XEXP (y, 1)) <= 4095)
1337
            {
1338
              /* This is safe because of the guard page at the
1339
                 beginning and end of the data space.  Just
1340
                 return the original address.  */
1341
              return orig;
1342
            }
1343
          else
1344
            {
1345
              /* Doesn't look like one we can optimize.  */
1346
              regx1 = force_reg (Pmode, force_operand (XEXP (x, 0), 0));
1347
              regy1 = force_reg (Pmode, force_operand (XEXP (y, 0), 0));
1348
              regy2 = force_reg (Pmode, force_operand (XEXP (y, 1), 0));
1349
              regx1 = force_reg (Pmode,
1350
                                 gen_rtx_fmt_ee (GET_CODE (y), Pmode,
1351
                                                 regx1, regy2));
1352
              return force_reg (Pmode, gen_rtx_PLUS (Pmode, regx1, regy1));
1353
            }
1354
        }
1355
    }
1356
 
1357
  return orig;
1358
}
1359
 
1360
/* Implement the TARGET_REGISTER_MOVE_COST hook.
1361
 
1362
   Compute extra cost of moving data between one register class
1363
   and another.
1364
 
1365
   Make moves from SAR so expensive they should never happen.  We used to
1366
   have 0xffff here, but that generates overflow in rare cases.
1367
 
1368
   Copies involving a FP register and a non-FP register are relatively
1369
   expensive because they must go through memory.
1370
 
1371
   Other copies are reasonably cheap.  */
1372
 
1373
static int
1374
hppa_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1375
                         reg_class_t from, reg_class_t to)
1376
{
1377
  if (from == SHIFT_REGS)
1378
    return 0x100;
1379
  else if (to == SHIFT_REGS && FP_REG_CLASS_P (from))
1380
    return 18;
1381
  else if ((FP_REG_CLASS_P (from) && ! FP_REG_CLASS_P (to))
1382
           || (FP_REG_CLASS_P (to) && ! FP_REG_CLASS_P (from)))
1383
    return 16;
1384
  else
1385
    return 2;
1386
}
1387
 
1388
/* For the HPPA, REG and REG+CONST is cost 0
1389
   and addresses involving symbolic constants are cost 2.
1390
 
1391
   PIC addresses are very expensive.
1392
 
1393
   It is no coincidence that this has the same structure
1394
   as GO_IF_LEGITIMATE_ADDRESS.  */
1395
 
1396
static int
1397
hppa_address_cost (rtx X,
1398
                   bool speed ATTRIBUTE_UNUSED)
1399
{
1400
  switch (GET_CODE (X))
1401
    {
1402
    case REG:
1403
    case PLUS:
1404
    case LO_SUM:
1405
      return 1;
1406
    case HIGH:
1407
      return 2;
1408
    default:
1409
      return 4;
1410
    }
1411
}
1412
 
1413
/* Compute a (partial) cost for rtx X.  Return true if the complete
1414
   cost has been computed, and false if subexpressions should be
1415
   scanned.  In either case, *TOTAL contains the cost result.  */
1416
 
1417
static bool
1418
hppa_rtx_costs (rtx x, int code, int outer_code, int opno ATTRIBUTE_UNUSED,
1419
                int *total, bool speed ATTRIBUTE_UNUSED)
1420
{
1421
  switch (code)
1422
    {
1423
    case CONST_INT:
1424
      if (INTVAL (x) == 0)
1425
        *total = 0;
1426
      else if (INT_14_BITS (x))
1427
        *total = 1;
1428
      else
1429
        *total = 2;
1430
      return true;
1431
 
1432
    case HIGH:
1433
      *total = 2;
1434
      return true;
1435
 
1436
    case CONST:
1437
    case LABEL_REF:
1438
    case SYMBOL_REF:
1439
      *total = 4;
1440
      return true;
1441
 
1442
    case CONST_DOUBLE:
1443
      if ((x == CONST0_RTX (DFmode) || x == CONST0_RTX (SFmode))
1444
          && outer_code != SET)
1445
        *total = 0;
1446
      else
1447
        *total = 8;
1448
      return true;
1449
 
1450
    case MULT:
1451
      if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1452
        *total = COSTS_N_INSNS (3);
1453
      else if (TARGET_PA_11 && !TARGET_DISABLE_FPREGS && !TARGET_SOFT_FLOAT)
1454
        *total = COSTS_N_INSNS (8);
1455
      else
1456
        *total = COSTS_N_INSNS (20);
1457
      return true;
1458
 
1459
    case DIV:
1460
      if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1461
        {
1462
          *total = COSTS_N_INSNS (14);
1463
          return true;
1464
        }
1465
      /* FALLTHRU */
1466
 
1467
    case UDIV:
1468
    case MOD:
1469
    case UMOD:
1470
      *total = COSTS_N_INSNS (60);
1471
      return true;
1472
 
1473
    case PLUS: /* this includes shNadd insns */
1474
    case MINUS:
1475
      if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1476
        *total = COSTS_N_INSNS (3);
1477
      else
1478
        *total = COSTS_N_INSNS (1);
1479
      return true;
1480
 
1481
    case ASHIFT:
1482
    case ASHIFTRT:
1483
    case LSHIFTRT:
1484
      *total = COSTS_N_INSNS (1);
1485
      return true;
1486
 
1487
    default:
1488
      return false;
1489
    }
1490
}
1491
 
1492
/* Ensure mode of ORIG, a REG rtx, is MODE.  Returns either ORIG or a
1493
   new rtx with the correct mode.  */
1494
static inline rtx
1495
force_mode (enum machine_mode mode, rtx orig)
1496
{
1497
  if (mode == GET_MODE (orig))
1498
    return orig;
1499
 
1500
  gcc_assert (REGNO (orig) < FIRST_PSEUDO_REGISTER);
1501
 
1502
  return gen_rtx_REG (mode, REGNO (orig));
1503
}
1504
 
1505
/* Return 1 if *X is a thread-local symbol.  */
1506
 
1507
static int
1508
pa_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
1509
{
1510
  return PA_SYMBOL_REF_TLS_P (*x);
1511
}
1512
 
1513
/* Return 1 if X contains a thread-local symbol.  */
1514
 
1515
bool
1516
pa_tls_referenced_p (rtx x)
1517
{
1518
  if (!TARGET_HAVE_TLS)
1519
    return false;
1520
 
1521
  return for_each_rtx (&x, &pa_tls_symbol_ref_1, 0);
1522
}
1523
 
1524
/* Implement TARGET_CANNOT_FORCE_CONST_MEM.  */
1525
 
1526
static bool
1527
pa_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1528
{
1529
  return pa_tls_referenced_p (x);
1530
}
1531
 
1532
/* Emit insns to move operands[1] into operands[0].
1533
 
1534
   Return 1 if we have written out everything that needs to be done to
1535
   do the move.  Otherwise, return 0 and the caller will emit the move
1536
   normally.
1537
 
1538
   Note SCRATCH_REG may not be in the proper mode depending on how it
1539
   will be used.  This routine is responsible for creating a new copy
1540
   of SCRATCH_REG in the proper mode.  */
1541
 
1542
int
1543
pa_emit_move_sequence (rtx *operands, enum machine_mode mode, rtx scratch_reg)
1544
{
1545
  register rtx operand0 = operands[0];
1546
  register rtx operand1 = operands[1];
1547
  register rtx tem;
1548
 
1549
  /* We can only handle indexed addresses in the destination operand
1550
     of floating point stores.  Thus, we need to break out indexed
1551
     addresses from the destination operand.  */
1552
  if (GET_CODE (operand0) == MEM && IS_INDEX_ADDR_P (XEXP (operand0, 0)))
1553
    {
1554
      gcc_assert (can_create_pseudo_p ());
1555
 
1556
      tem = copy_to_mode_reg (Pmode, XEXP (operand0, 0));
1557
      operand0 = replace_equiv_address (operand0, tem);
1558
    }
1559
 
1560
  /* On targets with non-equivalent space registers, break out unscaled
1561
     indexed addresses from the source operand before the final CSE.
1562
     We have to do this because the REG_POINTER flag is not correctly
1563
     carried through various optimization passes and CSE may substitute
1564
     a pseudo without the pointer set for one with the pointer set.  As
1565
     a result, we loose various opportunities to create insns with
1566
     unscaled indexed addresses.  */
1567
  if (!TARGET_NO_SPACE_REGS
1568
      && !cse_not_expected
1569
      && GET_CODE (operand1) == MEM
1570
      && GET_CODE (XEXP (operand1, 0)) == PLUS
1571
      && REG_P (XEXP (XEXP (operand1, 0), 0))
1572
      && REG_P (XEXP (XEXP (operand1, 0), 1)))
1573
    operand1
1574
      = replace_equiv_address (operand1,
1575
                               copy_to_mode_reg (Pmode, XEXP (operand1, 0)));
1576
 
1577
  if (scratch_reg
1578
      && reload_in_progress && GET_CODE (operand0) == REG
1579
      && REGNO (operand0) >= FIRST_PSEUDO_REGISTER)
1580
    operand0 = reg_equiv_mem (REGNO (operand0));
1581
  else if (scratch_reg
1582
           && reload_in_progress && GET_CODE (operand0) == SUBREG
1583
           && GET_CODE (SUBREG_REG (operand0)) == REG
1584
           && REGNO (SUBREG_REG (operand0)) >= FIRST_PSEUDO_REGISTER)
1585
    {
1586
     /* We must not alter SUBREG_BYTE (operand0) since that would confuse
1587
        the code which tracks sets/uses for delete_output_reload.  */
1588
      rtx temp = gen_rtx_SUBREG (GET_MODE (operand0),
1589
                                 reg_equiv_mem (REGNO (SUBREG_REG (operand0))),
1590
                                 SUBREG_BYTE (operand0));
1591
      operand0 = alter_subreg (&temp);
1592
    }
1593
 
1594
  if (scratch_reg
1595
      && reload_in_progress && GET_CODE (operand1) == REG
1596
      && REGNO (operand1) >= FIRST_PSEUDO_REGISTER)
1597
    operand1 = reg_equiv_mem (REGNO (operand1));
1598
  else if (scratch_reg
1599
           && reload_in_progress && GET_CODE (operand1) == SUBREG
1600
           && GET_CODE (SUBREG_REG (operand1)) == REG
1601
           && REGNO (SUBREG_REG (operand1)) >= FIRST_PSEUDO_REGISTER)
1602
    {
1603
     /* We must not alter SUBREG_BYTE (operand0) since that would confuse
1604
        the code which tracks sets/uses for delete_output_reload.  */
1605
      rtx temp = gen_rtx_SUBREG (GET_MODE (operand1),
1606
                                 reg_equiv_mem (REGNO (SUBREG_REG (operand1))),
1607
                                 SUBREG_BYTE (operand1));
1608
      operand1 = alter_subreg (&temp);
1609
    }
1610
 
1611
  if (scratch_reg && reload_in_progress && GET_CODE (operand0) == MEM
1612
      && ((tem = find_replacement (&XEXP (operand0, 0)))
1613
          != XEXP (operand0, 0)))
1614
    operand0 = replace_equiv_address (operand0, tem);
1615
 
1616
  if (scratch_reg && reload_in_progress && GET_CODE (operand1) == MEM
1617
      && ((tem = find_replacement (&XEXP (operand1, 0)))
1618
          != XEXP (operand1, 0)))
1619
    operand1 = replace_equiv_address (operand1, tem);
1620
 
1621
  /* Handle secondary reloads for loads/stores of FP registers from
1622
     REG+D addresses where D does not fit in 5 or 14 bits, including
1623
     (subreg (mem (addr))) cases.  */
1624
  if (scratch_reg
1625
      && fp_reg_operand (operand0, mode)
1626
      && ((GET_CODE (operand1) == MEM
1627
           && !memory_address_p ((GET_MODE_SIZE (mode) == 4 ? SFmode : DFmode),
1628
                                 XEXP (operand1, 0)))
1629
          || ((GET_CODE (operand1) == SUBREG
1630
               && GET_CODE (XEXP (operand1, 0)) == MEM
1631
               && !memory_address_p ((GET_MODE_SIZE (mode) == 4
1632
                                      ? SFmode : DFmode),
1633
                                     XEXP (XEXP (operand1, 0), 0))))))
1634
    {
1635
      if (GET_CODE (operand1) == SUBREG)
1636
        operand1 = XEXP (operand1, 0);
1637
 
1638
      /* SCRATCH_REG will hold an address and maybe the actual data.  We want
1639
         it in WORD_MODE regardless of what mode it was originally given
1640
         to us.  */
1641
      scratch_reg = force_mode (word_mode, scratch_reg);
1642
 
1643
      /* D might not fit in 14 bits either; for such cases load D into
1644
         scratch reg.  */
1645
      if (!memory_address_p (Pmode, XEXP (operand1, 0)))
1646
        {
1647
          emit_move_insn (scratch_reg, XEXP (XEXP (operand1, 0), 1));
1648
          emit_move_insn (scratch_reg,
1649
                          gen_rtx_fmt_ee (GET_CODE (XEXP (operand1, 0)),
1650
                                          Pmode,
1651
                                          XEXP (XEXP (operand1, 0), 0),
1652
                                          scratch_reg));
1653
        }
1654
      else
1655
        emit_move_insn (scratch_reg, XEXP (operand1, 0));
1656
      emit_insn (gen_rtx_SET (VOIDmode, operand0,
1657
                              replace_equiv_address (operand1, scratch_reg)));
1658
      return 1;
1659
    }
1660
  else if (scratch_reg
1661
           && fp_reg_operand (operand1, mode)
1662
           && ((GET_CODE (operand0) == MEM
1663
                && !memory_address_p ((GET_MODE_SIZE (mode) == 4
1664
                                        ? SFmode : DFmode),
1665
                                       XEXP (operand0, 0)))
1666
               || ((GET_CODE (operand0) == SUBREG)
1667
                   && GET_CODE (XEXP (operand0, 0)) == MEM
1668
                   && !memory_address_p ((GET_MODE_SIZE (mode) == 4
1669
                                          ? SFmode : DFmode),
1670
                                         XEXP (XEXP (operand0, 0), 0)))))
1671
    {
1672
      if (GET_CODE (operand0) == SUBREG)
1673
        operand0 = XEXP (operand0, 0);
1674
 
1675
      /* SCRATCH_REG will hold an address and maybe the actual data.  We want
1676
         it in WORD_MODE regardless of what mode it was originally given
1677
         to us.  */
1678
      scratch_reg = force_mode (word_mode, scratch_reg);
1679
 
1680
      /* D might not fit in 14 bits either; for such cases load D into
1681
         scratch reg.  */
1682
      if (!memory_address_p (Pmode, XEXP (operand0, 0)))
1683
        {
1684
          emit_move_insn (scratch_reg, XEXP (XEXP (operand0, 0), 1));
1685
          emit_move_insn (scratch_reg, gen_rtx_fmt_ee (GET_CODE (XEXP (operand0,
1686
                                                                        0)),
1687
                                                       Pmode,
1688
                                                       XEXP (XEXP (operand0, 0),
1689
                                                                   0),
1690
                                                       scratch_reg));
1691
        }
1692
      else
1693
        emit_move_insn (scratch_reg, XEXP (operand0, 0));
1694
      emit_insn (gen_rtx_SET (VOIDmode,
1695
                              replace_equiv_address (operand0, scratch_reg),
1696
                              operand1));
1697
      return 1;
1698
    }
1699
  /* Handle secondary reloads for loads of FP registers from constant
1700
     expressions by forcing the constant into memory.
1701
 
1702
     Use scratch_reg to hold the address of the memory location.
1703
 
1704
     The proper fix is to change TARGET_PREFERRED_RELOAD_CLASS to return
1705
     NO_REGS when presented with a const_int and a register class
1706
     containing only FP registers.  Doing so unfortunately creates
1707
     more problems than it solves.   Fix this for 2.5.  */
1708
  else if (scratch_reg
1709
           && CONSTANT_P (operand1)
1710
           && fp_reg_operand (operand0, mode))
1711
    {
1712
      rtx const_mem, xoperands[2];
1713
 
1714
      /* SCRATCH_REG will hold an address and maybe the actual data.  We want
1715
         it in WORD_MODE regardless of what mode it was originally given
1716
         to us.  */
1717
      scratch_reg = force_mode (word_mode, scratch_reg);
1718
 
1719
      /* Force the constant into memory and put the address of the
1720
         memory location into scratch_reg.  */
1721
      const_mem = force_const_mem (mode, operand1);
1722
      xoperands[0] = scratch_reg;
1723
      xoperands[1] = XEXP (const_mem, 0);
1724
      pa_emit_move_sequence (xoperands, Pmode, 0);
1725
 
1726
      /* Now load the destination register.  */
1727
      emit_insn (gen_rtx_SET (mode, operand0,
1728
                              replace_equiv_address (const_mem, scratch_reg)));
1729
      return 1;
1730
    }
1731
  /* Handle secondary reloads for SAR.  These occur when trying to load
1732
     the SAR from memory or a constant.  */
1733
  else if (scratch_reg
1734
           && GET_CODE (operand0) == REG
1735
           && REGNO (operand0) < FIRST_PSEUDO_REGISTER
1736
           && REGNO_REG_CLASS (REGNO (operand0)) == SHIFT_REGS
1737
           && (GET_CODE (operand1) == MEM || GET_CODE (operand1) == CONST_INT))
1738
    {
1739
      /* D might not fit in 14 bits either; for such cases load D into
1740
         scratch reg.  */
1741
      if (GET_CODE (operand1) == MEM
1742
          && !memory_address_p (GET_MODE (operand0), XEXP (operand1, 0)))
1743
        {
1744
          /* We are reloading the address into the scratch register, so we
1745
             want to make sure the scratch register is a full register.  */
1746
          scratch_reg = force_mode (word_mode, scratch_reg);
1747
 
1748
          emit_move_insn (scratch_reg, XEXP (XEXP (operand1, 0), 1));
1749
          emit_move_insn (scratch_reg, gen_rtx_fmt_ee (GET_CODE (XEXP (operand1,
1750
                                                                        0)),
1751
                                                       Pmode,
1752
                                                       XEXP (XEXP (operand1, 0),
1753
                                                       0),
1754
                                                       scratch_reg));
1755
 
1756
          /* Now we are going to load the scratch register from memory,
1757
             we want to load it in the same width as the original MEM,
1758
             which must be the same as the width of the ultimate destination,
1759
             OPERAND0.  */
1760
          scratch_reg = force_mode (GET_MODE (operand0), scratch_reg);
1761
 
1762
          emit_move_insn (scratch_reg,
1763
                          replace_equiv_address (operand1, scratch_reg));
1764
        }
1765
      else
1766
        {
1767
          /* We want to load the scratch register using the same mode as
1768
             the ultimate destination.  */
1769
          scratch_reg = force_mode (GET_MODE (operand0), scratch_reg);
1770
 
1771
          emit_move_insn (scratch_reg, operand1);
1772
        }
1773
 
1774
      /* And emit the insn to set the ultimate destination.  We know that
1775
         the scratch register has the same mode as the destination at this
1776
         point.  */
1777
      emit_move_insn (operand0, scratch_reg);
1778
      return 1;
1779
    }
1780
  /* Handle the most common case: storing into a register.  */
1781
  else if (register_operand (operand0, mode))
1782
    {
1783
      /* Legitimize TLS symbol references.  This happens for references
1784
         that aren't a legitimate constant.  */
1785
      if (PA_SYMBOL_REF_TLS_P (operand1))
1786
        operand1 = legitimize_tls_address (operand1);
1787
 
1788
      if (register_operand (operand1, mode)
1789
          || (GET_CODE (operand1) == CONST_INT
1790
              && pa_cint_ok_for_move (INTVAL (operand1)))
1791
          || (operand1 == CONST0_RTX (mode))
1792
          || (GET_CODE (operand1) == HIGH
1793
              && !symbolic_operand (XEXP (operand1, 0), VOIDmode))
1794
          /* Only `general_operands' can come here, so MEM is ok.  */
1795
          || GET_CODE (operand1) == MEM)
1796
        {
1797
          /* Various sets are created during RTL generation which don't
1798
             have the REG_POINTER flag correctly set.  After the CSE pass,
1799
             instruction recognition can fail if we don't consistently
1800
             set this flag when performing register copies.  This should
1801
             also improve the opportunities for creating insns that use
1802
             unscaled indexing.  */
1803
          if (REG_P (operand0) && REG_P (operand1))
1804
            {
1805
              if (REG_POINTER (operand1)
1806
                  && !REG_POINTER (operand0)
1807
                  && !HARD_REGISTER_P (operand0))
1808
                copy_reg_pointer (operand0, operand1);
1809
            }
1810
 
1811
          /* When MEMs are broken out, the REG_POINTER flag doesn't
1812
             get set.  In some cases, we can set the REG_POINTER flag
1813
             from the declaration for the MEM.  */
1814
          if (REG_P (operand0)
1815
              && GET_CODE (operand1) == MEM
1816
              && !REG_POINTER (operand0))
1817
            {
1818
              tree decl = MEM_EXPR (operand1);
1819
 
1820
              /* Set the register pointer flag and register alignment
1821
                 if the declaration for this memory reference is a
1822
                 pointer type.  */
1823
              if (decl)
1824
                {
1825
                  tree type;
1826
 
1827
                  /* If this is a COMPONENT_REF, use the FIELD_DECL from
1828
                     tree operand 1.  */
1829
                  if (TREE_CODE (decl) == COMPONENT_REF)
1830
                    decl = TREE_OPERAND (decl, 1);
1831
 
1832
                  type = TREE_TYPE (decl);
1833
                  type = strip_array_types (type);
1834
 
1835
                  if (POINTER_TYPE_P (type))
1836
                    {
1837
                      int align;
1838
 
1839
                      type = TREE_TYPE (type);
1840
                      /* Using TYPE_ALIGN_OK is rather conservative as
1841
                         only the ada frontend actually sets it.  */
1842
                      align = (TYPE_ALIGN_OK (type) ? TYPE_ALIGN (type)
1843
                               : BITS_PER_UNIT);
1844
                      mark_reg_pointer (operand0, align);
1845
                    }
1846
                }
1847
            }
1848
 
1849
          emit_insn (gen_rtx_SET (VOIDmode, operand0, operand1));
1850
          return 1;
1851
        }
1852
    }
1853
  else if (GET_CODE (operand0) == MEM)
1854
    {
1855
      if (mode == DFmode && operand1 == CONST0_RTX (mode)
1856
          && !(reload_in_progress || reload_completed))
1857
        {
1858
          rtx temp = gen_reg_rtx (DFmode);
1859
 
1860
          emit_insn (gen_rtx_SET (VOIDmode, temp, operand1));
1861
          emit_insn (gen_rtx_SET (VOIDmode, operand0, temp));
1862
          return 1;
1863
        }
1864
      if (register_operand (operand1, mode) || operand1 == CONST0_RTX (mode))
1865
        {
1866
          /* Run this case quickly.  */
1867
          emit_insn (gen_rtx_SET (VOIDmode, operand0, operand1));
1868
          return 1;
1869
        }
1870
      if (! (reload_in_progress || reload_completed))
1871
        {
1872
          operands[0] = validize_mem (operand0);
1873
          operands[1] = operand1 = force_reg (mode, operand1);
1874
        }
1875
    }
1876
 
1877
  /* Simplify the source if we need to.
1878
     Note we do have to handle function labels here, even though we do
1879
     not consider them legitimate constants.  Loop optimizations can
1880
     call the emit_move_xxx with one as a source.  */
1881
  if ((GET_CODE (operand1) != HIGH && immediate_operand (operand1, mode))
1882
      || function_label_operand (operand1, VOIDmode)
1883
      || (GET_CODE (operand1) == HIGH
1884
          && symbolic_operand (XEXP (operand1, 0), mode)))
1885
    {
1886
      int ishighonly = 0;
1887
 
1888
      if (GET_CODE (operand1) == HIGH)
1889
        {
1890
          ishighonly = 1;
1891
          operand1 = XEXP (operand1, 0);
1892
        }
1893
      if (symbolic_operand (operand1, mode))
1894
        {
1895
          /* Argh.  The assembler and linker can't handle arithmetic
1896
             involving plabels.
1897
 
1898
             So we force the plabel into memory, load operand0 from
1899
             the memory location, then add in the constant part.  */
1900
          if ((GET_CODE (operand1) == CONST
1901
               && GET_CODE (XEXP (operand1, 0)) == PLUS
1902
               && function_label_operand (XEXP (XEXP (operand1, 0), 0),
1903
                                          VOIDmode))
1904
              || function_label_operand (operand1, VOIDmode))
1905
            {
1906
              rtx temp, const_part;
1907
 
1908
              /* Figure out what (if any) scratch register to use.  */
1909
              if (reload_in_progress || reload_completed)
1910
                {
1911
                  scratch_reg = scratch_reg ? scratch_reg : operand0;
1912
                  /* SCRATCH_REG will hold an address and maybe the actual
1913
                     data.  We want it in WORD_MODE regardless of what mode it
1914
                     was originally given to us.  */
1915
                  scratch_reg = force_mode (word_mode, scratch_reg);
1916
                }
1917
              else if (flag_pic)
1918
                scratch_reg = gen_reg_rtx (Pmode);
1919
 
1920
              if (GET_CODE (operand1) == CONST)
1921
                {
1922
                  /* Save away the constant part of the expression.  */
1923
                  const_part = XEXP (XEXP (operand1, 0), 1);
1924
                  gcc_assert (GET_CODE (const_part) == CONST_INT);
1925
 
1926
                  /* Force the function label into memory.  */
1927
                  temp = force_const_mem (mode, XEXP (XEXP (operand1, 0), 0));
1928
                }
1929
              else
1930
                {
1931
                  /* No constant part.  */
1932
                  const_part = NULL_RTX;
1933
 
1934
                  /* Force the function label into memory.  */
1935
                  temp = force_const_mem (mode, operand1);
1936
                }
1937
 
1938
 
1939
              /* Get the address of the memory location.  PIC-ify it if
1940
                 necessary.  */
1941
              temp = XEXP (temp, 0);
1942
              if (flag_pic)
1943
                temp = legitimize_pic_address (temp, mode, scratch_reg);
1944
 
1945
              /* Put the address of the memory location into our destination
1946
                 register.  */
1947
              operands[1] = temp;
1948
              pa_emit_move_sequence (operands, mode, scratch_reg);
1949
 
1950
              /* Now load from the memory location into our destination
1951
                 register.  */
1952
              operands[1] = gen_rtx_MEM (Pmode, operands[0]);
1953
              pa_emit_move_sequence (operands, mode, scratch_reg);
1954
 
1955
              /* And add back in the constant part.  */
1956
              if (const_part != NULL_RTX)
1957
                expand_inc (operand0, const_part);
1958
 
1959
              return 1;
1960
            }
1961
 
1962
          if (flag_pic)
1963
            {
1964
              rtx temp;
1965
 
1966
              if (reload_in_progress || reload_completed)
1967
                {
1968
                  temp = scratch_reg ? scratch_reg : operand0;
1969
                  /* TEMP will hold an address and maybe the actual
1970
                     data.  We want it in WORD_MODE regardless of what mode it
1971
                     was originally given to us.  */
1972
                  temp = force_mode (word_mode, temp);
1973
                }
1974
              else
1975
                temp = gen_reg_rtx (Pmode);
1976
 
1977
              /* (const (plus (symbol) (const_int))) must be forced to
1978
                 memory during/after reload if the const_int will not fit
1979
                 in 14 bits.  */
1980
              if (GET_CODE (operand1) == CONST
1981
                       && GET_CODE (XEXP (operand1, 0)) == PLUS
1982
                       && GET_CODE (XEXP (XEXP (operand1, 0), 1)) == CONST_INT
1983
                       && !INT_14_BITS (XEXP (XEXP (operand1, 0), 1))
1984
                       && (reload_completed || reload_in_progress)
1985
                       && flag_pic)
1986
                {
1987
                  rtx const_mem = force_const_mem (mode, operand1);
1988
                  operands[1] = legitimize_pic_address (XEXP (const_mem, 0),
1989
                                                        mode, temp);
1990
                  operands[1] = replace_equiv_address (const_mem, operands[1]);
1991
                  pa_emit_move_sequence (operands, mode, temp);
1992
                }
1993
              else
1994
                {
1995
                  operands[1] = legitimize_pic_address (operand1, mode, temp);
1996
                  if (REG_P (operand0) && REG_P (operands[1]))
1997
                    copy_reg_pointer (operand0, operands[1]);
1998
                  emit_insn (gen_rtx_SET (VOIDmode, operand0, operands[1]));
1999
                }
2000
            }
2001
          /* On the HPPA, references to data space are supposed to use dp,
2002
             register 27, but showing it in the RTL inhibits various cse
2003
             and loop optimizations.  */
2004
          else
2005
            {
2006
              rtx temp, set;
2007
 
2008
              if (reload_in_progress || reload_completed)
2009
                {
2010
                  temp = scratch_reg ? scratch_reg : operand0;
2011
                  /* TEMP will hold an address and maybe the actual
2012
                     data.  We want it in WORD_MODE regardless of what mode it
2013
                     was originally given to us.  */
2014
                  temp = force_mode (word_mode, temp);
2015
                }
2016
              else
2017
                temp = gen_reg_rtx (mode);
2018
 
2019
              /* Loading a SYMBOL_REF into a register makes that register
2020
                 safe to be used as the base in an indexed address.
2021
 
2022
                 Don't mark hard registers though.  That loses.  */
2023
              if (GET_CODE (operand0) == REG
2024
                  && REGNO (operand0) >= FIRST_PSEUDO_REGISTER)
2025
                mark_reg_pointer (operand0, BITS_PER_UNIT);
2026
              if (REGNO (temp) >= FIRST_PSEUDO_REGISTER)
2027
                mark_reg_pointer (temp, BITS_PER_UNIT);
2028
 
2029
              if (ishighonly)
2030
                set = gen_rtx_SET (mode, operand0, temp);
2031
              else
2032
                set = gen_rtx_SET (VOIDmode,
2033
                                   operand0,
2034
                                   gen_rtx_LO_SUM (mode, temp, operand1));
2035
 
2036
              emit_insn (gen_rtx_SET (VOIDmode,
2037
                                      temp,
2038
                                      gen_rtx_HIGH (mode, operand1)));
2039
              emit_insn (set);
2040
 
2041
            }
2042
          return 1;
2043
        }
2044
      else if (pa_tls_referenced_p (operand1))
2045
        {
2046
          rtx tmp = operand1;
2047
          rtx addend = NULL;
2048
 
2049
          if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
2050
            {
2051
              addend = XEXP (XEXP (tmp, 0), 1);
2052
              tmp = XEXP (XEXP (tmp, 0), 0);
2053
            }
2054
 
2055
          gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
2056
          tmp = legitimize_tls_address (tmp);
2057
          if (addend)
2058
            {
2059
              tmp = gen_rtx_PLUS (mode, tmp, addend);
2060
              tmp = force_operand (tmp, operands[0]);
2061
            }
2062
          operands[1] = tmp;
2063
        }
2064
      else if (GET_CODE (operand1) != CONST_INT
2065
               || !pa_cint_ok_for_move (INTVAL (operand1)))
2066
        {
2067
          rtx insn, temp;
2068
          rtx op1 = operand1;
2069
          HOST_WIDE_INT value = 0;
2070
          HOST_WIDE_INT insv = 0;
2071
          int insert = 0;
2072
 
2073
          if (GET_CODE (operand1) == CONST_INT)
2074
            value = INTVAL (operand1);
2075
 
2076
          if (TARGET_64BIT
2077
              && GET_CODE (operand1) == CONST_INT
2078
              && HOST_BITS_PER_WIDE_INT > 32
2079
              && GET_MODE_BITSIZE (GET_MODE (operand0)) > 32)
2080
            {
2081
              HOST_WIDE_INT nval;
2082
 
2083
              /* Extract the low order 32 bits of the value and sign extend.
2084
                 If the new value is the same as the original value, we can
2085
                 can use the original value as-is.  If the new value is
2086
                 different, we use it and insert the most-significant 32-bits
2087
                 of the original value into the final result.  */
2088
              nval = ((value & (((HOST_WIDE_INT) 2 << 31) - 1))
2089
                      ^ ((HOST_WIDE_INT) 1 << 31)) - ((HOST_WIDE_INT) 1 << 31);
2090
              if (value != nval)
2091
                {
2092
#if HOST_BITS_PER_WIDE_INT > 32
2093
                  insv = value >= 0 ? value >> 32 : ~(~value >> 32);
2094
#endif
2095
                  insert = 1;
2096
                  value = nval;
2097
                  operand1 = GEN_INT (nval);
2098
                }
2099
            }
2100
 
2101
          if (reload_in_progress || reload_completed)
2102
            temp = scratch_reg ? scratch_reg : operand0;
2103
          else
2104
            temp = gen_reg_rtx (mode);
2105
 
2106
          /* We don't directly split DImode constants on 32-bit targets
2107
             because PLUS uses an 11-bit immediate and the insn sequence
2108
             generated is not as efficient as the one using HIGH/LO_SUM.  */
2109
          if (GET_CODE (operand1) == CONST_INT
2110
              && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD
2111
              && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2112
              && !insert)
2113
            {
2114
              /* Directly break constant into high and low parts.  This
2115
                 provides better optimization opportunities because various
2116
                 passes recognize constants split with PLUS but not LO_SUM.
2117
                 We use a 14-bit signed low part except when the addition
2118
                 of 0x4000 to the high part might change the sign of the
2119
                 high part.  */
2120
              HOST_WIDE_INT low = value & 0x3fff;
2121
              HOST_WIDE_INT high = value & ~ 0x3fff;
2122
 
2123
              if (low >= 0x2000)
2124
                {
2125
                  if (high == 0x7fffc000 || (mode == HImode && high == 0x4000))
2126
                    high += 0x2000;
2127
                  else
2128
                    high += 0x4000;
2129
                }
2130
 
2131
              low = value - high;
2132
 
2133
              emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (high)));
2134
              operands[1] = gen_rtx_PLUS (mode, temp, GEN_INT (low));
2135
            }
2136
          else
2137
            {
2138
              emit_insn (gen_rtx_SET (VOIDmode, temp,
2139
                                      gen_rtx_HIGH (mode, operand1)));
2140
              operands[1] = gen_rtx_LO_SUM (mode, temp, operand1);
2141
            }
2142
 
2143
          insn = emit_move_insn (operands[0], operands[1]);
2144
 
2145
          /* Now insert the most significant 32 bits of the value
2146
             into the register.  When we don't have a second register
2147
             available, it could take up to nine instructions to load
2148
             a 64-bit integer constant.  Prior to reload, we force
2149
             constants that would take more than three instructions
2150
             to load to the constant pool.  During and after reload,
2151
             we have to handle all possible values.  */
2152
          if (insert)
2153
            {
2154
              /* Use a HIGH/LO_SUM/INSV sequence if we have a second
2155
                 register and the value to be inserted is outside the
2156
                 range that can be loaded with three depdi instructions.  */
2157
              if (temp != operand0 && (insv >= 16384 || insv < -16384))
2158
                {
2159
                  operand1 = GEN_INT (insv);
2160
 
2161
                  emit_insn (gen_rtx_SET (VOIDmode, temp,
2162
                                          gen_rtx_HIGH (mode, operand1)));
2163
                  emit_move_insn (temp, gen_rtx_LO_SUM (mode, temp, operand1));
2164
                  emit_insn (gen_insv (operand0, GEN_INT (32),
2165
                                       const0_rtx, temp));
2166
                }
2167
              else
2168
                {
2169
                  int len = 5, pos = 27;
2170
 
2171
                  /* Insert the bits using the depdi instruction.  */
2172
                  while (pos >= 0)
2173
                    {
2174
                      HOST_WIDE_INT v5 = ((insv & 31) ^ 16) - 16;
2175
                      HOST_WIDE_INT sign = v5 < 0;
2176
 
2177
                      /* Left extend the insertion.  */
2178
                      insv = (insv >= 0 ? insv >> len : ~(~insv >> len));
2179
                      while (pos > 0 && (insv & 1) == sign)
2180
                        {
2181
                          insv = (insv >= 0 ? insv >> 1 : ~(~insv >> 1));
2182
                          len += 1;
2183
                          pos -= 1;
2184
                        }
2185
 
2186
                      emit_insn (gen_insv (operand0, GEN_INT (len),
2187
                                           GEN_INT (pos), GEN_INT (v5)));
2188
 
2189
                      len = pos > 0 && pos < 5 ? pos : 5;
2190
                      pos -= len;
2191
                    }
2192
                }
2193
            }
2194
 
2195
          set_unique_reg_note (insn, REG_EQUAL, op1);
2196
 
2197
          return 1;
2198
        }
2199
    }
2200
  /* Now have insn-emit do whatever it normally does.  */
2201
  return 0;
2202
}
2203
 
2204
/* Examine EXP and return nonzero if it contains an ADDR_EXPR (meaning
2205
   it will need a link/runtime reloc).  */
2206
 
2207
int
2208
pa_reloc_needed (tree exp)
2209
{
2210
  int reloc = 0;
2211
 
2212
  switch (TREE_CODE (exp))
2213
    {
2214
    case ADDR_EXPR:
2215
      return 1;
2216
 
2217
    case POINTER_PLUS_EXPR:
2218
    case PLUS_EXPR:
2219
    case MINUS_EXPR:
2220
      reloc = pa_reloc_needed (TREE_OPERAND (exp, 0));
2221
      reloc |= pa_reloc_needed (TREE_OPERAND (exp, 1));
2222
      break;
2223
 
2224
    CASE_CONVERT:
2225
    case NON_LVALUE_EXPR:
2226
      reloc = pa_reloc_needed (TREE_OPERAND (exp, 0));
2227
      break;
2228
 
2229
    case CONSTRUCTOR:
2230
      {
2231
        tree value;
2232
        unsigned HOST_WIDE_INT ix;
2233
 
2234
        FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), ix, value)
2235
          if (value)
2236
            reloc |= pa_reloc_needed (value);
2237
      }
2238
      break;
2239
 
2240
    case ERROR_MARK:
2241
      break;
2242
 
2243
    default:
2244
      break;
2245
    }
2246
  return reloc;
2247
}
2248
 
2249
 
2250
/* Return the best assembler insn template
2251
   for moving operands[1] into operands[0] as a fullword.  */
2252
const char *
2253
pa_singlemove_string (rtx *operands)
2254
{
2255
  HOST_WIDE_INT intval;
2256
 
2257
  if (GET_CODE (operands[0]) == MEM)
2258
    return "stw %r1,%0";
2259
  if (GET_CODE (operands[1]) == MEM)
2260
    return "ldw %1,%0";
2261
  if (GET_CODE (operands[1]) == CONST_DOUBLE)
2262
    {
2263
      long i;
2264
      REAL_VALUE_TYPE d;
2265
 
2266
      gcc_assert (GET_MODE (operands[1]) == SFmode);
2267
 
2268
      /* Translate the CONST_DOUBLE to a CONST_INT with the same target
2269
         bit pattern.  */
2270
      REAL_VALUE_FROM_CONST_DOUBLE (d, operands[1]);
2271
      REAL_VALUE_TO_TARGET_SINGLE (d, i);
2272
 
2273
      operands[1] = GEN_INT (i);
2274
      /* Fall through to CONST_INT case.  */
2275
    }
2276
  if (GET_CODE (operands[1]) == CONST_INT)
2277
    {
2278
      intval = INTVAL (operands[1]);
2279
 
2280
      if (VAL_14_BITS_P (intval))
2281
        return "ldi %1,%0";
2282
      else if ((intval & 0x7ff) == 0)
2283
        return "ldil L'%1,%0";
2284
      else if (pa_zdepi_cint_p (intval))
2285
        return "{zdepi %Z1,%0|depwi,z %Z1,%0}";
2286
      else
2287
        return "ldil L'%1,%0\n\tldo R'%1(%0),%0";
2288
    }
2289
  return "copy %1,%0";
2290
}
2291
 
2292
 
2293
/* Compute position (in OP[1]) and width (in OP[2])
2294
   useful for copying IMM to a register using the zdepi
2295
   instructions.  Store the immediate value to insert in OP[0].  */
2296
static void
2297
compute_zdepwi_operands (unsigned HOST_WIDE_INT imm, unsigned *op)
2298
{
2299
  int lsb, len;
2300
 
2301
  /* Find the least significant set bit in IMM.  */
2302
  for (lsb = 0; lsb < 32; lsb++)
2303
    {
2304
      if ((imm & 1) != 0)
2305
        break;
2306
      imm >>= 1;
2307
    }
2308
 
2309
  /* Choose variants based on *sign* of the 5-bit field.  */
2310
  if ((imm & 0x10) == 0)
2311
    len = (lsb <= 28) ? 4 : 32 - lsb;
2312
  else
2313
    {
2314
      /* Find the width of the bitstring in IMM.  */
2315
      for (len = 5; len < 32 - lsb; len++)
2316
        {
2317
          if ((imm & ((unsigned HOST_WIDE_INT) 1 << len)) == 0)
2318
            break;
2319
        }
2320
 
2321
      /* Sign extend IMM as a 5-bit value.  */
2322
      imm = (imm & 0xf) - 0x10;
2323
    }
2324
 
2325
  op[0] = imm;
2326
  op[1] = 31 - lsb;
2327
  op[2] = len;
2328
}
2329
 
2330
/* Compute position (in OP[1]) and width (in OP[2])
2331
   useful for copying IMM to a register using the depdi,z
2332
   instructions.  Store the immediate value to insert in OP[0].  */
2333
 
2334
static void
2335
compute_zdepdi_operands (unsigned HOST_WIDE_INT imm, unsigned *op)
2336
{
2337
  int lsb, len, maxlen;
2338
 
2339
  maxlen = MIN (HOST_BITS_PER_WIDE_INT, 64);
2340
 
2341
  /* Find the least significant set bit in IMM.  */
2342
  for (lsb = 0; lsb < maxlen; lsb++)
2343
    {
2344
      if ((imm & 1) != 0)
2345
        break;
2346
      imm >>= 1;
2347
    }
2348
 
2349
  /* Choose variants based on *sign* of the 5-bit field.  */
2350
  if ((imm & 0x10) == 0)
2351
    len = (lsb <= maxlen - 4) ? 4 : maxlen - lsb;
2352
  else
2353
    {
2354
      /* Find the width of the bitstring in IMM.  */
2355
      for (len = 5; len < maxlen - lsb; len++)
2356
        {
2357
          if ((imm & ((unsigned HOST_WIDE_INT) 1 << len)) == 0)
2358
            break;
2359
        }
2360
 
2361
      /* Extend length if host is narrow and IMM is negative.  */
2362
      if (HOST_BITS_PER_WIDE_INT == 32 && len == maxlen - lsb)
2363
        len += 32;
2364
 
2365
      /* Sign extend IMM as a 5-bit value.  */
2366
      imm = (imm & 0xf) - 0x10;
2367
    }
2368
 
2369
  op[0] = imm;
2370
  op[1] = 63 - lsb;
2371
  op[2] = len;
2372
}
2373
 
2374
/* Output assembler code to perform a doubleword move insn
2375
   with operands OPERANDS.  */
2376
 
2377
const char *
2378
pa_output_move_double (rtx *operands)
2379
{
2380
  enum { REGOP, OFFSOP, MEMOP, CNSTOP, RNDOP } optype0, optype1;
2381
  rtx latehalf[2];
2382
  rtx addreg0 = 0, addreg1 = 0;
2383
 
2384
  /* First classify both operands.  */
2385
 
2386
  if (REG_P (operands[0]))
2387
    optype0 = REGOP;
2388
  else if (offsettable_memref_p (operands[0]))
2389
    optype0 = OFFSOP;
2390
  else if (GET_CODE (operands[0]) == MEM)
2391
    optype0 = MEMOP;
2392
  else
2393
    optype0 = RNDOP;
2394
 
2395
  if (REG_P (operands[1]))
2396
    optype1 = REGOP;
2397
  else if (CONSTANT_P (operands[1]))
2398
    optype1 = CNSTOP;
2399
  else if (offsettable_memref_p (operands[1]))
2400
    optype1 = OFFSOP;
2401
  else if (GET_CODE (operands[1]) == MEM)
2402
    optype1 = MEMOP;
2403
  else
2404
    optype1 = RNDOP;
2405
 
2406
  /* Check for the cases that the operand constraints are not
2407
     supposed to allow to happen.  */
2408
  gcc_assert (optype0 == REGOP || optype1 == REGOP);
2409
 
2410
  /* Handle copies between general and floating registers.  */
2411
 
2412
  if (optype0 == REGOP && optype1 == REGOP
2413
      && FP_REG_P (operands[0]) ^ FP_REG_P (operands[1]))
2414
    {
2415
      if (FP_REG_P (operands[0]))
2416
        {
2417
          output_asm_insn ("{stws|stw} %1,-16(%%sp)", operands);
2418
          output_asm_insn ("{stws|stw} %R1,-12(%%sp)", operands);
2419
          return "{fldds|fldd} -16(%%sp),%0";
2420
        }
2421
      else
2422
        {
2423
          output_asm_insn ("{fstds|fstd} %1,-16(%%sp)", operands);
2424
          output_asm_insn ("{ldws|ldw} -16(%%sp),%0", operands);
2425
          return "{ldws|ldw} -12(%%sp),%R0";
2426
        }
2427
    }
2428
 
2429
   /* Handle auto decrementing and incrementing loads and stores
2430
     specifically, since the structure of the function doesn't work
2431
     for them without major modification.  Do it better when we learn
2432
     this port about the general inc/dec addressing of PA.
2433
     (This was written by tege.  Chide him if it doesn't work.)  */
2434
 
2435
  if (optype0 == MEMOP)
2436
    {
2437
      /* We have to output the address syntax ourselves, since print_operand
2438
         doesn't deal with the addresses we want to use.  Fix this later.  */
2439
 
2440
      rtx addr = XEXP (operands[0], 0);
2441
      if (GET_CODE (addr) == POST_INC || GET_CODE (addr) == POST_DEC)
2442
        {
2443
          rtx high_reg = gen_rtx_SUBREG (SImode, operands[1], 0);
2444
 
2445
          operands[0] = XEXP (addr, 0);
2446
          gcc_assert (GET_CODE (operands[1]) == REG
2447
                      && GET_CODE (operands[0]) == REG);
2448
 
2449
          gcc_assert (!reg_overlap_mentioned_p (high_reg, addr));
2450
 
2451
          /* No overlap between high target register and address
2452
             register.  (We do this in a non-obvious way to
2453
             save a register file writeback)  */
2454
          if (GET_CODE (addr) == POST_INC)
2455
            return "{stws|stw},ma %1,8(%0)\n\tstw %R1,-4(%0)";
2456
          return "{stws|stw},ma %1,-8(%0)\n\tstw %R1,12(%0)";
2457
        }
2458
      else if (GET_CODE (addr) == PRE_INC || GET_CODE (addr) == PRE_DEC)
2459
        {
2460
          rtx high_reg = gen_rtx_SUBREG (SImode, operands[1], 0);
2461
 
2462
          operands[0] = XEXP (addr, 0);
2463
          gcc_assert (GET_CODE (operands[1]) == REG
2464
                      && GET_CODE (operands[0]) == REG);
2465
 
2466
          gcc_assert (!reg_overlap_mentioned_p (high_reg, addr));
2467
          /* No overlap between high target register and address
2468
             register.  (We do this in a non-obvious way to save a
2469
             register file writeback)  */
2470
          if (GET_CODE (addr) == PRE_INC)
2471
            return "{stws|stw},mb %1,8(%0)\n\tstw %R1,4(%0)";
2472
          return "{stws|stw},mb %1,-8(%0)\n\tstw %R1,4(%0)";
2473
        }
2474
    }
2475
  if (optype1 == MEMOP)
2476
    {
2477
      /* We have to output the address syntax ourselves, since print_operand
2478
         doesn't deal with the addresses we want to use.  Fix this later.  */
2479
 
2480
      rtx addr = XEXP (operands[1], 0);
2481
      if (GET_CODE (addr) == POST_INC || GET_CODE (addr) == POST_DEC)
2482
        {
2483
          rtx high_reg = gen_rtx_SUBREG (SImode, operands[0], 0);
2484
 
2485
          operands[1] = XEXP (addr, 0);
2486
          gcc_assert (GET_CODE (operands[0]) == REG
2487
                      && GET_CODE (operands[1]) == REG);
2488
 
2489
          if (!reg_overlap_mentioned_p (high_reg, addr))
2490
            {
2491
              /* No overlap between high target register and address
2492
                 register.  (We do this in a non-obvious way to
2493
                 save a register file writeback)  */
2494
              if (GET_CODE (addr) == POST_INC)
2495
                return "{ldws|ldw},ma 8(%1),%0\n\tldw -4(%1),%R0";
2496
              return "{ldws|ldw},ma -8(%1),%0\n\tldw 12(%1),%R0";
2497
            }
2498
          else
2499
            {
2500
              /* This is an undefined situation.  We should load into the
2501
                 address register *and* update that register.  Probably
2502
                 we don't need to handle this at all.  */
2503
              if (GET_CODE (addr) == POST_INC)
2504
                return "ldw 4(%1),%R0\n\t{ldws|ldw},ma 8(%1),%0";
2505
              return "ldw 4(%1),%R0\n\t{ldws|ldw},ma -8(%1),%0";
2506
            }
2507
        }
2508
      else if (GET_CODE (addr) == PRE_INC || GET_CODE (addr) == PRE_DEC)
2509
        {
2510
          rtx high_reg = gen_rtx_SUBREG (SImode, operands[0], 0);
2511
 
2512
          operands[1] = XEXP (addr, 0);
2513
          gcc_assert (GET_CODE (operands[0]) == REG
2514
                      && GET_CODE (operands[1]) == REG);
2515
 
2516
          if (!reg_overlap_mentioned_p (high_reg, addr))
2517
            {
2518
              /* No overlap between high target register and address
2519
                 register.  (We do this in a non-obvious way to
2520
                 save a register file writeback)  */
2521
              if (GET_CODE (addr) == PRE_INC)
2522
                return "{ldws|ldw},mb 8(%1),%0\n\tldw 4(%1),%R0";
2523
              return "{ldws|ldw},mb -8(%1),%0\n\tldw 4(%1),%R0";
2524
            }
2525
          else
2526
            {
2527
              /* This is an undefined situation.  We should load into the
2528
                 address register *and* update that register.  Probably
2529
                 we don't need to handle this at all.  */
2530
              if (GET_CODE (addr) == PRE_INC)
2531
                return "ldw 12(%1),%R0\n\t{ldws|ldw},mb 8(%1),%0";
2532
              return "ldw -4(%1),%R0\n\t{ldws|ldw},mb -8(%1),%0";
2533
            }
2534
        }
2535
      else if (GET_CODE (addr) == PLUS
2536
               && GET_CODE (XEXP (addr, 0)) == MULT)
2537
        {
2538
          rtx xoperands[4];
2539
          rtx high_reg = gen_rtx_SUBREG (SImode, operands[0], 0);
2540
 
2541
          if (!reg_overlap_mentioned_p (high_reg, addr))
2542
            {
2543
              xoperands[0] = high_reg;
2544
              xoperands[1] = XEXP (addr, 1);
2545
              xoperands[2] = XEXP (XEXP (addr, 0), 0);
2546
              xoperands[3] = XEXP (XEXP (addr, 0), 1);
2547
              output_asm_insn ("{sh%O3addl %2,%1,%0|shladd,l %2,%O3,%1,%0}",
2548
                               xoperands);
2549
              return "ldw 4(%0),%R0\n\tldw 0(%0),%0";
2550
            }
2551
          else
2552
            {
2553
              xoperands[0] = high_reg;
2554
              xoperands[1] = XEXP (addr, 1);
2555
              xoperands[2] = XEXP (XEXP (addr, 0), 0);
2556
              xoperands[3] = XEXP (XEXP (addr, 0), 1);
2557
              output_asm_insn ("{sh%O3addl %2,%1,%R0|shladd,l %2,%O3,%1,%R0}",
2558
                               xoperands);
2559
              return "ldw 0(%R0),%0\n\tldw 4(%R0),%R0";
2560
            }
2561
        }
2562
    }
2563
 
2564
  /* If an operand is an unoffsettable memory ref, find a register
2565
     we can increment temporarily to make it refer to the second word.  */
2566
 
2567
  if (optype0 == MEMOP)
2568
    addreg0 = find_addr_reg (XEXP (operands[0], 0));
2569
 
2570
  if (optype1 == MEMOP)
2571
    addreg1 = find_addr_reg (XEXP (operands[1], 0));
2572
 
2573
  /* Ok, we can do one word at a time.
2574
     Normally we do the low-numbered word first.
2575
 
2576
     In either case, set up in LATEHALF the operands to use
2577
     for the high-numbered word and in some cases alter the
2578
     operands in OPERANDS to be suitable for the low-numbered word.  */
2579
 
2580
  if (optype0 == REGOP)
2581
    latehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 1);
2582
  else if (optype0 == OFFSOP)
2583
    latehalf[0] = adjust_address (operands[0], SImode, 4);
2584
  else
2585
    latehalf[0] = operands[0];
2586
 
2587
  if (optype1 == REGOP)
2588
    latehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 1);
2589
  else if (optype1 == OFFSOP)
2590
    latehalf[1] = adjust_address (operands[1], SImode, 4);
2591
  else if (optype1 == CNSTOP)
2592
    split_double (operands[1], &operands[1], &latehalf[1]);
2593
  else
2594
    latehalf[1] = operands[1];
2595
 
2596
  /* If the first move would clobber the source of the second one,
2597
     do them in the other order.
2598
 
2599
     This can happen in two cases:
2600
 
2601
        mem -> register where the first half of the destination register
2602
        is the same register used in the memory's address.  Reload
2603
        can create such insns.
2604
 
2605
        mem in this case will be either register indirect or register
2606
        indirect plus a valid offset.
2607
 
2608
        register -> register move where REGNO(dst) == REGNO(src + 1)
2609
        someone (Tim/Tege?) claimed this can happen for parameter loads.
2610
 
2611
     Handle mem -> register case first.  */
2612
  if (optype0 == REGOP
2613
      && (optype1 == MEMOP || optype1 == OFFSOP)
2614
      && refers_to_regno_p (REGNO (operands[0]), REGNO (operands[0]) + 1,
2615
                            operands[1], 0))
2616
    {
2617
      /* Do the late half first.  */
2618
      if (addreg1)
2619
        output_asm_insn ("ldo 4(%0),%0", &addreg1);
2620
      output_asm_insn (pa_singlemove_string (latehalf), latehalf);
2621
 
2622
      /* Then clobber.  */
2623
      if (addreg1)
2624
        output_asm_insn ("ldo -4(%0),%0", &addreg1);
2625
      return pa_singlemove_string (operands);
2626
    }
2627
 
2628
  /* Now handle register -> register case.  */
2629
  if (optype0 == REGOP && optype1 == REGOP
2630
      && REGNO (operands[0]) == REGNO (operands[1]) + 1)
2631
    {
2632
      output_asm_insn (pa_singlemove_string (latehalf), latehalf);
2633
      return pa_singlemove_string (operands);
2634
    }
2635
 
2636
  /* Normal case: do the two words, low-numbered first.  */
2637
 
2638
  output_asm_insn (pa_singlemove_string (operands), operands);
2639
 
2640
  /* Make any unoffsettable addresses point at high-numbered word.  */
2641
  if (addreg0)
2642
    output_asm_insn ("ldo 4(%0),%0", &addreg0);
2643
  if (addreg1)
2644
    output_asm_insn ("ldo 4(%0),%0", &addreg1);
2645
 
2646
  /* Do that word.  */
2647
  output_asm_insn (pa_singlemove_string (latehalf), latehalf);
2648
 
2649
  /* Undo the adds we just did.  */
2650
  if (addreg0)
2651
    output_asm_insn ("ldo -4(%0),%0", &addreg0);
2652
  if (addreg1)
2653
    output_asm_insn ("ldo -4(%0),%0", &addreg1);
2654
 
2655
  return "";
2656
}
2657
 
2658
const char *
2659
pa_output_fp_move_double (rtx *operands)
2660
{
2661
  if (FP_REG_P (operands[0]))
2662
    {
2663
      if (FP_REG_P (operands[1])
2664
          || operands[1] == CONST0_RTX (GET_MODE (operands[0])))
2665
        output_asm_insn ("fcpy,dbl %f1,%0", operands);
2666
      else
2667
        output_asm_insn ("fldd%F1 %1,%0", operands);
2668
    }
2669
  else if (FP_REG_P (operands[1]))
2670
    {
2671
      output_asm_insn ("fstd%F0 %1,%0", operands);
2672
    }
2673
  else
2674
    {
2675
      rtx xoperands[2];
2676
 
2677
      gcc_assert (operands[1] == CONST0_RTX (GET_MODE (operands[0])));
2678
 
2679
      /* This is a pain.  You have to be prepared to deal with an
2680
         arbitrary address here including pre/post increment/decrement.
2681
 
2682
         so avoid this in the MD.  */
2683
      gcc_assert (GET_CODE (operands[0]) == REG);
2684
 
2685
      xoperands[1] = gen_rtx_REG (SImode, REGNO (operands[0]) + 1);
2686
      xoperands[0] = operands[0];
2687
      output_asm_insn ("copy %%r0,%0\n\tcopy %%r0,%1", xoperands);
2688
    }
2689
  return "";
2690
}
2691
 
2692
/* Return a REG that occurs in ADDR with coefficient 1.
2693
   ADDR can be effectively incremented by incrementing REG.  */
2694
 
2695
static rtx
2696
find_addr_reg (rtx addr)
2697
{
2698
  while (GET_CODE (addr) == PLUS)
2699
    {
2700
      if (GET_CODE (XEXP (addr, 0)) == REG)
2701
        addr = XEXP (addr, 0);
2702
      else if (GET_CODE (XEXP (addr, 1)) == REG)
2703
        addr = XEXP (addr, 1);
2704
      else if (CONSTANT_P (XEXP (addr, 0)))
2705
        addr = XEXP (addr, 1);
2706
      else if (CONSTANT_P (XEXP (addr, 1)))
2707
        addr = XEXP (addr, 0);
2708
      else
2709
        gcc_unreachable ();
2710
    }
2711
  gcc_assert (GET_CODE (addr) == REG);
2712
  return addr;
2713
}
2714
 
2715
/* Emit code to perform a block move.
2716
 
2717
   OPERANDS[0] is the destination pointer as a REG, clobbered.
2718
   OPERANDS[1] is the source pointer as a REG, clobbered.
2719
   OPERANDS[2] is a register for temporary storage.
2720
   OPERANDS[3] is a register for temporary storage.
2721
   OPERANDS[4] is the size as a CONST_INT
2722
   OPERANDS[5] is the alignment safe to use, as a CONST_INT.
2723
   OPERANDS[6] is another temporary register.  */
2724
 
2725
const char *
2726
pa_output_block_move (rtx *operands, int size_is_constant ATTRIBUTE_UNUSED)
2727
{
2728
  int align = INTVAL (operands[5]);
2729
  unsigned long n_bytes = INTVAL (operands[4]);
2730
 
2731
  /* We can't move more than a word at a time because the PA
2732
     has no longer integer move insns.  (Could use fp mem ops?)  */
2733
  if (align > (TARGET_64BIT ? 8 : 4))
2734
    align = (TARGET_64BIT ? 8 : 4);
2735
 
2736
  /* Note that we know each loop below will execute at least twice
2737
     (else we would have open-coded the copy).  */
2738
  switch (align)
2739
    {
2740
      case 8:
2741
        /* Pre-adjust the loop counter.  */
2742
        operands[4] = GEN_INT (n_bytes - 16);
2743
        output_asm_insn ("ldi %4,%2", operands);
2744
 
2745
        /* Copying loop.  */
2746
        output_asm_insn ("ldd,ma 8(%1),%3", operands);
2747
        output_asm_insn ("ldd,ma 8(%1),%6", operands);
2748
        output_asm_insn ("std,ma %3,8(%0)", operands);
2749
        output_asm_insn ("addib,>= -16,%2,.-12", operands);
2750
        output_asm_insn ("std,ma %6,8(%0)", operands);
2751
 
2752
        /* Handle the residual.  There could be up to 7 bytes of
2753
           residual to copy!  */
2754
        if (n_bytes % 16 != 0)
2755
          {
2756
            operands[4] = GEN_INT (n_bytes % 8);
2757
            if (n_bytes % 16 >= 8)
2758
              output_asm_insn ("ldd,ma 8(%1),%3", operands);
2759
            if (n_bytes % 8 != 0)
2760
              output_asm_insn ("ldd 0(%1),%6", operands);
2761
            if (n_bytes % 16 >= 8)
2762
              output_asm_insn ("std,ma %3,8(%0)", operands);
2763
            if (n_bytes % 8 != 0)
2764
              output_asm_insn ("stdby,e %6,%4(%0)", operands);
2765
          }
2766
        return "";
2767
 
2768
      case 4:
2769
        /* Pre-adjust the loop counter.  */
2770
        operands[4] = GEN_INT (n_bytes - 8);
2771
        output_asm_insn ("ldi %4,%2", operands);
2772
 
2773
        /* Copying loop.  */
2774
        output_asm_insn ("{ldws|ldw},ma 4(%1),%3", operands);
2775
        output_asm_insn ("{ldws|ldw},ma 4(%1),%6", operands);
2776
        output_asm_insn ("{stws|stw},ma %3,4(%0)", operands);
2777
        output_asm_insn ("addib,>= -8,%2,.-12", operands);
2778
        output_asm_insn ("{stws|stw},ma %6,4(%0)", operands);
2779
 
2780
        /* Handle the residual.  There could be up to 7 bytes of
2781
           residual to copy!  */
2782
        if (n_bytes % 8 != 0)
2783
          {
2784
            operands[4] = GEN_INT (n_bytes % 4);
2785
            if (n_bytes % 8 >= 4)
2786
              output_asm_insn ("{ldws|ldw},ma 4(%1),%3", operands);
2787
            if (n_bytes % 4 != 0)
2788
              output_asm_insn ("ldw 0(%1),%6", operands);
2789
            if (n_bytes % 8 >= 4)
2790
              output_asm_insn ("{stws|stw},ma %3,4(%0)", operands);
2791
            if (n_bytes % 4 != 0)
2792
              output_asm_insn ("{stbys|stby},e %6,%4(%0)", operands);
2793
          }
2794
        return "";
2795
 
2796
      case 2:
2797
        /* Pre-adjust the loop counter.  */
2798
        operands[4] = GEN_INT (n_bytes - 4);
2799
        output_asm_insn ("ldi %4,%2", operands);
2800
 
2801
        /* Copying loop.  */
2802
        output_asm_insn ("{ldhs|ldh},ma 2(%1),%3", operands);
2803
        output_asm_insn ("{ldhs|ldh},ma 2(%1),%6", operands);
2804
        output_asm_insn ("{sths|sth},ma %3,2(%0)", operands);
2805
        output_asm_insn ("addib,>= -4,%2,.-12", operands);
2806
        output_asm_insn ("{sths|sth},ma %6,2(%0)", operands);
2807
 
2808
        /* Handle the residual.  */
2809
        if (n_bytes % 4 != 0)
2810
          {
2811
            if (n_bytes % 4 >= 2)
2812
              output_asm_insn ("{ldhs|ldh},ma 2(%1),%3", operands);
2813
            if (n_bytes % 2 != 0)
2814
              output_asm_insn ("ldb 0(%1),%6", operands);
2815
            if (n_bytes % 4 >= 2)
2816
              output_asm_insn ("{sths|sth},ma %3,2(%0)", operands);
2817
            if (n_bytes % 2 != 0)
2818
              output_asm_insn ("stb %6,0(%0)", operands);
2819
          }
2820
        return "";
2821
 
2822
      case 1:
2823
        /* Pre-adjust the loop counter.  */
2824
        operands[4] = GEN_INT (n_bytes - 2);
2825
        output_asm_insn ("ldi %4,%2", operands);
2826
 
2827
        /* Copying loop.  */
2828
        output_asm_insn ("{ldbs|ldb},ma 1(%1),%3", operands);
2829
        output_asm_insn ("{ldbs|ldb},ma 1(%1),%6", operands);
2830
        output_asm_insn ("{stbs|stb},ma %3,1(%0)", operands);
2831
        output_asm_insn ("addib,>= -2,%2,.-12", operands);
2832
        output_asm_insn ("{stbs|stb},ma %6,1(%0)", operands);
2833
 
2834
        /* Handle the residual.  */
2835
        if (n_bytes % 2 != 0)
2836
          {
2837
            output_asm_insn ("ldb 0(%1),%3", operands);
2838
            output_asm_insn ("stb %3,0(%0)", operands);
2839
          }
2840
        return "";
2841
 
2842
      default:
2843
        gcc_unreachable ();
2844
    }
2845
}
2846
 
2847
/* Count the number of insns necessary to handle this block move.
2848
 
2849
   Basic structure is the same as emit_block_move, except that we
2850
   count insns rather than emit them.  */
2851
 
2852
static int
2853
compute_movmem_length (rtx insn)
2854
{
2855
  rtx pat = PATTERN (insn);
2856
  unsigned int align = INTVAL (XEXP (XVECEXP (pat, 0, 7), 0));
2857
  unsigned long n_bytes = INTVAL (XEXP (XVECEXP (pat, 0, 6), 0));
2858
  unsigned int n_insns = 0;
2859
 
2860
  /* We can't move more than four bytes at a time because the PA
2861
     has no longer integer move insns.  (Could use fp mem ops?)  */
2862
  if (align > (TARGET_64BIT ? 8 : 4))
2863
    align = (TARGET_64BIT ? 8 : 4);
2864
 
2865
  /* The basic copying loop.  */
2866
  n_insns = 6;
2867
 
2868
  /* Residuals.  */
2869
  if (n_bytes % (2 * align) != 0)
2870
    {
2871
      if ((n_bytes % (2 * align)) >= align)
2872
        n_insns += 2;
2873
 
2874
      if ((n_bytes % align) != 0)
2875
        n_insns += 2;
2876
    }
2877
 
2878
  /* Lengths are expressed in bytes now; each insn is 4 bytes.  */
2879
  return n_insns * 4;
2880
}
2881
 
2882
/* Emit code to perform a block clear.
2883
 
2884
   OPERANDS[0] is the destination pointer as a REG, clobbered.
2885
   OPERANDS[1] is a register for temporary storage.
2886
   OPERANDS[2] is the size as a CONST_INT
2887
   OPERANDS[3] is the alignment safe to use, as a CONST_INT.  */
2888
 
2889
const char *
2890
pa_output_block_clear (rtx *operands, int size_is_constant ATTRIBUTE_UNUSED)
2891
{
2892
  int align = INTVAL (operands[3]);
2893
  unsigned long n_bytes = INTVAL (operands[2]);
2894
 
2895
  /* We can't clear more than a word at a time because the PA
2896
     has no longer integer move insns.  */
2897
  if (align > (TARGET_64BIT ? 8 : 4))
2898
    align = (TARGET_64BIT ? 8 : 4);
2899
 
2900
  /* Note that we know each loop below will execute at least twice
2901
     (else we would have open-coded the copy).  */
2902
  switch (align)
2903
    {
2904
      case 8:
2905
        /* Pre-adjust the loop counter.  */
2906
        operands[2] = GEN_INT (n_bytes - 16);
2907
        output_asm_insn ("ldi %2,%1", operands);
2908
 
2909
        /* Loop.  */
2910
        output_asm_insn ("std,ma %%r0,8(%0)", operands);
2911
        output_asm_insn ("addib,>= -16,%1,.-4", operands);
2912
        output_asm_insn ("std,ma %%r0,8(%0)", operands);
2913
 
2914
        /* Handle the residual.  There could be up to 7 bytes of
2915
           residual to copy!  */
2916
        if (n_bytes % 16 != 0)
2917
          {
2918
            operands[2] = GEN_INT (n_bytes % 8);
2919
            if (n_bytes % 16 >= 8)
2920
              output_asm_insn ("std,ma %%r0,8(%0)", operands);
2921
            if (n_bytes % 8 != 0)
2922
              output_asm_insn ("stdby,e %%r0,%2(%0)", operands);
2923
          }
2924
        return "";
2925
 
2926
      case 4:
2927
        /* Pre-adjust the loop counter.  */
2928
        operands[2] = GEN_INT (n_bytes - 8);
2929
        output_asm_insn ("ldi %2,%1", operands);
2930
 
2931
        /* Loop.  */
2932
        output_asm_insn ("{stws|stw},ma %%r0,4(%0)", operands);
2933
        output_asm_insn ("addib,>= -8,%1,.-4", operands);
2934
        output_asm_insn ("{stws|stw},ma %%r0,4(%0)", operands);
2935
 
2936
        /* Handle the residual.  There could be up to 7 bytes of
2937
           residual to copy!  */
2938
        if (n_bytes % 8 != 0)
2939
          {
2940
            operands[2] = GEN_INT (n_bytes % 4);
2941
            if (n_bytes % 8 >= 4)
2942
              output_asm_insn ("{stws|stw},ma %%r0,4(%0)", operands);
2943
            if (n_bytes % 4 != 0)
2944
              output_asm_insn ("{stbys|stby},e %%r0,%2(%0)", operands);
2945
          }
2946
        return "";
2947
 
2948
      case 2:
2949
        /* Pre-adjust the loop counter.  */
2950
        operands[2] = GEN_INT (n_bytes - 4);
2951
        output_asm_insn ("ldi %2,%1", operands);
2952
 
2953
        /* Loop.  */
2954
        output_asm_insn ("{sths|sth},ma %%r0,2(%0)", operands);
2955
        output_asm_insn ("addib,>= -4,%1,.-4", operands);
2956
        output_asm_insn ("{sths|sth},ma %%r0,2(%0)", operands);
2957
 
2958
        /* Handle the residual.  */
2959
        if (n_bytes % 4 != 0)
2960
          {
2961
            if (n_bytes % 4 >= 2)
2962
              output_asm_insn ("{sths|sth},ma %%r0,2(%0)", operands);
2963
            if (n_bytes % 2 != 0)
2964
              output_asm_insn ("stb %%r0,0(%0)", operands);
2965
          }
2966
        return "";
2967
 
2968
      case 1:
2969
        /* Pre-adjust the loop counter.  */
2970
        operands[2] = GEN_INT (n_bytes - 2);
2971
        output_asm_insn ("ldi %2,%1", operands);
2972
 
2973
        /* Loop.  */
2974
        output_asm_insn ("{stbs|stb},ma %%r0,1(%0)", operands);
2975
        output_asm_insn ("addib,>= -2,%1,.-4", operands);
2976
        output_asm_insn ("{stbs|stb},ma %%r0,1(%0)", operands);
2977
 
2978
        /* Handle the residual.  */
2979
        if (n_bytes % 2 != 0)
2980
          output_asm_insn ("stb %%r0,0(%0)", operands);
2981
 
2982
        return "";
2983
 
2984
      default:
2985
        gcc_unreachable ();
2986
    }
2987
}
2988
 
2989
/* Count the number of insns necessary to handle this block move.
2990
 
2991
   Basic structure is the same as emit_block_move, except that we
2992
   count insns rather than emit them.  */
2993
 
2994
static int
2995
compute_clrmem_length (rtx insn)
2996
{
2997
  rtx pat = PATTERN (insn);
2998
  unsigned int align = INTVAL (XEXP (XVECEXP (pat, 0, 4), 0));
2999
  unsigned long n_bytes = INTVAL (XEXP (XVECEXP (pat, 0, 3), 0));
3000
  unsigned int n_insns = 0;
3001
 
3002
  /* We can't clear more than a word at a time because the PA
3003
     has no longer integer move insns.  */
3004
  if (align > (TARGET_64BIT ? 8 : 4))
3005
    align = (TARGET_64BIT ? 8 : 4);
3006
 
3007
  /* The basic loop.  */
3008
  n_insns = 4;
3009
 
3010
  /* Residuals.  */
3011
  if (n_bytes % (2 * align) != 0)
3012
    {
3013
      if ((n_bytes % (2 * align)) >= align)
3014
        n_insns++;
3015
 
3016
      if ((n_bytes % align) != 0)
3017
        n_insns++;
3018
    }
3019
 
3020
  /* Lengths are expressed in bytes now; each insn is 4 bytes.  */
3021
  return n_insns * 4;
3022
}
3023
 
3024
 
3025
const char *
3026
pa_output_and (rtx *operands)
3027
{
3028
  if (GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) != 0)
3029
    {
3030
      unsigned HOST_WIDE_INT mask = INTVAL (operands[2]);
3031
      int ls0, ls1, ms0, p, len;
3032
 
3033
      for (ls0 = 0; ls0 < 32; ls0++)
3034
        if ((mask & (1 << ls0)) == 0)
3035
          break;
3036
 
3037
      for (ls1 = ls0; ls1 < 32; ls1++)
3038
        if ((mask & (1 << ls1)) != 0)
3039
          break;
3040
 
3041
      for (ms0 = ls1; ms0 < 32; ms0++)
3042
        if ((mask & (1 << ms0)) == 0)
3043
          break;
3044
 
3045
      gcc_assert (ms0 == 32);
3046
 
3047
      if (ls1 == 32)
3048
        {
3049
          len = ls0;
3050
 
3051
          gcc_assert (len);
3052
 
3053
          operands[2] = GEN_INT (len);
3054
          return "{extru|extrw,u} %1,31,%2,%0";
3055
        }
3056
      else
3057
        {
3058
          /* We could use this `depi' for the case above as well, but `depi'
3059
             requires one more register file access than an `extru'.  */
3060
 
3061
          p = 31 - ls0;
3062
          len = ls1 - ls0;
3063
 
3064
          operands[2] = GEN_INT (p);
3065
          operands[3] = GEN_INT (len);
3066
          return "{depi|depwi} 0,%2,%3,%0";
3067
        }
3068
    }
3069
  else
3070
    return "and %1,%2,%0";
3071
}
3072
 
3073
/* Return a string to perform a bitwise-and of operands[1] with operands[2]
3074
   storing the result in operands[0].  */
3075
const char *
3076
pa_output_64bit_and (rtx *operands)
3077
{
3078
  if (GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) != 0)
3079
    {
3080
      unsigned HOST_WIDE_INT mask = INTVAL (operands[2]);
3081
      int ls0, ls1, ms0, p, len;
3082
 
3083
      for (ls0 = 0; ls0 < HOST_BITS_PER_WIDE_INT; ls0++)
3084
        if ((mask & ((unsigned HOST_WIDE_INT) 1 << ls0)) == 0)
3085
          break;
3086
 
3087
      for (ls1 = ls0; ls1 < HOST_BITS_PER_WIDE_INT; ls1++)
3088
        if ((mask & ((unsigned HOST_WIDE_INT) 1 << ls1)) != 0)
3089
          break;
3090
 
3091
      for (ms0 = ls1; ms0 < HOST_BITS_PER_WIDE_INT; ms0++)
3092
        if ((mask & ((unsigned HOST_WIDE_INT) 1 << ms0)) == 0)
3093
          break;
3094
 
3095
      gcc_assert (ms0 == HOST_BITS_PER_WIDE_INT);
3096
 
3097
      if (ls1 == HOST_BITS_PER_WIDE_INT)
3098
        {
3099
          len = ls0;
3100
 
3101
          gcc_assert (len);
3102
 
3103
          operands[2] = GEN_INT (len);
3104
          return "extrd,u %1,63,%2,%0";
3105
        }
3106
      else
3107
        {
3108
          /* We could use this `depi' for the case above as well, but `depi'
3109
             requires one more register file access than an `extru'.  */
3110
 
3111
          p = 63 - ls0;
3112
          len = ls1 - ls0;
3113
 
3114
          operands[2] = GEN_INT (p);
3115
          operands[3] = GEN_INT (len);
3116
          return "depdi 0,%2,%3,%0";
3117
        }
3118
    }
3119
  else
3120
    return "and %1,%2,%0";
3121
}
3122
 
3123
const char *
3124
pa_output_ior (rtx *operands)
3125
{
3126
  unsigned HOST_WIDE_INT mask = INTVAL (operands[2]);
3127
  int bs0, bs1, p, len;
3128
 
3129
  if (INTVAL (operands[2]) == 0)
3130
    return "copy %1,%0";
3131
 
3132
  for (bs0 = 0; bs0 < 32; bs0++)
3133
    if ((mask & (1 << bs0)) != 0)
3134
      break;
3135
 
3136
  for (bs1 = bs0; bs1 < 32; bs1++)
3137
    if ((mask & (1 << bs1)) == 0)
3138
      break;
3139
 
3140
  gcc_assert (bs1 == 32 || ((unsigned HOST_WIDE_INT) 1 << bs1) > mask);
3141
 
3142
  p = 31 - bs0;
3143
  len = bs1 - bs0;
3144
 
3145
  operands[2] = GEN_INT (p);
3146
  operands[3] = GEN_INT (len);
3147
  return "{depi|depwi} -1,%2,%3,%0";
3148
}
3149
 
3150
/* Return a string to perform a bitwise-and of operands[1] with operands[2]
3151
   storing the result in operands[0].  */
3152
const char *
3153
pa_output_64bit_ior (rtx *operands)
3154
{
3155
  unsigned HOST_WIDE_INT mask = INTVAL (operands[2]);
3156
  int bs0, bs1, p, len;
3157
 
3158
  if (INTVAL (operands[2]) == 0)
3159
    return "copy %1,%0";
3160
 
3161
  for (bs0 = 0; bs0 < HOST_BITS_PER_WIDE_INT; bs0++)
3162
    if ((mask & ((unsigned HOST_WIDE_INT) 1 << bs0)) != 0)
3163
      break;
3164
 
3165
  for (bs1 = bs0; bs1 < HOST_BITS_PER_WIDE_INT; bs1++)
3166
    if ((mask & ((unsigned HOST_WIDE_INT) 1 << bs1)) == 0)
3167
      break;
3168
 
3169
  gcc_assert (bs1 == HOST_BITS_PER_WIDE_INT
3170
              || ((unsigned HOST_WIDE_INT) 1 << bs1) > mask);
3171
 
3172
  p = 63 - bs0;
3173
  len = bs1 - bs0;
3174
 
3175
  operands[2] = GEN_INT (p);
3176
  operands[3] = GEN_INT (len);
3177
  return "depdi -1,%2,%3,%0";
3178
}
3179
 
3180
/* Target hook for assembling integer objects.  This code handles
3181
   aligned SI and DI integers specially since function references
3182
   must be preceded by P%.  */
3183
 
3184
static bool
3185
pa_assemble_integer (rtx x, unsigned int size, int aligned_p)
3186
{
3187
  if (size == UNITS_PER_WORD
3188
      && aligned_p
3189
      && function_label_operand (x, VOIDmode))
3190
    {
3191
      fputs (size == 8? "\t.dword\tP%" : "\t.word\tP%", asm_out_file);
3192
      output_addr_const (asm_out_file, x);
3193
      fputc ('\n', asm_out_file);
3194
      return true;
3195
    }
3196
  return default_assemble_integer (x, size, aligned_p);
3197
}
3198
 
3199
/* Output an ascii string.  */
3200
void
3201
pa_output_ascii (FILE *file, const char *p, int size)
3202
{
3203
  int i;
3204
  int chars_output;
3205
  unsigned char partial_output[16];     /* Max space 4 chars can occupy.  */
3206
 
3207
  /* The HP assembler can only take strings of 256 characters at one
3208
     time.  This is a limitation on input line length, *not* the
3209
     length of the string.  Sigh.  Even worse, it seems that the
3210
     restriction is in number of input characters (see \xnn &
3211
     \whatever).  So we have to do this very carefully.  */
3212
 
3213
  fputs ("\t.STRING \"", file);
3214
 
3215
  chars_output = 0;
3216
  for (i = 0; i < size; i += 4)
3217
    {
3218
      int co = 0;
3219
      int io = 0;
3220
      for (io = 0, co = 0; io < MIN (4, size - i); io++)
3221
        {
3222
          register unsigned int c = (unsigned char) p[i + io];
3223
 
3224
          if (c == '\"' || c == '\\')
3225
            partial_output[co++] = '\\';
3226
          if (c >= ' ' && c < 0177)
3227
            partial_output[co++] = c;
3228
          else
3229
            {
3230
              unsigned int hexd;
3231
              partial_output[co++] = '\\';
3232
              partial_output[co++] = 'x';
3233
              hexd =  c  / 16 - 0 + '0';
3234
              if (hexd > '9')
3235
                hexd -= '9' - 'a' + 1;
3236
              partial_output[co++] = hexd;
3237
              hexd =  c % 16 - 0 + '0';
3238
              if (hexd > '9')
3239
                hexd -= '9' - 'a' + 1;
3240
              partial_output[co++] = hexd;
3241
            }
3242
        }
3243
      if (chars_output + co > 243)
3244
        {
3245
          fputs ("\"\n\t.STRING \"", file);
3246
          chars_output = 0;
3247
        }
3248
      fwrite (partial_output, 1, (size_t) co, file);
3249
      chars_output += co;
3250
      co = 0;
3251
    }
3252
  fputs ("\"\n", file);
3253
}
3254
 
3255
/* Try to rewrite floating point comparisons & branches to avoid
3256
   useless add,tr insns.
3257
 
3258
   CHECK_NOTES is nonzero if we should examine REG_DEAD notes
3259
   to see if FPCC is dead.  CHECK_NOTES is nonzero for the
3260
   first attempt to remove useless add,tr insns.  It is zero
3261
   for the second pass as reorg sometimes leaves bogus REG_DEAD
3262
   notes lying around.
3263
 
3264
   When CHECK_NOTES is zero we can only eliminate add,tr insns
3265
   when there's a 1:1 correspondence between fcmp and ftest/fbranch
3266
   instructions.  */
3267
static void
3268
remove_useless_addtr_insns (int check_notes)
3269
{
3270
  rtx insn;
3271
  static int pass = 0;
3272
 
3273
  /* This is fairly cheap, so always run it when optimizing.  */
3274
  if (optimize > 0)
3275
    {
3276
      int fcmp_count = 0;
3277
      int fbranch_count = 0;
3278
 
3279
      /* Walk all the insns in this function looking for fcmp & fbranch
3280
         instructions.  Keep track of how many of each we find.  */
3281
      for (insn = get_insns (); insn; insn = next_insn (insn))
3282
        {
3283
          rtx tmp;
3284
 
3285
          /* Ignore anything that isn't an INSN or a JUMP_INSN.  */
3286
          if (GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
3287
            continue;
3288
 
3289
          tmp = PATTERN (insn);
3290
 
3291
          /* It must be a set.  */
3292
          if (GET_CODE (tmp) != SET)
3293
            continue;
3294
 
3295
          /* If the destination is CCFP, then we've found an fcmp insn.  */
3296
          tmp = SET_DEST (tmp);
3297
          if (GET_CODE (tmp) == REG && REGNO (tmp) == 0)
3298
            {
3299
              fcmp_count++;
3300
              continue;
3301
            }
3302
 
3303
          tmp = PATTERN (insn);
3304
          /* If this is an fbranch instruction, bump the fbranch counter.  */
3305
          if (GET_CODE (tmp) == SET
3306
              && SET_DEST (tmp) == pc_rtx
3307
              && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
3308
              && GET_CODE (XEXP (SET_SRC (tmp), 0)) == NE
3309
              && GET_CODE (XEXP (XEXP (SET_SRC (tmp), 0), 0)) == REG
3310
              && REGNO (XEXP (XEXP (SET_SRC (tmp), 0), 0)) == 0)
3311
            {
3312
              fbranch_count++;
3313
              continue;
3314
            }
3315
        }
3316
 
3317
 
3318
      /* Find all floating point compare + branch insns.  If possible,
3319
         reverse the comparison & the branch to avoid add,tr insns.  */
3320
      for (insn = get_insns (); insn; insn = next_insn (insn))
3321
        {
3322
          rtx tmp, next;
3323
 
3324
          /* Ignore anything that isn't an INSN.  */
3325
          if (GET_CODE (insn) != INSN)
3326
            continue;
3327
 
3328
          tmp = PATTERN (insn);
3329
 
3330
          /* It must be a set.  */
3331
          if (GET_CODE (tmp) != SET)
3332
            continue;
3333
 
3334
          /* The destination must be CCFP, which is register zero.  */
3335
          tmp = SET_DEST (tmp);
3336
          if (GET_CODE (tmp) != REG || REGNO (tmp) != 0)
3337
            continue;
3338
 
3339
          /* INSN should be a set of CCFP.
3340
 
3341
             See if the result of this insn is used in a reversed FP
3342
             conditional branch.  If so, reverse our condition and
3343
             the branch.  Doing so avoids useless add,tr insns.  */
3344
          next = next_insn (insn);
3345
          while (next)
3346
            {
3347
              /* Jumps, calls and labels stop our search.  */
3348
              if (GET_CODE (next) == JUMP_INSN
3349
                  || GET_CODE (next) == CALL_INSN
3350
                  || GET_CODE (next) == CODE_LABEL)
3351
                break;
3352
 
3353
              /* As does another fcmp insn.  */
3354
              if (GET_CODE (next) == INSN
3355
                  && GET_CODE (PATTERN (next)) == SET
3356
                  && GET_CODE (SET_DEST (PATTERN (next))) == REG
3357
                  && REGNO (SET_DEST (PATTERN (next))) == 0)
3358
                break;
3359
 
3360
              next = next_insn (next);
3361
            }
3362
 
3363
          /* Is NEXT_INSN a branch?  */
3364
          if (next
3365
              && GET_CODE (next) == JUMP_INSN)
3366
            {
3367
              rtx pattern = PATTERN (next);
3368
 
3369
              /* If it a reversed fp conditional branch (e.g. uses add,tr)
3370
                 and CCFP dies, then reverse our conditional and the branch
3371
                 to avoid the add,tr.  */
3372
              if (GET_CODE (pattern) == SET
3373
                  && SET_DEST (pattern) == pc_rtx
3374
                  && GET_CODE (SET_SRC (pattern)) == IF_THEN_ELSE
3375
                  && GET_CODE (XEXP (SET_SRC (pattern), 0)) == NE
3376
                  && GET_CODE (XEXP (XEXP (SET_SRC (pattern), 0), 0)) == REG
3377
                  && REGNO (XEXP (XEXP (SET_SRC (pattern), 0), 0)) == 0
3378
                  && GET_CODE (XEXP (SET_SRC (pattern), 1)) == PC
3379
                  && (fcmp_count == fbranch_count
3380
                      || (check_notes
3381
                          && find_regno_note (next, REG_DEAD, 0))))
3382
                {
3383
                  /* Reverse the branch.  */
3384
                  tmp = XEXP (SET_SRC (pattern), 1);
3385
                  XEXP (SET_SRC (pattern), 1) = XEXP (SET_SRC (pattern), 2);
3386
                  XEXP (SET_SRC (pattern), 2) = tmp;
3387
                  INSN_CODE (next) = -1;
3388
 
3389
                  /* Reverse our condition.  */
3390
                  tmp = PATTERN (insn);
3391
                  PUT_CODE (XEXP (tmp, 1),
3392
                            (reverse_condition_maybe_unordered
3393
                             (GET_CODE (XEXP (tmp, 1)))));
3394
                }
3395
            }
3396
        }
3397
    }
3398
 
3399
  pass = !pass;
3400
 
3401
}
3402
 
3403
/* You may have trouble believing this, but this is the 32 bit HP-PA
3404
   stack layout.  Wow.
3405
 
3406
   Offset               Contents
3407
 
3408
   Variable arguments   (optional; any number may be allocated)
3409
 
3410
   SP-(4*(N+9))         arg word N
3411
        :                   :
3412
      SP-56             arg word 5
3413
      SP-52             arg word 4
3414
 
3415
   Fixed arguments      (must be allocated; may remain unused)
3416
 
3417
      SP-48             arg word 3
3418
      SP-44             arg word 2
3419
      SP-40             arg word 1
3420
      SP-36             arg word 0
3421
 
3422
   Frame Marker
3423
 
3424
      SP-32             External Data Pointer (DP)
3425
      SP-28             External sr4
3426
      SP-24             External/stub RP (RP')
3427
      SP-20             Current RP
3428
      SP-16             Static Link
3429
      SP-12             Clean up
3430
      SP-8              Calling Stub RP (RP'')
3431
      SP-4              Previous SP
3432
 
3433
   Top of Frame
3434
 
3435
      SP-0              Stack Pointer (points to next available address)
3436
 
3437
*/
3438
 
3439
/* This function saves registers as follows.  Registers marked with ' are
3440
   this function's registers (as opposed to the previous function's).
3441
   If a frame_pointer isn't needed, r4 is saved as a general register;
3442
   the space for the frame pointer is still allocated, though, to keep
3443
   things simple.
3444
 
3445
 
3446
   Top of Frame
3447
 
3448
       SP (FP')         Previous FP
3449
       SP + 4           Alignment filler (sigh)
3450
       SP + 8           Space for locals reserved here.
3451
       .
3452
       .
3453
       .
3454
       SP + n           All call saved register used.
3455
       .
3456
       .
3457
       .
3458
       SP + o           All call saved fp registers used.
3459
       .
3460
       .
3461
       .
3462
       SP + p (SP')     points to next available address.
3463
 
3464
*/
3465
 
3466
/* Global variables set by output_function_prologue().  */
3467
/* Size of frame.  Need to know this to emit return insns from
3468
   leaf procedures.  */
3469
static HOST_WIDE_INT actual_fsize, local_fsize;
3470
static int save_fregs;
3471
 
3472
/* Emit RTL to store REG at the memory location specified by BASE+DISP.
3473
   Handle case where DISP > 8k by using the add_high_const patterns.
3474
 
3475
   Note in DISP > 8k case, we will leave the high part of the address
3476
   in %r1.  There is code in expand_hppa_{prologue,epilogue} that knows this.*/
3477
 
3478
static void
3479
store_reg (int reg, HOST_WIDE_INT disp, int base)
3480
{
3481
  rtx insn, dest, src, basereg;
3482
 
3483
  src = gen_rtx_REG (word_mode, reg);
3484
  basereg = gen_rtx_REG (Pmode, base);
3485
  if (VAL_14_BITS_P (disp))
3486
    {
3487
      dest = gen_rtx_MEM (word_mode, plus_constant (basereg, disp));
3488
      insn = emit_move_insn (dest, src);
3489
    }
3490
  else if (TARGET_64BIT && !VAL_32_BITS_P (disp))
3491
    {
3492
      rtx delta = GEN_INT (disp);
3493
      rtx tmpreg = gen_rtx_REG (Pmode, 1);
3494
 
3495
      emit_move_insn (tmpreg, delta);
3496
      insn = emit_move_insn (tmpreg, gen_rtx_PLUS (Pmode, tmpreg, basereg));
3497
      if (DO_FRAME_NOTES)
3498
        {
3499
          add_reg_note (insn, REG_FRAME_RELATED_EXPR,
3500
                        gen_rtx_SET (VOIDmode, tmpreg,
3501
                                     gen_rtx_PLUS (Pmode, basereg, delta)));
3502
          RTX_FRAME_RELATED_P (insn) = 1;
3503
        }
3504
      dest = gen_rtx_MEM (word_mode, tmpreg);
3505
      insn = emit_move_insn (dest, src);
3506
    }
3507
  else
3508
    {
3509
      rtx delta = GEN_INT (disp);
3510
      rtx high = gen_rtx_PLUS (Pmode, basereg, gen_rtx_HIGH (Pmode, delta));
3511
      rtx tmpreg = gen_rtx_REG (Pmode, 1);
3512
 
3513
      emit_move_insn (tmpreg, high);
3514
      dest = gen_rtx_MEM (word_mode, gen_rtx_LO_SUM (Pmode, tmpreg, delta));
3515
      insn = emit_move_insn (dest, src);
3516
      if (DO_FRAME_NOTES)
3517
        add_reg_note (insn, REG_FRAME_RELATED_EXPR,
3518
                      gen_rtx_SET (VOIDmode,
3519
                                   gen_rtx_MEM (word_mode,
3520
                                                gen_rtx_PLUS (word_mode,
3521
                                                              basereg,
3522
                                                              delta)),
3523
                                   src));
3524
    }
3525
 
3526
  if (DO_FRAME_NOTES)
3527
    RTX_FRAME_RELATED_P (insn) = 1;
3528
}
3529
 
3530
/* Emit RTL to store REG at the memory location specified by BASE and then
3531
   add MOD to BASE.  MOD must be <= 8k.  */
3532
 
3533
static void
3534
store_reg_modify (int base, int reg, HOST_WIDE_INT mod)
3535
{
3536
  rtx insn, basereg, srcreg, delta;
3537
 
3538
  gcc_assert (VAL_14_BITS_P (mod));
3539
 
3540
  basereg = gen_rtx_REG (Pmode, base);
3541
  srcreg = gen_rtx_REG (word_mode, reg);
3542
  delta = GEN_INT (mod);
3543
 
3544
  insn = emit_insn (gen_post_store (basereg, srcreg, delta));
3545
  if (DO_FRAME_NOTES)
3546
    {
3547
      RTX_FRAME_RELATED_P (insn) = 1;
3548
 
3549
      /* RTX_FRAME_RELATED_P must be set on each frame related set
3550
         in a parallel with more than one element.  */
3551
      RTX_FRAME_RELATED_P (XVECEXP (PATTERN (insn), 0, 0)) = 1;
3552
      RTX_FRAME_RELATED_P (XVECEXP (PATTERN (insn), 0, 1)) = 1;
3553
    }
3554
}
3555
 
3556
/* Emit RTL to set REG to the value specified by BASE+DISP.  Handle case
3557
   where DISP > 8k by using the add_high_const patterns.  NOTE indicates
3558
   whether to add a frame note or not.
3559
 
3560
   In the DISP > 8k case, we leave the high part of the address in %r1.
3561
   There is code in expand_hppa_{prologue,epilogue} that knows about this.  */
3562
 
3563
static void
3564
set_reg_plus_d (int reg, int base, HOST_WIDE_INT disp, int note)
3565
{
3566
  rtx insn;
3567
 
3568
  if (VAL_14_BITS_P (disp))
3569
    {
3570
      insn = emit_move_insn (gen_rtx_REG (Pmode, reg),
3571
                             plus_constant (gen_rtx_REG (Pmode, base), disp));
3572
    }
3573
  else if (TARGET_64BIT && !VAL_32_BITS_P (disp))
3574
    {
3575
      rtx basereg = gen_rtx_REG (Pmode, base);
3576
      rtx delta = GEN_INT (disp);
3577
      rtx tmpreg = gen_rtx_REG (Pmode, 1);
3578
 
3579
      emit_move_insn (tmpreg, delta);
3580
      insn = emit_move_insn (gen_rtx_REG (Pmode, reg),
3581
                             gen_rtx_PLUS (Pmode, tmpreg, basereg));
3582
      if (DO_FRAME_NOTES)
3583
        add_reg_note (insn, REG_FRAME_RELATED_EXPR,
3584
                      gen_rtx_SET (VOIDmode, tmpreg,
3585
                                   gen_rtx_PLUS (Pmode, basereg, delta)));
3586
    }
3587
  else
3588
    {
3589
      rtx basereg = gen_rtx_REG (Pmode, base);
3590
      rtx delta = GEN_INT (disp);
3591
      rtx tmpreg = gen_rtx_REG (Pmode, 1);
3592
 
3593
      emit_move_insn (tmpreg,
3594
                      gen_rtx_PLUS (Pmode, basereg,
3595
                                    gen_rtx_HIGH (Pmode, delta)));
3596
      insn = emit_move_insn (gen_rtx_REG (Pmode, reg),
3597
                             gen_rtx_LO_SUM (Pmode, tmpreg, delta));
3598
    }
3599
 
3600
  if (DO_FRAME_NOTES && note)
3601
    RTX_FRAME_RELATED_P (insn) = 1;
3602
}
3603
 
3604
HOST_WIDE_INT
3605
pa_compute_frame_size (HOST_WIDE_INT size, int *fregs_live)
3606
{
3607
  int freg_saved = 0;
3608
  int i, j;
3609
 
3610
  /* The code in pa_expand_prologue and pa_expand_epilogue must
3611
     be consistent with the rounding and size calculation done here.
3612
     Change them at the same time.  */
3613
 
3614
  /* We do our own stack alignment.  First, round the size of the
3615
     stack locals up to a word boundary.  */
3616
  size = (size + UNITS_PER_WORD - 1) & ~(UNITS_PER_WORD - 1);
3617
 
3618
  /* Space for previous frame pointer + filler.  If any frame is
3619
     allocated, we need to add in the STARTING_FRAME_OFFSET.  We
3620
     waste some space here for the sake of HP compatibility.  The
3621
     first slot is only used when the frame pointer is needed.  */
3622
  if (size || frame_pointer_needed)
3623
    size += STARTING_FRAME_OFFSET;
3624
 
3625
  /* If the current function calls __builtin_eh_return, then we need
3626
     to allocate stack space for registers that will hold data for
3627
     the exception handler.  */
3628
  if (DO_FRAME_NOTES && crtl->calls_eh_return)
3629
    {
3630
      unsigned int i;
3631
 
3632
      for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
3633
        continue;
3634
      size += i * UNITS_PER_WORD;
3635
    }
3636
 
3637
  /* Account for space used by the callee general register saves.  */
3638
  for (i = 18, j = frame_pointer_needed ? 4 : 3; i >= j; i--)
3639
    if (df_regs_ever_live_p (i))
3640
      size += UNITS_PER_WORD;
3641
 
3642
  /* Account for space used by the callee floating point register saves.  */
3643
  for (i = FP_SAVED_REG_LAST; i >= FP_SAVED_REG_FIRST; i -= FP_REG_STEP)
3644
    if (df_regs_ever_live_p (i)
3645
        || (!TARGET_64BIT && df_regs_ever_live_p (i + 1)))
3646
      {
3647
        freg_saved = 1;
3648
 
3649
        /* We always save both halves of the FP register, so always
3650
           increment the frame size by 8 bytes.  */
3651
        size += 8;
3652
      }
3653
 
3654
  /* If any of the floating registers are saved, account for the
3655
     alignment needed for the floating point register save block.  */
3656
  if (freg_saved)
3657
    {
3658
      size = (size + 7) & ~7;
3659
      if (fregs_live)
3660
        *fregs_live = 1;
3661
    }
3662
 
3663
  /* The various ABIs include space for the outgoing parameters in the
3664
     size of the current function's stack frame.  We don't need to align
3665
     for the outgoing arguments as their alignment is set by the final
3666
     rounding for the frame as a whole.  */
3667
  size += crtl->outgoing_args_size;
3668
 
3669
  /* Allocate space for the fixed frame marker.  This space must be
3670
     allocated for any function that makes calls or allocates
3671
     stack space.  */
3672
  if (!current_function_is_leaf || size)
3673
    size += TARGET_64BIT ? 48 : 32;
3674
 
3675
  /* Finally, round to the preferred stack boundary.  */
3676
  return ((size + PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)
3677
          & ~(PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1));
3678
}
3679
 
3680
/* Generate the assembly code for function entry.  FILE is a stdio
3681
   stream to output the code to.  SIZE is an int: how many units of
3682
   temporary storage to allocate.
3683
 
3684
   Refer to the array `regs_ever_live' to determine which registers to
3685
   save; `regs_ever_live[I]' is nonzero if register number I is ever
3686
   used in the function.  This function is responsible for knowing
3687
   which registers should not be saved even if used.  */
3688
 
3689
/* On HP-PA, move-double insns between fpu and cpu need an 8-byte block
3690
   of memory.  If any fpu reg is used in the function, we allocate
3691
   such a block here, at the bottom of the frame, just in case it's needed.
3692
 
3693
   If this function is a leaf procedure, then we may choose not
3694
   to do a "save" insn.  The decision about whether or not
3695
   to do this is made in regclass.c.  */
3696
 
3697
static void
3698
pa_output_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
3699
{
3700
  /* The function's label and associated .PROC must never be
3701
     separated and must be output *after* any profiling declarations
3702
     to avoid changing spaces/subspaces within a procedure.  */
3703
  ASM_OUTPUT_LABEL (file, XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0));
3704
  fputs ("\t.PROC\n", file);
3705
 
3706
  /* pa_expand_prologue does the dirty work now.  We just need
3707
     to output the assembler directives which denote the start
3708
     of a function.  */
3709
  fprintf (file, "\t.CALLINFO FRAME=" HOST_WIDE_INT_PRINT_DEC, actual_fsize);
3710
  if (current_function_is_leaf)
3711
    fputs (",NO_CALLS", file);
3712
  else
3713
    fputs (",CALLS", file);
3714
  if (rp_saved)
3715
    fputs (",SAVE_RP", file);
3716
 
3717
  /* The SAVE_SP flag is used to indicate that register %r3 is stored
3718
     at the beginning of the frame and that it is used as the frame
3719
     pointer for the frame.  We do this because our current frame
3720
     layout doesn't conform to that specified in the HP runtime
3721
     documentation and we need a way to indicate to programs such as
3722
     GDB where %r3 is saved.  The SAVE_SP flag was chosen because it
3723
     isn't used by HP compilers but is supported by the assembler.
3724
     However, SAVE_SP is supposed to indicate that the previous stack
3725
     pointer has been saved in the frame marker.  */
3726
  if (frame_pointer_needed)
3727
    fputs (",SAVE_SP", file);
3728
 
3729
  /* Pass on information about the number of callee register saves
3730
     performed in the prologue.
3731
 
3732
     The compiler is supposed to pass the highest register number
3733
     saved, the assembler then has to adjust that number before
3734
     entering it into the unwind descriptor (to account for any
3735
     caller saved registers with lower register numbers than the
3736
     first callee saved register).  */
3737
  if (gr_saved)
3738
    fprintf (file, ",ENTRY_GR=%d", gr_saved + 2);
3739
 
3740
  if (fr_saved)
3741
    fprintf (file, ",ENTRY_FR=%d", fr_saved + 11);
3742
 
3743
  fputs ("\n\t.ENTRY\n", file);
3744
 
3745
  remove_useless_addtr_insns (0);
3746
}
3747
 
3748
void
3749
pa_expand_prologue (void)
3750
{
3751
  int merge_sp_adjust_with_store = 0;
3752
  HOST_WIDE_INT size = get_frame_size ();
3753
  HOST_WIDE_INT offset;
3754
  int i;
3755
  rtx insn, tmpreg;
3756
 
3757
  gr_saved = 0;
3758
  fr_saved = 0;
3759
  save_fregs = 0;
3760
 
3761
  /* Compute total size for frame pointer, filler, locals and rounding to
3762
     the next word boundary.  Similar code appears in pa_compute_frame_size
3763
     and must be changed in tandem with this code.  */
3764
  local_fsize = (size + UNITS_PER_WORD - 1) & ~(UNITS_PER_WORD - 1);
3765
  if (local_fsize || frame_pointer_needed)
3766
    local_fsize += STARTING_FRAME_OFFSET;
3767
 
3768
  actual_fsize = pa_compute_frame_size (size, &save_fregs);
3769
  if (flag_stack_usage_info)
3770
    current_function_static_stack_size = actual_fsize;
3771
 
3772
  /* Compute a few things we will use often.  */
3773
  tmpreg = gen_rtx_REG (word_mode, 1);
3774
 
3775
  /* Save RP first.  The calling conventions manual states RP will
3776
     always be stored into the caller's frame at sp - 20 or sp - 16
3777
     depending on which ABI is in use.  */
3778
  if (df_regs_ever_live_p (2) || crtl->calls_eh_return)
3779
    {
3780
      store_reg (2, TARGET_64BIT ? -16 : -20, STACK_POINTER_REGNUM);
3781
      rp_saved = true;
3782
    }
3783
  else
3784
    rp_saved = false;
3785
 
3786
  /* Allocate the local frame and set up the frame pointer if needed.  */
3787
  if (actual_fsize != 0)
3788
    {
3789
      if (frame_pointer_needed)
3790
        {
3791
          /* Copy the old frame pointer temporarily into %r1.  Set up the
3792
             new stack pointer, then store away the saved old frame pointer
3793
             into the stack at sp and at the same time update the stack
3794
             pointer by actual_fsize bytes.  Two versions, first
3795
             handles small (<8k) frames.  The second handles large (>=8k)
3796
             frames.  */
3797
          insn = emit_move_insn (tmpreg, hard_frame_pointer_rtx);
3798
          if (DO_FRAME_NOTES)
3799
            RTX_FRAME_RELATED_P (insn) = 1;
3800
 
3801
          insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
3802
          if (DO_FRAME_NOTES)
3803
            RTX_FRAME_RELATED_P (insn) = 1;
3804
 
3805
          if (VAL_14_BITS_P (actual_fsize))
3806
            store_reg_modify (STACK_POINTER_REGNUM, 1, actual_fsize);
3807
          else
3808
            {
3809
              /* It is incorrect to store the saved frame pointer at *sp,
3810
                 then increment sp (writes beyond the current stack boundary).
3811
 
3812
                 So instead use stwm to store at *sp and post-increment the
3813
                 stack pointer as an atomic operation.  Then increment sp to
3814
                 finish allocating the new frame.  */
3815
              HOST_WIDE_INT adjust1 = 8192 - 64;
3816
              HOST_WIDE_INT adjust2 = actual_fsize - adjust1;
3817
 
3818
              store_reg_modify (STACK_POINTER_REGNUM, 1, adjust1);
3819
              set_reg_plus_d (STACK_POINTER_REGNUM, STACK_POINTER_REGNUM,
3820
                              adjust2, 1);
3821
            }
3822
 
3823
          /* We set SAVE_SP in frames that need a frame pointer.  Thus,
3824
             we need to store the previous stack pointer (frame pointer)
3825
             into the frame marker on targets that use the HP unwind
3826
             library.  This allows the HP unwind library to be used to
3827
             unwind GCC frames.  However, we are not fully compatible
3828
             with the HP library because our frame layout differs from
3829
             that specified in the HP runtime specification.
3830
 
3831
             We don't want a frame note on this instruction as the frame
3832
             marker moves during dynamic stack allocation.
3833
 
3834
             This instruction also serves as a blockage to prevent
3835
             register spills from being scheduled before the stack
3836
             pointer is raised.  This is necessary as we store
3837
             registers using the frame pointer as a base register,
3838
             and the frame pointer is set before sp is raised.  */
3839
          if (TARGET_HPUX_UNWIND_LIBRARY)
3840
            {
3841
              rtx addr = gen_rtx_PLUS (word_mode, stack_pointer_rtx,
3842
                                       GEN_INT (TARGET_64BIT ? -8 : -4));
3843
 
3844
              emit_move_insn (gen_rtx_MEM (word_mode, addr),
3845
                              hard_frame_pointer_rtx);
3846
            }
3847
          else
3848
            emit_insn (gen_blockage ());
3849
        }
3850
      /* no frame pointer needed.  */
3851
      else
3852
        {
3853
          /* In some cases we can perform the first callee register save
3854
             and allocating the stack frame at the same time.   If so, just
3855
             make a note of it and defer allocating the frame until saving
3856
             the callee registers.  */
3857
          if (VAL_14_BITS_P (actual_fsize) && local_fsize == 0)
3858
            merge_sp_adjust_with_store = 1;
3859
          /* Can not optimize.  Adjust the stack frame by actual_fsize
3860
             bytes.  */
3861
          else
3862
            set_reg_plus_d (STACK_POINTER_REGNUM, STACK_POINTER_REGNUM,
3863
                            actual_fsize, 1);
3864
        }
3865
    }
3866
 
3867
  /* Normal register save.
3868
 
3869
     Do not save the frame pointer in the frame_pointer_needed case.  It
3870
     was done earlier.  */
3871
  if (frame_pointer_needed)
3872
    {
3873
      offset = local_fsize;
3874
 
3875
      /* Saving the EH return data registers in the frame is the simplest
3876
         way to get the frame unwind information emitted.  We put them
3877
         just before the general registers.  */
3878
      if (DO_FRAME_NOTES && crtl->calls_eh_return)
3879
        {
3880
          unsigned int i, regno;
3881
 
3882
          for (i = 0; ; ++i)
3883
            {
3884
              regno = EH_RETURN_DATA_REGNO (i);
3885
              if (regno == INVALID_REGNUM)
3886
                break;
3887
 
3888
              store_reg (regno, offset, HARD_FRAME_POINTER_REGNUM);
3889
              offset += UNITS_PER_WORD;
3890
            }
3891
        }
3892
 
3893
      for (i = 18; i >= 4; i--)
3894
        if (df_regs_ever_live_p (i) && ! call_used_regs[i])
3895
          {
3896
            store_reg (i, offset, HARD_FRAME_POINTER_REGNUM);
3897
            offset += UNITS_PER_WORD;
3898
            gr_saved++;
3899
          }
3900
      /* Account for %r3 which is saved in a special place.  */
3901
      gr_saved++;
3902
    }
3903
  /* No frame pointer needed.  */
3904
  else
3905
    {
3906
      offset = local_fsize - actual_fsize;
3907
 
3908
      /* Saving the EH return data registers in the frame is the simplest
3909
         way to get the frame unwind information emitted.  */
3910
      if (DO_FRAME_NOTES && crtl->calls_eh_return)
3911
        {
3912
          unsigned int i, regno;
3913
 
3914
          for (i = 0; ; ++i)
3915
            {
3916
              regno = EH_RETURN_DATA_REGNO (i);
3917
              if (regno == INVALID_REGNUM)
3918
                break;
3919
 
3920
              /* If merge_sp_adjust_with_store is nonzero, then we can
3921
                 optimize the first save.  */
3922
              if (merge_sp_adjust_with_store)
3923
                {
3924
                  store_reg_modify (STACK_POINTER_REGNUM, regno, -offset);
3925
                  merge_sp_adjust_with_store = 0;
3926
                }
3927
              else
3928
                store_reg (regno, offset, STACK_POINTER_REGNUM);
3929
              offset += UNITS_PER_WORD;
3930
            }
3931
        }
3932
 
3933
      for (i = 18; i >= 3; i--)
3934
        if (df_regs_ever_live_p (i) && ! call_used_regs[i])
3935
          {
3936
            /* If merge_sp_adjust_with_store is nonzero, then we can
3937
               optimize the first GR save.  */
3938
            if (merge_sp_adjust_with_store)
3939
              {
3940
                store_reg_modify (STACK_POINTER_REGNUM, i, -offset);
3941
                merge_sp_adjust_with_store = 0;
3942
              }
3943
            else
3944
              store_reg (i, offset, STACK_POINTER_REGNUM);
3945
            offset += UNITS_PER_WORD;
3946
            gr_saved++;
3947
          }
3948
 
3949
      /* If we wanted to merge the SP adjustment with a GR save, but we never
3950
         did any GR saves, then just emit the adjustment here.  */
3951
      if (merge_sp_adjust_with_store)
3952
        set_reg_plus_d (STACK_POINTER_REGNUM, STACK_POINTER_REGNUM,
3953
                        actual_fsize, 1);
3954
    }
3955
 
3956
  /* The hppa calling conventions say that %r19, the pic offset
3957
     register, is saved at sp - 32 (in this function's frame)
3958
     when generating PIC code.  FIXME:  What is the correct thing
3959
     to do for functions which make no calls and allocate no
3960
     frame?  Do we need to allocate a frame, or can we just omit
3961
     the save?   For now we'll just omit the save.
3962
 
3963
     We don't want a note on this insn as the frame marker can
3964
     move if there is a dynamic stack allocation.  */
3965
  if (flag_pic && actual_fsize != 0 && !TARGET_64BIT)
3966
    {
3967
      rtx addr = gen_rtx_PLUS (word_mode, stack_pointer_rtx, GEN_INT (-32));
3968
 
3969
      emit_move_insn (gen_rtx_MEM (word_mode, addr), pic_offset_table_rtx);
3970
 
3971
    }
3972
 
3973
  /* Align pointer properly (doubleword boundary).  */
3974
  offset = (offset + 7) & ~7;
3975
 
3976
  /* Floating point register store.  */
3977
  if (save_fregs)
3978
    {
3979
      rtx base;
3980
 
3981
      /* First get the frame or stack pointer to the start of the FP register
3982
         save area.  */
3983
      if (frame_pointer_needed)
3984
        {
3985
          set_reg_plus_d (1, HARD_FRAME_POINTER_REGNUM, offset, 0);
3986
          base = hard_frame_pointer_rtx;
3987
        }
3988
      else
3989
        {
3990
          set_reg_plus_d (1, STACK_POINTER_REGNUM, offset, 0);
3991
          base = stack_pointer_rtx;
3992
        }
3993
 
3994
      /* Now actually save the FP registers.  */
3995
      for (i = FP_SAVED_REG_LAST; i >= FP_SAVED_REG_FIRST; i -= FP_REG_STEP)
3996
        {
3997
          if (df_regs_ever_live_p (i)
3998
              || (! TARGET_64BIT && df_regs_ever_live_p (i + 1)))
3999
            {
4000
              rtx addr, insn, reg;
4001
              addr = gen_rtx_MEM (DFmode, gen_rtx_POST_INC (DFmode, tmpreg));
4002
              reg = gen_rtx_REG (DFmode, i);
4003
              insn = emit_move_insn (addr, reg);
4004
              if (DO_FRAME_NOTES)
4005
                {
4006
                  RTX_FRAME_RELATED_P (insn) = 1;
4007
                  if (TARGET_64BIT)
4008
                    {
4009
                      rtx mem = gen_rtx_MEM (DFmode,
4010
                                             plus_constant (base, offset));
4011
                      add_reg_note (insn, REG_FRAME_RELATED_EXPR,
4012
                                    gen_rtx_SET (VOIDmode, mem, reg));
4013
                    }
4014
                  else
4015
                    {
4016
                      rtx meml = gen_rtx_MEM (SFmode,
4017
                                              plus_constant (base, offset));
4018
                      rtx memr = gen_rtx_MEM (SFmode,
4019
                                              plus_constant (base, offset + 4));
4020
                      rtx regl = gen_rtx_REG (SFmode, i);
4021
                      rtx regr = gen_rtx_REG (SFmode, i + 1);
4022
                      rtx setl = gen_rtx_SET (VOIDmode, meml, regl);
4023
                      rtx setr = gen_rtx_SET (VOIDmode, memr, regr);
4024
                      rtvec vec;
4025
 
4026
                      RTX_FRAME_RELATED_P (setl) = 1;
4027
                      RTX_FRAME_RELATED_P (setr) = 1;
4028
                      vec = gen_rtvec (2, setl, setr);
4029
                      add_reg_note (insn, REG_FRAME_RELATED_EXPR,
4030
                                    gen_rtx_SEQUENCE (VOIDmode, vec));
4031
                    }
4032
                }
4033
              offset += GET_MODE_SIZE (DFmode);
4034
              fr_saved++;
4035
            }
4036
        }
4037
    }
4038
}
4039
 
4040
/* Emit RTL to load REG from the memory location specified by BASE+DISP.
4041
   Handle case where DISP > 8k by using the add_high_const patterns.  */
4042
 
4043
static void
4044
load_reg (int reg, HOST_WIDE_INT disp, int base)
4045
{
4046
  rtx dest = gen_rtx_REG (word_mode, reg);
4047
  rtx basereg = gen_rtx_REG (Pmode, base);
4048
  rtx src;
4049
 
4050
  if (VAL_14_BITS_P (disp))
4051
    src = gen_rtx_MEM (word_mode, plus_constant (basereg, disp));
4052
  else if (TARGET_64BIT && !VAL_32_BITS_P (disp))
4053
    {
4054
      rtx delta = GEN_INT (disp);
4055
      rtx tmpreg = gen_rtx_REG (Pmode, 1);
4056
 
4057
      emit_move_insn (tmpreg, delta);
4058
      if (TARGET_DISABLE_INDEXING)
4059
        {
4060
          emit_move_insn (tmpreg, gen_rtx_PLUS (Pmode, tmpreg, basereg));
4061
          src = gen_rtx_MEM (word_mode, tmpreg);
4062
        }
4063
      else
4064
        src = gen_rtx_MEM (word_mode, gen_rtx_PLUS (Pmode, tmpreg, basereg));
4065
    }
4066
  else
4067
    {
4068
      rtx delta = GEN_INT (disp);
4069
      rtx high = gen_rtx_PLUS (Pmode, basereg, gen_rtx_HIGH (Pmode, delta));
4070
      rtx tmpreg = gen_rtx_REG (Pmode, 1);
4071
 
4072
      emit_move_insn (tmpreg, high);
4073
      src = gen_rtx_MEM (word_mode, gen_rtx_LO_SUM (Pmode, tmpreg, delta));
4074
    }
4075
 
4076
  emit_move_insn (dest, src);
4077
}
4078
 
4079
/* Update the total code bytes output to the text section.  */
4080
 
4081
static void
4082
update_total_code_bytes (unsigned int nbytes)
4083
{
4084
  if ((TARGET_PORTABLE_RUNTIME || !TARGET_GAS || !TARGET_SOM)
4085
      && !IN_NAMED_SECTION_P (cfun->decl))
4086
    {
4087
      unsigned int old_total = total_code_bytes;
4088
 
4089
      total_code_bytes += nbytes;
4090
 
4091
      /* Be prepared to handle overflows.  */
4092
      if (old_total > total_code_bytes)
4093
        total_code_bytes = UINT_MAX;
4094
    }
4095
}
4096
 
4097
/* This function generates the assembly code for function exit.
4098
   Args are as for output_function_prologue ().
4099
 
4100
   The function epilogue should not depend on the current stack
4101
   pointer!  It should use the frame pointer only.  This is mandatory
4102
   because of alloca; we also take advantage of it to omit stack
4103
   adjustments before returning.  */
4104
 
4105
static void
4106
pa_output_function_epilogue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
4107
{
4108
  rtx insn = get_last_insn ();
4109
 
4110
  last_address = 0;
4111
 
4112
  /* pa_expand_epilogue does the dirty work now.  We just need
4113
     to output the assembler directives which denote the end
4114
     of a function.
4115
 
4116
     To make debuggers happy, emit a nop if the epilogue was completely
4117
     eliminated due to a volatile call as the last insn in the
4118
     current function.  That way the return address (in %r2) will
4119
     always point to a valid instruction in the current function.  */
4120
 
4121
  /* Get the last real insn.  */
4122
  if (GET_CODE (insn) == NOTE)
4123
    insn = prev_real_insn (insn);
4124
 
4125
  /* If it is a sequence, then look inside.  */
4126
  if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4127
    insn = XVECEXP (PATTERN (insn), 0, 0);
4128
 
4129
  /* If insn is a CALL_INSN, then it must be a call to a volatile
4130
     function (otherwise there would be epilogue insns).  */
4131
  if (insn && GET_CODE (insn) == CALL_INSN)
4132
    {
4133
      fputs ("\tnop\n", file);
4134
      last_address += 4;
4135
    }
4136
 
4137
  fputs ("\t.EXIT\n\t.PROCEND\n", file);
4138
 
4139
  if (TARGET_SOM && TARGET_GAS)
4140
    {
4141
      /* We done with this subspace except possibly for some additional
4142
         debug information.  Forget that we are in this subspace to ensure
4143
         that the next function is output in its own subspace.  */
4144
      in_section = NULL;
4145
      cfun->machine->in_nsubspa = 2;
4146
    }
4147
 
4148
  if (INSN_ADDRESSES_SET_P ())
4149
    {
4150
      insn = get_last_nonnote_insn ();
4151
      last_address += INSN_ADDRESSES (INSN_UID (insn));
4152
      if (INSN_P (insn))
4153
        last_address += insn_default_length (insn);
4154
      last_address = ((last_address + FUNCTION_BOUNDARY / BITS_PER_UNIT - 1)
4155
                      & ~(FUNCTION_BOUNDARY / BITS_PER_UNIT - 1));
4156
    }
4157
  else
4158
    last_address = UINT_MAX;
4159
 
4160
  /* Finally, update the total number of code bytes output so far.  */
4161
  update_total_code_bytes (last_address);
4162
}
4163
 
4164
void
4165
pa_expand_epilogue (void)
4166
{
4167
  rtx tmpreg;
4168
  HOST_WIDE_INT offset;
4169
  HOST_WIDE_INT ret_off = 0;
4170
  int i;
4171
  int merge_sp_adjust_with_load = 0;
4172
 
4173
  /* We will use this often.  */
4174
  tmpreg = gen_rtx_REG (word_mode, 1);
4175
 
4176
  /* Try to restore RP early to avoid load/use interlocks when
4177
     RP gets used in the return (bv) instruction.  This appears to still
4178
     be necessary even when we schedule the prologue and epilogue.  */
4179
  if (rp_saved)
4180
    {
4181
      ret_off = TARGET_64BIT ? -16 : -20;
4182
      if (frame_pointer_needed)
4183
        {
4184
          load_reg (2, ret_off, HARD_FRAME_POINTER_REGNUM);
4185
          ret_off = 0;
4186
        }
4187
      else
4188
        {
4189
          /* No frame pointer, and stack is smaller than 8k.  */
4190
          if (VAL_14_BITS_P (ret_off - actual_fsize))
4191
            {
4192
              load_reg (2, ret_off - actual_fsize, STACK_POINTER_REGNUM);
4193
              ret_off = 0;
4194
            }
4195
        }
4196
    }
4197
 
4198
  /* General register restores.  */
4199
  if (frame_pointer_needed)
4200
    {
4201
      offset = local_fsize;
4202
 
4203
      /* If the current function calls __builtin_eh_return, then we need
4204
         to restore the saved EH data registers.  */
4205
      if (DO_FRAME_NOTES && crtl->calls_eh_return)
4206
        {
4207
          unsigned int i, regno;
4208
 
4209
          for (i = 0; ; ++i)
4210
            {
4211
              regno = EH_RETURN_DATA_REGNO (i);
4212
              if (regno == INVALID_REGNUM)
4213
                break;
4214
 
4215
              load_reg (regno, offset, HARD_FRAME_POINTER_REGNUM);
4216
              offset += UNITS_PER_WORD;
4217
            }
4218
        }
4219
 
4220
      for (i = 18; i >= 4; i--)
4221
        if (df_regs_ever_live_p (i) && ! call_used_regs[i])
4222
          {
4223
            load_reg (i, offset, HARD_FRAME_POINTER_REGNUM);
4224
            offset += UNITS_PER_WORD;
4225
          }
4226
    }
4227
  else
4228
    {
4229
      offset = local_fsize - actual_fsize;
4230
 
4231
      /* If the current function calls __builtin_eh_return, then we need
4232
         to restore the saved EH data registers.  */
4233
      if (DO_FRAME_NOTES && crtl->calls_eh_return)
4234
        {
4235
          unsigned int i, regno;
4236
 
4237
          for (i = 0; ; ++i)
4238
            {
4239
              regno = EH_RETURN_DATA_REGNO (i);
4240
              if (regno == INVALID_REGNUM)
4241
                break;
4242
 
4243
              /* Only for the first load.
4244
                 merge_sp_adjust_with_load holds the register load
4245
                 with which we will merge the sp adjustment.  */
4246
              if (merge_sp_adjust_with_load == 0
4247
                  && local_fsize == 0
4248
                  && VAL_14_BITS_P (-actual_fsize))
4249
                merge_sp_adjust_with_load = regno;
4250
              else
4251
                load_reg (regno, offset, STACK_POINTER_REGNUM);
4252
              offset += UNITS_PER_WORD;
4253
            }
4254
        }
4255
 
4256
      for (i = 18; i >= 3; i--)
4257
        {
4258
          if (df_regs_ever_live_p (i) && ! call_used_regs[i])
4259
            {
4260
              /* Only for the first load.
4261
                 merge_sp_adjust_with_load holds the register load
4262
                 with which we will merge the sp adjustment.  */
4263
              if (merge_sp_adjust_with_load == 0
4264
                  && local_fsize == 0
4265
                  && VAL_14_BITS_P (-actual_fsize))
4266
                merge_sp_adjust_with_load = i;
4267
              else
4268
                load_reg (i, offset, STACK_POINTER_REGNUM);
4269
              offset += UNITS_PER_WORD;
4270
            }
4271
        }
4272
    }
4273
 
4274
  /* Align pointer properly (doubleword boundary).  */
4275
  offset = (offset + 7) & ~7;
4276
 
4277
  /* FP register restores.  */
4278
  if (save_fregs)
4279
    {
4280
      /* Adjust the register to index off of.  */
4281
      if (frame_pointer_needed)
4282
        set_reg_plus_d (1, HARD_FRAME_POINTER_REGNUM, offset, 0);
4283
      else
4284
        set_reg_plus_d (1, STACK_POINTER_REGNUM, offset, 0);
4285
 
4286
      /* Actually do the restores now.  */
4287
      for (i = FP_SAVED_REG_LAST; i >= FP_SAVED_REG_FIRST; i -= FP_REG_STEP)
4288
        if (df_regs_ever_live_p (i)
4289
            || (! TARGET_64BIT && df_regs_ever_live_p (i + 1)))
4290
          {
4291
            rtx src = gen_rtx_MEM (DFmode, gen_rtx_POST_INC (DFmode, tmpreg));
4292
            rtx dest = gen_rtx_REG (DFmode, i);
4293
            emit_move_insn (dest, src);
4294
          }
4295
    }
4296
 
4297
  /* Emit a blockage insn here to keep these insns from being moved to
4298
     an earlier spot in the epilogue, or into the main instruction stream.
4299
 
4300
     This is necessary as we must not cut the stack back before all the
4301
     restores are finished.  */
4302
  emit_insn (gen_blockage ());
4303
 
4304
  /* Reset stack pointer (and possibly frame pointer).  The stack
4305
     pointer is initially set to fp + 64 to avoid a race condition.  */
4306
  if (frame_pointer_needed)
4307
    {
4308
      rtx delta = GEN_INT (-64);
4309
 
4310
      set_reg_plus_d (STACK_POINTER_REGNUM, HARD_FRAME_POINTER_REGNUM, 64, 0);
4311
      emit_insn (gen_pre_load (hard_frame_pointer_rtx,
4312
                               stack_pointer_rtx, delta));
4313
    }
4314
  /* If we were deferring a callee register restore, do it now.  */
4315
  else if (merge_sp_adjust_with_load)
4316
    {
4317
      rtx delta = GEN_INT (-actual_fsize);
4318
      rtx dest = gen_rtx_REG (word_mode, merge_sp_adjust_with_load);
4319
 
4320
      emit_insn (gen_pre_load (dest, stack_pointer_rtx, delta));
4321
    }
4322
  else if (actual_fsize != 0)
4323
    set_reg_plus_d (STACK_POINTER_REGNUM, STACK_POINTER_REGNUM,
4324
                    - actual_fsize, 0);
4325
 
4326
  /* If we haven't restored %r2 yet (no frame pointer, and a stack
4327
     frame greater than 8k), do so now.  */
4328
  if (ret_off != 0)
4329
    load_reg (2, ret_off, STACK_POINTER_REGNUM);
4330
 
4331
  if (DO_FRAME_NOTES && crtl->calls_eh_return)
4332
    {
4333
      rtx sa = EH_RETURN_STACKADJ_RTX;
4334
 
4335
      emit_insn (gen_blockage ());
4336
      emit_insn (TARGET_64BIT
4337
                 ? gen_subdi3 (stack_pointer_rtx, stack_pointer_rtx, sa)
4338
                 : gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, sa));
4339
    }
4340
}
4341
 
4342
bool
4343
pa_can_use_return_insn (void)
4344
{
4345
  if (!reload_completed)
4346
    return false;
4347
 
4348
  if (frame_pointer_needed)
4349
    return false;
4350
 
4351
  if (df_regs_ever_live_p (2))
4352
    return false;
4353
 
4354
  if (crtl->profile)
4355
    return false;
4356
 
4357
  return pa_compute_frame_size (get_frame_size (), 0) == 0;
4358
}
4359
 
4360
rtx
4361
hppa_pic_save_rtx (void)
4362
{
4363
  return get_hard_reg_initial_val (word_mode, PIC_OFFSET_TABLE_REGNUM);
4364
}
4365
 
4366
#ifndef NO_DEFERRED_PROFILE_COUNTERS
4367
#define NO_DEFERRED_PROFILE_COUNTERS 0
4368
#endif
4369
 
4370
 
4371
/* Vector of funcdef numbers.  */
4372
static VEC(int,heap) *funcdef_nos;
4373
 
4374
/* Output deferred profile counters.  */
4375
static void
4376
output_deferred_profile_counters (void)
4377
{
4378
  unsigned int i;
4379
  int align, n;
4380
 
4381
  if (VEC_empty (int, funcdef_nos))
4382
   return;
4383
 
4384
  switch_to_section (data_section);
4385
  align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
4386
  ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
4387
 
4388
  for (i = 0; VEC_iterate (int, funcdef_nos, i, n); i++)
4389
    {
4390
      targetm.asm_out.internal_label (asm_out_file, "LP", n);
4391
      assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
4392
    }
4393
 
4394
  VEC_free (int, heap, funcdef_nos);
4395
}
4396
 
4397
void
4398
hppa_profile_hook (int label_no)
4399
{
4400
  /* We use SImode for the address of the function in both 32 and
4401
     64-bit code to avoid having to provide DImode versions of the
4402
     lcla2 and load_offset_label_address insn patterns.  */
4403
  rtx reg = gen_reg_rtx (SImode);
4404
  rtx label_rtx = gen_label_rtx ();
4405
  rtx begin_label_rtx, call_insn;
4406
  char begin_label_name[16];
4407
 
4408
  ASM_GENERATE_INTERNAL_LABEL (begin_label_name, FUNC_BEGIN_PROLOG_LABEL,
4409
                               label_no);
4410
  begin_label_rtx = gen_rtx_SYMBOL_REF (SImode, ggc_strdup (begin_label_name));
4411
 
4412
  if (TARGET_64BIT)
4413
    emit_move_insn (arg_pointer_rtx,
4414
                    gen_rtx_PLUS (word_mode, virtual_outgoing_args_rtx,
4415
                                  GEN_INT (64)));
4416
 
4417
  emit_move_insn (gen_rtx_REG (word_mode, 26), gen_rtx_REG (word_mode, 2));
4418
 
4419
  /* The address of the function is loaded into %r25 with an instruction-
4420
     relative sequence that avoids the use of relocations.  The sequence
4421
     is split so that the load_offset_label_address instruction can
4422
     occupy the delay slot of the call to _mcount.  */
4423
  if (TARGET_PA_20)
4424
    emit_insn (gen_lcla2 (reg, label_rtx));
4425
  else
4426
    emit_insn (gen_lcla1 (reg, label_rtx));
4427
 
4428
  emit_insn (gen_load_offset_label_address (gen_rtx_REG (SImode, 25),
4429
                                            reg, begin_label_rtx, label_rtx));
4430
 
4431
#if !NO_DEFERRED_PROFILE_COUNTERS
4432
  {
4433
    rtx count_label_rtx, addr, r24;
4434
    char count_label_name[16];
4435
 
4436
    VEC_safe_push (int, heap, funcdef_nos, label_no);
4437
    ASM_GENERATE_INTERNAL_LABEL (count_label_name, "LP", label_no);
4438
    count_label_rtx = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (count_label_name));
4439
 
4440
    addr = force_reg (Pmode, count_label_rtx);
4441
    r24 = gen_rtx_REG (Pmode, 24);
4442
    emit_move_insn (r24, addr);
4443
 
4444
    call_insn =
4445
      emit_call_insn (gen_call (gen_rtx_MEM (Pmode,
4446
                                             gen_rtx_SYMBOL_REF (Pmode,
4447
                                                                 "_mcount")),
4448
                                GEN_INT (TARGET_64BIT ? 24 : 12)));
4449
 
4450
    use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), r24);
4451
  }
4452
#else
4453
 
4454
  call_insn =
4455
    emit_call_insn (gen_call (gen_rtx_MEM (Pmode,
4456
                                           gen_rtx_SYMBOL_REF (Pmode,
4457
                                                               "_mcount")),
4458
                              GEN_INT (TARGET_64BIT ? 16 : 8)));
4459
 
4460
#endif
4461
 
4462
  use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), gen_rtx_REG (SImode, 25));
4463
  use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), gen_rtx_REG (SImode, 26));
4464
 
4465
  /* Indicate the _mcount call cannot throw, nor will it execute a
4466
     non-local goto.  */
4467
  make_reg_eh_region_note_nothrow_nononlocal (call_insn);
4468
}
4469
 
4470
/* Fetch the return address for the frame COUNT steps up from
4471
   the current frame, after the prologue.  FRAMEADDR is the
4472
   frame pointer of the COUNT frame.
4473
 
4474
   We want to ignore any export stub remnants here.  To handle this,
4475
   we examine the code at the return address, and if it is an export
4476
   stub, we return a memory rtx for the stub return address stored
4477
   at frame-24.
4478
 
4479
   The value returned is used in two different ways:
4480
 
4481
        1. To find a function's caller.
4482
 
4483
        2. To change the return address for a function.
4484
 
4485
   This function handles most instances of case 1; however, it will
4486
   fail if there are two levels of stubs to execute on the return
4487
   path.  The only way I believe that can happen is if the return value
4488
   needs a parameter relocation, which never happens for C code.
4489
 
4490
   This function handles most instances of case 2; however, it will
4491
   fail if we did not originally have stub code on the return path
4492
   but will need stub code on the new return path.  This can happen if
4493
   the caller & callee are both in the main program, but the new
4494
   return location is in a shared library.  */
4495
 
4496
rtx
4497
pa_return_addr_rtx (int count, rtx frameaddr)
4498
{
4499
  rtx label;
4500
  rtx rp;
4501
  rtx saved_rp;
4502
  rtx ins;
4503
 
4504
  /* The instruction stream at the return address of a PA1.X export stub is:
4505
 
4506
        0x4bc23fd1 | stub+8:   ldw -18(sr0,sp),rp
4507
        0x004010a1 | stub+12:  ldsid (sr0,rp),r1
4508
        0x00011820 | stub+16:  mtsp r1,sr0
4509
        0xe0400002 | stub+20:  be,n 0(sr0,rp)
4510
 
4511
     0xe0400002 must be specified as -532676606 so that it won't be
4512
     rejected as an invalid immediate operand on 64-bit hosts.
4513
 
4514
     The instruction stream at the return address of a PA2.0 export stub is:
4515
 
4516
        0x4bc23fd1 | stub+8:   ldw -18(sr0,sp),rp
4517
        0xe840d002 | stub+12:  bve,n (rp)
4518
  */
4519
 
4520
  HOST_WIDE_INT insns[4];
4521
  int i, len;
4522
 
4523
  if (count != 0)
4524
    return NULL_RTX;
4525
 
4526
  rp = get_hard_reg_initial_val (Pmode, 2);
4527
 
4528
  if (TARGET_64BIT || TARGET_NO_SPACE_REGS)
4529
    return rp;
4530
 
4531
  /* If there is no export stub then just use the value saved from
4532
     the return pointer register.  */
4533
 
4534
  saved_rp = gen_reg_rtx (Pmode);
4535
  emit_move_insn (saved_rp, rp);
4536
 
4537
  /* Get pointer to the instruction stream.  We have to mask out the
4538
     privilege level from the two low order bits of the return address
4539
     pointer here so that ins will point to the start of the first
4540
     instruction that would have been executed if we returned.  */
4541
  ins = copy_to_reg (gen_rtx_AND (Pmode, rp, MASK_RETURN_ADDR));
4542
  label = gen_label_rtx ();
4543
 
4544
  if (TARGET_PA_20)
4545
    {
4546
      insns[0] = 0x4bc23fd1;
4547
      insns[1] = -398405630;
4548
      len = 2;
4549
    }
4550
  else
4551
    {
4552
      insns[0] = 0x4bc23fd1;
4553
      insns[1] = 0x004010a1;
4554
      insns[2] = 0x00011820;
4555
      insns[3] = -532676606;
4556
      len = 4;
4557
    }
4558
 
4559
  /* Check the instruction stream at the normal return address for the
4560
     export stub.  If it is an export stub, than our return address is
4561
     really in -24[frameaddr].  */
4562
 
4563
  for (i = 0; i < len; i++)
4564
    {
4565
      rtx op0 = gen_rtx_MEM (SImode, plus_constant (ins, i * 4));
4566
      rtx op1 = GEN_INT (insns[i]);
4567
      emit_cmp_and_jump_insns (op0, op1, NE, NULL, SImode, 0, label);
4568
    }
4569
 
4570
  /* Here we know that our return address points to an export
4571
     stub.  We don't want to return the address of the export stub,
4572
     but rather the return address of the export stub.  That return
4573
     address is stored at -24[frameaddr].  */
4574
 
4575
  emit_move_insn (saved_rp,
4576
                  gen_rtx_MEM (Pmode,
4577
                               memory_address (Pmode,
4578
                                               plus_constant (frameaddr,
4579
                                                              -24))));
4580
 
4581
  emit_label (label);
4582
 
4583
  return saved_rp;
4584
}
4585
 
4586
void
4587
pa_emit_bcond_fp (rtx operands[])
4588
{
4589
  enum rtx_code code = GET_CODE (operands[0]);
4590
  rtx operand0 = operands[1];
4591
  rtx operand1 = operands[2];
4592
  rtx label = operands[3];
4593
 
4594
  emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_REG (CCFPmode, 0),
4595
                          gen_rtx_fmt_ee (code, CCFPmode, operand0, operand1)));
4596
 
4597
  emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
4598
                               gen_rtx_IF_THEN_ELSE (VOIDmode,
4599
                                                     gen_rtx_fmt_ee (NE,
4600
                                                              VOIDmode,
4601
                                                              gen_rtx_REG (CCFPmode, 0),
4602
                                                              const0_rtx),
4603
                                                     gen_rtx_LABEL_REF (VOIDmode, label),
4604
                                                     pc_rtx)));
4605
 
4606
}
4607
 
4608
/* Adjust the cost of a scheduling dependency.  Return the new cost of
4609
   a dependency LINK or INSN on DEP_INSN.  COST is the current cost.  */
4610
 
4611
static int
4612
pa_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
4613
{
4614
  enum attr_type attr_type;
4615
 
4616
  /* Don't adjust costs for a pa8000 chip, also do not adjust any
4617
     true dependencies as they are described with bypasses now.  */
4618
  if (pa_cpu >= PROCESSOR_8000 || REG_NOTE_KIND (link) == 0)
4619
    return cost;
4620
 
4621
  if (! recog_memoized (insn))
4622
    return 0;
4623
 
4624
  attr_type = get_attr_type (insn);
4625
 
4626
  switch (REG_NOTE_KIND (link))
4627
    {
4628
    case REG_DEP_ANTI:
4629
      /* Anti dependency; DEP_INSN reads a register that INSN writes some
4630
         cycles later.  */
4631
 
4632
      if (attr_type == TYPE_FPLOAD)
4633
        {
4634
          rtx pat = PATTERN (insn);
4635
          rtx dep_pat = PATTERN (dep_insn);
4636
          if (GET_CODE (pat) == PARALLEL)
4637
            {
4638
              /* This happens for the fldXs,mb patterns.  */
4639
              pat = XVECEXP (pat, 0, 0);
4640
            }
4641
          if (GET_CODE (pat) != SET || GET_CODE (dep_pat) != SET)
4642
            /* If this happens, we have to extend this to schedule
4643
               optimally.  Return 0 for now.  */
4644
          return 0;
4645
 
4646
          if (reg_mentioned_p (SET_DEST (pat), SET_SRC (dep_pat)))
4647
            {
4648
              if (! recog_memoized (dep_insn))
4649
                return 0;
4650
              switch (get_attr_type (dep_insn))
4651
                {
4652
                case TYPE_FPALU:
4653
                case TYPE_FPMULSGL:
4654
                case TYPE_FPMULDBL:
4655
                case TYPE_FPDIVSGL:
4656
                case TYPE_FPDIVDBL:
4657
                case TYPE_FPSQRTSGL:
4658
                case TYPE_FPSQRTDBL:
4659
                  /* A fpload can't be issued until one cycle before a
4660
                     preceding arithmetic operation has finished if
4661
                     the target of the fpload is any of the sources
4662
                     (or destination) of the arithmetic operation.  */
4663
                  return insn_default_latency (dep_insn) - 1;
4664
 
4665
                default:
4666
                  return 0;
4667
                }
4668
            }
4669
        }
4670
      else if (attr_type == TYPE_FPALU)
4671
        {
4672
          rtx pat = PATTERN (insn);
4673
          rtx dep_pat = PATTERN (dep_insn);
4674
          if (GET_CODE (pat) == PARALLEL)
4675
            {
4676
              /* This happens for the fldXs,mb patterns.  */
4677
              pat = XVECEXP (pat, 0, 0);
4678
            }
4679
          if (GET_CODE (pat) != SET || GET_CODE (dep_pat) != SET)
4680
            /* If this happens, we have to extend this to schedule
4681
               optimally.  Return 0 for now.  */
4682
          return 0;
4683
 
4684
          if (reg_mentioned_p (SET_DEST (pat), SET_SRC (dep_pat)))
4685
            {
4686
              if (! recog_memoized (dep_insn))
4687
                return 0;
4688
              switch (get_attr_type (dep_insn))
4689
                {
4690
                case TYPE_FPDIVSGL:
4691
                case TYPE_FPDIVDBL:
4692
                case TYPE_FPSQRTSGL:
4693
                case TYPE_FPSQRTDBL:
4694
                  /* An ALU flop can't be issued until two cycles before a
4695
                     preceding divide or sqrt operation has finished if
4696
                     the target of the ALU flop is any of the sources
4697
                     (or destination) of the divide or sqrt operation.  */
4698
                  return insn_default_latency (dep_insn) - 2;
4699
 
4700
                default:
4701
                  return 0;
4702
                }
4703
            }
4704
        }
4705
 
4706
      /* For other anti dependencies, the cost is 0.  */
4707
      return 0;
4708
 
4709
    case REG_DEP_OUTPUT:
4710
      /* Output dependency; DEP_INSN writes a register that INSN writes some
4711
         cycles later.  */
4712
      if (attr_type == TYPE_FPLOAD)
4713
        {
4714
          rtx pat = PATTERN (insn);
4715
          rtx dep_pat = PATTERN (dep_insn);
4716
          if (GET_CODE (pat) == PARALLEL)
4717
            {
4718
              /* This happens for the fldXs,mb patterns.  */
4719
              pat = XVECEXP (pat, 0, 0);
4720
            }
4721
          if (GET_CODE (pat) != SET || GET_CODE (dep_pat) != SET)
4722
            /* If this happens, we have to extend this to schedule
4723
               optimally.  Return 0 for now.  */
4724
          return 0;
4725
 
4726
          if (reg_mentioned_p (SET_DEST (pat), SET_DEST (dep_pat)))
4727
            {
4728
              if (! recog_memoized (dep_insn))
4729
                return 0;
4730
              switch (get_attr_type (dep_insn))
4731
                {
4732
                case TYPE_FPALU:
4733
                case TYPE_FPMULSGL:
4734
                case TYPE_FPMULDBL:
4735
                case TYPE_FPDIVSGL:
4736
                case TYPE_FPDIVDBL:
4737
                case TYPE_FPSQRTSGL:
4738
                case TYPE_FPSQRTDBL:
4739
                  /* A fpload can't be issued until one cycle before a
4740
                     preceding arithmetic operation has finished if
4741
                     the target of the fpload is the destination of the
4742
                     arithmetic operation.
4743
 
4744
                     Exception: For PA7100LC, PA7200 and PA7300, the cost
4745
                     is 3 cycles, unless they bundle together.   We also
4746
                     pay the penalty if the second insn is a fpload.  */
4747
                  return insn_default_latency (dep_insn) - 1;
4748
 
4749
                default:
4750
                  return 0;
4751
                }
4752
            }
4753
        }
4754
      else if (attr_type == TYPE_FPALU)
4755
        {
4756
          rtx pat = PATTERN (insn);
4757
          rtx dep_pat = PATTERN (dep_insn);
4758
          if (GET_CODE (pat) == PARALLEL)
4759
            {
4760
              /* This happens for the fldXs,mb patterns.  */
4761
              pat = XVECEXP (pat, 0, 0);
4762
            }
4763
          if (GET_CODE (pat) != SET || GET_CODE (dep_pat) != SET)
4764
            /* If this happens, we have to extend this to schedule
4765
               optimally.  Return 0 for now.  */
4766
          return 0;
4767
 
4768
          if (reg_mentioned_p (SET_DEST (pat), SET_DEST (dep_pat)))
4769
            {
4770
              if (! recog_memoized (dep_insn))
4771
                return 0;
4772
              switch (get_attr_type (dep_insn))
4773
                {
4774
                case TYPE_FPDIVSGL:
4775
                case TYPE_FPDIVDBL:
4776
                case TYPE_FPSQRTSGL:
4777
                case TYPE_FPSQRTDBL:
4778
                  /* An ALU flop can't be issued until two cycles before a
4779
                     preceding divide or sqrt operation has finished if
4780
                     the target of the ALU flop is also the target of
4781
                     the divide or sqrt operation.  */
4782
                  return insn_default_latency (dep_insn) - 2;
4783
 
4784
                default:
4785
                  return 0;
4786
                }
4787
            }
4788
        }
4789
 
4790
      /* For other output dependencies, the cost is 0.  */
4791
      return 0;
4792
 
4793
    default:
4794
      gcc_unreachable ();
4795
    }
4796
}
4797
 
4798
/* Adjust scheduling priorities.  We use this to try and keep addil
4799
   and the next use of %r1 close together.  */
4800
static int
4801
pa_adjust_priority (rtx insn, int priority)
4802
{
4803
  rtx set = single_set (insn);
4804
  rtx src, dest;
4805
  if (set)
4806
    {
4807
      src = SET_SRC (set);
4808
      dest = SET_DEST (set);
4809
      if (GET_CODE (src) == LO_SUM
4810
          && symbolic_operand (XEXP (src, 1), VOIDmode)
4811
          && ! read_only_operand (XEXP (src, 1), VOIDmode))
4812
        priority >>= 3;
4813
 
4814
      else if (GET_CODE (src) == MEM
4815
               && GET_CODE (XEXP (src, 0)) == LO_SUM
4816
               && symbolic_operand (XEXP (XEXP (src, 0), 1), VOIDmode)
4817
               && ! read_only_operand (XEXP (XEXP (src, 0), 1), VOIDmode))
4818
        priority >>= 1;
4819
 
4820
      else if (GET_CODE (dest) == MEM
4821
               && GET_CODE (XEXP (dest, 0)) == LO_SUM
4822
               && symbolic_operand (XEXP (XEXP (dest, 0), 1), VOIDmode)
4823
               && ! read_only_operand (XEXP (XEXP (dest, 0), 1), VOIDmode))
4824
        priority >>= 3;
4825
    }
4826
  return priority;
4827
}
4828
 
4829
/* The 700 can only issue a single insn at a time.
4830
   The 7XXX processors can issue two insns at a time.
4831
   The 8000 can issue 4 insns at a time.  */
4832
static int
4833
pa_issue_rate (void)
4834
{
4835
  switch (pa_cpu)
4836
    {
4837
    case PROCESSOR_700:         return 1;
4838
    case PROCESSOR_7100:        return 2;
4839
    case PROCESSOR_7100LC:      return 2;
4840
    case PROCESSOR_7200:        return 2;
4841
    case PROCESSOR_7300:        return 2;
4842
    case PROCESSOR_8000:        return 4;
4843
 
4844
    default:
4845
      gcc_unreachable ();
4846
    }
4847
}
4848
 
4849
 
4850
 
4851
/* Return any length adjustment needed by INSN which already has its length
4852
   computed as LENGTH.   Return zero if no adjustment is necessary.
4853
 
4854
   For the PA: function calls, millicode calls, and backwards short
4855
   conditional branches with unfilled delay slots need an adjustment by +1
4856
   (to account for the NOP which will be inserted into the instruction stream).
4857
 
4858
   Also compute the length of an inline block move here as it is too
4859
   complicated to express as a length attribute in pa.md.  */
4860
int
4861
pa_adjust_insn_length (rtx insn, int length)
4862
{
4863
  rtx pat = PATTERN (insn);
4864
 
4865
  /* Jumps inside switch tables which have unfilled delay slots need
4866
     adjustment.  */
4867
  if (GET_CODE (insn) == JUMP_INSN
4868
      && GET_CODE (pat) == PARALLEL
4869
      && get_attr_type (insn) == TYPE_BTABLE_BRANCH)
4870
    return 4;
4871
  /* Millicode insn with an unfilled delay slot.  */
4872
  else if (GET_CODE (insn) == INSN
4873
           && GET_CODE (pat) != SEQUENCE
4874
           && GET_CODE (pat) != USE
4875
           && GET_CODE (pat) != CLOBBER
4876
           && get_attr_type (insn) == TYPE_MILLI)
4877
    return 4;
4878
  /* Block move pattern.  */
4879
  else if (GET_CODE (insn) == INSN
4880
           && GET_CODE (pat) == PARALLEL
4881
           && GET_CODE (XVECEXP (pat, 0, 0)) == SET
4882
           && GET_CODE (XEXP (XVECEXP (pat, 0, 0), 0)) == MEM
4883
           && GET_CODE (XEXP (XVECEXP (pat, 0, 0), 1)) == MEM
4884
           && GET_MODE (XEXP (XVECEXP (pat, 0, 0), 0)) == BLKmode
4885
           && GET_MODE (XEXP (XVECEXP (pat, 0, 0), 1)) == BLKmode)
4886
    return compute_movmem_length (insn) - 4;
4887
  /* Block clear pattern.  */
4888
  else if (GET_CODE (insn) == INSN
4889
           && GET_CODE (pat) == PARALLEL
4890
           && GET_CODE (XVECEXP (pat, 0, 0)) == SET
4891
           && GET_CODE (XEXP (XVECEXP (pat, 0, 0), 0)) == MEM
4892
           && XEXP (XVECEXP (pat, 0, 0), 1) == const0_rtx
4893
           && GET_MODE (XEXP (XVECEXP (pat, 0, 0), 0)) == BLKmode)
4894
    return compute_clrmem_length (insn) - 4;
4895
  /* Conditional branch with an unfilled delay slot.  */
4896
  else if (GET_CODE (insn) == JUMP_INSN && ! simplejump_p (insn))
4897
    {
4898
      /* Adjust a short backwards conditional with an unfilled delay slot.  */
4899
      if (GET_CODE (pat) == SET
4900
          && length == 4
4901
          && JUMP_LABEL (insn) != NULL_RTX
4902
          && ! forward_branch_p (insn))
4903
        return 4;
4904
      else if (GET_CODE (pat) == PARALLEL
4905
               && get_attr_type (insn) == TYPE_PARALLEL_BRANCH
4906
               && length == 4)
4907
        return 4;
4908
      /* Adjust dbra insn with short backwards conditional branch with
4909
         unfilled delay slot -- only for case where counter is in a
4910
         general register register.  */
4911
      else if (GET_CODE (pat) == PARALLEL
4912
               && GET_CODE (XVECEXP (pat, 0, 1)) == SET
4913
               && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == REG
4914
               && ! FP_REG_P (XEXP (XVECEXP (pat, 0, 1), 0))
4915
               && length == 4
4916
               && ! forward_branch_p (insn))
4917
        return 4;
4918
      else
4919
        return 0;
4920
    }
4921
  return 0;
4922
}
4923
 
4924
/* Implement the TARGET_PRINT_OPERAND_PUNCT_VALID_P hook.  */
4925
 
4926
static bool
4927
pa_print_operand_punct_valid_p (unsigned char code)
4928
{
4929
  if (code == '@'
4930
      || code == '#'
4931
      || code == '*'
4932
      || code == '^')
4933
    return true;
4934
 
4935
  return false;
4936
}
4937
 
4938
/* Print operand X (an rtx) in assembler syntax to file FILE.
4939
   CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
4940
   For `%' followed by punctuation, CODE is the punctuation and X is null.  */
4941
 
4942
void
4943
pa_print_operand (FILE *file, rtx x, int code)
4944
{
4945
  switch (code)
4946
    {
4947
    case '#':
4948
      /* Output a 'nop' if there's nothing for the delay slot.  */
4949
      if (dbr_sequence_length () == 0)
4950
        fputs ("\n\tnop", file);
4951
      return;
4952
    case '*':
4953
      /* Output a nullification completer if there's nothing for the */
4954
      /* delay slot or nullification is requested.  */
4955
      if (dbr_sequence_length () == 0 ||
4956
          (final_sequence &&
4957
           INSN_ANNULLED_BRANCH_P (XVECEXP (final_sequence, 0, 0))))
4958
        fputs (",n", file);
4959
      return;
4960
    case 'R':
4961
      /* Print out the second register name of a register pair.
4962
         I.e., R (6) => 7.  */
4963
      fputs (reg_names[REGNO (x) + 1], file);
4964
      return;
4965
    case 'r':
4966
      /* A register or zero.  */
4967
      if (x == const0_rtx
4968
          || (x == CONST0_RTX (DFmode))
4969
          || (x == CONST0_RTX (SFmode)))
4970
        {
4971
          fputs ("%r0", file);
4972
          return;
4973
        }
4974
      else
4975
        break;
4976
    case 'f':
4977
      /* A register or zero (floating point).  */
4978
      if (x == const0_rtx
4979
          || (x == CONST0_RTX (DFmode))
4980
          || (x == CONST0_RTX (SFmode)))
4981
        {
4982
          fputs ("%fr0", file);
4983
          return;
4984
        }
4985
      else
4986
        break;
4987
    case 'A':
4988
      {
4989
        rtx xoperands[2];
4990
 
4991
        xoperands[0] = XEXP (XEXP (x, 0), 0);
4992
        xoperands[1] = XVECEXP (XEXP (XEXP (x, 0), 1), 0, 0);
4993
        pa_output_global_address (file, xoperands[1], 0);
4994
        fprintf (file, "(%s)", reg_names [REGNO (xoperands[0])]);
4995
        return;
4996
      }
4997
 
4998
    case 'C':                   /* Plain (C)ondition */
4999
    case 'X':
5000
      switch (GET_CODE (x))
5001
        {
5002
        case EQ:
5003
          fputs ("=", file);  break;
5004
        case NE:
5005
          fputs ("<>", file);  break;
5006
        case GT:
5007
          fputs (">", file);  break;
5008
        case GE:
5009
          fputs (">=", file);  break;
5010
        case GEU:
5011
          fputs (">>=", file);  break;
5012
        case GTU:
5013
          fputs (">>", file);  break;
5014
        case LT:
5015
          fputs ("<", file);  break;
5016
        case LE:
5017
          fputs ("<=", file);  break;
5018
        case LEU:
5019
          fputs ("<<=", file);  break;
5020
        case LTU:
5021
          fputs ("<<", file);  break;
5022
        default:
5023
          gcc_unreachable ();
5024
        }
5025
      return;
5026
    case 'N':                   /* Condition, (N)egated */
5027
      switch (GET_CODE (x))
5028
        {
5029
        case EQ:
5030
          fputs ("<>", file);  break;
5031
        case NE:
5032
          fputs ("=", file);  break;
5033
        case GT:
5034
          fputs ("<=", file);  break;
5035
        case GE:
5036
          fputs ("<", file);  break;
5037
        case GEU:
5038
          fputs ("<<", file);  break;
5039
        case GTU:
5040
          fputs ("<<=", file);  break;
5041
        case LT:
5042
          fputs (">=", file);  break;
5043
        case LE:
5044
          fputs (">", file);  break;
5045
        case LEU:
5046
          fputs (">>", file);  break;
5047
        case LTU:
5048
          fputs (">>=", file);  break;
5049
        default:
5050
          gcc_unreachable ();
5051
        }
5052
      return;
5053
    /* For floating point comparisons.  Note that the output
5054
       predicates are the complement of the desired mode.  The
5055
       conditions for GT, GE, LT, LE and LTGT cause an invalid
5056
       operation exception if the result is unordered and this
5057
       exception is enabled in the floating-point status register.  */
5058
    case 'Y':
5059
      switch (GET_CODE (x))
5060
        {
5061
        case EQ:
5062
          fputs ("!=", file);  break;
5063
        case NE:
5064
          fputs ("=", file);  break;
5065
        case GT:
5066
          fputs ("!>", file);  break;
5067
        case GE:
5068
          fputs ("!>=", file);  break;
5069
        case LT:
5070
          fputs ("!<", file);  break;
5071
        case LE:
5072
          fputs ("!<=", file);  break;
5073
        case LTGT:
5074
          fputs ("!<>", file);  break;
5075
        case UNLE:
5076
          fputs ("!?<=", file);  break;
5077
        case UNLT:
5078
          fputs ("!?<", file);  break;
5079
        case UNGE:
5080
          fputs ("!?>=", file);  break;
5081
        case UNGT:
5082
          fputs ("!?>", file);  break;
5083
        case UNEQ:
5084
          fputs ("!?=", file);  break;
5085
        case UNORDERED:
5086
          fputs ("!?", file);  break;
5087
        case ORDERED:
5088
          fputs ("?", file);  break;
5089
        default:
5090
          gcc_unreachable ();
5091
        }
5092
      return;
5093
    case 'S':                   /* Condition, operands are (S)wapped.  */
5094
      switch (GET_CODE (x))
5095
        {
5096
        case EQ:
5097
          fputs ("=", file);  break;
5098
        case NE:
5099
          fputs ("<>", file);  break;
5100
        case GT:
5101
          fputs ("<", file);  break;
5102
        case GE:
5103
          fputs ("<=", file);  break;
5104
        case GEU:
5105
          fputs ("<<=", file);  break;
5106
        case GTU:
5107
          fputs ("<<", file);  break;
5108
        case LT:
5109
          fputs (">", file);  break;
5110
        case LE:
5111
          fputs (">=", file);  break;
5112
        case LEU:
5113
          fputs (">>=", file);  break;
5114
        case LTU:
5115
          fputs (">>", file);  break;
5116
        default:
5117
          gcc_unreachable ();
5118
        }
5119
      return;
5120
    case 'B':                   /* Condition, (B)oth swapped and negate.  */
5121
      switch (GET_CODE (x))
5122
        {
5123
        case EQ:
5124
          fputs ("<>", file);  break;
5125
        case NE:
5126
          fputs ("=", file);  break;
5127
        case GT:
5128
          fputs (">=", file);  break;
5129
        case GE:
5130
          fputs (">", file);  break;
5131
        case GEU:
5132
          fputs (">>", file);  break;
5133
        case GTU:
5134
          fputs (">>=", file);  break;
5135
        case LT:
5136
          fputs ("<=", file);  break;
5137
        case LE:
5138
          fputs ("<", file);  break;
5139
        case LEU:
5140
          fputs ("<<", file);  break;
5141
        case LTU:
5142
          fputs ("<<=", file);  break;
5143
        default:
5144
          gcc_unreachable ();
5145
        }
5146
      return;
5147
    case 'k':
5148
      gcc_assert (GET_CODE (x) == CONST_INT);
5149
      fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~INTVAL (x));
5150
      return;
5151
    case 'Q':
5152
      gcc_assert (GET_CODE (x) == CONST_INT);
5153
      fprintf (file, HOST_WIDE_INT_PRINT_DEC, 64 - (INTVAL (x) & 63));
5154
      return;
5155
    case 'L':
5156
      gcc_assert (GET_CODE (x) == CONST_INT);
5157
      fprintf (file, HOST_WIDE_INT_PRINT_DEC, 32 - (INTVAL (x) & 31));
5158
      return;
5159
    case 'O':
5160
      gcc_assert (GET_CODE (x) == CONST_INT && exact_log2 (INTVAL (x)) >= 0);
5161
      fprintf (file, "%d", exact_log2 (INTVAL (x)));
5162
      return;
5163
    case 'p':
5164
      gcc_assert (GET_CODE (x) == CONST_INT);
5165
      fprintf (file, HOST_WIDE_INT_PRINT_DEC, 63 - (INTVAL (x) & 63));
5166
      return;
5167
    case 'P':
5168
      gcc_assert (GET_CODE (x) == CONST_INT);
5169
      fprintf (file, HOST_WIDE_INT_PRINT_DEC, 31 - (INTVAL (x) & 31));
5170
      return;
5171
    case 'I':
5172
      if (GET_CODE (x) == CONST_INT)
5173
        fputs ("i", file);
5174
      return;
5175
    case 'M':
5176
    case 'F':
5177
      switch (GET_CODE (XEXP (x, 0)))
5178
        {
5179
        case PRE_DEC:
5180
        case PRE_INC:
5181
          if (ASSEMBLER_DIALECT == 0)
5182
            fputs ("s,mb", file);
5183
          else
5184
            fputs (",mb", file);
5185
          break;
5186
        case POST_DEC:
5187
        case POST_INC:
5188
          if (ASSEMBLER_DIALECT == 0)
5189
            fputs ("s,ma", file);
5190
          else
5191
            fputs (",ma", file);
5192
          break;
5193
        case PLUS:
5194
          if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
5195
              && GET_CODE (XEXP (XEXP (x, 0), 1)) == REG)
5196
            {
5197
              if (ASSEMBLER_DIALECT == 0)
5198
                fputs ("x", file);
5199
            }
5200
          else if (GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
5201
                   || GET_CODE (XEXP (XEXP (x, 0), 1)) == MULT)
5202
            {
5203
              if (ASSEMBLER_DIALECT == 0)
5204
                fputs ("x,s", file);
5205
              else
5206
                fputs (",s", file);
5207
            }
5208
          else if (code == 'F' && ASSEMBLER_DIALECT == 0)
5209
            fputs ("s", file);
5210
          break;
5211
        default:
5212
          if (code == 'F' && ASSEMBLER_DIALECT == 0)
5213
            fputs ("s", file);
5214
          break;
5215
        }
5216
      return;
5217
    case 'G':
5218
      pa_output_global_address (file, x, 0);
5219
      return;
5220
    case 'H':
5221
      pa_output_global_address (file, x, 1);
5222
      return;
5223
    case 0:                      /* Don't do anything special */
5224
      break;
5225
    case 'Z':
5226
      {
5227
        unsigned op[3];
5228
        compute_zdepwi_operands (INTVAL (x), op);
5229
        fprintf (file, "%d,%d,%d", op[0], op[1], op[2]);
5230
        return;
5231
      }
5232
    case 'z':
5233
      {
5234
        unsigned op[3];
5235
        compute_zdepdi_operands (INTVAL (x), op);
5236
        fprintf (file, "%d,%d,%d", op[0], op[1], op[2]);
5237
        return;
5238
      }
5239
    case 'c':
5240
      /* We can get here from a .vtable_inherit due to our
5241
         CONSTANT_ADDRESS_P rejecting perfectly good constant
5242
         addresses.  */
5243
      break;
5244
    default:
5245
      gcc_unreachable ();
5246
    }
5247
  if (GET_CODE (x) == REG)
5248
    {
5249
      fputs (reg_names [REGNO (x)], file);
5250
      if (TARGET_64BIT && FP_REG_P (x) && GET_MODE_SIZE (GET_MODE (x)) <= 4)
5251
        {
5252
          fputs ("R", file);
5253
          return;
5254
        }
5255
      if (FP_REG_P (x)
5256
          && GET_MODE_SIZE (GET_MODE (x)) <= 4
5257
          && (REGNO (x) & 1) == 0)
5258
        fputs ("L", file);
5259
    }
5260
  else if (GET_CODE (x) == MEM)
5261
    {
5262
      int size = GET_MODE_SIZE (GET_MODE (x));
5263
      rtx base = NULL_RTX;
5264
      switch (GET_CODE (XEXP (x, 0)))
5265
        {
5266
        case PRE_DEC:
5267
        case POST_DEC:
5268
          base = XEXP (XEXP (x, 0), 0);
5269
          fprintf (file, "-%d(%s)", size, reg_names [REGNO (base)]);
5270
          break;
5271
        case PRE_INC:
5272
        case POST_INC:
5273
          base = XEXP (XEXP (x, 0), 0);
5274
          fprintf (file, "%d(%s)", size, reg_names [REGNO (base)]);
5275
          break;
5276
        case PLUS:
5277
          if (GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT)
5278
            fprintf (file, "%s(%s)",
5279
                     reg_names [REGNO (XEXP (XEXP (XEXP (x, 0), 0), 0))],
5280
                     reg_names [REGNO (XEXP (XEXP (x, 0), 1))]);
5281
          else if (GET_CODE (XEXP (XEXP (x, 0), 1)) == MULT)
5282
            fprintf (file, "%s(%s)",
5283
                     reg_names [REGNO (XEXP (XEXP (XEXP (x, 0), 1), 0))],
5284
                     reg_names [REGNO (XEXP (XEXP (x, 0), 0))]);
5285
          else if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
5286
                   && GET_CODE (XEXP (XEXP (x, 0), 1)) == REG)
5287
            {
5288
              /* Because the REG_POINTER flag can get lost during reload,
5289
                 GO_IF_LEGITIMATE_ADDRESS canonicalizes the order of the
5290
                 index and base registers in the combined move patterns.  */
5291
              rtx base = XEXP (XEXP (x, 0), 1);
5292
              rtx index = XEXP (XEXP (x, 0), 0);
5293
 
5294
              fprintf (file, "%s(%s)",
5295
                       reg_names [REGNO (index)], reg_names [REGNO (base)]);
5296
            }
5297
          else
5298
            output_address (XEXP (x, 0));
5299
          break;
5300
        default:
5301
          output_address (XEXP (x, 0));
5302
          break;
5303
        }
5304
    }
5305
  else
5306
    output_addr_const (file, x);
5307
}
5308
 
5309
/* output a SYMBOL_REF or a CONST expression involving a SYMBOL_REF.  */
5310
 
5311
void
5312
pa_output_global_address (FILE *file, rtx x, int round_constant)
5313
{
5314
 
5315
  /* Imagine  (high (const (plus ...))).  */
5316
  if (GET_CODE (x) == HIGH)
5317
    x = XEXP (x, 0);
5318
 
5319
  if (GET_CODE (x) == SYMBOL_REF && read_only_operand (x, VOIDmode))
5320
    output_addr_const (file, x);
5321
  else if (GET_CODE (x) == SYMBOL_REF && !flag_pic)
5322
    {
5323
      output_addr_const (file, x);
5324
      fputs ("-$global$", file);
5325
    }
5326
  else if (GET_CODE (x) == CONST)
5327
    {
5328
      const char *sep = "";
5329
      int offset = 0;            /* assembler wants -$global$ at end */
5330
      rtx base = NULL_RTX;
5331
 
5332
      switch (GET_CODE (XEXP (XEXP (x, 0), 0)))
5333
        {
5334
        case SYMBOL_REF:
5335
          base = XEXP (XEXP (x, 0), 0);
5336
          output_addr_const (file, base);
5337
          break;
5338
        case CONST_INT:
5339
          offset = INTVAL (XEXP (XEXP (x, 0), 0));
5340
          break;
5341
        default:
5342
          gcc_unreachable ();
5343
        }
5344
 
5345
      switch (GET_CODE (XEXP (XEXP (x, 0), 1)))
5346
        {
5347
        case SYMBOL_REF:
5348
          base = XEXP (XEXP (x, 0), 1);
5349
          output_addr_const (file, base);
5350
          break;
5351
        case CONST_INT:
5352
          offset = INTVAL (XEXP (XEXP (x, 0), 1));
5353
          break;
5354
        default:
5355
          gcc_unreachable ();
5356
        }
5357
 
5358
      /* How bogus.  The compiler is apparently responsible for
5359
         rounding the constant if it uses an LR field selector.
5360
 
5361
         The linker and/or assembler seem a better place since
5362
         they have to do this kind of thing already.
5363
 
5364
         If we fail to do this, HP's optimizing linker may eliminate
5365
         an addil, but not update the ldw/stw/ldo instruction that
5366
         uses the result of the addil.  */
5367
      if (round_constant)
5368
        offset = ((offset + 0x1000) & ~0x1fff);
5369
 
5370
      switch (GET_CODE (XEXP (x, 0)))
5371
        {
5372
        case PLUS:
5373
          if (offset < 0)
5374
            {
5375
              offset = -offset;
5376
              sep = "-";
5377
            }
5378
          else
5379
            sep = "+";
5380
          break;
5381
 
5382
        case MINUS:
5383
          gcc_assert (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF);
5384
          sep = "-";
5385
          break;
5386
 
5387
        default:
5388
          gcc_unreachable ();
5389
        }
5390
 
5391
      if (!read_only_operand (base, VOIDmode) && !flag_pic)
5392
        fputs ("-$global$", file);
5393
      if (offset)
5394
        fprintf (file, "%s%d", sep, offset);
5395
    }
5396
  else
5397
    output_addr_const (file, x);
5398
}
5399
 
5400
/* Output boilerplate text to appear at the beginning of the file.
5401
   There are several possible versions.  */
5402
#define aputs(x) fputs(x, asm_out_file)
5403
static inline void
5404
pa_file_start_level (void)
5405
{
5406
  if (TARGET_64BIT)
5407
    aputs ("\t.LEVEL 2.0w\n");
5408
  else if (TARGET_PA_20)
5409
    aputs ("\t.LEVEL 2.0\n");
5410
  else if (TARGET_PA_11)
5411
    aputs ("\t.LEVEL 1.1\n");
5412
  else
5413
    aputs ("\t.LEVEL 1.0\n");
5414
}
5415
 
5416
static inline void
5417
pa_file_start_space (int sortspace)
5418
{
5419
  aputs ("\t.SPACE $PRIVATE$");
5420
  if (sortspace)
5421
    aputs (",SORT=16");
5422
  aputs ("\n\t.SUBSPA $DATA$,QUAD=1,ALIGN=8,ACCESS=31");
5423
  if (flag_tm)
5424
    aputs ("\n\t.SUBSPA $TM_CLONE_TABLE$,QUAD=1,ALIGN=8,ACCESS=31");
5425
  aputs ("\n\t.SUBSPA $BSS$,QUAD=1,ALIGN=8,ACCESS=31,ZERO,SORT=82"
5426
         "\n\t.SPACE $TEXT$");
5427
  if (sortspace)
5428
    aputs (",SORT=8");
5429
  aputs ("\n\t.SUBSPA $LIT$,QUAD=0,ALIGN=8,ACCESS=44"
5430
         "\n\t.SUBSPA $CODE$,QUAD=0,ALIGN=8,ACCESS=44,CODE_ONLY\n");
5431
}
5432
 
5433
static inline void
5434
pa_file_start_file (int want_version)
5435
{
5436
  if (write_symbols != NO_DEBUG)
5437
    {
5438
      output_file_directive (asm_out_file, main_input_filename);
5439
      if (want_version)
5440
        aputs ("\t.version\t\"01.01\"\n");
5441
    }
5442
}
5443
 
5444
static inline void
5445
pa_file_start_mcount (const char *aswhat)
5446
{
5447
  if (profile_flag)
5448
    fprintf (asm_out_file, "\t.IMPORT _mcount,%s\n", aswhat);
5449
}
5450
 
5451
static void
5452
pa_elf_file_start (void)
5453
{
5454
  pa_file_start_level ();
5455
  pa_file_start_mcount ("ENTRY");
5456
  pa_file_start_file (0);
5457
}
5458
 
5459
static void
5460
pa_som_file_start (void)
5461
{
5462
  pa_file_start_level ();
5463
  pa_file_start_space (0);
5464
  aputs ("\t.IMPORT $global$,DATA\n"
5465
         "\t.IMPORT $$dyncall,MILLICODE\n");
5466
  pa_file_start_mcount ("CODE");
5467
  pa_file_start_file (0);
5468
}
5469
 
5470
static void
5471
pa_linux_file_start (void)
5472
{
5473
  pa_file_start_file (1);
5474
  pa_file_start_level ();
5475
  pa_file_start_mcount ("CODE");
5476
}
5477
 
5478
static void
5479
pa_hpux64_gas_file_start (void)
5480
{
5481
  pa_file_start_level ();
5482
#ifdef ASM_OUTPUT_TYPE_DIRECTIVE
5483
  if (profile_flag)
5484
    ASM_OUTPUT_TYPE_DIRECTIVE (asm_out_file, "_mcount", "function");
5485
#endif
5486
  pa_file_start_file (1);
5487
}
5488
 
5489
static void
5490
pa_hpux64_hpas_file_start (void)
5491
{
5492
  pa_file_start_level ();
5493
  pa_file_start_space (1);
5494
  pa_file_start_mcount ("CODE");
5495
  pa_file_start_file (0);
5496
}
5497
#undef aputs
5498
 
5499
/* Search the deferred plabel list for SYMBOL and return its internal
5500
   label.  If an entry for SYMBOL is not found, a new entry is created.  */
5501
 
5502
rtx
5503
pa_get_deferred_plabel (rtx symbol)
5504
{
5505
  const char *fname = XSTR (symbol, 0);
5506
  size_t i;
5507
 
5508
  /* See if we have already put this function on the list of deferred
5509
     plabels.  This list is generally small, so a liner search is not
5510
     too ugly.  If it proves too slow replace it with something faster.  */
5511
  for (i = 0; i < n_deferred_plabels; i++)
5512
    if (strcmp (fname, XSTR (deferred_plabels[i].symbol, 0)) == 0)
5513
      break;
5514
 
5515
  /* If the deferred plabel list is empty, or this entry was not found
5516
     on the list, create a new entry on the list.  */
5517
  if (deferred_plabels == NULL || i == n_deferred_plabels)
5518
    {
5519
      tree id;
5520
 
5521
      if (deferred_plabels == 0)
5522
        deferred_plabels =  ggc_alloc_deferred_plabel ();
5523
      else
5524
        deferred_plabels = GGC_RESIZEVEC (struct deferred_plabel,
5525
                                          deferred_plabels,
5526
                                          n_deferred_plabels + 1);
5527
 
5528
      i = n_deferred_plabels++;
5529
      deferred_plabels[i].internal_label = gen_label_rtx ();
5530
      deferred_plabels[i].symbol = symbol;
5531
 
5532
      /* Gross.  We have just implicitly taken the address of this
5533
         function.  Mark it in the same manner as assemble_name.  */
5534
      id = maybe_get_identifier (targetm.strip_name_encoding (fname));
5535
      if (id)
5536
        mark_referenced (id);
5537
    }
5538
 
5539
  return deferred_plabels[i].internal_label;
5540
}
5541
 
5542
static void
5543
output_deferred_plabels (void)
5544
{
5545
  size_t i;
5546
 
5547
  /* If we have some deferred plabels, then we need to switch into the
5548
     data or readonly data section, and align it to a 4 byte boundary
5549
     before outputting the deferred plabels.  */
5550
  if (n_deferred_plabels)
5551
    {
5552
      switch_to_section (flag_pic ? data_section : readonly_data_section);
5553
      ASM_OUTPUT_ALIGN (asm_out_file, TARGET_64BIT ? 3 : 2);
5554
    }
5555
 
5556
  /* Now output the deferred plabels.  */
5557
  for (i = 0; i < n_deferred_plabels; i++)
5558
    {
5559
      targetm.asm_out.internal_label (asm_out_file, "L",
5560
                 CODE_LABEL_NUMBER (deferred_plabels[i].internal_label));
5561
      assemble_integer (deferred_plabels[i].symbol,
5562
                        TARGET_64BIT ? 8 : 4, TARGET_64BIT ? 64 : 32, 1);
5563
    }
5564
}
5565
 
5566
/* Initialize optabs to point to emulation routines.  */
5567
 
5568
static void
5569
pa_init_libfuncs (void)
5570
{
5571
  if (HPUX_LONG_DOUBLE_LIBRARY)
5572
    {
5573
      set_optab_libfunc (add_optab, TFmode, "_U_Qfadd");
5574
      set_optab_libfunc (sub_optab, TFmode, "_U_Qfsub");
5575
      set_optab_libfunc (smul_optab, TFmode, "_U_Qfmpy");
5576
      set_optab_libfunc (sdiv_optab, TFmode, "_U_Qfdiv");
5577
      set_optab_libfunc (smin_optab, TFmode, "_U_Qmin");
5578
      set_optab_libfunc (smax_optab, TFmode, "_U_Qfmax");
5579
      set_optab_libfunc (sqrt_optab, TFmode, "_U_Qfsqrt");
5580
      set_optab_libfunc (abs_optab, TFmode, "_U_Qfabs");
5581
      set_optab_libfunc (neg_optab, TFmode, "_U_Qfneg");
5582
 
5583
      set_optab_libfunc (eq_optab, TFmode, "_U_Qfeq");
5584
      set_optab_libfunc (ne_optab, TFmode, "_U_Qfne");
5585
      set_optab_libfunc (gt_optab, TFmode, "_U_Qfgt");
5586
      set_optab_libfunc (ge_optab, TFmode, "_U_Qfge");
5587
      set_optab_libfunc (lt_optab, TFmode, "_U_Qflt");
5588
      set_optab_libfunc (le_optab, TFmode, "_U_Qfle");
5589
      set_optab_libfunc (unord_optab, TFmode, "_U_Qfunord");
5590
 
5591
      set_conv_libfunc (sext_optab, TFmode, SFmode, "_U_Qfcnvff_sgl_to_quad");
5592
      set_conv_libfunc (sext_optab, TFmode, DFmode, "_U_Qfcnvff_dbl_to_quad");
5593
      set_conv_libfunc (trunc_optab, SFmode, TFmode, "_U_Qfcnvff_quad_to_sgl");
5594
      set_conv_libfunc (trunc_optab, DFmode, TFmode, "_U_Qfcnvff_quad_to_dbl");
5595
 
5596
      set_conv_libfunc (sfix_optab, SImode, TFmode,
5597
                        TARGET_64BIT ? "__U_Qfcnvfxt_quad_to_sgl"
5598
                                     : "_U_Qfcnvfxt_quad_to_sgl");
5599
      set_conv_libfunc (sfix_optab, DImode, TFmode,
5600
                        "_U_Qfcnvfxt_quad_to_dbl");
5601
      set_conv_libfunc (ufix_optab, SImode, TFmode,
5602
                        "_U_Qfcnvfxt_quad_to_usgl");
5603
      set_conv_libfunc (ufix_optab, DImode, TFmode,
5604
                        "_U_Qfcnvfxt_quad_to_udbl");
5605
 
5606
      set_conv_libfunc (sfloat_optab, TFmode, SImode,
5607
                        "_U_Qfcnvxf_sgl_to_quad");
5608
      set_conv_libfunc (sfloat_optab, TFmode, DImode,
5609
                        "_U_Qfcnvxf_dbl_to_quad");
5610
      set_conv_libfunc (ufloat_optab, TFmode, SImode,
5611
                        "_U_Qfcnvxf_usgl_to_quad");
5612
      set_conv_libfunc (ufloat_optab, TFmode, DImode,
5613
                        "_U_Qfcnvxf_udbl_to_quad");
5614
    }
5615
 
5616
  if (TARGET_SYNC_LIBCALL)
5617
    init_sync_libfuncs (UNITS_PER_WORD);
5618
}
5619
 
5620
/* HP's millicode routines mean something special to the assembler.
5621
   Keep track of which ones we have used.  */
5622
 
5623
enum millicodes { remI, remU, divI, divU, mulI, end1000 };
5624
static void import_milli (enum millicodes);
5625
static char imported[(int) end1000];
5626
static const char * const milli_names[] = {"remI", "remU", "divI", "divU", "mulI"};
5627
static const char import_string[] = ".IMPORT $$....,MILLICODE";
5628
#define MILLI_START 10
5629
 
5630
static void
5631
import_milli (enum millicodes code)
5632
{
5633
  char str[sizeof (import_string)];
5634
 
5635
  if (!imported[(int) code])
5636
    {
5637
      imported[(int) code] = 1;
5638
      strcpy (str, import_string);
5639
      strncpy (str + MILLI_START, milli_names[(int) code], 4);
5640
      output_asm_insn (str, 0);
5641
    }
5642
}
5643
 
5644
/* The register constraints have put the operands and return value in
5645
   the proper registers.  */
5646
 
5647
const char *
5648
pa_output_mul_insn (int unsignedp ATTRIBUTE_UNUSED, rtx insn)
5649
{
5650
  import_milli (mulI);
5651
  return pa_output_millicode_call (insn, gen_rtx_SYMBOL_REF (Pmode, "$$mulI"));
5652
}
5653
 
5654
/* Emit the rtl for doing a division by a constant.  */
5655
 
5656
/* Do magic division millicodes exist for this value? */
5657
const int pa_magic_milli[]= {0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1};
5658
 
5659
/* We'll use an array to keep track of the magic millicodes and
5660
   whether or not we've used them already. [n][0] is signed, [n][1] is
5661
   unsigned.  */
5662
 
5663
static int div_milli[16][2];
5664
 
5665
int
5666
pa_emit_hpdiv_const (rtx *operands, int unsignedp)
5667
{
5668
  if (GET_CODE (operands[2]) == CONST_INT
5669
      && INTVAL (operands[2]) > 0
5670
      && INTVAL (operands[2]) < 16
5671
      && pa_magic_milli[INTVAL (operands[2])])
5672
    {
5673
      rtx ret = gen_rtx_REG (SImode, TARGET_64BIT ? 2 : 31);
5674
 
5675
      emit_move_insn (gen_rtx_REG (SImode, 26), operands[1]);
5676
      emit
5677
        (gen_rtx_PARALLEL
5678
         (VOIDmode,
5679
          gen_rtvec (6, gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, 29),
5680
                                     gen_rtx_fmt_ee (unsignedp ? UDIV : DIV,
5681
                                                     SImode,
5682
                                                     gen_rtx_REG (SImode, 26),
5683
                                                     operands[2])),
5684
                     gen_rtx_CLOBBER (VOIDmode, operands[4]),
5685
                     gen_rtx_CLOBBER (VOIDmode, operands[3]),
5686
                     gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 26)),
5687
                     gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 25)),
5688
                     gen_rtx_CLOBBER (VOIDmode, ret))));
5689
      emit_move_insn (operands[0], gen_rtx_REG (SImode, 29));
5690
      return 1;
5691
    }
5692
  return 0;
5693
}
5694
 
5695
const char *
5696
pa_output_div_insn (rtx *operands, int unsignedp, rtx insn)
5697
{
5698
  int divisor;
5699
 
5700
  /* If the divisor is a constant, try to use one of the special
5701
     opcodes .*/
5702
  if (GET_CODE (operands[0]) == CONST_INT)
5703
    {
5704
      static char buf[100];
5705
      divisor = INTVAL (operands[0]);
5706
      if (!div_milli[divisor][unsignedp])
5707
        {
5708
          div_milli[divisor][unsignedp] = 1;
5709
          if (unsignedp)
5710
            output_asm_insn (".IMPORT $$divU_%0,MILLICODE", operands);
5711
          else
5712
            output_asm_insn (".IMPORT $$divI_%0,MILLICODE", operands);
5713
        }
5714
      if (unsignedp)
5715
        {
5716
          sprintf (buf, "$$divU_" HOST_WIDE_INT_PRINT_DEC,
5717
                   INTVAL (operands[0]));
5718
          return pa_output_millicode_call (insn,
5719
                                           gen_rtx_SYMBOL_REF (SImode, buf));
5720
        }
5721
      else
5722
        {
5723
          sprintf (buf, "$$divI_" HOST_WIDE_INT_PRINT_DEC,
5724
                   INTVAL (operands[0]));
5725
          return pa_output_millicode_call (insn,
5726
                                           gen_rtx_SYMBOL_REF (SImode, buf));
5727
        }
5728
    }
5729
  /* Divisor isn't a special constant.  */
5730
  else
5731
    {
5732
      if (unsignedp)
5733
        {
5734
          import_milli (divU);
5735
          return pa_output_millicode_call (insn,
5736
                                        gen_rtx_SYMBOL_REF (SImode, "$$divU"));
5737
        }
5738
      else
5739
        {
5740
          import_milli (divI);
5741
          return pa_output_millicode_call (insn,
5742
                                        gen_rtx_SYMBOL_REF (SImode, "$$divI"));
5743
        }
5744
    }
5745
}
5746
 
5747
/* Output a $$rem millicode to do mod.  */
5748
 
5749
const char *
5750
pa_output_mod_insn (int unsignedp, rtx insn)
5751
{
5752
  if (unsignedp)
5753
    {
5754
      import_milli (remU);
5755
      return pa_output_millicode_call (insn,
5756
                                       gen_rtx_SYMBOL_REF (SImode, "$$remU"));
5757
    }
5758
  else
5759
    {
5760
      import_milli (remI);
5761
      return pa_output_millicode_call (insn,
5762
                                       gen_rtx_SYMBOL_REF (SImode, "$$remI"));
5763
    }
5764
}
5765
 
5766
void
5767
pa_output_arg_descriptor (rtx call_insn)
5768
{
5769
  const char *arg_regs[4];
5770
  enum machine_mode arg_mode;
5771
  rtx link;
5772
  int i, output_flag = 0;
5773
  int regno;
5774
 
5775
  /* We neither need nor want argument location descriptors for the
5776
     64bit runtime environment or the ELF32 environment.  */
5777
  if (TARGET_64BIT || TARGET_ELF32)
5778
    return;
5779
 
5780
  for (i = 0; i < 4; i++)
5781
    arg_regs[i] = 0;
5782
 
5783
  /* Specify explicitly that no argument relocations should take place
5784
     if using the portable runtime calling conventions.  */
5785
  if (TARGET_PORTABLE_RUNTIME)
5786
    {
5787
      fputs ("\t.CALL ARGW0=NO,ARGW1=NO,ARGW2=NO,ARGW3=NO,RETVAL=NO\n",
5788
             asm_out_file);
5789
      return;
5790
    }
5791
 
5792
  gcc_assert (GET_CODE (call_insn) == CALL_INSN);
5793
  for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
5794
       link; link = XEXP (link, 1))
5795
    {
5796
      rtx use = XEXP (link, 0);
5797
 
5798
      if (! (GET_CODE (use) == USE
5799
             && GET_CODE (XEXP (use, 0)) == REG
5800
             && FUNCTION_ARG_REGNO_P (REGNO (XEXP (use, 0)))))
5801
        continue;
5802
 
5803
      arg_mode = GET_MODE (XEXP (use, 0));
5804
      regno = REGNO (XEXP (use, 0));
5805
      if (regno >= 23 && regno <= 26)
5806
        {
5807
          arg_regs[26 - regno] = "GR";
5808
          if (arg_mode == DImode)
5809
            arg_regs[25 - regno] = "GR";
5810
        }
5811
      else if (regno >= 32 && regno <= 39)
5812
        {
5813
          if (arg_mode == SFmode)
5814
            arg_regs[(regno - 32) / 2] = "FR";
5815
          else
5816
            {
5817
#ifndef HP_FP_ARG_DESCRIPTOR_REVERSED
5818
              arg_regs[(regno - 34) / 2] = "FR";
5819
              arg_regs[(regno - 34) / 2 + 1] = "FU";
5820
#else
5821
              arg_regs[(regno - 34) / 2] = "FU";
5822
              arg_regs[(regno - 34) / 2 + 1] = "FR";
5823
#endif
5824
            }
5825
        }
5826
    }
5827
  fputs ("\t.CALL ", asm_out_file);
5828
  for (i = 0; i < 4; i++)
5829
    {
5830
      if (arg_regs[i])
5831
        {
5832
          if (output_flag++)
5833
            fputc (',', asm_out_file);
5834
          fprintf (asm_out_file, "ARGW%d=%s", i, arg_regs[i]);
5835
        }
5836
    }
5837
  fputc ('\n', asm_out_file);
5838
}
5839
 
5840
/* Inform reload about cases where moving X with a mode MODE to a register in
5841
   RCLASS requires an extra scratch or immediate register.  Return the class
5842
   needed for the immediate register.  */
5843
 
5844
static reg_class_t
5845
pa_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
5846
                     enum machine_mode mode, secondary_reload_info *sri)
5847
{
5848
  int regno;
5849
  enum reg_class rclass = (enum reg_class) rclass_i;
5850
 
5851
  /* Handle the easy stuff first.  */
5852
  if (rclass == R1_REGS)
5853
    return NO_REGS;
5854
 
5855
  if (REG_P (x))
5856
    {
5857
      regno = REGNO (x);
5858
      if (rclass == BASE_REG_CLASS && regno < FIRST_PSEUDO_REGISTER)
5859
        return NO_REGS;
5860
    }
5861
  else
5862
    regno = -1;
5863
 
5864
  /* If we have something like (mem (mem (...)), we can safely assume the
5865
     inner MEM will end up in a general register after reloading, so there's
5866
     no need for a secondary reload.  */
5867
  if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == MEM)
5868
    return NO_REGS;
5869
 
5870
  /* Trying to load a constant into a FP register during PIC code
5871
     generation requires %r1 as a scratch register.  */
5872
  if (flag_pic
5873
      && (mode == SImode || mode == DImode)
5874
      && FP_REG_CLASS_P (rclass)
5875
      && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
5876
    {
5877
      sri->icode = (mode == SImode ? CODE_FOR_reload_insi_r1
5878
                    : CODE_FOR_reload_indi_r1);
5879
      return NO_REGS;
5880
    }
5881
 
5882
  /* Secondary reloads of symbolic operands require %r1 as a scratch
5883
     register when we're generating PIC code and when the operand isn't
5884
     readonly.  */
5885
  if (pa_symbolic_expression_p (x))
5886
    {
5887
      if (GET_CODE (x) == HIGH)
5888
        x = XEXP (x, 0);
5889
 
5890
      if (flag_pic || !read_only_operand (x, VOIDmode))
5891
        {
5892
          gcc_assert (mode == SImode || mode == DImode);
5893
          sri->icode = (mode == SImode ? CODE_FOR_reload_insi_r1
5894
                        : CODE_FOR_reload_indi_r1);
5895
          return NO_REGS;
5896
        }
5897
    }
5898
 
5899
  /* Profiling showed the PA port spends about 1.3% of its compilation
5900
     time in true_regnum from calls inside pa_secondary_reload_class.  */
5901
  if (regno >= FIRST_PSEUDO_REGISTER || GET_CODE (x) == SUBREG)
5902
    regno = true_regnum (x);
5903
 
5904
  /* In order to allow 14-bit displacements in integer loads and stores,
5905
     we need to prevent reload from generating out of range integer mode
5906
     loads and stores to the floating point registers.  Previously, we
5907
     used to call for a secondary reload and have pa_emit_move_sequence()
5908
     fix the instruction sequence.  However, reload occasionally wouldn't
5909
     generate the reload and we would end up with an invalid REG+D memory
5910
     address.  So, now we use an intermediate general register for most
5911
     memory loads and stores.  */
5912
  if ((regno >= FIRST_PSEUDO_REGISTER || regno == -1)
5913
      && GET_MODE_CLASS (mode) == MODE_INT
5914
      && FP_REG_CLASS_P (rclass))
5915
    {
5916
      /* Reload passes (mem:SI (reg/f:DI 30 %r30) when it wants to check
5917
         the secondary reload needed for a pseudo.  It never passes a
5918
         REG+D address.  */
5919
      if (GET_CODE (x) == MEM)
5920
        {
5921
          x = XEXP (x, 0);
5922
 
5923
          /* We don't need an intermediate for indexed and LO_SUM DLT
5924
             memory addresses.  When INT14_OK_STRICT is true, it might
5925
             appear that we could directly allow register indirect
5926
             memory addresses.  However, this doesn't work because we
5927
             don't support SUBREGs in floating-point register copies
5928
             and reload doesn't tell us when it's going to use a SUBREG.  */
5929
          if (IS_INDEX_ADDR_P (x)
5930
              || IS_LO_SUM_DLT_ADDR_P (x))
5931
            return NO_REGS;
5932
 
5933
          /* Otherwise, we need an intermediate general register.  */
5934
          return GENERAL_REGS;
5935
        }
5936
 
5937
      /* Request a secondary reload with a general scratch register
5938
         for everthing else.  ??? Could symbolic operands be handled
5939
         directly when generating non-pic PA 2.0 code?  */
5940
      sri->icode = (in_p
5941
                    ? direct_optab_handler (reload_in_optab, mode)
5942
                    : direct_optab_handler (reload_out_optab, mode));
5943
      return NO_REGS;
5944
    }
5945
 
5946
  /* A SAR<->FP register copy requires an intermediate general register
5947
     and secondary memory.  We need a secondary reload with a general
5948
     scratch register for spills.  */
5949
  if (rclass == SHIFT_REGS)
5950
    {
5951
      /* Handle spill.  */
5952
      if (regno >= FIRST_PSEUDO_REGISTER || regno < 0)
5953
        {
5954
          sri->icode = (in_p
5955
                        ? direct_optab_handler (reload_in_optab, mode)
5956
                        : direct_optab_handler (reload_out_optab, mode));
5957
          return NO_REGS;
5958
        }
5959
 
5960
      /* Handle FP copy.  */
5961
      if (FP_REG_CLASS_P (REGNO_REG_CLASS (regno)))
5962
        return GENERAL_REGS;
5963
    }
5964
 
5965
  if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER
5966
      && REGNO_REG_CLASS (regno) == SHIFT_REGS
5967
      && FP_REG_CLASS_P (rclass))
5968
    return GENERAL_REGS;
5969
 
5970
  return NO_REGS;
5971
}
5972
 
5973
/* Implement TARGET_EXTRA_LIVE_ON_ENTRY.  The argument pointer
5974
   is only marked as live on entry by df-scan when it is a fixed
5975
   register.  It isn't a fixed register in the 64-bit runtime,
5976
   so we need to mark it here.  */
5977
 
5978
static void
5979
pa_extra_live_on_entry (bitmap regs)
5980
{
5981
  if (TARGET_64BIT)
5982
    bitmap_set_bit (regs, ARG_POINTER_REGNUM);
5983
}
5984
 
5985
/* Implement EH_RETURN_HANDLER_RTX.  The MEM needs to be volatile
5986
   to prevent it from being deleted.  */
5987
 
5988
rtx
5989
pa_eh_return_handler_rtx (void)
5990
{
5991
  rtx tmp;
5992
 
5993
  tmp = gen_rtx_PLUS (word_mode, hard_frame_pointer_rtx,
5994
                      TARGET_64BIT ? GEN_INT (-16) : GEN_INT (-20));
5995
  tmp = gen_rtx_MEM (word_mode, tmp);
5996
  tmp->volatil = 1;
5997
  return tmp;
5998
}
5999
 
6000
/* In the 32-bit runtime, arguments larger than eight bytes are passed
6001
   by invisible reference.  As a GCC extension, we also pass anything
6002
   with a zero or variable size by reference.
6003
 
6004
   The 64-bit runtime does not describe passing any types by invisible
6005
   reference.  The internals of GCC can't currently handle passing
6006
   empty structures, and zero or variable length arrays when they are
6007
   not passed entirely on the stack or by reference.  Thus, as a GCC
6008
   extension, we pass these types by reference.  The HP compiler doesn't
6009
   support these types, so hopefully there shouldn't be any compatibility
6010
   issues.  This may have to be revisited when HP releases a C99 compiler
6011
   or updates the ABI.  */
6012
 
6013
static bool
6014
pa_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
6015
                      enum machine_mode mode, const_tree type,
6016
                      bool named ATTRIBUTE_UNUSED)
6017
{
6018
  HOST_WIDE_INT size;
6019
 
6020
  if (type)
6021
    size = int_size_in_bytes (type);
6022
  else
6023
    size = GET_MODE_SIZE (mode);
6024
 
6025
  if (TARGET_64BIT)
6026
    return size <= 0;
6027
  else
6028
    return size <= 0 || size > 8;
6029
}
6030
 
6031
enum direction
6032
pa_function_arg_padding (enum machine_mode mode, const_tree type)
6033
{
6034
  if (mode == BLKmode
6035
      || (TARGET_64BIT
6036
          && type
6037
          && (AGGREGATE_TYPE_P (type)
6038
              || TREE_CODE (type) == COMPLEX_TYPE
6039
              || TREE_CODE (type) == VECTOR_TYPE)))
6040
    {
6041
      /* Return none if justification is not required.  */
6042
      if (type
6043
          && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6044
          && (int_size_in_bytes (type) * BITS_PER_UNIT) % PARM_BOUNDARY == 0)
6045
        return none;
6046
 
6047
      /* The directions set here are ignored when a BLKmode argument larger
6048
         than a word is placed in a register.  Different code is used for
6049
         the stack and registers.  This makes it difficult to have a
6050
         consistent data representation for both the stack and registers.
6051
         For both runtimes, the justification and padding for arguments on
6052
         the stack and in registers should be identical.  */
6053
      if (TARGET_64BIT)
6054
        /* The 64-bit runtime specifies left justification for aggregates.  */
6055
        return upward;
6056
      else
6057
        /* The 32-bit runtime architecture specifies right justification.
6058
           When the argument is passed on the stack, the argument is padded
6059
           with garbage on the left.  The HP compiler pads with zeros.  */
6060
        return downward;
6061
    }
6062
 
6063
  if (GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
6064
    return downward;
6065
  else
6066
    return none;
6067
}
6068
 
6069
 
6070
/* Do what is necessary for `va_start'.  We look at the current function
6071
   to determine if stdargs or varargs is used and fill in an initial
6072
   va_list.  A pointer to this constructor is returned.  */
6073
 
6074
static rtx
6075
hppa_builtin_saveregs (void)
6076
{
6077
  rtx offset, dest;
6078
  tree fntype = TREE_TYPE (current_function_decl);
6079
  int argadj = ((!stdarg_p (fntype))
6080
                ? UNITS_PER_WORD : 0);
6081
 
6082
  if (argadj)
6083
    offset = plus_constant (crtl->args.arg_offset_rtx, argadj);
6084
  else
6085
    offset = crtl->args.arg_offset_rtx;
6086
 
6087
  if (TARGET_64BIT)
6088
    {
6089
      int i, off;
6090
 
6091
      /* Adjust for varargs/stdarg differences.  */
6092
      if (argadj)
6093
        offset = plus_constant (crtl->args.arg_offset_rtx, -argadj);
6094
      else
6095
        offset = crtl->args.arg_offset_rtx;
6096
 
6097
      /* We need to save %r26 .. %r19 inclusive starting at offset -64
6098
         from the incoming arg pointer and growing to larger addresses.  */
6099
      for (i = 26, off = -64; i >= 19; i--, off += 8)
6100
        emit_move_insn (gen_rtx_MEM (word_mode,
6101
                                     plus_constant (arg_pointer_rtx, off)),
6102
                        gen_rtx_REG (word_mode, i));
6103
 
6104
      /* The incoming args pointer points just beyond the flushback area;
6105
         normally this is not a serious concern.  However, when we are doing
6106
         varargs/stdargs we want to make the arg pointer point to the start
6107
         of the incoming argument area.  */
6108
      emit_move_insn (virtual_incoming_args_rtx,
6109
                      plus_constant (arg_pointer_rtx, -64));
6110
 
6111
      /* Now return a pointer to the first anonymous argument.  */
6112
      return copy_to_reg (expand_binop (Pmode, add_optab,
6113
                                        virtual_incoming_args_rtx,
6114
                                        offset, 0, 0, OPTAB_LIB_WIDEN));
6115
    }
6116
 
6117
  /* Store general registers on the stack.  */
6118
  dest = gen_rtx_MEM (BLKmode,
6119
                      plus_constant (crtl->args.internal_arg_pointer,
6120
                                     -16));
6121
  set_mem_alias_set (dest, get_varargs_alias_set ());
6122
  set_mem_align (dest, BITS_PER_WORD);
6123
  move_block_from_reg (23, dest, 4);
6124
 
6125
  /* move_block_from_reg will emit code to store the argument registers
6126
     individually as scalar stores.
6127
 
6128
     However, other insns may later load from the same addresses for
6129
     a structure load (passing a struct to a varargs routine).
6130
 
6131
     The alias code assumes that such aliasing can never happen, so we
6132
     have to keep memory referencing insns from moving up beyond the
6133
     last argument register store.  So we emit a blockage insn here.  */
6134
  emit_insn (gen_blockage ());
6135
 
6136
  return copy_to_reg (expand_binop (Pmode, add_optab,
6137
                                    crtl->args.internal_arg_pointer,
6138
                                    offset, 0, 0, OPTAB_LIB_WIDEN));
6139
}
6140
 
6141
static void
6142
hppa_va_start (tree valist, rtx nextarg)
6143
{
6144
  nextarg = expand_builtin_saveregs ();
6145
  std_expand_builtin_va_start (valist, nextarg);
6146
}
6147
 
6148
static tree
6149
hppa_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
6150
                           gimple_seq *post_p)
6151
{
6152
  if (TARGET_64BIT)
6153
    {
6154
      /* Args grow upward.  We can use the generic routines.  */
6155
      return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
6156
    }
6157
  else /* !TARGET_64BIT */
6158
    {
6159
      tree ptr = build_pointer_type (type);
6160
      tree valist_type;
6161
      tree t, u;
6162
      unsigned int size, ofs;
6163
      bool indirect;
6164
 
6165
      indirect = pass_by_reference (NULL, TYPE_MODE (type), type, 0);
6166
      if (indirect)
6167
        {
6168
          type = ptr;
6169
          ptr = build_pointer_type (type);
6170
        }
6171
      size = int_size_in_bytes (type);
6172
      valist_type = TREE_TYPE (valist);
6173
 
6174
      /* Args grow down.  Not handled by generic routines.  */
6175
 
6176
      u = fold_convert (sizetype, size_in_bytes (type));
6177
      u = fold_build1 (NEGATE_EXPR, sizetype, u);
6178
      t = fold_build_pointer_plus (valist, u);
6179
 
6180
      /* Align to 4 or 8 byte boundary depending on argument size.  */
6181
 
6182
      u = build_int_cst (TREE_TYPE (t), (HOST_WIDE_INT)(size > 4 ? -8 : -4));
6183
      t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t, u);
6184
      t = fold_convert (valist_type, t);
6185
 
6186
      t = build2 (MODIFY_EXPR, valist_type, valist, t);
6187
 
6188
      ofs = (8 - size) % 4;
6189
      if (ofs != 0)
6190
        t = fold_build_pointer_plus_hwi (t, ofs);
6191
 
6192
      t = fold_convert (ptr, t);
6193
      t = build_va_arg_indirect_ref (t);
6194
 
6195
      if (indirect)
6196
        t = build_va_arg_indirect_ref (t);
6197
 
6198
      return t;
6199
    }
6200
}
6201
 
6202
/* True if MODE is valid for the target.  By "valid", we mean able to
6203
   be manipulated in non-trivial ways.  In particular, this means all
6204
   the arithmetic is supported.
6205
 
6206
   Currently, TImode is not valid as the HP 64-bit runtime documentation
6207
   doesn't document the alignment and calling conventions for this type.
6208
   Thus, we return false when PRECISION is 2 * BITS_PER_WORD and
6209
   2 * BITS_PER_WORD isn't equal LONG_LONG_TYPE_SIZE.  */
6210
 
6211
static bool
6212
pa_scalar_mode_supported_p (enum machine_mode mode)
6213
{
6214
  int precision = GET_MODE_PRECISION (mode);
6215
 
6216
  switch (GET_MODE_CLASS (mode))
6217
    {
6218
    case MODE_PARTIAL_INT:
6219
    case MODE_INT:
6220
      if (precision == CHAR_TYPE_SIZE)
6221
        return true;
6222
      if (precision == SHORT_TYPE_SIZE)
6223
        return true;
6224
      if (precision == INT_TYPE_SIZE)
6225
        return true;
6226
      if (precision == LONG_TYPE_SIZE)
6227
        return true;
6228
      if (precision == LONG_LONG_TYPE_SIZE)
6229
        return true;
6230
      return false;
6231
 
6232
    case MODE_FLOAT:
6233
      if (precision == FLOAT_TYPE_SIZE)
6234
        return true;
6235
      if (precision == DOUBLE_TYPE_SIZE)
6236
        return true;
6237
      if (precision == LONG_DOUBLE_TYPE_SIZE)
6238
        return true;
6239
      return false;
6240
 
6241
    case MODE_DECIMAL_FLOAT:
6242
      return false;
6243
 
6244
    default:
6245
      gcc_unreachable ();
6246
    }
6247
}
6248
 
6249
/* Return TRUE if INSN, a jump insn, has an unfilled delay slot and
6250
   it branches into the delay slot.  Otherwise, return FALSE.  */
6251
 
6252
static bool
6253
branch_to_delay_slot_p (rtx insn)
6254
{
6255
  rtx jump_insn;
6256
 
6257
  if (dbr_sequence_length ())
6258
    return FALSE;
6259
 
6260
  jump_insn = next_active_insn (JUMP_LABEL (insn));
6261
  while (insn)
6262
    {
6263
      insn = next_active_insn (insn);
6264
      if (jump_insn == insn)
6265
        return TRUE;
6266
 
6267
      /* We can't rely on the length of asms.  So, we return FALSE when
6268
         the branch is followed by an asm.  */
6269
      if (!insn
6270
          || GET_CODE (PATTERN (insn)) == ASM_INPUT
6271
          || extract_asm_operands (PATTERN (insn)) != NULL_RTX
6272
          || get_attr_length (insn) > 0)
6273
        break;
6274
    }
6275
 
6276
  return FALSE;
6277
}
6278
 
6279
/* Return TRUE if INSN, a forward jump insn, needs a nop in its delay slot.
6280
 
6281
   This occurs when INSN has an unfilled delay slot and is followed
6282
   by an asm.  Disaster can occur if the asm is empty and the jump
6283
   branches into the delay slot.  So, we add a nop in the delay slot
6284
   when this occurs.  */
6285
 
6286
static bool
6287
branch_needs_nop_p (rtx insn)
6288
{
6289
  rtx jump_insn;
6290
 
6291
  if (dbr_sequence_length ())
6292
    return FALSE;
6293
 
6294
  jump_insn = next_active_insn (JUMP_LABEL (insn));
6295
  while (insn)
6296
    {
6297
      insn = next_active_insn (insn);
6298
      if (!insn || jump_insn == insn)
6299
        return TRUE;
6300
 
6301
      if (!(GET_CODE (PATTERN (insn)) == ASM_INPUT
6302
           || extract_asm_operands (PATTERN (insn)) != NULL_RTX)
6303
          && get_attr_length (insn) > 0)
6304
        break;
6305
    }
6306
 
6307
  return FALSE;
6308
}
6309
 
6310
/* Return TRUE if INSN, a forward jump insn, can use nullification
6311
   to skip the following instruction.  This avoids an extra cycle due
6312
   to a mis-predicted branch when we fall through.  */
6313
 
6314
static bool
6315
use_skip_p (rtx insn)
6316
{
6317
  rtx jump_insn = next_active_insn (JUMP_LABEL (insn));
6318
 
6319
  while (insn)
6320
    {
6321
      insn = next_active_insn (insn);
6322
 
6323
      /* We can't rely on the length of asms, so we can't skip asms.  */
6324
      if (!insn
6325
          || GET_CODE (PATTERN (insn)) == ASM_INPUT
6326
          || extract_asm_operands (PATTERN (insn)) != NULL_RTX)
6327
        break;
6328
      if (get_attr_length (insn) == 4
6329
          && jump_insn == next_active_insn (insn))
6330
        return TRUE;
6331
      if (get_attr_length (insn) > 0)
6332
        break;
6333
    }
6334
 
6335
  return FALSE;
6336
}
6337
 
6338
/* This routine handles all the normal conditional branch sequences we
6339
   might need to generate.  It handles compare immediate vs compare
6340
   register, nullification of delay slots, varying length branches,
6341
   negated branches, and all combinations of the above.  It returns the
6342
   output appropriate to emit the branch corresponding to all given
6343
   parameters.  */
6344
 
6345
const char *
6346
pa_output_cbranch (rtx *operands, int negated, rtx insn)
6347
{
6348
  static char buf[100];
6349
  bool useskip;
6350
  int nullify = INSN_ANNULLED_BRANCH_P (insn);
6351
  int length = get_attr_length (insn);
6352
  int xdelay;
6353
 
6354
  /* A conditional branch to the following instruction (e.g. the delay slot)
6355
     is asking for a disaster.  This can happen when not optimizing and
6356
     when jump optimization fails.
6357
 
6358
     While it is usually safe to emit nothing, this can fail if the
6359
     preceding instruction is a nullified branch with an empty delay
6360
     slot and the same branch target as this branch.  We could check
6361
     for this but jump optimization should eliminate nop jumps.  It
6362
     is always safe to emit a nop.  */
6363
  if (branch_to_delay_slot_p (insn))
6364
    return "nop";
6365
 
6366
  /* The doubleword form of the cmpib instruction doesn't have the LEU
6367
     and GTU conditions while the cmpb instruction does.  Since we accept
6368
     zero for cmpb, we must ensure that we use cmpb for the comparison.  */
6369
  if (GET_MODE (operands[1]) == DImode && operands[2] == const0_rtx)
6370
    operands[2] = gen_rtx_REG (DImode, 0);
6371
  if (GET_MODE (operands[2]) == DImode && operands[1] == const0_rtx)
6372
    operands[1] = gen_rtx_REG (DImode, 0);
6373
 
6374
  /* If this is a long branch with its delay slot unfilled, set `nullify'
6375
     as it can nullify the delay slot and save a nop.  */
6376
  if (length == 8 && dbr_sequence_length () == 0)
6377
    nullify = 1;
6378
 
6379
  /* If this is a short forward conditional branch which did not get
6380
     its delay slot filled, the delay slot can still be nullified.  */
6381
  if (! nullify && length == 4 && dbr_sequence_length () == 0)
6382
    nullify = forward_branch_p (insn);
6383
 
6384
  /* A forward branch over a single nullified insn can be done with a
6385
     comclr instruction.  This avoids a single cycle penalty due to
6386
     mis-predicted branch if we fall through (branch not taken).  */
6387
  useskip = (length == 4 && nullify) ? use_skip_p (insn) : FALSE;
6388
 
6389
  switch (length)
6390
    {
6391
      /* All short conditional branches except backwards with an unfilled
6392
         delay slot.  */
6393
      case 4:
6394
        if (useskip)
6395
          strcpy (buf, "{com%I2clr,|cmp%I2clr,}");
6396
        else
6397
          strcpy (buf, "{com%I2b,|cmp%I2b,}");
6398
        if (GET_MODE (operands[1]) == DImode)
6399
          strcat (buf, "*");
6400
        if (negated)
6401
          strcat (buf, "%B3");
6402
        else
6403
          strcat (buf, "%S3");
6404
        if (useskip)
6405
          strcat (buf, " %2,%r1,%%r0");
6406
        else if (nullify)
6407
          {
6408
            if (branch_needs_nop_p (insn))
6409
              strcat (buf, ",n %2,%r1,%0%#");
6410
            else
6411
              strcat (buf, ",n %2,%r1,%0");
6412
          }
6413
        else
6414
          strcat (buf, " %2,%r1,%0");
6415
        break;
6416
 
6417
     /* All long conditionals.  Note a short backward branch with an
6418
        unfilled delay slot is treated just like a long backward branch
6419
        with an unfilled delay slot.  */
6420
      case 8:
6421
        /* Handle weird backwards branch with a filled delay slot
6422
           which is nullified.  */
6423
        if (dbr_sequence_length () != 0
6424
            && ! forward_branch_p (insn)
6425
            && nullify)
6426
          {
6427
            strcpy (buf, "{com%I2b,|cmp%I2b,}");
6428
            if (GET_MODE (operands[1]) == DImode)
6429
              strcat (buf, "*");
6430
            if (negated)
6431
              strcat (buf, "%S3");
6432
            else
6433
              strcat (buf, "%B3");
6434
            strcat (buf, ",n %2,%r1,.+12\n\tb %0");
6435
          }
6436
        /* Handle short backwards branch with an unfilled delay slot.
6437
           Using a comb;nop rather than comiclr;bl saves 1 cycle for both
6438
           taken and untaken branches.  */
6439
        else if (dbr_sequence_length () == 0
6440
                 && ! forward_branch_p (insn)
6441
                 && INSN_ADDRESSES_SET_P ()
6442
                 && VAL_14_BITS_P (INSN_ADDRESSES (INSN_UID (JUMP_LABEL (insn)))
6443
                                    - INSN_ADDRESSES (INSN_UID (insn)) - 8))
6444
          {
6445
            strcpy (buf, "{com%I2b,|cmp%I2b,}");
6446
            if (GET_MODE (operands[1]) == DImode)
6447
              strcat (buf, "*");
6448
            if (negated)
6449
              strcat (buf, "%B3 %2,%r1,%0%#");
6450
            else
6451
              strcat (buf, "%S3 %2,%r1,%0%#");
6452
          }
6453
        else
6454
          {
6455
            strcpy (buf, "{com%I2clr,|cmp%I2clr,}");
6456
            if (GET_MODE (operands[1]) == DImode)
6457
              strcat (buf, "*");
6458
            if (negated)
6459
              strcat (buf, "%S3");
6460
            else
6461
              strcat (buf, "%B3");
6462
            if (nullify)
6463
              strcat (buf, " %2,%r1,%%r0\n\tb,n %0");
6464
            else
6465
              strcat (buf, " %2,%r1,%%r0\n\tb %0");
6466
          }
6467
        break;
6468
 
6469
      default:
6470
        /* The reversed conditional branch must branch over one additional
6471
           instruction if the delay slot is filled and needs to be extracted
6472
           by pa_output_lbranch.  If the delay slot is empty or this is a
6473
           nullified forward branch, the instruction after the reversed
6474
           condition branch must be nullified.  */
6475
        if (dbr_sequence_length () == 0
6476
            || (nullify && forward_branch_p (insn)))
6477
          {
6478
            nullify = 1;
6479
            xdelay = 0;
6480
            operands[4] = GEN_INT (length);
6481
          }
6482
        else
6483
          {
6484
            xdelay = 1;
6485
            operands[4] = GEN_INT (length + 4);
6486
          }
6487
 
6488
        /* Create a reversed conditional branch which branches around
6489
           the following insns.  */
6490
        if (GET_MODE (operands[1]) != DImode)
6491
          {
6492
            if (nullify)
6493
              {
6494
                if (negated)
6495
                  strcpy (buf,
6496
                    "{com%I2b,%S3,n %2,%r1,.+%4|cmp%I2b,%S3,n %2,%r1,.+%4}");
6497
                else
6498
                  strcpy (buf,
6499
                    "{com%I2b,%B3,n %2,%r1,.+%4|cmp%I2b,%B3,n %2,%r1,.+%4}");
6500
              }
6501
            else
6502
              {
6503
                if (negated)
6504
                  strcpy (buf,
6505
                    "{com%I2b,%S3 %2,%r1,.+%4|cmp%I2b,%S3 %2,%r1,.+%4}");
6506
                else
6507
                  strcpy (buf,
6508
                    "{com%I2b,%B3 %2,%r1,.+%4|cmp%I2b,%B3 %2,%r1,.+%4}");
6509
              }
6510
          }
6511
        else
6512
          {
6513
            if (nullify)
6514
              {
6515
                if (negated)
6516
                  strcpy (buf,
6517
                    "{com%I2b,*%S3,n %2,%r1,.+%4|cmp%I2b,*%S3,n %2,%r1,.+%4}");
6518
                else
6519
                  strcpy (buf,
6520
                    "{com%I2b,*%B3,n %2,%r1,.+%4|cmp%I2b,*%B3,n %2,%r1,.+%4}");
6521
              }
6522
            else
6523
              {
6524
                if (negated)
6525
                  strcpy (buf,
6526
                    "{com%I2b,*%S3 %2,%r1,.+%4|cmp%I2b,*%S3 %2,%r1,.+%4}");
6527
                else
6528
                  strcpy (buf,
6529
                    "{com%I2b,*%B3 %2,%r1,.+%4|cmp%I2b,*%B3 %2,%r1,.+%4}");
6530
              }
6531
          }
6532
 
6533
        output_asm_insn (buf, operands);
6534
        return pa_output_lbranch (operands[0], insn, xdelay);
6535
    }
6536
  return buf;
6537
}
6538
 
6539
/* This routine handles output of long unconditional branches that
6540
   exceed the maximum range of a simple branch instruction.  Since
6541
   we don't have a register available for the branch, we save register
6542
   %r1 in the frame marker, load the branch destination DEST into %r1,
6543
   execute the branch, and restore %r1 in the delay slot of the branch.
6544
 
6545
   Since long branches may have an insn in the delay slot and the
6546
   delay slot is used to restore %r1, we in general need to extract
6547
   this insn and execute it before the branch.  However, to facilitate
6548
   use of this function by conditional branches, we also provide an
6549
   option to not extract the delay insn so that it will be emitted
6550
   after the long branch.  So, if there is an insn in the delay slot,
6551
   it is extracted if XDELAY is nonzero.
6552
 
6553
   The lengths of the various long-branch sequences are 20, 16 and 24
6554
   bytes for the portable runtime, non-PIC and PIC cases, respectively.  */
6555
 
6556
const char *
6557
pa_output_lbranch (rtx dest, rtx insn, int xdelay)
6558
{
6559
  rtx xoperands[2];
6560
 
6561
  xoperands[0] = dest;
6562
 
6563
  /* First, free up the delay slot.  */
6564
  if (xdelay && dbr_sequence_length () != 0)
6565
    {
6566
      /* We can't handle a jump in the delay slot.  */
6567
      gcc_assert (GET_CODE (NEXT_INSN (insn)) != JUMP_INSN);
6568
 
6569
      final_scan_insn (NEXT_INSN (insn), asm_out_file,
6570
                       optimize, 0, NULL);
6571
 
6572
      /* Now delete the delay insn.  */
6573
      SET_INSN_DELETED (NEXT_INSN (insn));
6574
    }
6575
 
6576
  /* Output an insn to save %r1.  The runtime documentation doesn't
6577
     specify whether the "Clean Up" slot in the callers frame can
6578
     be clobbered by the callee.  It isn't copied by HP's builtin
6579
     alloca, so this suggests that it can be clobbered if necessary.
6580
     The "Static Link" location is copied by HP builtin alloca, so
6581
     we avoid using it.  Using the cleanup slot might be a problem
6582
     if we have to interoperate with languages that pass cleanup
6583
     information.  However, it should be possible to handle these
6584
     situations with GCC's asm feature.
6585
 
6586
     The "Current RP" slot is reserved for the called procedure, so
6587
     we try to use it when we don't have a frame of our own.  It's
6588
     rather unlikely that we won't have a frame when we need to emit
6589
     a very long branch.
6590
 
6591
     Really the way to go long term is a register scavenger; goto
6592
     the target of the jump and find a register which we can use
6593
     as a scratch to hold the value in %r1.  Then, we wouldn't have
6594
     to free up the delay slot or clobber a slot that may be needed
6595
     for other purposes.  */
6596
  if (TARGET_64BIT)
6597
    {
6598
      if (actual_fsize == 0 && !df_regs_ever_live_p (2))
6599
        /* Use the return pointer slot in the frame marker.  */
6600
        output_asm_insn ("std %%r1,-16(%%r30)", xoperands);
6601
      else
6602
        /* Use the slot at -40 in the frame marker since HP builtin
6603
           alloca doesn't copy it.  */
6604
        output_asm_insn ("std %%r1,-40(%%r30)", xoperands);
6605
    }
6606
  else
6607
    {
6608
      if (actual_fsize == 0 && !df_regs_ever_live_p (2))
6609
        /* Use the return pointer slot in the frame marker.  */
6610
        output_asm_insn ("stw %%r1,-20(%%r30)", xoperands);
6611
      else
6612
        /* Use the "Clean Up" slot in the frame marker.  In GCC,
6613
           the only other use of this location is for copying a
6614
           floating point double argument from a floating-point
6615
           register to two general registers.  The copy is done
6616
           as an "atomic" operation when outputting a call, so it
6617
           won't interfere with our using the location here.  */
6618
        output_asm_insn ("stw %%r1,-12(%%r30)", xoperands);
6619
    }
6620
 
6621
  if (TARGET_PORTABLE_RUNTIME)
6622
    {
6623
      output_asm_insn ("ldil L'%0,%%r1", xoperands);
6624
      output_asm_insn ("ldo R'%0(%%r1),%%r1", xoperands);
6625
      output_asm_insn ("bv %%r0(%%r1)", xoperands);
6626
    }
6627
  else if (flag_pic)
6628
    {
6629
      output_asm_insn ("{bl|b,l} .+8,%%r1", xoperands);
6630
      if (TARGET_SOM || !TARGET_GAS)
6631
        {
6632
          xoperands[1] = gen_label_rtx ();
6633
          output_asm_insn ("addil L'%l0-%l1,%%r1", xoperands);
6634
          targetm.asm_out.internal_label (asm_out_file, "L",
6635
                                          CODE_LABEL_NUMBER (xoperands[1]));
6636
          output_asm_insn ("ldo R'%l0-%l1(%%r1),%%r1", xoperands);
6637
        }
6638
      else
6639
        {
6640
          output_asm_insn ("addil L'%l0-$PIC_pcrel$0+4,%%r1", xoperands);
6641
          output_asm_insn ("ldo R'%l0-$PIC_pcrel$0+8(%%r1),%%r1", xoperands);
6642
        }
6643
      output_asm_insn ("bv %%r0(%%r1)", xoperands);
6644
    }
6645
  else
6646
    /* Now output a very long branch to the original target.  */
6647
    output_asm_insn ("ldil L'%l0,%%r1\n\tbe R'%l0(%%sr4,%%r1)", xoperands);
6648
 
6649
  /* Now restore the value of %r1 in the delay slot.  */
6650
  if (TARGET_64BIT)
6651
    {
6652
      if (actual_fsize == 0 && !df_regs_ever_live_p (2))
6653
        return "ldd -16(%%r30),%%r1";
6654
      else
6655
        return "ldd -40(%%r30),%%r1";
6656
    }
6657
  else
6658
    {
6659
      if (actual_fsize == 0 && !df_regs_ever_live_p (2))
6660
        return "ldw -20(%%r30),%%r1";
6661
      else
6662
        return "ldw -12(%%r30),%%r1";
6663
    }
6664
}
6665
 
6666
/* This routine handles all the branch-on-bit conditional branch sequences we
6667
   might need to generate.  It handles nullification of delay slots,
6668
   varying length branches, negated branches and all combinations of the
6669
   above.  it returns the appropriate output template to emit the branch.  */
6670
 
6671
const char *
6672
pa_output_bb (rtx *operands ATTRIBUTE_UNUSED, int negated, rtx insn, int which)
6673
{
6674
  static char buf[100];
6675
  bool useskip;
6676
  int nullify = INSN_ANNULLED_BRANCH_P (insn);
6677
  int length = get_attr_length (insn);
6678
  int xdelay;
6679
 
6680
  /* A conditional branch to the following instruction (e.g. the delay slot) is
6681
     asking for a disaster.  I do not think this can happen as this pattern
6682
     is only used when optimizing; jump optimization should eliminate the
6683
     jump.  But be prepared just in case.  */
6684
 
6685
  if (branch_to_delay_slot_p (insn))
6686
    return "nop";
6687
 
6688
  /* If this is a long branch with its delay slot unfilled, set `nullify'
6689
     as it can nullify the delay slot and save a nop.  */
6690
  if (length == 8 && dbr_sequence_length () == 0)
6691
    nullify = 1;
6692
 
6693
  /* If this is a short forward conditional branch which did not get
6694
     its delay slot filled, the delay slot can still be nullified.  */
6695
  if (! nullify && length == 4 && dbr_sequence_length () == 0)
6696
    nullify = forward_branch_p (insn);
6697
 
6698
  /* A forward branch over a single nullified insn can be done with a
6699
     extrs instruction.  This avoids a single cycle penalty due to
6700
     mis-predicted branch if we fall through (branch not taken).  */
6701
  useskip = (length == 4 && nullify) ? use_skip_p (insn) : FALSE;
6702
 
6703
  switch (length)
6704
    {
6705
 
6706
      /* All short conditional branches except backwards with an unfilled
6707
         delay slot.  */
6708
      case 4:
6709
        if (useskip)
6710
          strcpy (buf, "{extrs,|extrw,s,}");
6711
        else
6712
          strcpy (buf, "bb,");
6713
        if (useskip && GET_MODE (operands[0]) == DImode)
6714
          strcpy (buf, "extrd,s,*");
6715
        else if (GET_MODE (operands[0]) == DImode)
6716
          strcpy (buf, "bb,*");
6717
        if ((which == 0 && negated)
6718
             || (which == 1 && ! negated))
6719
          strcat (buf, ">=");
6720
        else
6721
          strcat (buf, "<");
6722
        if (useskip)
6723
          strcat (buf, " %0,%1,1,%%r0");
6724
        else if (nullify && negated)
6725
          {
6726
            if (branch_needs_nop_p (insn))
6727
              strcat (buf, ",n %0,%1,%3%#");
6728
            else
6729
              strcat (buf, ",n %0,%1,%3");
6730
          }
6731
        else if (nullify && ! negated)
6732
          {
6733
            if (branch_needs_nop_p (insn))
6734
              strcat (buf, ",n %0,%1,%2%#");
6735
            else
6736
              strcat (buf, ",n %0,%1,%2");
6737
          }
6738
        else if (! nullify && negated)
6739
          strcat (buf, " %0,%1,%3");
6740
        else if (! nullify && ! negated)
6741
          strcat (buf, " %0,%1,%2");
6742
        break;
6743
 
6744
     /* All long conditionals.  Note a short backward branch with an
6745
        unfilled delay slot is treated just like a long backward branch
6746
        with an unfilled delay slot.  */
6747
      case 8:
6748
        /* Handle weird backwards branch with a filled delay slot
6749
           which is nullified.  */
6750
        if (dbr_sequence_length () != 0
6751
            && ! forward_branch_p (insn)
6752
            && nullify)
6753
          {
6754
            strcpy (buf, "bb,");
6755
            if (GET_MODE (operands[0]) == DImode)
6756
              strcat (buf, "*");
6757
            if ((which == 0 && negated)
6758
                || (which == 1 && ! negated))
6759
              strcat (buf, "<");
6760
            else
6761
              strcat (buf, ">=");
6762
            if (negated)
6763
              strcat (buf, ",n %0,%1,.+12\n\tb %3");
6764
            else
6765
              strcat (buf, ",n %0,%1,.+12\n\tb %2");
6766
          }
6767
        /* Handle short backwards branch with an unfilled delay slot.
6768
           Using a bb;nop rather than extrs;bl saves 1 cycle for both
6769
           taken and untaken branches.  */
6770
        else if (dbr_sequence_length () == 0
6771
                 && ! forward_branch_p (insn)
6772
                 && INSN_ADDRESSES_SET_P ()
6773
                 && VAL_14_BITS_P (INSN_ADDRESSES (INSN_UID (JUMP_LABEL (insn)))
6774
                                    - INSN_ADDRESSES (INSN_UID (insn)) - 8))
6775
          {
6776
            strcpy (buf, "bb,");
6777
            if (GET_MODE (operands[0]) == DImode)
6778
              strcat (buf, "*");
6779
            if ((which == 0 && negated)
6780
                || (which == 1 && ! negated))
6781
              strcat (buf, ">=");
6782
            else
6783
              strcat (buf, "<");
6784
            if (negated)
6785
              strcat (buf, " %0,%1,%3%#");
6786
            else
6787
              strcat (buf, " %0,%1,%2%#");
6788
          }
6789
        else
6790
          {
6791
            if (GET_MODE (operands[0]) == DImode)
6792
              strcpy (buf, "extrd,s,*");
6793
            else
6794
              strcpy (buf, "{extrs,|extrw,s,}");
6795
            if ((which == 0 && negated)
6796
                || (which == 1 && ! negated))
6797
              strcat (buf, "<");
6798
            else
6799
              strcat (buf, ">=");
6800
            if (nullify && negated)
6801
              strcat (buf, " %0,%1,1,%%r0\n\tb,n %3");
6802
            else if (nullify && ! negated)
6803
              strcat (buf, " %0,%1,1,%%r0\n\tb,n %2");
6804
            else if (negated)
6805
              strcat (buf, " %0,%1,1,%%r0\n\tb %3");
6806
            else
6807
              strcat (buf, " %0,%1,1,%%r0\n\tb %2");
6808
          }
6809
        break;
6810
 
6811
      default:
6812
        /* The reversed conditional branch must branch over one additional
6813
           instruction if the delay slot is filled and needs to be extracted
6814
           by pa_output_lbranch.  If the delay slot is empty or this is a
6815
           nullified forward branch, the instruction after the reversed
6816
           condition branch must be nullified.  */
6817
        if (dbr_sequence_length () == 0
6818
            || (nullify && forward_branch_p (insn)))
6819
          {
6820
            nullify = 1;
6821
            xdelay = 0;
6822
            operands[4] = GEN_INT (length);
6823
          }
6824
        else
6825
          {
6826
            xdelay = 1;
6827
            operands[4] = GEN_INT (length + 4);
6828
          }
6829
 
6830
        if (GET_MODE (operands[0]) == DImode)
6831
          strcpy (buf, "bb,*");
6832
        else
6833
          strcpy (buf, "bb,");
6834
        if ((which == 0 && negated)
6835
            || (which == 1 && !negated))
6836
          strcat (buf, "<");
6837
        else
6838
          strcat (buf, ">=");
6839
        if (nullify)
6840
          strcat (buf, ",n %0,%1,.+%4");
6841
        else
6842
          strcat (buf, " %0,%1,.+%4");
6843
        output_asm_insn (buf, operands);
6844
        return pa_output_lbranch (negated ? operands[3] : operands[2],
6845
                                  insn, xdelay);
6846
    }
6847
  return buf;
6848
}
6849
 
6850
/* This routine handles all the branch-on-variable-bit conditional branch
6851
   sequences we might need to generate.  It handles nullification of delay
6852
   slots, varying length branches, negated branches and all combinations
6853
   of the above.  it returns the appropriate output template to emit the
6854
   branch.  */
6855
 
6856
const char *
6857
pa_output_bvb (rtx *operands ATTRIBUTE_UNUSED, int negated, rtx insn,
6858
               int which)
6859
{
6860
  static char buf[100];
6861
  bool useskip;
6862
  int nullify = INSN_ANNULLED_BRANCH_P (insn);
6863
  int length = get_attr_length (insn);
6864
  int xdelay;
6865
 
6866
  /* A conditional branch to the following instruction (e.g. the delay slot) is
6867
     asking for a disaster.  I do not think this can happen as this pattern
6868
     is only used when optimizing; jump optimization should eliminate the
6869
     jump.  But be prepared just in case.  */
6870
 
6871
  if (branch_to_delay_slot_p (insn))
6872
    return "nop";
6873
 
6874
  /* If this is a long branch with its delay slot unfilled, set `nullify'
6875
     as it can nullify the delay slot and save a nop.  */
6876
  if (length == 8 && dbr_sequence_length () == 0)
6877
    nullify = 1;
6878
 
6879
  /* If this is a short forward conditional branch which did not get
6880
     its delay slot filled, the delay slot can still be nullified.  */
6881
  if (! nullify && length == 4 && dbr_sequence_length () == 0)
6882
    nullify = forward_branch_p (insn);
6883
 
6884
  /* A forward branch over a single nullified insn can be done with a
6885
     extrs instruction.  This avoids a single cycle penalty due to
6886
     mis-predicted branch if we fall through (branch not taken).  */
6887
  useskip = (length == 4 && nullify) ? use_skip_p (insn) : FALSE;
6888
 
6889
  switch (length)
6890
    {
6891
 
6892
      /* All short conditional branches except backwards with an unfilled
6893
         delay slot.  */
6894
      case 4:
6895
        if (useskip)
6896
          strcpy (buf, "{vextrs,|extrw,s,}");
6897
        else
6898
          strcpy (buf, "{bvb,|bb,}");
6899
        if (useskip && GET_MODE (operands[0]) == DImode)
6900
          strcpy (buf, "extrd,s,*");
6901
        else if (GET_MODE (operands[0]) == DImode)
6902
          strcpy (buf, "bb,*");
6903
        if ((which == 0 && negated)
6904
             || (which == 1 && ! negated))
6905
          strcat (buf, ">=");
6906
        else
6907
          strcat (buf, "<");
6908
        if (useskip)
6909
          strcat (buf, "{ %0,1,%%r0| %0,%%sar,1,%%r0}");
6910
        else if (nullify && negated)
6911
          {
6912
            if (branch_needs_nop_p (insn))
6913
              strcat (buf, "{,n %0,%3%#|,n %0,%%sar,%3%#}");
6914
            else
6915
              strcat (buf, "{,n %0,%3|,n %0,%%sar,%3}");
6916
          }
6917
        else if (nullify && ! negated)
6918
          {
6919
            if (branch_needs_nop_p (insn))
6920
              strcat (buf, "{,n %0,%2%#|,n %0,%%sar,%2%#}");
6921
            else
6922
              strcat (buf, "{,n %0,%2|,n %0,%%sar,%2}");
6923
          }
6924
        else if (! nullify && negated)
6925
          strcat (buf, "{ %0,%3| %0,%%sar,%3}");
6926
        else if (! nullify && ! negated)
6927
          strcat (buf, "{ %0,%2| %0,%%sar,%2}");
6928
        break;
6929
 
6930
     /* All long conditionals.  Note a short backward branch with an
6931
        unfilled delay slot is treated just like a long backward branch
6932
        with an unfilled delay slot.  */
6933
      case 8:
6934
        /* Handle weird backwards branch with a filled delay slot
6935
           which is nullified.  */
6936
        if (dbr_sequence_length () != 0
6937
            && ! forward_branch_p (insn)
6938
            && nullify)
6939
          {
6940
            strcpy (buf, "{bvb,|bb,}");
6941
            if (GET_MODE (operands[0]) == DImode)
6942
              strcat (buf, "*");
6943
            if ((which == 0 && negated)
6944
                || (which == 1 && ! negated))
6945
              strcat (buf, "<");
6946
            else
6947
              strcat (buf, ">=");
6948
            if (negated)
6949
              strcat (buf, "{,n %0,.+12\n\tb %3|,n %0,%%sar,.+12\n\tb %3}");
6950
            else
6951
              strcat (buf, "{,n %0,.+12\n\tb %2|,n %0,%%sar,.+12\n\tb %2}");
6952
          }
6953
        /* Handle short backwards branch with an unfilled delay slot.
6954
           Using a bb;nop rather than extrs;bl saves 1 cycle for both
6955
           taken and untaken branches.  */
6956
        else if (dbr_sequence_length () == 0
6957
                 && ! forward_branch_p (insn)
6958
                 && INSN_ADDRESSES_SET_P ()
6959
                 && VAL_14_BITS_P (INSN_ADDRESSES (INSN_UID (JUMP_LABEL (insn)))
6960
                                    - INSN_ADDRESSES (INSN_UID (insn)) - 8))
6961
          {
6962
            strcpy (buf, "{bvb,|bb,}");
6963
            if (GET_MODE (operands[0]) == DImode)
6964
              strcat (buf, "*");
6965
            if ((which == 0 && negated)
6966
                || (which == 1 && ! negated))
6967
              strcat (buf, ">=");
6968
            else
6969
              strcat (buf, "<");
6970
            if (negated)
6971
              strcat (buf, "{ %0,%3%#| %0,%%sar,%3%#}");
6972
            else
6973
              strcat (buf, "{ %0,%2%#| %0,%%sar,%2%#}");
6974
          }
6975
        else
6976
          {
6977
            strcpy (buf, "{vextrs,|extrw,s,}");
6978
            if (GET_MODE (operands[0]) == DImode)
6979
              strcpy (buf, "extrd,s,*");
6980
            if ((which == 0 && negated)
6981
                || (which == 1 && ! negated))
6982
              strcat (buf, "<");
6983
            else
6984
              strcat (buf, ">=");
6985
            if (nullify && negated)
6986
              strcat (buf, "{ %0,1,%%r0\n\tb,n %3| %0,%%sar,1,%%r0\n\tb,n %3}");
6987
            else if (nullify && ! negated)
6988
              strcat (buf, "{ %0,1,%%r0\n\tb,n %2| %0,%%sar,1,%%r0\n\tb,n %2}");
6989
            else if (negated)
6990
              strcat (buf, "{ %0,1,%%r0\n\tb %3| %0,%%sar,1,%%r0\n\tb %3}");
6991
            else
6992
              strcat (buf, "{ %0,1,%%r0\n\tb %2| %0,%%sar,1,%%r0\n\tb %2}");
6993
          }
6994
        break;
6995
 
6996
      default:
6997
        /* The reversed conditional branch must branch over one additional
6998
           instruction if the delay slot is filled and needs to be extracted
6999
           by pa_output_lbranch.  If the delay slot is empty or this is a
7000
           nullified forward branch, the instruction after the reversed
7001
           condition branch must be nullified.  */
7002
        if (dbr_sequence_length () == 0
7003
            || (nullify && forward_branch_p (insn)))
7004
          {
7005
            nullify = 1;
7006
            xdelay = 0;
7007
            operands[4] = GEN_INT (length);
7008
          }
7009
        else
7010
          {
7011
            xdelay = 1;
7012
            operands[4] = GEN_INT (length + 4);
7013
          }
7014
 
7015
        if (GET_MODE (operands[0]) == DImode)
7016
          strcpy (buf, "bb,*");
7017
        else
7018
          strcpy (buf, "{bvb,|bb,}");
7019
        if ((which == 0 && negated)
7020
            || (which == 1 && !negated))
7021
          strcat (buf, "<");
7022
        else
7023
          strcat (buf, ">=");
7024
        if (nullify)
7025
          strcat (buf, ",n {%0,.+%4|%0,%%sar,.+%4}");
7026
        else
7027
          strcat (buf, " {%0,.+%4|%0,%%sar,.+%4}");
7028
        output_asm_insn (buf, operands);
7029
        return pa_output_lbranch (negated ? operands[3] : operands[2],
7030
                                  insn, xdelay);
7031
    }
7032
  return buf;
7033
}
7034
 
7035
/* Return the output template for emitting a dbra type insn.
7036
 
7037
   Note it may perform some output operations on its own before
7038
   returning the final output string.  */
7039
const char *
7040
pa_output_dbra (rtx *operands, rtx insn, int which_alternative)
7041
{
7042
  int length = get_attr_length (insn);
7043
 
7044
  /* A conditional branch to the following instruction (e.g. the delay slot) is
7045
     asking for a disaster.  Be prepared!  */
7046
 
7047
  if (branch_to_delay_slot_p (insn))
7048
    {
7049
      if (which_alternative == 0)
7050
        return "ldo %1(%0),%0";
7051
      else if (which_alternative == 1)
7052
        {
7053
          output_asm_insn ("{fstws|fstw} %0,-16(%%r30)", operands);
7054
          output_asm_insn ("ldw -16(%%r30),%4", operands);
7055
          output_asm_insn ("ldo %1(%4),%4\n\tstw %4,-16(%%r30)", operands);
7056
          return "{fldws|fldw} -16(%%r30),%0";
7057
        }
7058
      else
7059
        {
7060
          output_asm_insn ("ldw %0,%4", operands);
7061
          return "ldo %1(%4),%4\n\tstw %4,%0";
7062
        }
7063
    }
7064
 
7065
  if (which_alternative == 0)
7066
    {
7067
      int nullify = INSN_ANNULLED_BRANCH_P (insn);
7068
      int xdelay;
7069
 
7070
      /* If this is a long branch with its delay slot unfilled, set `nullify'
7071
         as it can nullify the delay slot and save a nop.  */
7072
      if (length == 8 && dbr_sequence_length () == 0)
7073
        nullify = 1;
7074
 
7075
      /* If this is a short forward conditional branch which did not get
7076
         its delay slot filled, the delay slot can still be nullified.  */
7077
      if (! nullify && length == 4 && dbr_sequence_length () == 0)
7078
        nullify = forward_branch_p (insn);
7079
 
7080
      switch (length)
7081
        {
7082
        case 4:
7083
          if (nullify)
7084
            {
7085
              if (branch_needs_nop_p (insn))
7086
                return "addib,%C2,n %1,%0,%3%#";
7087
              else
7088
                return "addib,%C2,n %1,%0,%3";
7089
            }
7090
          else
7091
            return "addib,%C2 %1,%0,%3";
7092
 
7093
        case 8:
7094
          /* Handle weird backwards branch with a fulled delay slot
7095
             which is nullified.  */
7096
          if (dbr_sequence_length () != 0
7097
              && ! forward_branch_p (insn)
7098
              && nullify)
7099
            return "addib,%N2,n %1,%0,.+12\n\tb %3";
7100
          /* Handle short backwards branch with an unfilled delay slot.
7101
             Using a addb;nop rather than addi;bl saves 1 cycle for both
7102
             taken and untaken branches.  */
7103
          else if (dbr_sequence_length () == 0
7104
                   && ! forward_branch_p (insn)
7105
                   && INSN_ADDRESSES_SET_P ()
7106
                   && VAL_14_BITS_P (INSN_ADDRESSES (INSN_UID (JUMP_LABEL (insn)))
7107
                                      - INSN_ADDRESSES (INSN_UID (insn)) - 8))
7108
              return "addib,%C2 %1,%0,%3%#";
7109
 
7110
          /* Handle normal cases.  */
7111
          if (nullify)
7112
            return "addi,%N2 %1,%0,%0\n\tb,n %3";
7113
          else
7114
            return "addi,%N2 %1,%0,%0\n\tb %3";
7115
 
7116
        default:
7117
          /* The reversed conditional branch must branch over one additional
7118
             instruction if the delay slot is filled and needs to be extracted
7119
             by pa_output_lbranch.  If the delay slot is empty or this is a
7120
             nullified forward branch, the instruction after the reversed
7121
             condition branch must be nullified.  */
7122
          if (dbr_sequence_length () == 0
7123
              || (nullify && forward_branch_p (insn)))
7124
            {
7125
              nullify = 1;
7126
              xdelay = 0;
7127
              operands[4] = GEN_INT (length);
7128
            }
7129
          else
7130
            {
7131
              xdelay = 1;
7132
              operands[4] = GEN_INT (length + 4);
7133
            }
7134
 
7135
          if (nullify)
7136
            output_asm_insn ("addib,%N2,n %1,%0,.+%4", operands);
7137
          else
7138
            output_asm_insn ("addib,%N2 %1,%0,.+%4", operands);
7139
 
7140
          return pa_output_lbranch (operands[3], insn, xdelay);
7141
        }
7142
 
7143
    }
7144
  /* Deal with gross reload from FP register case.  */
7145
  else if (which_alternative == 1)
7146
    {
7147
      /* Move loop counter from FP register to MEM then into a GR,
7148
         increment the GR, store the GR into MEM, and finally reload
7149
         the FP register from MEM from within the branch's delay slot.  */
7150
      output_asm_insn ("{fstws|fstw} %0,-16(%%r30)\n\tldw -16(%%r30),%4",
7151
                       operands);
7152
      output_asm_insn ("ldo %1(%4),%4\n\tstw %4,-16(%%r30)", operands);
7153
      if (length == 24)
7154
        return "{comb|cmpb},%S2 %%r0,%4,%3\n\t{fldws|fldw} -16(%%r30),%0";
7155
      else if (length == 28)
7156
        return "{comclr|cmpclr},%B2 %%r0,%4,%%r0\n\tb %3\n\t{fldws|fldw} -16(%%r30),%0";
7157
      else
7158
        {
7159
          operands[5] = GEN_INT (length - 16);
7160
          output_asm_insn ("{comb|cmpb},%B2 %%r0,%4,.+%5", operands);
7161
          output_asm_insn ("{fldws|fldw} -16(%%r30),%0", operands);
7162
          return pa_output_lbranch (operands[3], insn, 0);
7163
        }
7164
    }
7165
  /* Deal with gross reload from memory case.  */
7166
  else
7167
    {
7168
      /* Reload loop counter from memory, the store back to memory
7169
         happens in the branch's delay slot.  */
7170
      output_asm_insn ("ldw %0,%4", operands);
7171
      if (length == 12)
7172
        return "addib,%C2 %1,%4,%3\n\tstw %4,%0";
7173
      else if (length == 16)
7174
        return "addi,%N2 %1,%4,%4\n\tb %3\n\tstw %4,%0";
7175
      else
7176
        {
7177
          operands[5] = GEN_INT (length - 4);
7178
          output_asm_insn ("addib,%N2 %1,%4,.+%5\n\tstw %4,%0", operands);
7179
          return pa_output_lbranch (operands[3], insn, 0);
7180
        }
7181
    }
7182
}
7183
 
7184
/* Return the output template for emitting a movb type insn.
7185
 
7186
   Note it may perform some output operations on its own before
7187
   returning the final output string.  */
7188
const char *
7189
pa_output_movb (rtx *operands, rtx insn, int which_alternative,
7190
             int reverse_comparison)
7191
{
7192
  int length = get_attr_length (insn);
7193
 
7194
  /* A conditional branch to the following instruction (e.g. the delay slot) is
7195
     asking for a disaster.  Be prepared!  */
7196
 
7197
  if (branch_to_delay_slot_p (insn))
7198
    {
7199
      if (which_alternative == 0)
7200
        return "copy %1,%0";
7201
      else if (which_alternative == 1)
7202
        {
7203
          output_asm_insn ("stw %1,-16(%%r30)", operands);
7204
          return "{fldws|fldw} -16(%%r30),%0";
7205
        }
7206
      else if (which_alternative == 2)
7207
        return "stw %1,%0";
7208
      else
7209
        return "mtsar %r1";
7210
    }
7211
 
7212
  /* Support the second variant.  */
7213
  if (reverse_comparison)
7214
    PUT_CODE (operands[2], reverse_condition (GET_CODE (operands[2])));
7215
 
7216
  if (which_alternative == 0)
7217
    {
7218
      int nullify = INSN_ANNULLED_BRANCH_P (insn);
7219
      int xdelay;
7220
 
7221
      /* If this is a long branch with its delay slot unfilled, set `nullify'
7222
         as it can nullify the delay slot and save a nop.  */
7223
      if (length == 8 && dbr_sequence_length () == 0)
7224
        nullify = 1;
7225
 
7226
      /* If this is a short forward conditional branch which did not get
7227
         its delay slot filled, the delay slot can still be nullified.  */
7228
      if (! nullify && length == 4 && dbr_sequence_length () == 0)
7229
        nullify = forward_branch_p (insn);
7230
 
7231
      switch (length)
7232
        {
7233
        case 4:
7234
          if (nullify)
7235
            {
7236
              if (branch_needs_nop_p (insn))
7237
                return "movb,%C2,n %1,%0,%3%#";
7238
              else
7239
                return "movb,%C2,n %1,%0,%3";
7240
            }
7241
          else
7242
            return "movb,%C2 %1,%0,%3";
7243
 
7244
        case 8:
7245
          /* Handle weird backwards branch with a filled delay slot
7246
             which is nullified.  */
7247
          if (dbr_sequence_length () != 0
7248
              && ! forward_branch_p (insn)
7249
              && nullify)
7250
            return "movb,%N2,n %1,%0,.+12\n\tb %3";
7251
 
7252
          /* Handle short backwards branch with an unfilled delay slot.
7253
             Using a movb;nop rather than or;bl saves 1 cycle for both
7254
             taken and untaken branches.  */
7255
          else if (dbr_sequence_length () == 0
7256
                   && ! forward_branch_p (insn)
7257
                   && INSN_ADDRESSES_SET_P ()
7258
                   && VAL_14_BITS_P (INSN_ADDRESSES (INSN_UID (JUMP_LABEL (insn)))
7259
                                      - INSN_ADDRESSES (INSN_UID (insn)) - 8))
7260
            return "movb,%C2 %1,%0,%3%#";
7261
          /* Handle normal cases.  */
7262
          if (nullify)
7263
            return "or,%N2 %1,%%r0,%0\n\tb,n %3";
7264
          else
7265
            return "or,%N2 %1,%%r0,%0\n\tb %3";
7266
 
7267
        default:
7268
          /* The reversed conditional branch must branch over one additional
7269
             instruction if the delay slot is filled and needs to be extracted
7270
             by pa_output_lbranch.  If the delay slot is empty or this is a
7271
             nullified forward branch, the instruction after the reversed
7272
             condition branch must be nullified.  */
7273
          if (dbr_sequence_length () == 0
7274
              || (nullify && forward_branch_p (insn)))
7275
            {
7276
              nullify = 1;
7277
              xdelay = 0;
7278
              operands[4] = GEN_INT (length);
7279
            }
7280
          else
7281
            {
7282
              xdelay = 1;
7283
              operands[4] = GEN_INT (length + 4);
7284
            }
7285
 
7286
          if (nullify)
7287
            output_asm_insn ("movb,%N2,n %1,%0,.+%4", operands);
7288
          else
7289
            output_asm_insn ("movb,%N2 %1,%0,.+%4", operands);
7290
 
7291
          return pa_output_lbranch (operands[3], insn, xdelay);
7292
        }
7293
    }
7294
  /* Deal with gross reload for FP destination register case.  */
7295
  else if (which_alternative == 1)
7296
    {
7297
      /* Move source register to MEM, perform the branch test, then
7298
         finally load the FP register from MEM from within the branch's
7299
         delay slot.  */
7300
      output_asm_insn ("stw %1,-16(%%r30)", operands);
7301
      if (length == 12)
7302
        return "{comb|cmpb},%S2 %%r0,%1,%3\n\t{fldws|fldw} -16(%%r30),%0";
7303
      else if (length == 16)
7304
        return "{comclr|cmpclr},%B2 %%r0,%1,%%r0\n\tb %3\n\t{fldws|fldw} -16(%%r30),%0";
7305
      else
7306
        {
7307
          operands[4] = GEN_INT (length - 4);
7308
          output_asm_insn ("{comb|cmpb},%B2 %%r0,%1,.+%4", operands);
7309
          output_asm_insn ("{fldws|fldw} -16(%%r30),%0", operands);
7310
          return pa_output_lbranch (operands[3], insn, 0);
7311
        }
7312
    }
7313
  /* Deal with gross reload from memory case.  */
7314
  else if (which_alternative == 2)
7315
    {
7316
      /* Reload loop counter from memory, the store back to memory
7317
         happens in the branch's delay slot.  */
7318
      if (length == 8)
7319
        return "{comb|cmpb},%S2 %%r0,%1,%3\n\tstw %1,%0";
7320
      else if (length == 12)
7321
        return "{comclr|cmpclr},%B2 %%r0,%1,%%r0\n\tb %3\n\tstw %1,%0";
7322
      else
7323
        {
7324
          operands[4] = GEN_INT (length);
7325
          output_asm_insn ("{comb|cmpb},%B2 %%r0,%1,.+%4\n\tstw %1,%0",
7326
                           operands);
7327
          return pa_output_lbranch (operands[3], insn, 0);
7328
        }
7329
    }
7330
  /* Handle SAR as a destination.  */
7331
  else
7332
    {
7333
      if (length == 8)
7334
        return "{comb|cmpb},%S2 %%r0,%1,%3\n\tmtsar %r1";
7335
      else if (length == 12)
7336
        return "{comclr|cmpclr},%B2 %%r0,%1,%%r0\n\tb %3\n\tmtsar %r1";
7337
      else
7338
        {
7339
          operands[4] = GEN_INT (length);
7340
          output_asm_insn ("{comb|cmpb},%B2 %%r0,%1,.+%4\n\tmtsar %r1",
7341
                           operands);
7342
          return pa_output_lbranch (operands[3], insn, 0);
7343
        }
7344
    }
7345
}
7346
 
7347
/* Copy any FP arguments in INSN into integer registers.  */
7348
static void
7349
copy_fp_args (rtx insn)
7350
{
7351
  rtx link;
7352
  rtx xoperands[2];
7353
 
7354
  for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
7355
    {
7356
      int arg_mode, regno;
7357
      rtx use = XEXP (link, 0);
7358
 
7359
      if (! (GET_CODE (use) == USE
7360
          && GET_CODE (XEXP (use, 0)) == REG
7361
          && FUNCTION_ARG_REGNO_P (REGNO (XEXP (use, 0)))))
7362
        continue;
7363
 
7364
      arg_mode = GET_MODE (XEXP (use, 0));
7365
      regno = REGNO (XEXP (use, 0));
7366
 
7367
      /* Is it a floating point register?  */
7368
      if (regno >= 32 && regno <= 39)
7369
        {
7370
          /* Copy the FP register into an integer register via memory.  */
7371
          if (arg_mode == SFmode)
7372
            {
7373
              xoperands[0] = XEXP (use, 0);
7374
              xoperands[1] = gen_rtx_REG (SImode, 26 - (regno - 32) / 2);
7375
              output_asm_insn ("{fstws|fstw} %0,-16(%%sr0,%%r30)", xoperands);
7376
              output_asm_insn ("ldw -16(%%sr0,%%r30),%1", xoperands);
7377
            }
7378
          else
7379
            {
7380
              xoperands[0] = XEXP (use, 0);
7381
              xoperands[1] = gen_rtx_REG (DImode, 25 - (regno - 34) / 2);
7382
              output_asm_insn ("{fstds|fstd} %0,-16(%%sr0,%%r30)", xoperands);
7383
              output_asm_insn ("ldw -12(%%sr0,%%r30),%R1", xoperands);
7384
              output_asm_insn ("ldw -16(%%sr0,%%r30),%1", xoperands);
7385
            }
7386
        }
7387
    }
7388
}
7389
 
7390
/* Compute length of the FP argument copy sequence for INSN.  */
7391
static int
7392
length_fp_args (rtx insn)
7393
{
7394
  int length = 0;
7395
  rtx link;
7396
 
7397
  for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
7398
    {
7399
      int arg_mode, regno;
7400
      rtx use = XEXP (link, 0);
7401
 
7402
      if (! (GET_CODE (use) == USE
7403
          && GET_CODE (XEXP (use, 0)) == REG
7404
          && FUNCTION_ARG_REGNO_P (REGNO (XEXP (use, 0)))))
7405
        continue;
7406
 
7407
      arg_mode = GET_MODE (XEXP (use, 0));
7408
      regno = REGNO (XEXP (use, 0));
7409
 
7410
      /* Is it a floating point register?  */
7411
      if (regno >= 32 && regno <= 39)
7412
        {
7413
          if (arg_mode == SFmode)
7414
            length += 8;
7415
          else
7416
            length += 12;
7417
        }
7418
    }
7419
 
7420
  return length;
7421
}
7422
 
7423
/* Return the attribute length for the millicode call instruction INSN.
7424
   The length must match the code generated by pa_output_millicode_call.
7425
   We include the delay slot in the returned length as it is better to
7426
   over estimate the length than to under estimate it.  */
7427
 
7428
int
7429
pa_attr_length_millicode_call (rtx insn)
7430
{
7431
  unsigned long distance = -1;
7432
  unsigned long total = IN_NAMED_SECTION_P (cfun->decl) ? 0 : total_code_bytes;
7433
 
7434
  if (INSN_ADDRESSES_SET_P ())
7435
    {
7436
      distance = (total + insn_current_reference_address (insn));
7437
      if (distance < total)
7438
        distance = -1;
7439
    }
7440
 
7441
  if (TARGET_64BIT)
7442
    {
7443
      if (!TARGET_LONG_CALLS && distance < 7600000)
7444
        return 8;
7445
 
7446
      return 20;
7447
    }
7448
  else if (TARGET_PORTABLE_RUNTIME)
7449
    return 24;
7450
  else
7451
    {
7452
      if (!TARGET_LONG_CALLS && distance < 240000)
7453
        return 8;
7454
 
7455
      if (TARGET_LONG_ABS_CALL && !flag_pic)
7456
        return 12;
7457
 
7458
      return 24;
7459
    }
7460
}
7461
 
7462
/* INSN is a function call.  It may have an unconditional jump
7463
   in its delay slot.
7464
 
7465
   CALL_DEST is the routine we are calling.  */
7466
 
7467
const char *
7468
pa_output_millicode_call (rtx insn, rtx call_dest)
7469
{
7470
  int attr_length = get_attr_length (insn);
7471
  int seq_length = dbr_sequence_length ();
7472
  int distance;
7473
  rtx seq_insn;
7474
  rtx xoperands[3];
7475
 
7476
  xoperands[0] = call_dest;
7477
  xoperands[2] = gen_rtx_REG (Pmode, TARGET_64BIT ? 2 : 31);
7478
 
7479
  /* Handle the common case where we are sure that the branch will
7480
     reach the beginning of the $CODE$ subspace.  The within reach
7481
     form of the $$sh_func_adrs call has a length of 28.  Because
7482
     it has an attribute type of multi, it never has a nonzero
7483
     sequence length.  The length of the $$sh_func_adrs is the same
7484
     as certain out of reach PIC calls to other routines.  */
7485
  if (!TARGET_LONG_CALLS
7486
      && ((seq_length == 0
7487
           && (attr_length == 12
7488
               || (attr_length == 28 && get_attr_type (insn) == TYPE_MULTI)))
7489
          || (seq_length != 0 && attr_length == 8)))
7490
    {
7491
      output_asm_insn ("{bl|b,l} %0,%2", xoperands);
7492
    }
7493
  else
7494
    {
7495
      if (TARGET_64BIT)
7496
        {
7497
          /* It might seem that one insn could be saved by accessing
7498
             the millicode function using the linkage table.  However,
7499
             this doesn't work in shared libraries and other dynamically
7500
             loaded objects.  Using a pc-relative sequence also avoids
7501
             problems related to the implicit use of the gp register.  */
7502
          output_asm_insn ("b,l .+8,%%r1", xoperands);
7503
 
7504
          if (TARGET_GAS)
7505
            {
7506
              output_asm_insn ("addil L'%0-$PIC_pcrel$0+4,%%r1", xoperands);
7507
              output_asm_insn ("ldo R'%0-$PIC_pcrel$0+8(%%r1),%%r1", xoperands);
7508
            }
7509
          else
7510
            {
7511
              xoperands[1] = gen_label_rtx ();
7512
              output_asm_insn ("addil L'%0-%l1,%%r1", xoperands);
7513
              targetm.asm_out.internal_label (asm_out_file, "L",
7514
                                         CODE_LABEL_NUMBER (xoperands[1]));
7515
              output_asm_insn ("ldo R'%0-%l1(%%r1),%%r1", xoperands);
7516
            }
7517
 
7518
          output_asm_insn ("bve,l (%%r1),%%r2", xoperands);
7519
        }
7520
      else if (TARGET_PORTABLE_RUNTIME)
7521
        {
7522
          /* Pure portable runtime doesn't allow be/ble; we also don't
7523
             have PIC support in the assembler/linker, so this sequence
7524
             is needed.  */
7525
 
7526
          /* Get the address of our target into %r1.  */
7527
          output_asm_insn ("ldil L'%0,%%r1", xoperands);
7528
          output_asm_insn ("ldo R'%0(%%r1),%%r1", xoperands);
7529
 
7530
          /* Get our return address into %r31.  */
7531
          output_asm_insn ("{bl|b,l} .+8,%%r31", xoperands);
7532
          output_asm_insn ("addi 8,%%r31,%%r31", xoperands);
7533
 
7534
          /* Jump to our target address in %r1.  */
7535
          output_asm_insn ("bv %%r0(%%r1)", xoperands);
7536
        }
7537
      else if (!flag_pic)
7538
        {
7539
          output_asm_insn ("ldil L'%0,%%r1", xoperands);
7540
          if (TARGET_PA_20)
7541
            output_asm_insn ("be,l R'%0(%%sr4,%%r1),%%sr0,%%r31", xoperands);
7542
          else
7543
            output_asm_insn ("ble R'%0(%%sr4,%%r1)", xoperands);
7544
        }
7545
      else
7546
        {
7547
          output_asm_insn ("{bl|b,l} .+8,%%r1", xoperands);
7548
          output_asm_insn ("addi 16,%%r1,%%r31", xoperands);
7549
 
7550
          if (TARGET_SOM || !TARGET_GAS)
7551
            {
7552
              /* The HP assembler can generate relocations for the
7553
                 difference of two symbols.  GAS can do this for a
7554
                 millicode symbol but not an arbitrary external
7555
                 symbol when generating SOM output.  */
7556
              xoperands[1] = gen_label_rtx ();
7557
              targetm.asm_out.internal_label (asm_out_file, "L",
7558
                                         CODE_LABEL_NUMBER (xoperands[1]));
7559
              output_asm_insn ("addil L'%0-%l1,%%r1", xoperands);
7560
              output_asm_insn ("ldo R'%0-%l1(%%r1),%%r1", xoperands);
7561
            }
7562
          else
7563
            {
7564
              output_asm_insn ("addil L'%0-$PIC_pcrel$0+8,%%r1", xoperands);
7565
              output_asm_insn ("ldo R'%0-$PIC_pcrel$0+12(%%r1),%%r1",
7566
                               xoperands);
7567
            }
7568
 
7569
          /* Jump to our target address in %r1.  */
7570
          output_asm_insn ("bv %%r0(%%r1)", xoperands);
7571
        }
7572
    }
7573
 
7574
  if (seq_length == 0)
7575
    output_asm_insn ("nop", xoperands);
7576
 
7577
  /* We are done if there isn't a jump in the delay slot.  */
7578
  if (seq_length == 0 || GET_CODE (NEXT_INSN (insn)) != JUMP_INSN)
7579
    return "";
7580
 
7581
  /* This call has an unconditional jump in its delay slot.  */
7582
  xoperands[0] = XEXP (PATTERN (NEXT_INSN (insn)), 1);
7583
 
7584
  /* See if the return address can be adjusted.  Use the containing
7585
     sequence insn's address.  */
7586
  if (INSN_ADDRESSES_SET_P ())
7587
    {
7588
      seq_insn = NEXT_INSN (PREV_INSN (XVECEXP (final_sequence, 0, 0)));
7589
      distance = (INSN_ADDRESSES (INSN_UID (JUMP_LABEL (NEXT_INSN (insn))))
7590
                  - INSN_ADDRESSES (INSN_UID (seq_insn)) - 8);
7591
 
7592
      if (VAL_14_BITS_P (distance))
7593
        {
7594
          xoperands[1] = gen_label_rtx ();
7595
          output_asm_insn ("ldo %0-%1(%2),%2", xoperands);
7596
          targetm.asm_out.internal_label (asm_out_file, "L",
7597
                                          CODE_LABEL_NUMBER (xoperands[1]));
7598
        }
7599
      else
7600
        /* ??? This branch may not reach its target.  */
7601
        output_asm_insn ("nop\n\tb,n %0", xoperands);
7602
    }
7603
  else
7604
    /* ??? This branch may not reach its target.  */
7605
    output_asm_insn ("nop\n\tb,n %0", xoperands);
7606
 
7607
  /* Delete the jump.  */
7608
  SET_INSN_DELETED (NEXT_INSN (insn));
7609
 
7610
  return "";
7611
}
7612
 
7613
/* Return the attribute length of the call instruction INSN.  The SIBCALL
7614
   flag indicates whether INSN is a regular call or a sibling call.  The
7615
   length returned must be longer than the code actually generated by
7616
   pa_output_call.  Since branch shortening is done before delay branch
7617
   sequencing, there is no way to determine whether or not the delay
7618
   slot will be filled during branch shortening.  Even when the delay
7619
   slot is filled, we may have to add a nop if the delay slot contains
7620
   a branch that can't reach its target.  Thus, we always have to include
7621
   the delay slot in the length estimate.  This used to be done in
7622
   pa_adjust_insn_length but we do it here now as some sequences always
7623
   fill the delay slot and we can save four bytes in the estimate for
7624
   these sequences.  */
7625
 
7626
int
7627
pa_attr_length_call (rtx insn, int sibcall)
7628
{
7629
  int local_call;
7630
  rtx call, call_dest;
7631
  tree call_decl;
7632
  int length = 0;
7633
  rtx pat = PATTERN (insn);
7634
  unsigned long distance = -1;
7635
 
7636
  gcc_assert (GET_CODE (insn) == CALL_INSN);
7637
 
7638
  if (INSN_ADDRESSES_SET_P ())
7639
    {
7640
      unsigned long total;
7641
 
7642
      total = IN_NAMED_SECTION_P (cfun->decl) ? 0 : total_code_bytes;
7643
      distance = (total + insn_current_reference_address (insn));
7644
      if (distance < total)
7645
        distance = -1;
7646
    }
7647
 
7648
  gcc_assert (GET_CODE (pat) == PARALLEL);
7649
 
7650
  /* Get the call rtx.  */
7651
  call = XVECEXP (pat, 0, 0);
7652
  if (GET_CODE (call) == SET)
7653
    call = SET_SRC (call);
7654
 
7655
  gcc_assert (GET_CODE (call) == CALL);
7656
 
7657
  /* Determine if this is a local call.  */
7658
  call_dest = XEXP (XEXP (call, 0), 0);
7659
  call_decl = SYMBOL_REF_DECL (call_dest);
7660
  local_call = call_decl && targetm.binds_local_p (call_decl);
7661
 
7662
  /* pc-relative branch.  */
7663
  if (!TARGET_LONG_CALLS
7664
      && ((TARGET_PA_20 && !sibcall && distance < 7600000)
7665
          || distance < 240000))
7666
    length += 8;
7667
 
7668
  /* 64-bit plabel sequence.  */
7669
  else if (TARGET_64BIT && !local_call)
7670
    length += sibcall ? 28 : 24;
7671
 
7672
  /* non-pic long absolute branch sequence.  */
7673
  else if ((TARGET_LONG_ABS_CALL || local_call) && !flag_pic)
7674
    length += 12;
7675
 
7676
  /* long pc-relative branch sequence.  */
7677
  else if (TARGET_LONG_PIC_SDIFF_CALL
7678
           || (TARGET_GAS && !TARGET_SOM
7679
               && (TARGET_LONG_PIC_PCREL_CALL || local_call)))
7680
    {
7681
      length += 20;
7682
 
7683
      if (!TARGET_PA_20 && !TARGET_NO_SPACE_REGS && (!local_call || flag_pic))
7684
        length += 8;
7685
    }
7686
 
7687
  /* 32-bit plabel sequence.  */
7688
  else
7689
    {
7690
      length += 32;
7691
 
7692
      if (TARGET_SOM)
7693
        length += length_fp_args (insn);
7694
 
7695
      if (flag_pic)
7696
        length += 4;
7697
 
7698
      if (!TARGET_PA_20)
7699
        {
7700
          if (!sibcall)
7701
            length += 8;
7702
 
7703
          if (!TARGET_NO_SPACE_REGS && (!local_call || flag_pic))
7704
            length += 8;
7705
        }
7706
    }
7707
 
7708
  return length;
7709
}
7710
 
7711
/* INSN is a function call.  It may have an unconditional jump
7712
   in its delay slot.
7713
 
7714
   CALL_DEST is the routine we are calling.  */
7715
 
7716
const char *
7717
pa_output_call (rtx insn, rtx call_dest, int sibcall)
7718
{
7719
  int delay_insn_deleted = 0;
7720
  int delay_slot_filled = 0;
7721
  int seq_length = dbr_sequence_length ();
7722
  tree call_decl = SYMBOL_REF_DECL (call_dest);
7723
  int local_call = call_decl && targetm.binds_local_p (call_decl);
7724
  rtx xoperands[2];
7725
 
7726
  xoperands[0] = call_dest;
7727
 
7728
  /* Handle the common case where we're sure that the branch will reach
7729
     the beginning of the "$CODE$" subspace.  This is the beginning of
7730
     the current function if we are in a named section.  */
7731
  if (!TARGET_LONG_CALLS && pa_attr_length_call (insn, sibcall) == 8)
7732
    {
7733
      xoperands[1] = gen_rtx_REG (word_mode, sibcall ? 0 : 2);
7734
      output_asm_insn ("{bl|b,l} %0,%1", xoperands);
7735
    }
7736
  else
7737
    {
7738
      if (TARGET_64BIT && !local_call)
7739
        {
7740
          /* ??? As far as I can tell, the HP linker doesn't support the
7741
             long pc-relative sequence described in the 64-bit runtime
7742
             architecture.  So, we use a slightly longer indirect call.  */
7743
          xoperands[0] = pa_get_deferred_plabel (call_dest);
7744
          xoperands[1] = gen_label_rtx ();
7745
 
7746
          /* If this isn't a sibcall, we put the load of %r27 into the
7747
             delay slot.  We can't do this in a sibcall as we don't
7748
             have a second call-clobbered scratch register available.  */
7749
          if (seq_length != 0
7750
              && GET_CODE (NEXT_INSN (insn)) != JUMP_INSN
7751
              && !sibcall)
7752
            {
7753
              final_scan_insn (NEXT_INSN (insn), asm_out_file,
7754
                               optimize, 0, NULL);
7755
 
7756
              /* Now delete the delay insn.  */
7757
              SET_INSN_DELETED (NEXT_INSN (insn));
7758
              delay_insn_deleted = 1;
7759
            }
7760
 
7761
          output_asm_insn ("addil LT'%0,%%r27", xoperands);
7762
          output_asm_insn ("ldd RT'%0(%%r1),%%r1", xoperands);
7763
          output_asm_insn ("ldd 0(%%r1),%%r1", xoperands);
7764
 
7765
          if (sibcall)
7766
            {
7767
              output_asm_insn ("ldd 24(%%r1),%%r27", xoperands);
7768
              output_asm_insn ("ldd 16(%%r1),%%r1", xoperands);
7769
              output_asm_insn ("bve (%%r1)", xoperands);
7770
            }
7771
          else
7772
            {
7773
              output_asm_insn ("ldd 16(%%r1),%%r2", xoperands);
7774
              output_asm_insn ("bve,l (%%r2),%%r2", xoperands);
7775
              output_asm_insn ("ldd 24(%%r1),%%r27", xoperands);
7776
              delay_slot_filled = 1;
7777
            }
7778
        }
7779
      else
7780
        {
7781
          int indirect_call = 0;
7782
 
7783
          /* Emit a long call.  There are several different sequences
7784
             of increasing length and complexity.  In most cases,
7785
             they don't allow an instruction in the delay slot.  */
7786
          if (!((TARGET_LONG_ABS_CALL || local_call) && !flag_pic)
7787
              && !TARGET_LONG_PIC_SDIFF_CALL
7788
              && !(TARGET_GAS && !TARGET_SOM
7789
                   && (TARGET_LONG_PIC_PCREL_CALL || local_call))
7790
              && !TARGET_64BIT)
7791
            indirect_call = 1;
7792
 
7793
          if (seq_length != 0
7794
              && GET_CODE (NEXT_INSN (insn)) != JUMP_INSN
7795
              && !sibcall
7796
              && (!TARGET_PA_20
7797
                  || indirect_call
7798
                  || ((TARGET_LONG_ABS_CALL || local_call) && !flag_pic)))
7799
            {
7800
              /* A non-jump insn in the delay slot.  By definition we can
7801
                 emit this insn before the call (and in fact before argument
7802
                 relocating.  */
7803
              final_scan_insn (NEXT_INSN (insn), asm_out_file, optimize, 0,
7804
                               NULL);
7805
 
7806
              /* Now delete the delay insn.  */
7807
              SET_INSN_DELETED (NEXT_INSN (insn));
7808
              delay_insn_deleted = 1;
7809
            }
7810
 
7811
          if ((TARGET_LONG_ABS_CALL || local_call) && !flag_pic)
7812
            {
7813
              /* This is the best sequence for making long calls in
7814
                 non-pic code.  Unfortunately, GNU ld doesn't provide
7815
                 the stub needed for external calls, and GAS's support
7816
                 for this with the SOM linker is buggy.  It is safe
7817
                 to use this for local calls.  */
7818
              output_asm_insn ("ldil L'%0,%%r1", xoperands);
7819
              if (sibcall)
7820
                output_asm_insn ("be R'%0(%%sr4,%%r1)", xoperands);
7821
              else
7822
                {
7823
                  if (TARGET_PA_20)
7824
                    output_asm_insn ("be,l R'%0(%%sr4,%%r1),%%sr0,%%r31",
7825
                                     xoperands);
7826
                  else
7827
                    output_asm_insn ("ble R'%0(%%sr4,%%r1)", xoperands);
7828
 
7829
                  output_asm_insn ("copy %%r31,%%r2", xoperands);
7830
                  delay_slot_filled = 1;
7831
                }
7832
            }
7833
          else
7834
            {
7835
              if (TARGET_LONG_PIC_SDIFF_CALL)
7836
                {
7837
                  /* The HP assembler and linker can handle relocations
7838
                     for the difference of two symbols.  The HP assembler
7839
                     recognizes the sequence as a pc-relative call and
7840
                     the linker provides stubs when needed.  */
7841
                  xoperands[1] = gen_label_rtx ();
7842
                  output_asm_insn ("{bl|b,l} .+8,%%r1", xoperands);
7843
                  output_asm_insn ("addil L'%0-%l1,%%r1", xoperands);
7844
                  targetm.asm_out.internal_label (asm_out_file, "L",
7845
                                             CODE_LABEL_NUMBER (xoperands[1]));
7846
                  output_asm_insn ("ldo R'%0-%l1(%%r1),%%r1", xoperands);
7847
                }
7848
              else if (TARGET_GAS && !TARGET_SOM
7849
                       && (TARGET_LONG_PIC_PCREL_CALL || local_call))
7850
                {
7851
                  /*  GAS currently can't generate the relocations that
7852
                      are needed for the SOM linker under HP-UX using this
7853
                      sequence.  The GNU linker doesn't generate the stubs
7854
                      that are needed for external calls on TARGET_ELF32
7855
                      with this sequence.  For now, we have to use a
7856
                      longer plabel sequence when using GAS.  */
7857
                  output_asm_insn ("{bl|b,l} .+8,%%r1", xoperands);
7858
                  output_asm_insn ("addil L'%0-$PIC_pcrel$0+4,%%r1",
7859
                                   xoperands);
7860
                  output_asm_insn ("ldo R'%0-$PIC_pcrel$0+8(%%r1),%%r1",
7861
                                   xoperands);
7862
                }
7863
              else
7864
                {
7865
                  /* Emit a long plabel-based call sequence.  This is
7866
                     essentially an inline implementation of $$dyncall.
7867
                     We don't actually try to call $$dyncall as this is
7868
                     as difficult as calling the function itself.  */
7869
                  xoperands[0] = pa_get_deferred_plabel (call_dest);
7870
                  xoperands[1] = gen_label_rtx ();
7871
 
7872
                  /* Since the call is indirect, FP arguments in registers
7873
                     need to be copied to the general registers.  Then, the
7874
                     argument relocation stub will copy them back.  */
7875
                  if (TARGET_SOM)
7876
                    copy_fp_args (insn);
7877
 
7878
                  if (flag_pic)
7879
                    {
7880
                      output_asm_insn ("addil LT'%0,%%r19", xoperands);
7881
                      output_asm_insn ("ldw RT'%0(%%r1),%%r1", xoperands);
7882
                      output_asm_insn ("ldw 0(%%r1),%%r1", xoperands);
7883
                    }
7884
                  else
7885
                    {
7886
                      output_asm_insn ("addil LR'%0-$global$,%%r27",
7887
                                       xoperands);
7888
                      output_asm_insn ("ldw RR'%0-$global$(%%r1),%%r1",
7889
                                       xoperands);
7890
                    }
7891
 
7892
                  output_asm_insn ("bb,>=,n %%r1,30,.+16", xoperands);
7893
                  output_asm_insn ("depi 0,31,2,%%r1", xoperands);
7894
                  output_asm_insn ("ldw 4(%%sr0,%%r1),%%r19", xoperands);
7895
                  output_asm_insn ("ldw 0(%%sr0,%%r1),%%r1", xoperands);
7896
 
7897
                  if (!sibcall && !TARGET_PA_20)
7898
                    {
7899
                      output_asm_insn ("{bl|b,l} .+8,%%r2", xoperands);
7900
                      if (TARGET_NO_SPACE_REGS || (local_call && !flag_pic))
7901
                        output_asm_insn ("addi 8,%%r2,%%r2", xoperands);
7902
                      else
7903
                        output_asm_insn ("addi 16,%%r2,%%r2", xoperands);
7904
                    }
7905
                }
7906
 
7907
              if (TARGET_PA_20)
7908
                {
7909
                  if (sibcall)
7910
                    output_asm_insn ("bve (%%r1)", xoperands);
7911
                  else
7912
                    {
7913
                      if (indirect_call)
7914
                        {
7915
                          output_asm_insn ("bve,l (%%r1),%%r2", xoperands);
7916
                          output_asm_insn ("stw %%r2,-24(%%sp)", xoperands);
7917
                          delay_slot_filled = 1;
7918
                        }
7919
                      else
7920
                        output_asm_insn ("bve,l (%%r1),%%r2", xoperands);
7921
                    }
7922
                }
7923
              else
7924
                {
7925
                  if (!TARGET_NO_SPACE_REGS && (!local_call || flag_pic))
7926
                    output_asm_insn ("ldsid (%%r1),%%r31\n\tmtsp %%r31,%%sr0",
7927
                                     xoperands);
7928
 
7929
                  if (sibcall)
7930
                    {
7931
                      if (TARGET_NO_SPACE_REGS || (local_call && !flag_pic))
7932
                        output_asm_insn ("be 0(%%sr4,%%r1)", xoperands);
7933
                      else
7934
                        output_asm_insn ("be 0(%%sr0,%%r1)", xoperands);
7935
                    }
7936
                  else
7937
                    {
7938
                      if (TARGET_NO_SPACE_REGS || (local_call && !flag_pic))
7939
                        output_asm_insn ("ble 0(%%sr4,%%r1)", xoperands);
7940
                      else
7941
                        output_asm_insn ("ble 0(%%sr0,%%r1)", xoperands);
7942
 
7943
                      if (indirect_call)
7944
                        output_asm_insn ("stw %%r31,-24(%%sp)", xoperands);
7945
                      else
7946
                        output_asm_insn ("copy %%r31,%%r2", xoperands);
7947
                      delay_slot_filled = 1;
7948
                    }
7949
                }
7950
            }
7951
        }
7952
    }
7953
 
7954
  if (!delay_slot_filled && (seq_length == 0 || delay_insn_deleted))
7955
    output_asm_insn ("nop", xoperands);
7956
 
7957
  /* We are done if there isn't a jump in the delay slot.  */
7958
  if (seq_length == 0
7959
      || delay_insn_deleted
7960
      || GET_CODE (NEXT_INSN (insn)) != JUMP_INSN)
7961
    return "";
7962
 
7963
  /* A sibcall should never have a branch in the delay slot.  */
7964
  gcc_assert (!sibcall);
7965
 
7966
  /* This call has an unconditional jump in its delay slot.  */
7967
  xoperands[0] = XEXP (PATTERN (NEXT_INSN (insn)), 1);
7968
 
7969
  if (!delay_slot_filled && INSN_ADDRESSES_SET_P ())
7970
    {
7971
      /* See if the return address can be adjusted.  Use the containing
7972
         sequence insn's address.  This would break the regular call/return@
7973
         relationship assumed by the table based eh unwinder, so only do that
7974
         if the call is not possibly throwing.  */
7975
      rtx seq_insn = NEXT_INSN (PREV_INSN (XVECEXP (final_sequence, 0, 0)));
7976
      int distance = (INSN_ADDRESSES (INSN_UID (JUMP_LABEL (NEXT_INSN (insn))))
7977
                      - INSN_ADDRESSES (INSN_UID (seq_insn)) - 8);
7978
 
7979
      if (VAL_14_BITS_P (distance)
7980
          && !(can_throw_internal (insn) || can_throw_external (insn)))
7981
        {
7982
          xoperands[1] = gen_label_rtx ();
7983
          output_asm_insn ("ldo %0-%1(%%r2),%%r2", xoperands);
7984
          targetm.asm_out.internal_label (asm_out_file, "L",
7985
                                          CODE_LABEL_NUMBER (xoperands[1]));
7986
        }
7987
      else
7988
        output_asm_insn ("nop\n\tb,n %0", xoperands);
7989
    }
7990
  else
7991
    output_asm_insn ("b,n %0", xoperands);
7992
 
7993
  /* Delete the jump.  */
7994
  SET_INSN_DELETED (NEXT_INSN (insn));
7995
 
7996
  return "";
7997
}
7998
 
7999
/* Return the attribute length of the indirect call instruction INSN.
8000
   The length must match the code generated by output_indirect call.
8001
   The returned length includes the delay slot.  Currently, the delay
8002
   slot of an indirect call sequence is not exposed and it is used by
8003
   the sequence itself.  */
8004
 
8005
int
8006
pa_attr_length_indirect_call (rtx insn)
8007
{
8008
  unsigned long distance = -1;
8009
  unsigned long total = IN_NAMED_SECTION_P (cfun->decl) ? 0 : total_code_bytes;
8010
 
8011
  if (INSN_ADDRESSES_SET_P ())
8012
    {
8013
      distance = (total + insn_current_reference_address (insn));
8014
      if (distance < total)
8015
        distance = -1;
8016
    }
8017
 
8018
  if (TARGET_64BIT)
8019
    return 12;
8020
 
8021
  if (TARGET_FAST_INDIRECT_CALLS
8022
      || (!TARGET_PORTABLE_RUNTIME
8023
          && ((TARGET_PA_20 && !TARGET_SOM && distance < 7600000)
8024
              || distance < 240000)))
8025
    return 8;
8026
 
8027
  if (flag_pic)
8028
    return 24;
8029
 
8030
  if (TARGET_PORTABLE_RUNTIME)
8031
    return 20;
8032
 
8033
  /* Out of reach, can use ble.  */
8034
  return 12;
8035
}
8036
 
8037
const char *
8038
pa_output_indirect_call (rtx insn, rtx call_dest)
8039
{
8040
  rtx xoperands[1];
8041
 
8042
  if (TARGET_64BIT)
8043
    {
8044
      xoperands[0] = call_dest;
8045
      output_asm_insn ("ldd 16(%0),%%r2", xoperands);
8046
      output_asm_insn ("bve,l (%%r2),%%r2\n\tldd 24(%0),%%r27", xoperands);
8047
      return "";
8048
    }
8049
 
8050
  /* First the special case for kernels, level 0 systems, etc.  */
8051
  if (TARGET_FAST_INDIRECT_CALLS)
8052
    return "ble 0(%%sr4,%%r22)\n\tcopy %%r31,%%r2";
8053
 
8054
  /* Now the normal case -- we can reach $$dyncall directly or
8055
     we're sure that we can get there via a long-branch stub.
8056
 
8057
     No need to check target flags as the length uniquely identifies
8058
     the remaining cases.  */
8059
  if (pa_attr_length_indirect_call (insn) == 8)
8060
    {
8061
      /* The HP linker sometimes substitutes a BLE for BL/B,L calls to
8062
         $$dyncall.  Since BLE uses %r31 as the link register, the 22-bit
8063
         variant of the B,L instruction can't be used on the SOM target.  */
8064
      if (TARGET_PA_20 && !TARGET_SOM)
8065
        return ".CALL\tARGW0=GR\n\tb,l $$dyncall,%%r2\n\tcopy %%r2,%%r31";
8066
      else
8067
        return ".CALL\tARGW0=GR\n\tbl $$dyncall,%%r31\n\tcopy %%r31,%%r2";
8068
    }
8069
 
8070
  /* Long millicode call, but we are not generating PIC or portable runtime
8071
     code.  */
8072
  if (pa_attr_length_indirect_call (insn) == 12)
8073
    return ".CALL\tARGW0=GR\n\tldil L'$$dyncall,%%r2\n\tble R'$$dyncall(%%sr4,%%r2)\n\tcopy %%r31,%%r2";
8074
 
8075
  /* Long millicode call for portable runtime.  */
8076
  if (pa_attr_length_indirect_call (insn) == 20)
8077
    return "ldil L'$$dyncall,%%r31\n\tldo R'$$dyncall(%%r31),%%r31\n\tblr %%r0,%%r2\n\tbv,n %%r0(%%r31)\n\tnop";
8078
 
8079
  /* We need a long PIC call to $$dyncall.  */
8080
  xoperands[0] = NULL_RTX;
8081
  output_asm_insn ("{bl|b,l} .+8,%%r1", xoperands);
8082
  if (TARGET_SOM || !TARGET_GAS)
8083
    {
8084
      xoperands[0] = gen_label_rtx ();
8085
      output_asm_insn ("addil L'$$dyncall-%0,%%r1", xoperands);
8086
      targetm.asm_out.internal_label (asm_out_file, "L",
8087
                                      CODE_LABEL_NUMBER (xoperands[0]));
8088
      output_asm_insn ("ldo R'$$dyncall-%0(%%r1),%%r1", xoperands);
8089
    }
8090
  else
8091
    {
8092
      output_asm_insn ("addil L'$$dyncall-$PIC_pcrel$0+4,%%r1", xoperands);
8093
      output_asm_insn ("ldo R'$$dyncall-$PIC_pcrel$0+8(%%r1),%%r1",
8094
                       xoperands);
8095
    }
8096
  output_asm_insn ("blr %%r0,%%r2", xoperands);
8097
  output_asm_insn ("bv,n %%r0(%%r1)\n\tnop", xoperands);
8098
  return "";
8099
}
8100
 
8101
/* In HPUX 8.0's shared library scheme, special relocations are needed
8102
   for function labels if they might be passed to a function
8103
   in a shared library (because shared libraries don't live in code
8104
   space), and special magic is needed to construct their address.  */
8105
 
8106
void
8107
pa_encode_label (rtx sym)
8108
{
8109
  const char *str = XSTR (sym, 0);
8110
  int len = strlen (str) + 1;
8111
  char *newstr, *p;
8112
 
8113
  p = newstr = XALLOCAVEC (char, len + 1);
8114
  *p++ = '@';
8115
  strcpy (p, str);
8116
 
8117
  XSTR (sym, 0) = ggc_alloc_string (newstr, len);
8118
}
8119
 
8120
static void
8121
pa_encode_section_info (tree decl, rtx rtl, int first)
8122
{
8123
  int old_referenced = 0;
8124
 
8125
  if (!first && MEM_P (rtl) && GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF)
8126
    old_referenced
8127
      = SYMBOL_REF_FLAGS (XEXP (rtl, 0)) & SYMBOL_FLAG_REFERENCED;
8128
 
8129
  default_encode_section_info (decl, rtl, first);
8130
 
8131
  if (first && TEXT_SPACE_P (decl))
8132
    {
8133
      SYMBOL_REF_FLAG (XEXP (rtl, 0)) = 1;
8134
      if (TREE_CODE (decl) == FUNCTION_DECL)
8135
        pa_encode_label (XEXP (rtl, 0));
8136
    }
8137
  else if (old_referenced)
8138
    SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= old_referenced;
8139
}
8140
 
8141
/* This is sort of inverse to pa_encode_section_info.  */
8142
 
8143
static const char *
8144
pa_strip_name_encoding (const char *str)
8145
{
8146
  str += (*str == '@');
8147
  str += (*str == '*');
8148
  return str;
8149
}
8150
 
8151
/* Returns 1 if OP is a function label involved in a simple addition
8152
   with a constant.  Used to keep certain patterns from matching
8153
   during instruction combination.  */
8154
int
8155
pa_is_function_label_plus_const (rtx op)
8156
{
8157
  /* Strip off any CONST.  */
8158
  if (GET_CODE (op) == CONST)
8159
    op = XEXP (op, 0);
8160
 
8161
  return (GET_CODE (op) == PLUS
8162
          && function_label_operand (XEXP (op, 0), VOIDmode)
8163
          && GET_CODE (XEXP (op, 1)) == CONST_INT);
8164
}
8165
 
8166
/* Output assembly code for a thunk to FUNCTION.  */
8167
 
8168
static void
8169
pa_asm_output_mi_thunk (FILE *file, tree thunk_fndecl, HOST_WIDE_INT delta,
8170
                        HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
8171
                        tree function)
8172
{
8173
  static unsigned int current_thunk_number;
8174
  int val_14 = VAL_14_BITS_P (delta);
8175
  unsigned int old_last_address = last_address, nbytes = 0;
8176
  char label[16];
8177
  rtx xoperands[4];
8178
 
8179
  xoperands[0] = XEXP (DECL_RTL (function), 0);
8180
  xoperands[1] = XEXP (DECL_RTL (thunk_fndecl), 0);
8181
  xoperands[2] = GEN_INT (delta);
8182
 
8183
  ASM_OUTPUT_LABEL (file, XSTR (xoperands[1], 0));
8184
  fprintf (file, "\t.PROC\n\t.CALLINFO FRAME=0,NO_CALLS\n\t.ENTRY\n");
8185
 
8186
  /* Output the thunk.  We know that the function is in the same
8187
     translation unit (i.e., the same space) as the thunk, and that
8188
     thunks are output after their method.  Thus, we don't need an
8189
     external branch to reach the function.  With SOM and GAS,
8190
     functions and thunks are effectively in different sections.
8191
     Thus, we can always use a IA-relative branch and the linker
8192
     will add a long branch stub if necessary.
8193
 
8194
     However, we have to be careful when generating PIC code on the
8195
     SOM port to ensure that the sequence does not transfer to an
8196
     import stub for the target function as this could clobber the
8197
     return value saved at SP-24.  This would also apply to the
8198
     32-bit linux port if the multi-space model is implemented.  */
8199
  if ((!TARGET_LONG_CALLS && TARGET_SOM && !TARGET_PORTABLE_RUNTIME
8200
       && !(flag_pic && TREE_PUBLIC (function))
8201
       && (TARGET_GAS || last_address < 262132))
8202
      || (!TARGET_LONG_CALLS && !TARGET_SOM && !TARGET_PORTABLE_RUNTIME
8203
          && ((targetm_common.have_named_sections
8204
               && DECL_SECTION_NAME (thunk_fndecl) != NULL
8205
               /* The GNU 64-bit linker has rather poor stub management.
8206
                  So, we use a long branch from thunks that aren't in
8207
                  the same section as the target function.  */
8208
               && ((!TARGET_64BIT
8209
                    && (DECL_SECTION_NAME (thunk_fndecl)
8210
                        != DECL_SECTION_NAME (function)))
8211
                   || ((DECL_SECTION_NAME (thunk_fndecl)
8212
                        == DECL_SECTION_NAME (function))
8213
                       && last_address < 262132)))
8214
              || (targetm_common.have_named_sections
8215
                  && DECL_SECTION_NAME (thunk_fndecl) == NULL
8216
                  && DECL_SECTION_NAME (function) == NULL
8217
                  && last_address < 262132)
8218
              || (!targetm_common.have_named_sections
8219
                  && last_address < 262132))))
8220
    {
8221
      if (!val_14)
8222
        output_asm_insn ("addil L'%2,%%r26", xoperands);
8223
 
8224
      output_asm_insn ("b %0", xoperands);
8225
 
8226
      if (val_14)
8227
        {
8228
          output_asm_insn ("ldo %2(%%r26),%%r26", xoperands);
8229
          nbytes += 8;
8230
        }
8231
      else
8232
        {
8233
          output_asm_insn ("ldo R'%2(%%r1),%%r26", xoperands);
8234
          nbytes += 12;
8235
        }
8236
    }
8237
  else if (TARGET_64BIT)
8238
    {
8239
      /* We only have one call-clobbered scratch register, so we can't
8240
         make use of the delay slot if delta doesn't fit in 14 bits.  */
8241
      if (!val_14)
8242
        {
8243
          output_asm_insn ("addil L'%2,%%r26", xoperands);
8244
          output_asm_insn ("ldo R'%2(%%r1),%%r26", xoperands);
8245
        }
8246
 
8247
      output_asm_insn ("b,l .+8,%%r1", xoperands);
8248
 
8249
      if (TARGET_GAS)
8250
        {
8251
          output_asm_insn ("addil L'%0-$PIC_pcrel$0+4,%%r1", xoperands);
8252
          output_asm_insn ("ldo R'%0-$PIC_pcrel$0+8(%%r1),%%r1", xoperands);
8253
        }
8254
      else
8255
        {
8256
          xoperands[3] = GEN_INT (val_14 ? 8 : 16);
8257
          output_asm_insn ("addil L'%0-%1-%3,%%r1", xoperands);
8258
        }
8259
 
8260
      if (val_14)
8261
        {
8262
          output_asm_insn ("bv %%r0(%%r1)", xoperands);
8263
          output_asm_insn ("ldo %2(%%r26),%%r26", xoperands);
8264
          nbytes += 20;
8265
        }
8266
      else
8267
        {
8268
          output_asm_insn ("bv,n %%r0(%%r1)", xoperands);
8269
          nbytes += 24;
8270
        }
8271
    }
8272
  else if (TARGET_PORTABLE_RUNTIME)
8273
    {
8274
      output_asm_insn ("ldil L'%0,%%r1", xoperands);
8275
      output_asm_insn ("ldo R'%0(%%r1),%%r22", xoperands);
8276
 
8277
      if (!val_14)
8278
        output_asm_insn ("addil L'%2,%%r26", xoperands);
8279
 
8280
      output_asm_insn ("bv %%r0(%%r22)", xoperands);
8281
 
8282
      if (val_14)
8283
        {
8284
          output_asm_insn ("ldo %2(%%r26),%%r26", xoperands);
8285
          nbytes += 16;
8286
        }
8287
      else
8288
        {
8289
          output_asm_insn ("ldo R'%2(%%r1),%%r26", xoperands);
8290
          nbytes += 20;
8291
        }
8292
    }
8293
  else if (TARGET_SOM && flag_pic && TREE_PUBLIC (function))
8294
    {
8295
      /* The function is accessible from outside this module.  The only
8296
         way to avoid an import stub between the thunk and function is to
8297
         call the function directly with an indirect sequence similar to
8298
         that used by $$dyncall.  This is possible because $$dyncall acts
8299
         as the import stub in an indirect call.  */
8300
      ASM_GENERATE_INTERNAL_LABEL (label, "LTHN", current_thunk_number);
8301
      xoperands[3] = gen_rtx_SYMBOL_REF (Pmode, label);
8302
      output_asm_insn ("addil LT'%3,%%r19", xoperands);
8303
      output_asm_insn ("ldw RT'%3(%%r1),%%r22", xoperands);
8304
      output_asm_insn ("ldw 0(%%sr0,%%r22),%%r22", xoperands);
8305
      output_asm_insn ("bb,>=,n %%r22,30,.+16", xoperands);
8306
      output_asm_insn ("depi 0,31,2,%%r22", xoperands);
8307
      output_asm_insn ("ldw 4(%%sr0,%%r22),%%r19", xoperands);
8308
      output_asm_insn ("ldw 0(%%sr0,%%r22),%%r22", xoperands);
8309
 
8310
      if (!val_14)
8311
        {
8312
          output_asm_insn ("addil L'%2,%%r26", xoperands);
8313
          nbytes += 4;
8314
        }
8315
 
8316
      if (TARGET_PA_20)
8317
        {
8318
          output_asm_insn ("bve (%%r22)", xoperands);
8319
          nbytes += 36;
8320
        }
8321
      else if (TARGET_NO_SPACE_REGS)
8322
        {
8323
          output_asm_insn ("be 0(%%sr4,%%r22)", xoperands);
8324
          nbytes += 36;
8325
        }
8326
      else
8327
        {
8328
          output_asm_insn ("ldsid (%%sr0,%%r22),%%r21", xoperands);
8329
          output_asm_insn ("mtsp %%r21,%%sr0", xoperands);
8330
          output_asm_insn ("be 0(%%sr0,%%r22)", xoperands);
8331
          nbytes += 44;
8332
        }
8333
 
8334
      if (val_14)
8335
        output_asm_insn ("ldo %2(%%r26),%%r26", xoperands);
8336
      else
8337
        output_asm_insn ("ldo R'%2(%%r1),%%r26", xoperands);
8338
    }
8339
  else if (flag_pic)
8340
    {
8341
      output_asm_insn ("{bl|b,l} .+8,%%r1", xoperands);
8342
 
8343
      if (TARGET_SOM || !TARGET_GAS)
8344
        {
8345
          output_asm_insn ("addil L'%0-%1-8,%%r1", xoperands);
8346
          output_asm_insn ("ldo R'%0-%1-8(%%r1),%%r22", xoperands);
8347
        }
8348
      else
8349
        {
8350
          output_asm_insn ("addil L'%0-$PIC_pcrel$0+4,%%r1", xoperands);
8351
          output_asm_insn ("ldo R'%0-$PIC_pcrel$0+8(%%r1),%%r22", xoperands);
8352
        }
8353
 
8354
      if (!val_14)
8355
        output_asm_insn ("addil L'%2,%%r26", xoperands);
8356
 
8357
      output_asm_insn ("bv %%r0(%%r22)", xoperands);
8358
 
8359
      if (val_14)
8360
        {
8361
          output_asm_insn ("ldo %2(%%r26),%%r26", xoperands);
8362
          nbytes += 20;
8363
        }
8364
      else
8365
        {
8366
          output_asm_insn ("ldo R'%2(%%r1),%%r26", xoperands);
8367
          nbytes += 24;
8368
        }
8369
    }
8370
  else
8371
    {
8372
      if (!val_14)
8373
        output_asm_insn ("addil L'%2,%%r26", xoperands);
8374
 
8375
      output_asm_insn ("ldil L'%0,%%r22", xoperands);
8376
      output_asm_insn ("be R'%0(%%sr4,%%r22)", xoperands);
8377
 
8378
      if (val_14)
8379
        {
8380
          output_asm_insn ("ldo %2(%%r26),%%r26", xoperands);
8381
          nbytes += 12;
8382
        }
8383
      else
8384
        {
8385
          output_asm_insn ("ldo R'%2(%%r1),%%r26", xoperands);
8386
          nbytes += 16;
8387
        }
8388
    }
8389
 
8390
  fprintf (file, "\t.EXIT\n\t.PROCEND\n");
8391
 
8392
  if (TARGET_SOM && TARGET_GAS)
8393
    {
8394
      /* We done with this subspace except possibly for some additional
8395
         debug information.  Forget that we are in this subspace to ensure
8396
         that the next function is output in its own subspace.  */
8397
      in_section = NULL;
8398
      cfun->machine->in_nsubspa = 2;
8399
    }
8400
 
8401
  if (TARGET_SOM && flag_pic && TREE_PUBLIC (function))
8402
    {
8403
      switch_to_section (data_section);
8404
      output_asm_insn (".align 4", xoperands);
8405
      ASM_OUTPUT_LABEL (file, label);
8406
      output_asm_insn (".word P'%0", xoperands);
8407
    }
8408
 
8409
  current_thunk_number++;
8410
  nbytes = ((nbytes + FUNCTION_BOUNDARY / BITS_PER_UNIT - 1)
8411
            & ~(FUNCTION_BOUNDARY / BITS_PER_UNIT - 1));
8412
  last_address += nbytes;
8413
  if (old_last_address > last_address)
8414
    last_address = UINT_MAX;
8415
  update_total_code_bytes (nbytes);
8416
}
8417
 
8418
/* Only direct calls to static functions are allowed to be sibling (tail)
8419
   call optimized.
8420
 
8421
   This restriction is necessary because some linker generated stubs will
8422
   store return pointers into rp' in some cases which might clobber a
8423
   live value already in rp'.
8424
 
8425
   In a sibcall the current function and the target function share stack
8426
   space.  Thus if the path to the current function and the path to the
8427
   target function save a value in rp', they save the value into the
8428
   same stack slot, which has undesirable consequences.
8429
 
8430
   Because of the deferred binding nature of shared libraries any function
8431
   with external scope could be in a different load module and thus require
8432
   rp' to be saved when calling that function.  So sibcall optimizations
8433
   can only be safe for static function.
8434
 
8435
   Note that GCC never needs return value relocations, so we don't have to
8436
   worry about static calls with return value relocations (which require
8437
   saving rp').
8438
 
8439
   It is safe to perform a sibcall optimization when the target function
8440
   will never return.  */
8441
static bool
8442
pa_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
8443
{
8444
  if (TARGET_PORTABLE_RUNTIME)
8445
    return false;
8446
 
8447
  /* Sibcalls are ok for TARGET_ELF32 as along as the linker is used in
8448
     single subspace mode and the call is not indirect.  As far as I know,
8449
     there is no operating system support for the multiple subspace mode.
8450
     It might be possible to support indirect calls if we didn't use
8451
     $$dyncall (see the indirect sequence generated in pa_output_call).  */
8452
  if (TARGET_ELF32)
8453
    return (decl != NULL_TREE);
8454
 
8455
  /* Sibcalls are not ok because the arg pointer register is not a fixed
8456
     register.  This prevents the sibcall optimization from occurring.  In
8457
     addition, there are problems with stub placement using GNU ld.  This
8458
     is because a normal sibcall branch uses a 17-bit relocation while
8459
     a regular call branch uses a 22-bit relocation.  As a result, more
8460
     care needs to be taken in the placement of long-branch stubs.  */
8461
  if (TARGET_64BIT)
8462
    return false;
8463
 
8464
  /* Sibcalls are only ok within a translation unit.  */
8465
  return (decl && !TREE_PUBLIC (decl));
8466
}
8467
 
8468
/* ??? Addition is not commutative on the PA due to the weird implicit
8469
   space register selection rules for memory addresses.  Therefore, we
8470
   don't consider a + b == b + a, as this might be inside a MEM.  */
8471
static bool
8472
pa_commutative_p (const_rtx x, int outer_code)
8473
{
8474
  return (COMMUTATIVE_P (x)
8475
          && (TARGET_NO_SPACE_REGS
8476
              || (outer_code != UNKNOWN && outer_code != MEM)
8477
              || GET_CODE (x) != PLUS));
8478
}
8479
 
8480
/* Returns 1 if the 6 operands specified in OPERANDS are suitable for
8481
   use in fmpyadd instructions.  */
8482
int
8483
pa_fmpyaddoperands (rtx *operands)
8484
{
8485
  enum machine_mode mode = GET_MODE (operands[0]);
8486
 
8487
  /* Must be a floating point mode.  */
8488
  if (mode != SFmode && mode != DFmode)
8489
    return 0;
8490
 
8491
  /* All modes must be the same.  */
8492
  if (! (mode == GET_MODE (operands[1])
8493
         && mode == GET_MODE (operands[2])
8494
         && mode == GET_MODE (operands[3])
8495
         && mode == GET_MODE (operands[4])
8496
         && mode == GET_MODE (operands[5])))
8497
    return 0;
8498
 
8499
  /* All operands must be registers.  */
8500
  if (! (GET_CODE (operands[1]) == REG
8501
         && GET_CODE (operands[2]) == REG
8502
         && GET_CODE (operands[3]) == REG
8503
         && GET_CODE (operands[4]) == REG
8504
         && GET_CODE (operands[5]) == REG))
8505
    return 0;
8506
 
8507
  /* Only 2 real operands to the addition.  One of the input operands must
8508
     be the same as the output operand.  */
8509
  if (! rtx_equal_p (operands[3], operands[4])
8510
      && ! rtx_equal_p (operands[3], operands[5]))
8511
    return 0;
8512
 
8513
  /* Inout operand of add cannot conflict with any operands from multiply.  */
8514
  if (rtx_equal_p (operands[3], operands[0])
8515
     || rtx_equal_p (operands[3], operands[1])
8516
     || rtx_equal_p (operands[3], operands[2]))
8517
    return 0;
8518
 
8519
  /* multiply cannot feed into addition operands.  */
8520
  if (rtx_equal_p (operands[4], operands[0])
8521
      || rtx_equal_p (operands[5], operands[0]))
8522
    return 0;
8523
 
8524
  /* SFmode limits the registers to the upper 32 of the 32bit FP regs.  */
8525
  if (mode == SFmode
8526
      && (REGNO_REG_CLASS (REGNO (operands[0])) != FPUPPER_REGS
8527
          || REGNO_REG_CLASS (REGNO (operands[1])) != FPUPPER_REGS
8528
          || REGNO_REG_CLASS (REGNO (operands[2])) != FPUPPER_REGS
8529
          || REGNO_REG_CLASS (REGNO (operands[3])) != FPUPPER_REGS
8530
          || REGNO_REG_CLASS (REGNO (operands[4])) != FPUPPER_REGS
8531
          || REGNO_REG_CLASS (REGNO (operands[5])) != FPUPPER_REGS))
8532
    return 0;
8533
 
8534
  /* Passed.  Operands are suitable for fmpyadd.  */
8535
  return 1;
8536
}
8537
 
8538
#if !defined(USE_COLLECT2)
8539
static void
8540
pa_asm_out_constructor (rtx symbol, int priority)
8541
{
8542
  if (!function_label_operand (symbol, VOIDmode))
8543
    pa_encode_label (symbol);
8544
 
8545
#ifdef CTORS_SECTION_ASM_OP
8546
  default_ctor_section_asm_out_constructor (symbol, priority);
8547
#else
8548
# ifdef TARGET_ASM_NAMED_SECTION
8549
  default_named_section_asm_out_constructor (symbol, priority);
8550
# else
8551
  default_stabs_asm_out_constructor (symbol, priority);
8552
# endif
8553
#endif
8554
}
8555
 
8556
static void
8557
pa_asm_out_destructor (rtx symbol, int priority)
8558
{
8559
  if (!function_label_operand (symbol, VOIDmode))
8560
    pa_encode_label (symbol);
8561
 
8562
#ifdef DTORS_SECTION_ASM_OP
8563
  default_dtor_section_asm_out_destructor (symbol, priority);
8564
#else
8565
# ifdef TARGET_ASM_NAMED_SECTION
8566
  default_named_section_asm_out_destructor (symbol, priority);
8567
# else
8568
  default_stabs_asm_out_destructor (symbol, priority);
8569
# endif
8570
#endif
8571
}
8572
#endif
8573
 
8574
/* This function places uninitialized global data in the bss section.
8575
   The ASM_OUTPUT_ALIGNED_BSS macro needs to be defined to call this
8576
   function on the SOM port to prevent uninitialized global data from
8577
   being placed in the data section.  */
8578
 
8579
void
8580
pa_asm_output_aligned_bss (FILE *stream,
8581
                           const char *name,
8582
                           unsigned HOST_WIDE_INT size,
8583
                           unsigned int align)
8584
{
8585
  switch_to_section (bss_section);
8586
  fprintf (stream, "\t.align %u\n", align / BITS_PER_UNIT);
8587
 
8588
#ifdef ASM_OUTPUT_TYPE_DIRECTIVE
8589
  ASM_OUTPUT_TYPE_DIRECTIVE (stream, name, "object");
8590
#endif
8591
 
8592
#ifdef ASM_OUTPUT_SIZE_DIRECTIVE
8593
  ASM_OUTPUT_SIZE_DIRECTIVE (stream, name, size);
8594
#endif
8595
 
8596
  fprintf (stream, "\t.align %u\n", align / BITS_PER_UNIT);
8597
  ASM_OUTPUT_LABEL (stream, name);
8598
  fprintf (stream, "\t.block "HOST_WIDE_INT_PRINT_UNSIGNED"\n", size);
8599
}
8600
 
8601
/* Both the HP and GNU assemblers under HP-UX provide a .comm directive
8602
   that doesn't allow the alignment of global common storage to be directly
8603
   specified.  The SOM linker aligns common storage based on the rounded
8604
   value of the NUM_BYTES parameter in the .comm directive.  It's not
8605
   possible to use the .align directive as it doesn't affect the alignment
8606
   of the label associated with a .comm directive.  */
8607
 
8608
void
8609
pa_asm_output_aligned_common (FILE *stream,
8610
                              const char *name,
8611
                              unsigned HOST_WIDE_INT size,
8612
                              unsigned int align)
8613
{
8614
  unsigned int max_common_align;
8615
 
8616
  max_common_align = TARGET_64BIT ? 128 : (size >= 4096 ? 256 : 64);
8617
  if (align > max_common_align)
8618
    {
8619
      warning (0, "alignment (%u) for %s exceeds maximum alignment "
8620
               "for global common data.  Using %u",
8621
               align / BITS_PER_UNIT, name, max_common_align / BITS_PER_UNIT);
8622
      align = max_common_align;
8623
    }
8624
 
8625
  switch_to_section (bss_section);
8626
 
8627
  assemble_name (stream, name);
8628
  fprintf (stream, "\t.comm "HOST_WIDE_INT_PRINT_UNSIGNED"\n",
8629
           MAX (size, align / BITS_PER_UNIT));
8630
}
8631
 
8632
/* We can't use .comm for local common storage as the SOM linker effectively
8633
   treats the symbol as universal and uses the same storage for local symbols
8634
   with the same name in different object files.  The .block directive
8635
   reserves an uninitialized block of storage.  However, it's not common
8636
   storage.  Fortunately, GCC never requests common storage with the same
8637
   name in any given translation unit.  */
8638
 
8639
void
8640
pa_asm_output_aligned_local (FILE *stream,
8641
                             const char *name,
8642
                             unsigned HOST_WIDE_INT size,
8643
                             unsigned int align)
8644
{
8645
  switch_to_section (bss_section);
8646
  fprintf (stream, "\t.align %u\n", align / BITS_PER_UNIT);
8647
 
8648
#ifdef LOCAL_ASM_OP
8649
  fprintf (stream, "%s", LOCAL_ASM_OP);
8650
  assemble_name (stream, name);
8651
  fprintf (stream, "\n");
8652
#endif
8653
 
8654
  ASM_OUTPUT_LABEL (stream, name);
8655
  fprintf (stream, "\t.block "HOST_WIDE_INT_PRINT_UNSIGNED"\n", size);
8656
}
8657
 
8658
/* Returns 1 if the 6 operands specified in OPERANDS are suitable for
8659
   use in fmpysub instructions.  */
8660
int
8661
pa_fmpysuboperands (rtx *operands)
8662
{
8663
  enum machine_mode mode = GET_MODE (operands[0]);
8664
 
8665
  /* Must be a floating point mode.  */
8666
  if (mode != SFmode && mode != DFmode)
8667
    return 0;
8668
 
8669
  /* All modes must be the same.  */
8670
  if (! (mode == GET_MODE (operands[1])
8671
         && mode == GET_MODE (operands[2])
8672
         && mode == GET_MODE (operands[3])
8673
         && mode == GET_MODE (operands[4])
8674
         && mode == GET_MODE (operands[5])))
8675
    return 0;
8676
 
8677
  /* All operands must be registers.  */
8678
  if (! (GET_CODE (operands[1]) == REG
8679
         && GET_CODE (operands[2]) == REG
8680
         && GET_CODE (operands[3]) == REG
8681
         && GET_CODE (operands[4]) == REG
8682
         && GET_CODE (operands[5]) == REG))
8683
    return 0;
8684
 
8685
  /* Only 2 real operands to the subtraction.  Subtraction is not a commutative
8686
     operation, so operands[4] must be the same as operand[3].  */
8687
  if (! rtx_equal_p (operands[3], operands[4]))
8688
    return 0;
8689
 
8690
  /* multiply cannot feed into subtraction.  */
8691
  if (rtx_equal_p (operands[5], operands[0]))
8692
    return 0;
8693
 
8694
  /* Inout operand of sub cannot conflict with any operands from multiply.  */
8695
  if (rtx_equal_p (operands[3], operands[0])
8696
     || rtx_equal_p (operands[3], operands[1])
8697
     || rtx_equal_p (operands[3], operands[2]))
8698
    return 0;
8699
 
8700
  /* SFmode limits the registers to the upper 32 of the 32bit FP regs.  */
8701
  if (mode == SFmode
8702
      && (REGNO_REG_CLASS (REGNO (operands[0])) != FPUPPER_REGS
8703
          || REGNO_REG_CLASS (REGNO (operands[1])) != FPUPPER_REGS
8704
          || REGNO_REG_CLASS (REGNO (operands[2])) != FPUPPER_REGS
8705
          || REGNO_REG_CLASS (REGNO (operands[3])) != FPUPPER_REGS
8706
          || REGNO_REG_CLASS (REGNO (operands[4])) != FPUPPER_REGS
8707
          || REGNO_REG_CLASS (REGNO (operands[5])) != FPUPPER_REGS))
8708
    return 0;
8709
 
8710
  /* Passed.  Operands are suitable for fmpysub.  */
8711
  return 1;
8712
}
8713
 
8714
/* Return 1 if the given constant is 2, 4, or 8.  These are the valid
8715
   constants for shadd instructions.  */
8716
int
8717
pa_shadd_constant_p (int val)
8718
{
8719
  if (val == 2 || val == 4 || val == 8)
8720
    return 1;
8721
  else
8722
    return 0;
8723
}
8724
 
8725
/* Return TRUE if INSN branches forward.  */
8726
 
8727
static bool
8728
forward_branch_p (rtx insn)
8729
{
8730
  rtx lab = JUMP_LABEL (insn);
8731
 
8732
  /* The INSN must have a jump label.  */
8733
  gcc_assert (lab != NULL_RTX);
8734
 
8735
  if (INSN_ADDRESSES_SET_P ())
8736
    return INSN_ADDRESSES (INSN_UID (lab)) > INSN_ADDRESSES (INSN_UID (insn));
8737
 
8738
  while (insn)
8739
    {
8740
      if (insn == lab)
8741
        return true;
8742
      else
8743
        insn = NEXT_INSN (insn);
8744
    }
8745
 
8746
  return false;
8747
}
8748
 
8749
/* Return 1 if INSN is in the delay slot of a call instruction.  */
8750
int
8751
pa_jump_in_call_delay (rtx insn)
8752
{
8753
 
8754
  if (GET_CODE (insn) != JUMP_INSN)
8755
    return 0;
8756
 
8757
  if (PREV_INSN (insn)
8758
      && PREV_INSN (PREV_INSN (insn))
8759
      && GET_CODE (next_real_insn (PREV_INSN (PREV_INSN (insn)))) == INSN)
8760
    {
8761
      rtx test_insn = next_real_insn (PREV_INSN (PREV_INSN (insn)));
8762
 
8763
      return (GET_CODE (PATTERN (test_insn)) == SEQUENCE
8764
              && XVECEXP (PATTERN (test_insn), 0, 1) == insn);
8765
 
8766
    }
8767
  else
8768
    return 0;
8769
}
8770
 
8771
/* Output an unconditional move and branch insn.  */
8772
 
8773
const char *
8774
pa_output_parallel_movb (rtx *operands, rtx insn)
8775
{
8776
  int length = get_attr_length (insn);
8777
 
8778
  /* These are the cases in which we win.  */
8779
  if (length == 4)
8780
    return "mov%I1b,tr %1,%0,%2";
8781
 
8782
  /* None of the following cases win, but they don't lose either.  */
8783
  if (length == 8)
8784
    {
8785
      if (dbr_sequence_length () == 0)
8786
        {
8787
          /* Nothing in the delay slot, fake it by putting the combined
8788
             insn (the copy or add) in the delay slot of a bl.  */
8789
          if (GET_CODE (operands[1]) == CONST_INT)
8790
            return "b %2\n\tldi %1,%0";
8791
          else
8792
            return "b %2\n\tcopy %1,%0";
8793
        }
8794
      else
8795
        {
8796
          /* Something in the delay slot, but we've got a long branch.  */
8797
          if (GET_CODE (operands[1]) == CONST_INT)
8798
            return "ldi %1,%0\n\tb %2";
8799
          else
8800
            return "copy %1,%0\n\tb %2";
8801
        }
8802
    }
8803
 
8804
  if (GET_CODE (operands[1]) == CONST_INT)
8805
    output_asm_insn ("ldi %1,%0", operands);
8806
  else
8807
    output_asm_insn ("copy %1,%0", operands);
8808
  return pa_output_lbranch (operands[2], insn, 1);
8809
}
8810
 
8811
/* Output an unconditional add and branch insn.  */
8812
 
8813
const char *
8814
pa_output_parallel_addb (rtx *operands, rtx insn)
8815
{
8816
  int length = get_attr_length (insn);
8817
 
8818
  /* To make life easy we want operand0 to be the shared input/output
8819
     operand and operand1 to be the readonly operand.  */
8820
  if (operands[0] == operands[1])
8821
    operands[1] = operands[2];
8822
 
8823
  /* These are the cases in which we win.  */
8824
  if (length == 4)
8825
    return "add%I1b,tr %1,%0,%3";
8826
 
8827
  /* None of the following cases win, but they don't lose either.  */
8828
  if (length == 8)
8829
    {
8830
      if (dbr_sequence_length () == 0)
8831
        /* Nothing in the delay slot, fake it by putting the combined
8832
           insn (the copy or add) in the delay slot of a bl.  */
8833
        return "b %3\n\tadd%I1 %1,%0,%0";
8834
      else
8835
        /* Something in the delay slot, but we've got a long branch.  */
8836
        return "add%I1 %1,%0,%0\n\tb %3";
8837
    }
8838
 
8839
  output_asm_insn ("add%I1 %1,%0,%0", operands);
8840
  return pa_output_lbranch (operands[3], insn, 1);
8841
}
8842
 
8843
/* Return nonzero if INSN (a jump insn) immediately follows a call
8844
   to a named function.  This is used to avoid filling the delay slot
8845
   of the jump since it can usually be eliminated by modifying RP in
8846
   the delay slot of the call.  */
8847
 
8848
int
8849
pa_following_call (rtx insn)
8850
{
8851
  if (! TARGET_JUMP_IN_DELAY)
8852
    return 0;
8853
 
8854
  /* Find the previous real insn, skipping NOTEs.  */
8855
  insn = PREV_INSN (insn);
8856
  while (insn && GET_CODE (insn) == NOTE)
8857
    insn = PREV_INSN (insn);
8858
 
8859
  /* Check for CALL_INSNs and millicode calls.  */
8860
  if (insn
8861
      && ((GET_CODE (insn) == CALL_INSN
8862
           && get_attr_type (insn) != TYPE_DYNCALL)
8863
          || (GET_CODE (insn) == INSN
8864
              && GET_CODE (PATTERN (insn)) != SEQUENCE
8865
              && GET_CODE (PATTERN (insn)) != USE
8866
              && GET_CODE (PATTERN (insn)) != CLOBBER
8867
              && get_attr_type (insn) == TYPE_MILLI)))
8868
    return 1;
8869
 
8870
  return 0;
8871
}
8872
 
8873
/* We use this hook to perform a PA specific optimization which is difficult
8874
   to do in earlier passes.
8875
 
8876
   We want the delay slots of branches within jump tables to be filled.
8877
   None of the compiler passes at the moment even has the notion that a
8878
   PA jump table doesn't contain addresses, but instead contains actual
8879
   instructions!
8880
 
8881
   Because we actually jump into the table, the addresses of each entry
8882
   must stay constant in relation to the beginning of the table (which
8883
   itself must stay constant relative to the instruction to jump into
8884
   it).  I don't believe we can guarantee earlier passes of the compiler
8885
   will adhere to those rules.
8886
 
8887
   So, late in the compilation process we find all the jump tables, and
8888
   expand them into real code -- e.g. each entry in the jump table vector
8889
   will get an appropriate label followed by a jump to the final target.
8890
 
8891
   Reorg and the final jump pass can then optimize these branches and
8892
   fill their delay slots.  We end up with smaller, more efficient code.
8893
 
8894
   The jump instructions within the table are special; we must be able
8895
   to identify them during assembly output (if the jumps don't get filled
8896
   we need to emit a nop rather than nullifying the delay slot)).  We
8897
   identify jumps in switch tables by using insns with the attribute
8898
   type TYPE_BTABLE_BRANCH.
8899
 
8900
   We also surround the jump table itself with BEGIN_BRTAB and END_BRTAB
8901
   insns.  This serves two purposes, first it prevents jump.c from
8902
   noticing that the last N entries in the table jump to the instruction
8903
   immediately after the table and deleting the jumps.  Second, those
8904
   insns mark where we should emit .begin_brtab and .end_brtab directives
8905
   when using GAS (allows for better link time optimizations).  */
8906
 
8907
static void
8908
pa_reorg (void)
8909
{
8910
  rtx insn;
8911
 
8912
  remove_useless_addtr_insns (1);
8913
 
8914
  if (pa_cpu < PROCESSOR_8000)
8915
    pa_combine_instructions ();
8916
 
8917
 
8918
  /* This is fairly cheap, so always run it if optimizing.  */
8919
  if (optimize > 0 && !TARGET_BIG_SWITCH)
8920
    {
8921
      /* Find and explode all ADDR_VEC or ADDR_DIFF_VEC insns.  */
8922
      for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8923
        {
8924
          rtx pattern, tmp, location, label;
8925
          unsigned int length, i;
8926
 
8927
          /* Find an ADDR_VEC or ADDR_DIFF_VEC insn to explode.  */
8928
          if (GET_CODE (insn) != JUMP_INSN
8929
              || (GET_CODE (PATTERN (insn)) != ADDR_VEC
8930
                  && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC))
8931
            continue;
8932
 
8933
          /* Emit marker for the beginning of the branch table.  */
8934
          emit_insn_before (gen_begin_brtab (), insn);
8935
 
8936
          pattern = PATTERN (insn);
8937
          location = PREV_INSN (insn);
8938
          length = XVECLEN (pattern, GET_CODE (pattern) == ADDR_DIFF_VEC);
8939
 
8940
          for (i = 0; i < length; i++)
8941
            {
8942
              /* Emit a label before each jump to keep jump.c from
8943
                 removing this code.  */
8944
              tmp = gen_label_rtx ();
8945
              LABEL_NUSES (tmp) = 1;
8946
              emit_label_after (tmp, location);
8947
              location = NEXT_INSN (location);
8948
 
8949
              if (GET_CODE (pattern) == ADDR_VEC)
8950
                label = XEXP (XVECEXP (pattern, 0, i), 0);
8951
              else
8952
                label = XEXP (XVECEXP (pattern, 1, i), 0);
8953
 
8954
              tmp = gen_short_jump (label);
8955
 
8956
              /* Emit the jump itself.  */
8957
              tmp = emit_jump_insn_after (tmp, location);
8958
              JUMP_LABEL (tmp) = label;
8959
              LABEL_NUSES (label)++;
8960
              location = NEXT_INSN (location);
8961
 
8962
              /* Emit a BARRIER after the jump.  */
8963
              emit_barrier_after (location);
8964
              location = NEXT_INSN (location);
8965
            }
8966
 
8967
          /* Emit marker for the end of the branch table.  */
8968
          emit_insn_before (gen_end_brtab (), location);
8969
          location = NEXT_INSN (location);
8970
          emit_barrier_after (location);
8971
 
8972
          /* Delete the ADDR_VEC or ADDR_DIFF_VEC.  */
8973
          delete_insn (insn);
8974
        }
8975
    }
8976
  else
8977
    {
8978
      /* Still need brtab marker insns.  FIXME: the presence of these
8979
         markers disables output of the branch table to readonly memory,
8980
         and any alignment directives that might be needed.  Possibly,
8981
         the begin_brtab insn should be output before the label for the
8982
         table.  This doesn't matter at the moment since the tables are
8983
         always output in the text section.  */
8984
      for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8985
        {
8986
          /* Find an ADDR_VEC insn.  */
8987
          if (GET_CODE (insn) != JUMP_INSN
8988
              || (GET_CODE (PATTERN (insn)) != ADDR_VEC
8989
                  && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC))
8990
            continue;
8991
 
8992
          /* Now generate markers for the beginning and end of the
8993
             branch table.  */
8994
          emit_insn_before (gen_begin_brtab (), insn);
8995
          emit_insn_after (gen_end_brtab (), insn);
8996
        }
8997
    }
8998
}
8999
 
9000
/* The PA has a number of odd instructions which can perform multiple
9001
   tasks at once.  On first generation PA machines (PA1.0 and PA1.1)
9002
   it may be profitable to combine two instructions into one instruction
9003
   with two outputs.  It's not profitable PA2.0 machines because the
9004
   two outputs would take two slots in the reorder buffers.
9005
 
9006
   This routine finds instructions which can be combined and combines
9007
   them.  We only support some of the potential combinations, and we
9008
   only try common ways to find suitable instructions.
9009
 
9010
      * addb can add two registers or a register and a small integer
9011
      and jump to a nearby (+-8k) location.  Normally the jump to the
9012
      nearby location is conditional on the result of the add, but by
9013
      using the "true" condition we can make the jump unconditional.
9014
      Thus addb can perform two independent operations in one insn.
9015
 
9016
      * movb is similar to addb in that it can perform a reg->reg
9017
      or small immediate->reg copy and jump to a nearby (+-8k location).
9018
 
9019
      * fmpyadd and fmpysub can perform a FP multiply and either an
9020
      FP add or FP sub if the operands of the multiply and add/sub are
9021
      independent (there are other minor restrictions).  Note both
9022
      the fmpy and fadd/fsub can in theory move to better spots according
9023
      to data dependencies, but for now we require the fmpy stay at a
9024
      fixed location.
9025
 
9026
      * Many of the memory operations can perform pre & post updates
9027
      of index registers.  GCC's pre/post increment/decrement addressing
9028
      is far too simple to take advantage of all the possibilities.  This
9029
      pass may not be suitable since those insns may not be independent.
9030
 
9031
      * comclr can compare two ints or an int and a register, nullify
9032
      the following instruction and zero some other register.  This
9033
      is more difficult to use as it's harder to find an insn which
9034
      will generate a comclr than finding something like an unconditional
9035
      branch.  (conditional moves & long branches create comclr insns).
9036
 
9037
      * Most arithmetic operations can conditionally skip the next
9038
      instruction.  They can be viewed as "perform this operation
9039
      and conditionally jump to this nearby location" (where nearby
9040
      is an insns away).  These are difficult to use due to the
9041
      branch length restrictions.  */
9042
 
9043
static void
9044
pa_combine_instructions (void)
9045
{
9046
  rtx anchor, new_rtx;
9047
 
9048
  /* This can get expensive since the basic algorithm is on the
9049
     order of O(n^2) (or worse).  Only do it for -O2 or higher
9050
     levels of optimization.  */
9051
  if (optimize < 2)
9052
    return;
9053
 
9054
  /* Walk down the list of insns looking for "anchor" insns which
9055
     may be combined with "floating" insns.  As the name implies,
9056
     "anchor" instructions don't move, while "floating" insns may
9057
     move around.  */
9058
  new_rtx = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, NULL_RTX, NULL_RTX));
9059
  new_rtx = make_insn_raw (new_rtx);
9060
 
9061
  for (anchor = get_insns (); anchor; anchor = NEXT_INSN (anchor))
9062
    {
9063
      enum attr_pa_combine_type anchor_attr;
9064
      enum attr_pa_combine_type floater_attr;
9065
 
9066
      /* We only care about INSNs, JUMP_INSNs, and CALL_INSNs.
9067
         Also ignore any special USE insns.  */
9068
      if ((GET_CODE (anchor) != INSN
9069
          && GET_CODE (anchor) != JUMP_INSN
9070
          && GET_CODE (anchor) != CALL_INSN)
9071
          || GET_CODE (PATTERN (anchor)) == USE
9072
          || GET_CODE (PATTERN (anchor)) == CLOBBER
9073
          || GET_CODE (PATTERN (anchor)) == ADDR_VEC
9074
          || GET_CODE (PATTERN (anchor)) == ADDR_DIFF_VEC)
9075
        continue;
9076
 
9077
      anchor_attr = get_attr_pa_combine_type (anchor);
9078
      /* See if anchor is an insn suitable for combination.  */
9079
      if (anchor_attr == PA_COMBINE_TYPE_FMPY
9080
          || anchor_attr == PA_COMBINE_TYPE_FADDSUB
9081
          || (anchor_attr == PA_COMBINE_TYPE_UNCOND_BRANCH
9082
              && ! forward_branch_p (anchor)))
9083
        {
9084
          rtx floater;
9085
 
9086
          for (floater = PREV_INSN (anchor);
9087
               floater;
9088
               floater = PREV_INSN (floater))
9089
            {
9090
              if (GET_CODE (floater) == NOTE
9091
                  || (GET_CODE (floater) == INSN
9092
                      && (GET_CODE (PATTERN (floater)) == USE
9093
                          || GET_CODE (PATTERN (floater)) == CLOBBER)))
9094
                continue;
9095
 
9096
              /* Anything except a regular INSN will stop our search.  */
9097
              if (GET_CODE (floater) != INSN
9098
                  || GET_CODE (PATTERN (floater)) == ADDR_VEC
9099
                  || GET_CODE (PATTERN (floater)) == ADDR_DIFF_VEC)
9100
                {
9101
                  floater = NULL_RTX;
9102
                  break;
9103
                }
9104
 
9105
              /* See if FLOATER is suitable for combination with the
9106
                 anchor.  */
9107
              floater_attr = get_attr_pa_combine_type (floater);
9108
              if ((anchor_attr == PA_COMBINE_TYPE_FMPY
9109
                   && floater_attr == PA_COMBINE_TYPE_FADDSUB)
9110
                  || (anchor_attr == PA_COMBINE_TYPE_FADDSUB
9111
                      && floater_attr == PA_COMBINE_TYPE_FMPY))
9112
                {
9113
                  /* If ANCHOR and FLOATER can be combined, then we're
9114
                     done with this pass.  */
9115
                  if (pa_can_combine_p (new_rtx, anchor, floater, 0,
9116
                                        SET_DEST (PATTERN (floater)),
9117
                                        XEXP (SET_SRC (PATTERN (floater)), 0),
9118
                                        XEXP (SET_SRC (PATTERN (floater)), 1)))
9119
                    break;
9120
                }
9121
 
9122
              else if (anchor_attr == PA_COMBINE_TYPE_UNCOND_BRANCH
9123
                       && floater_attr == PA_COMBINE_TYPE_ADDMOVE)
9124
                {
9125
                  if (GET_CODE (SET_SRC (PATTERN (floater))) == PLUS)
9126
                    {
9127
                      if (pa_can_combine_p (new_rtx, anchor, floater, 0,
9128
                                            SET_DEST (PATTERN (floater)),
9129
                                        XEXP (SET_SRC (PATTERN (floater)), 0),
9130
                                        XEXP (SET_SRC (PATTERN (floater)), 1)))
9131
                        break;
9132
                    }
9133
                  else
9134
                    {
9135
                      if (pa_can_combine_p (new_rtx, anchor, floater, 0,
9136
                                            SET_DEST (PATTERN (floater)),
9137
                                            SET_SRC (PATTERN (floater)),
9138
                                            SET_SRC (PATTERN (floater))))
9139
                        break;
9140
                    }
9141
                }
9142
            }
9143
 
9144
          /* If we didn't find anything on the backwards scan try forwards.  */
9145
          if (!floater
9146
              && (anchor_attr == PA_COMBINE_TYPE_FMPY
9147
                  || anchor_attr == PA_COMBINE_TYPE_FADDSUB))
9148
            {
9149
              for (floater = anchor; floater; floater = NEXT_INSN (floater))
9150
                {
9151
                  if (GET_CODE (floater) == NOTE
9152
                      || (GET_CODE (floater) == INSN
9153
                          && (GET_CODE (PATTERN (floater)) == USE
9154
                              || GET_CODE (PATTERN (floater)) == CLOBBER)))
9155
 
9156
                    continue;
9157
 
9158
                  /* Anything except a regular INSN will stop our search.  */
9159
                  if (GET_CODE (floater) != INSN
9160
                      || GET_CODE (PATTERN (floater)) == ADDR_VEC
9161
                      || GET_CODE (PATTERN (floater)) == ADDR_DIFF_VEC)
9162
                    {
9163
                      floater = NULL_RTX;
9164
                      break;
9165
                    }
9166
 
9167
                  /* See if FLOATER is suitable for combination with the
9168
                     anchor.  */
9169
                  floater_attr = get_attr_pa_combine_type (floater);
9170
                  if ((anchor_attr == PA_COMBINE_TYPE_FMPY
9171
                       && floater_attr == PA_COMBINE_TYPE_FADDSUB)
9172
                      || (anchor_attr == PA_COMBINE_TYPE_FADDSUB
9173
                          && floater_attr == PA_COMBINE_TYPE_FMPY))
9174
                    {
9175
                      /* If ANCHOR and FLOATER can be combined, then we're
9176
                         done with this pass.  */
9177
                      if (pa_can_combine_p (new_rtx, anchor, floater, 1,
9178
                                            SET_DEST (PATTERN (floater)),
9179
                                            XEXP (SET_SRC (PATTERN (floater)),
9180
                                                  0),
9181
                                            XEXP (SET_SRC (PATTERN (floater)),
9182
                                                  1)))
9183
                        break;
9184
                    }
9185
                }
9186
            }
9187
 
9188
          /* FLOATER will be nonzero if we found a suitable floating
9189
             insn for combination with ANCHOR.  */
9190
          if (floater
9191
              && (anchor_attr == PA_COMBINE_TYPE_FADDSUB
9192
                  || anchor_attr == PA_COMBINE_TYPE_FMPY))
9193
            {
9194
              /* Emit the new instruction and delete the old anchor.  */
9195
              emit_insn_before (gen_rtx_PARALLEL
9196
                                (VOIDmode,
9197
                                 gen_rtvec (2, PATTERN (anchor),
9198
                                            PATTERN (floater))),
9199
                                anchor);
9200
 
9201
              SET_INSN_DELETED (anchor);
9202
 
9203
              /* Emit a special USE insn for FLOATER, then delete
9204
                 the floating insn.  */
9205
              emit_insn_before (gen_rtx_USE (VOIDmode, floater), floater);
9206
              delete_insn (floater);
9207
 
9208
              continue;
9209
            }
9210
          else if (floater
9211
                   && anchor_attr == PA_COMBINE_TYPE_UNCOND_BRANCH)
9212
            {
9213
              rtx temp;
9214
              /* Emit the new_jump instruction and delete the old anchor.  */
9215
              temp
9216
                = emit_jump_insn_before (gen_rtx_PARALLEL
9217
                                         (VOIDmode,
9218
                                          gen_rtvec (2, PATTERN (anchor),
9219
                                                     PATTERN (floater))),
9220
                                         anchor);
9221
 
9222
              JUMP_LABEL (temp) = JUMP_LABEL (anchor);
9223
              SET_INSN_DELETED (anchor);
9224
 
9225
              /* Emit a special USE insn for FLOATER, then delete
9226
                 the floating insn.  */
9227
              emit_insn_before (gen_rtx_USE (VOIDmode, floater), floater);
9228
              delete_insn (floater);
9229
              continue;
9230
            }
9231
        }
9232
    }
9233
}
9234
 
9235
static int
9236
pa_can_combine_p (rtx new_rtx, rtx anchor, rtx floater, int reversed, rtx dest,
9237
                  rtx src1, rtx src2)
9238
{
9239
  int insn_code_number;
9240
  rtx start, end;
9241
 
9242
  /* Create a PARALLEL with the patterns of ANCHOR and
9243
     FLOATER, try to recognize it, then test constraints
9244
     for the resulting pattern.
9245
 
9246
     If the pattern doesn't match or the constraints
9247
     aren't met keep searching for a suitable floater
9248
     insn.  */
9249
  XVECEXP (PATTERN (new_rtx), 0, 0) = PATTERN (anchor);
9250
  XVECEXP (PATTERN (new_rtx), 0, 1) = PATTERN (floater);
9251
  INSN_CODE (new_rtx) = -1;
9252
  insn_code_number = recog_memoized (new_rtx);
9253
  if (insn_code_number < 0
9254
      || (extract_insn (new_rtx), ! constrain_operands (1)))
9255
    return 0;
9256
 
9257
  if (reversed)
9258
    {
9259
      start = anchor;
9260
      end = floater;
9261
    }
9262
  else
9263
    {
9264
      start = floater;
9265
      end = anchor;
9266
    }
9267
 
9268
  /* There's up to three operands to consider.  One
9269
     output and two inputs.
9270
 
9271
     The output must not be used between FLOATER & ANCHOR
9272
     exclusive.  The inputs must not be set between
9273
     FLOATER and ANCHOR exclusive.  */
9274
 
9275
  if (reg_used_between_p (dest, start, end))
9276
    return 0;
9277
 
9278
  if (reg_set_between_p (src1, start, end))
9279
    return 0;
9280
 
9281
  if (reg_set_between_p (src2, start, end))
9282
    return 0;
9283
 
9284
  /* If we get here, then everything is good.  */
9285
  return 1;
9286
}
9287
 
9288
/* Return nonzero if references for INSN are delayed.
9289
 
9290
   Millicode insns are actually function calls with some special
9291
   constraints on arguments and register usage.
9292
 
9293
   Millicode calls always expect their arguments in the integer argument
9294
   registers, and always return their result in %r29 (ret1).  They
9295
   are expected to clobber their arguments, %r1, %r29, and the return
9296
   pointer which is %r31 on 32-bit and %r2 on 64-bit, and nothing else.
9297
 
9298
   This function tells reorg that the references to arguments and
9299
   millicode calls do not appear to happen until after the millicode call.
9300
   This allows reorg to put insns which set the argument registers into the
9301
   delay slot of the millicode call -- thus they act more like traditional
9302
   CALL_INSNs.
9303
 
9304
   Note we cannot consider side effects of the insn to be delayed because
9305
   the branch and link insn will clobber the return pointer.  If we happened
9306
   to use the return pointer in the delay slot of the call, then we lose.
9307
 
9308
   get_attr_type will try to recognize the given insn, so make sure to
9309
   filter out things it will not accept -- SEQUENCE, USE and CLOBBER insns
9310
   in particular.  */
9311
int
9312
pa_insn_refs_are_delayed (rtx insn)
9313
{
9314
  return ((GET_CODE (insn) == INSN
9315
           && GET_CODE (PATTERN (insn)) != SEQUENCE
9316
           && GET_CODE (PATTERN (insn)) != USE
9317
           && GET_CODE (PATTERN (insn)) != CLOBBER
9318
           && get_attr_type (insn) == TYPE_MILLI));
9319
}
9320
 
9321
/* Promote the return value, but not the arguments.  */
9322
 
9323
static enum machine_mode
9324
pa_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
9325
                          enum machine_mode mode,
9326
                          int *punsignedp ATTRIBUTE_UNUSED,
9327
                          const_tree fntype ATTRIBUTE_UNUSED,
9328
                          int for_return)
9329
{
9330
  if (for_return == 0)
9331
    return mode;
9332
  return promote_mode (type, mode, punsignedp);
9333
}
9334
 
9335
/* On the HP-PA the value is found in register(s) 28(-29), unless
9336
   the mode is SF or DF. Then the value is returned in fr4 (32).
9337
 
9338
   This must perform the same promotions as PROMOTE_MODE, else promoting
9339
   return values in TARGET_PROMOTE_FUNCTION_MODE will not work correctly.
9340
 
9341
   Small structures must be returned in a PARALLEL on PA64 in order
9342
   to match the HP Compiler ABI.  */
9343
 
9344
static rtx
9345
pa_function_value (const_tree valtype,
9346
                   const_tree func ATTRIBUTE_UNUSED,
9347
                   bool outgoing ATTRIBUTE_UNUSED)
9348
{
9349
  enum machine_mode valmode;
9350
 
9351
  if (AGGREGATE_TYPE_P (valtype)
9352
      || TREE_CODE (valtype) == COMPLEX_TYPE
9353
      || TREE_CODE (valtype) == VECTOR_TYPE)
9354
    {
9355
      if (TARGET_64BIT)
9356
        {
9357
          /* Aggregates with a size less than or equal to 128 bits are
9358
             returned in GR 28(-29).  They are left justified.  The pad
9359
             bits are undefined.  Larger aggregates are returned in
9360
             memory.  */
9361
          rtx loc[2];
9362
          int i, offset = 0;
9363
          int ub = int_size_in_bytes (valtype) <= UNITS_PER_WORD ? 1 : 2;
9364
 
9365
          for (i = 0; i < ub; i++)
9366
            {
9367
              loc[i] = gen_rtx_EXPR_LIST (VOIDmode,
9368
                                          gen_rtx_REG (DImode, 28 + i),
9369
                                          GEN_INT (offset));
9370
              offset += 8;
9371
            }
9372
 
9373
          return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (ub, loc));
9374
        }
9375
      else if (int_size_in_bytes (valtype) > UNITS_PER_WORD)
9376
        {
9377
          /* Aggregates 5 to 8 bytes in size are returned in general
9378
             registers r28-r29 in the same manner as other non
9379
             floating-point objects.  The data is right-justified and
9380
             zero-extended to 64 bits.  This is opposite to the normal
9381
             justification used on big endian targets and requires
9382
             special treatment.  */
9383
          rtx loc = gen_rtx_EXPR_LIST (VOIDmode,
9384
                                       gen_rtx_REG (DImode, 28), const0_rtx);
9385
          return gen_rtx_PARALLEL (BLKmode, gen_rtvec (1, loc));
9386
        }
9387
    }
9388
 
9389
  if ((INTEGRAL_TYPE_P (valtype)
9390
       && GET_MODE_BITSIZE (TYPE_MODE (valtype)) < BITS_PER_WORD)
9391
      || POINTER_TYPE_P (valtype))
9392
    valmode = word_mode;
9393
  else
9394
    valmode = TYPE_MODE (valtype);
9395
 
9396
  if (TREE_CODE (valtype) == REAL_TYPE
9397
      && !AGGREGATE_TYPE_P (valtype)
9398
      && TYPE_MODE (valtype) != TFmode
9399
      && !TARGET_SOFT_FLOAT)
9400
    return gen_rtx_REG (valmode, 32);
9401
 
9402
  return gen_rtx_REG (valmode, 28);
9403
}
9404
 
9405
/* Implement the TARGET_LIBCALL_VALUE hook.  */
9406
 
9407
static rtx
9408
pa_libcall_value (enum machine_mode mode,
9409
                  const_rtx fun ATTRIBUTE_UNUSED)
9410
{
9411
  if (! TARGET_SOFT_FLOAT
9412
      && (mode == SFmode || mode == DFmode))
9413
    return  gen_rtx_REG (mode, 32);
9414
  else
9415
    return  gen_rtx_REG (mode, 28);
9416
}
9417
 
9418
/* Implement the TARGET_FUNCTION_VALUE_REGNO_P hook.  */
9419
 
9420
static bool
9421
pa_function_value_regno_p (const unsigned int regno)
9422
{
9423
  if (regno == 28
9424
      || (! TARGET_SOFT_FLOAT &&  regno == 32))
9425
    return true;
9426
 
9427
  return false;
9428
}
9429
 
9430
/* Update the data in CUM to advance over an argument
9431
   of mode MODE and data type TYPE.
9432
   (TYPE is null for libcalls where that information may not be available.)  */
9433
 
9434
static void
9435
pa_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
9436
                         const_tree type, bool named ATTRIBUTE_UNUSED)
9437
{
9438
  CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
9439
  int arg_size = FUNCTION_ARG_SIZE (mode, type);
9440
 
9441
  cum->nargs_prototype--;
9442
  cum->words += (arg_size
9443
                 + ((cum->words & 01)
9444
                    && type != NULL_TREE
9445
                    && arg_size > 1));
9446
}
9447
 
9448
/* Return the location of a parameter that is passed in a register or NULL
9449
   if the parameter has any component that is passed in memory.
9450
 
9451
   This is new code and will be pushed to into the net sources after
9452
   further testing.
9453
 
9454
   ??? We might want to restructure this so that it looks more like other
9455
   ports.  */
9456
static rtx
9457
pa_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
9458
                 const_tree type, bool named ATTRIBUTE_UNUSED)
9459
{
9460
  CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
9461
  int max_arg_words = (TARGET_64BIT ? 8 : 4);
9462
  int alignment = 0;
9463
  int arg_size;
9464
  int fpr_reg_base;
9465
  int gpr_reg_base;
9466
  rtx retval;
9467
 
9468
  if (mode == VOIDmode)
9469
    return NULL_RTX;
9470
 
9471
  arg_size = FUNCTION_ARG_SIZE (mode, type);
9472
 
9473
  /* If this arg would be passed partially or totally on the stack, then
9474
     this routine should return zero.  pa_arg_partial_bytes will
9475
     handle arguments which are split between regs and stack slots if
9476
     the ABI mandates split arguments.  */
9477
  if (!TARGET_64BIT)
9478
    {
9479
      /* The 32-bit ABI does not split arguments.  */
9480
      if (cum->words + arg_size > max_arg_words)
9481
        return NULL_RTX;
9482
    }
9483
  else
9484
    {
9485
      if (arg_size > 1)
9486
        alignment = cum->words & 1;
9487
      if (cum->words + alignment >= max_arg_words)
9488
        return NULL_RTX;
9489
    }
9490
 
9491
  /* The 32bit ABIs and the 64bit ABIs are rather different,
9492
     particularly in their handling of FP registers.  We might
9493
     be able to cleverly share code between them, but I'm not
9494
     going to bother in the hope that splitting them up results
9495
     in code that is more easily understood.  */
9496
 
9497
  if (TARGET_64BIT)
9498
    {
9499
      /* Advance the base registers to their current locations.
9500
 
9501
         Remember, gprs grow towards smaller register numbers while
9502
         fprs grow to higher register numbers.  Also remember that
9503
         although FP regs are 32-bit addressable, we pretend that
9504
         the registers are 64-bits wide.  */
9505
      gpr_reg_base = 26 - cum->words;
9506
      fpr_reg_base = 32 + cum->words;
9507
 
9508
      /* Arguments wider than one word and small aggregates need special
9509
         treatment.  */
9510
      if (arg_size > 1
9511
          || mode == BLKmode
9512
          || (type && (AGGREGATE_TYPE_P (type)
9513
                       || TREE_CODE (type) == COMPLEX_TYPE
9514
                       || TREE_CODE (type) == VECTOR_TYPE)))
9515
        {
9516
          /* Double-extended precision (80-bit), quad-precision (128-bit)
9517
             and aggregates including complex numbers are aligned on
9518
             128-bit boundaries.  The first eight 64-bit argument slots
9519
             are associated one-to-one, with general registers r26
9520
             through r19, and also with floating-point registers fr4
9521
             through fr11.  Arguments larger than one word are always
9522
             passed in general registers.
9523
 
9524
             Using a PARALLEL with a word mode register results in left
9525
             justified data on a big-endian target.  */
9526
 
9527
          rtx loc[8];
9528
          int i, offset = 0, ub = arg_size;
9529
 
9530
          /* Align the base register.  */
9531
          gpr_reg_base -= alignment;
9532
 
9533
          ub = MIN (ub, max_arg_words - cum->words - alignment);
9534
          for (i = 0; i < ub; i++)
9535
            {
9536
              loc[i] = gen_rtx_EXPR_LIST (VOIDmode,
9537
                                          gen_rtx_REG (DImode, gpr_reg_base),
9538
                                          GEN_INT (offset));
9539
              gpr_reg_base -= 1;
9540
              offset += 8;
9541
            }
9542
 
9543
          return gen_rtx_PARALLEL (mode, gen_rtvec_v (ub, loc));
9544
        }
9545
     }
9546
  else
9547
    {
9548
      /* If the argument is larger than a word, then we know precisely
9549
         which registers we must use.  */
9550
      if (arg_size > 1)
9551
        {
9552
          if (cum->words)
9553
            {
9554
              gpr_reg_base = 23;
9555
              fpr_reg_base = 38;
9556
            }
9557
          else
9558
            {
9559
              gpr_reg_base = 25;
9560
              fpr_reg_base = 34;
9561
            }
9562
 
9563
          /* Structures 5 to 8 bytes in size are passed in the general
9564
             registers in the same manner as other non floating-point
9565
             objects.  The data is right-justified and zero-extended
9566
             to 64 bits.  This is opposite to the normal justification
9567
             used on big endian targets and requires special treatment.
9568
             We now define BLOCK_REG_PADDING to pad these objects.
9569
             Aggregates, complex and vector types are passed in the same
9570
             manner as structures.  */
9571
          if (mode == BLKmode
9572
              || (type && (AGGREGATE_TYPE_P (type)
9573
                           || TREE_CODE (type) == COMPLEX_TYPE
9574
                           || TREE_CODE (type) == VECTOR_TYPE)))
9575
            {
9576
              rtx loc = gen_rtx_EXPR_LIST (VOIDmode,
9577
                                           gen_rtx_REG (DImode, gpr_reg_base),
9578
                                           const0_rtx);
9579
              return gen_rtx_PARALLEL (BLKmode, gen_rtvec (1, loc));
9580
            }
9581
        }
9582
      else
9583
        {
9584
           /* We have a single word (32 bits).  A simple computation
9585
              will get us the register #s we need.  */
9586
           gpr_reg_base = 26 - cum->words;
9587
           fpr_reg_base = 32 + 2 * cum->words;
9588
        }
9589
    }
9590
 
9591
  /* Determine if the argument needs to be passed in both general and
9592
     floating point registers.  */
9593
  if (((TARGET_PORTABLE_RUNTIME || TARGET_64BIT || TARGET_ELF32)
9594
       /* If we are doing soft-float with portable runtime, then there
9595
          is no need to worry about FP regs.  */
9596
       && !TARGET_SOFT_FLOAT
9597
       /* The parameter must be some kind of scalar float, else we just
9598
          pass it in integer registers.  */
9599
       && GET_MODE_CLASS (mode) == MODE_FLOAT
9600
       /* The target function must not have a prototype.  */
9601
       && cum->nargs_prototype <= 0
9602
       /* libcalls do not need to pass items in both FP and general
9603
          registers.  */
9604
       && type != NULL_TREE
9605
       /* All this hair applies to "outgoing" args only.  This includes
9606
          sibcall arguments setup with FUNCTION_INCOMING_ARG.  */
9607
       && !cum->incoming)
9608
      /* Also pass outgoing floating arguments in both registers in indirect
9609
         calls with the 32 bit ABI and the HP assembler since there is no
9610
         way to the specify argument locations in static functions.  */
9611
      || (!TARGET_64BIT
9612
          && !TARGET_GAS
9613
          && !cum->incoming
9614
          && cum->indirect
9615
          && GET_MODE_CLASS (mode) == MODE_FLOAT))
9616
    {
9617
      retval
9618
        = gen_rtx_PARALLEL
9619
            (mode,
9620
             gen_rtvec (2,
9621
                        gen_rtx_EXPR_LIST (VOIDmode,
9622
                                           gen_rtx_REG (mode, fpr_reg_base),
9623
                                           const0_rtx),
9624
                        gen_rtx_EXPR_LIST (VOIDmode,
9625
                                           gen_rtx_REG (mode, gpr_reg_base),
9626
                                           const0_rtx)));
9627
    }
9628
  else
9629
    {
9630
      /* See if we should pass this parameter in a general register.  */
9631
      if (TARGET_SOFT_FLOAT
9632
          /* Indirect calls in the normal 32bit ABI require all arguments
9633
             to be passed in general registers.  */
9634
          || (!TARGET_PORTABLE_RUNTIME
9635
              && !TARGET_64BIT
9636
              && !TARGET_ELF32
9637
              && cum->indirect)
9638
          /* If the parameter is not a scalar floating-point parameter,
9639
             then it belongs in GPRs.  */
9640
          || GET_MODE_CLASS (mode) != MODE_FLOAT
9641
          /* Structure with single SFmode field belongs in GPR.  */
9642
          || (type && AGGREGATE_TYPE_P (type)))
9643
        retval = gen_rtx_REG (mode, gpr_reg_base);
9644
      else
9645
        retval = gen_rtx_REG (mode, fpr_reg_base);
9646
    }
9647
  return retval;
9648
}
9649
 
9650
/* Arguments larger than one word are double word aligned.  */
9651
 
9652
static unsigned int
9653
pa_function_arg_boundary (enum machine_mode mode, const_tree type)
9654
{
9655
  bool singleword = (type
9656
                     ? (integer_zerop (TYPE_SIZE (type))
9657
                        || !TREE_CONSTANT (TYPE_SIZE (type))
9658
                        || int_size_in_bytes (type) <= UNITS_PER_WORD)
9659
                     : GET_MODE_SIZE (mode) <= UNITS_PER_WORD);
9660
 
9661
  return singleword ? PARM_BOUNDARY : MAX_PARM_BOUNDARY;
9662
}
9663
 
9664
/* If this arg would be passed totally in registers or totally on the stack,
9665
   then this routine should return zero.  */
9666
 
9667
static int
9668
pa_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
9669
                      tree type, bool named ATTRIBUTE_UNUSED)
9670
{
9671
  CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
9672
  unsigned int max_arg_words = 8;
9673
  unsigned int offset = 0;
9674
 
9675
  if (!TARGET_64BIT)
9676
    return 0;
9677
 
9678
  if (FUNCTION_ARG_SIZE (mode, type) > 1 && (cum->words & 1))
9679
    offset = 1;
9680
 
9681
  if (cum->words + offset + FUNCTION_ARG_SIZE (mode, type) <= max_arg_words)
9682
    /* Arg fits fully into registers.  */
9683
    return 0;
9684
  else if (cum->words + offset >= max_arg_words)
9685
    /* Arg fully on the stack.  */
9686
    return 0;
9687
  else
9688
    /* Arg is split.  */
9689
    return (max_arg_words - cum->words - offset) * UNITS_PER_WORD;
9690
}
9691
 
9692
 
9693
/* A get_unnamed_section callback for switching to the text section.
9694
 
9695
   This function is only used with SOM.  Because we don't support
9696
   named subspaces, we can only create a new subspace or switch back
9697
   to the default text subspace.  */
9698
 
9699
static void
9700
som_output_text_section_asm_op (const void *data ATTRIBUTE_UNUSED)
9701
{
9702
  gcc_assert (TARGET_SOM);
9703
  if (TARGET_GAS)
9704
    {
9705
      if (cfun && cfun->machine && !cfun->machine->in_nsubspa)
9706
        {
9707
          /* We only want to emit a .nsubspa directive once at the
9708
             start of the function.  */
9709
          cfun->machine->in_nsubspa = 1;
9710
 
9711
          /* Create a new subspace for the text.  This provides
9712
             better stub placement and one-only functions.  */
9713
          if (cfun->decl
9714
              && DECL_ONE_ONLY (cfun->decl)
9715
              && !DECL_WEAK (cfun->decl))
9716
            {
9717
              output_section_asm_op ("\t.SPACE $TEXT$\n"
9718
                                     "\t.NSUBSPA $CODE$,QUAD=0,ALIGN=8,"
9719
                                     "ACCESS=44,SORT=24,COMDAT");
9720
              return;
9721
            }
9722
        }
9723
      else
9724
        {
9725
          /* There isn't a current function or the body of the current
9726
             function has been completed.  So, we are changing to the
9727
             text section to output debugging information.  Thus, we
9728
             need to forget that we are in the text section so that
9729
             varasm.c will call us when text_section is selected again.  */
9730
          gcc_assert (!cfun || !cfun->machine
9731
                      || cfun->machine->in_nsubspa == 2);
9732
          in_section = NULL;
9733
        }
9734
      output_section_asm_op ("\t.SPACE $TEXT$\n\t.NSUBSPA $CODE$");
9735
      return;
9736
    }
9737
  output_section_asm_op ("\t.SPACE $TEXT$\n\t.SUBSPA $CODE$");
9738
}
9739
 
9740
/* A get_unnamed_section callback for switching to comdat data
9741
   sections.  This function is only used with SOM.  */
9742
 
9743
static void
9744
som_output_comdat_data_section_asm_op (const void *data)
9745
{
9746
  in_section = NULL;
9747
  output_section_asm_op (data);
9748
}
9749
 
9750
/* Implement TARGET_ASM_INITIALIZE_SECTIONS  */
9751
 
9752
static void
9753
pa_som_asm_init_sections (void)
9754
{
9755
  text_section
9756
    = get_unnamed_section (0, som_output_text_section_asm_op, NULL);
9757
 
9758
  /* SOM puts readonly data in the default $LIT$ subspace when PIC code
9759
     is not being generated.  */
9760
  som_readonly_data_section
9761
    = get_unnamed_section (0, output_section_asm_op,
9762
                           "\t.SPACE $TEXT$\n\t.SUBSPA $LIT$");
9763
 
9764
  /* When secondary definitions are not supported, SOM makes readonly
9765
     data one-only by creating a new $LIT$ subspace in $TEXT$ with
9766
     the comdat flag.  */
9767
  som_one_only_readonly_data_section
9768
    = get_unnamed_section (0, som_output_comdat_data_section_asm_op,
9769
                           "\t.SPACE $TEXT$\n"
9770
                           "\t.NSUBSPA $LIT$,QUAD=0,ALIGN=8,"
9771
                           "ACCESS=0x2c,SORT=16,COMDAT");
9772
 
9773
 
9774
  /* When secondary definitions are not supported, SOM makes data one-only
9775
     by creating a new $DATA$ subspace in $PRIVATE$ with the comdat flag.  */
9776
  som_one_only_data_section
9777
    = get_unnamed_section (SECTION_WRITE,
9778
                           som_output_comdat_data_section_asm_op,
9779
                           "\t.SPACE $PRIVATE$\n"
9780
                           "\t.NSUBSPA $DATA$,QUAD=1,ALIGN=8,"
9781
                           "ACCESS=31,SORT=24,COMDAT");
9782
 
9783
  if (flag_tm)
9784
    som_tm_clone_table_section
9785
      = get_unnamed_section (0, output_section_asm_op,
9786
                             "\t.SPACE $PRIVATE$\n\t.SUBSPA $TM_CLONE_TABLE$");
9787
 
9788
  /* FIXME: HPUX ld generates incorrect GOT entries for "T" fixups
9789
     which reference data within the $TEXT$ space (for example constant
9790
     strings in the $LIT$ subspace).
9791
 
9792
     The assemblers (GAS and HP as) both have problems with handling
9793
     the difference of two symbols which is the other correct way to
9794
     reference constant data during PIC code generation.
9795
 
9796
     So, there's no way to reference constant data which is in the
9797
     $TEXT$ space during PIC generation.  Instead place all constant
9798
     data into the $PRIVATE$ subspace (this reduces sharing, but it
9799
     works correctly).  */
9800
  readonly_data_section = flag_pic ? data_section : som_readonly_data_section;
9801
 
9802
  /* We must not have a reference to an external symbol defined in a
9803
     shared library in a readonly section, else the SOM linker will
9804
     complain.
9805
 
9806
     So, we force exception information into the data section.  */
9807
  exception_section = data_section;
9808
}
9809
 
9810
/* Implement TARGET_ASM_TM_CLONE_TABLE_SECTION.  */
9811
 
9812
static section *
9813
pa_som_tm_clone_table_section (void)
9814
{
9815
  return som_tm_clone_table_section;
9816
}
9817
 
9818
/* On hpux10, the linker will give an error if we have a reference
9819
   in the read-only data section to a symbol defined in a shared
9820
   library.  Therefore, expressions that might require a reloc can
9821
   not be placed in the read-only data section.  */
9822
 
9823
static section *
9824
pa_select_section (tree exp, int reloc,
9825
                   unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
9826
{
9827
  if (TREE_CODE (exp) == VAR_DECL
9828
      && TREE_READONLY (exp)
9829
      && !TREE_THIS_VOLATILE (exp)
9830
      && DECL_INITIAL (exp)
9831
      && (DECL_INITIAL (exp) == error_mark_node
9832
          || TREE_CONSTANT (DECL_INITIAL (exp)))
9833
      && !reloc)
9834
    {
9835
      if (TARGET_SOM
9836
          && DECL_ONE_ONLY (exp)
9837
          && !DECL_WEAK (exp))
9838
        return som_one_only_readonly_data_section;
9839
      else
9840
        return readonly_data_section;
9841
    }
9842
  else if (CONSTANT_CLASS_P (exp) && !reloc)
9843
    return readonly_data_section;
9844
  else if (TARGET_SOM
9845
           && TREE_CODE (exp) == VAR_DECL
9846
           && DECL_ONE_ONLY (exp)
9847
           && !DECL_WEAK (exp))
9848
    return som_one_only_data_section;
9849
  else
9850
    return data_section;
9851
}
9852
 
9853
static void
9854
pa_globalize_label (FILE *stream, const char *name)
9855
{
9856
  /* We only handle DATA objects here, functions are globalized in
9857
     ASM_DECLARE_FUNCTION_NAME.  */
9858
  if (! FUNCTION_NAME_P (name))
9859
  {
9860
    fputs ("\t.EXPORT ", stream);
9861
    assemble_name (stream, name);
9862
    fputs (",DATA\n", stream);
9863
  }
9864
}
9865
 
9866
/* Worker function for TARGET_STRUCT_VALUE_RTX.  */
9867
 
9868
static rtx
9869
pa_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
9870
                     int incoming ATTRIBUTE_UNUSED)
9871
{
9872
  return gen_rtx_REG (Pmode, PA_STRUCT_VALUE_REGNUM);
9873
}
9874
 
9875
/* Worker function for TARGET_RETURN_IN_MEMORY.  */
9876
 
9877
bool
9878
pa_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9879
{
9880
  /* SOM ABI says that objects larger than 64 bits are returned in memory.
9881
     PA64 ABI says that objects larger than 128 bits are returned in memory.
9882
     Note, int_size_in_bytes can return -1 if the size of the object is
9883
     variable or larger than the maximum value that can be expressed as
9884
     a HOST_WIDE_INT.   It can also return zero for an empty type.  The
9885
     simplest way to handle variable and empty types is to pass them in
9886
     memory.  This avoids problems in defining the boundaries of argument
9887
     slots, allocating registers, etc.  */
9888
  return (int_size_in_bytes (type) > (TARGET_64BIT ? 16 : 8)
9889
          || int_size_in_bytes (type) <= 0);
9890
}
9891
 
9892
/* Structure to hold declaration and name of external symbols that are
9893
   emitted by GCC.  We generate a vector of these symbols and output them
9894
   at the end of the file if and only if SYMBOL_REF_REFERENCED_P is true.
9895
   This avoids putting out names that are never really used.  */
9896
 
9897
typedef struct GTY(()) extern_symbol
9898
{
9899
  tree decl;
9900
  const char *name;
9901
} extern_symbol;
9902
 
9903
/* Define gc'd vector type for extern_symbol.  */
9904
DEF_VEC_O(extern_symbol);
9905
DEF_VEC_ALLOC_O(extern_symbol,gc);
9906
 
9907
/* Vector of extern_symbol pointers.  */
9908
static GTY(()) VEC(extern_symbol,gc) *extern_symbols;
9909
 
9910
#ifdef ASM_OUTPUT_EXTERNAL_REAL
9911
/* Mark DECL (name NAME) as an external reference (assembler output
9912
   file FILE).  This saves the names to output at the end of the file
9913
   if actually referenced.  */
9914
 
9915
void
9916
pa_hpux_asm_output_external (FILE *file, tree decl, const char *name)
9917
{
9918
  extern_symbol * p = VEC_safe_push (extern_symbol, gc, extern_symbols, NULL);
9919
 
9920
  gcc_assert (file == asm_out_file);
9921
  p->decl = decl;
9922
  p->name = name;
9923
}
9924
 
9925
/* Output text required at the end of an assembler file.
9926
   This includes deferred plabels and .import directives for
9927
   all external symbols that were actually referenced.  */
9928
 
9929
static void
9930
pa_hpux_file_end (void)
9931
{
9932
  unsigned int i;
9933
  extern_symbol *p;
9934
 
9935
  if (!NO_DEFERRED_PROFILE_COUNTERS)
9936
    output_deferred_profile_counters ();
9937
 
9938
  output_deferred_plabels ();
9939
 
9940
  for (i = 0; VEC_iterate (extern_symbol, extern_symbols, i, p); i++)
9941
    {
9942
      tree decl = p->decl;
9943
 
9944
      if (!TREE_ASM_WRITTEN (decl)
9945
          && SYMBOL_REF_REFERENCED_P (XEXP (DECL_RTL (decl), 0)))
9946
        ASM_OUTPUT_EXTERNAL_REAL (asm_out_file, decl, p->name);
9947
    }
9948
 
9949
  VEC_free (extern_symbol, gc, extern_symbols);
9950
}
9951
#endif
9952
 
9953
/* Return true if a change from mode FROM to mode TO for a register
9954
   in register class RCLASS is invalid.  */
9955
 
9956
bool
9957
pa_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
9958
                             enum reg_class rclass)
9959
{
9960
  if (from == to)
9961
    return false;
9962
 
9963
  /* Reject changes to/from complex and vector modes.  */
9964
  if (COMPLEX_MODE_P (from) || VECTOR_MODE_P (from)
9965
      || COMPLEX_MODE_P (to) || VECTOR_MODE_P (to))
9966
    return true;
9967
 
9968
  if (GET_MODE_SIZE (from) == GET_MODE_SIZE (to))
9969
    return false;
9970
 
9971
  /* There is no way to load QImode or HImode values directly from
9972
     memory.  SImode loads to the FP registers are not zero extended.
9973
     On the 64-bit target, this conflicts with the definition of
9974
     LOAD_EXTEND_OP.  Thus, we can't allow changing between modes
9975
     with different sizes in the floating-point registers.  */
9976
  if (MAYBE_FP_REG_CLASS_P (rclass))
9977
    return true;
9978
 
9979
  /* HARD_REGNO_MODE_OK places modes with sizes larger than a word
9980
     in specific sets of registers.  Thus, we cannot allow changing
9981
     to a larger mode when it's larger than a word.  */
9982
  if (GET_MODE_SIZE (to) > UNITS_PER_WORD
9983
      && GET_MODE_SIZE (to) > GET_MODE_SIZE (from))
9984
    return true;
9985
 
9986
  return false;
9987
}
9988
 
9989
/* Returns TRUE if it is a good idea to tie two pseudo registers
9990
   when one has mode MODE1 and one has mode MODE2.
9991
   If HARD_REGNO_MODE_OK could produce different values for MODE1 and MODE2,
9992
   for any hard reg, then this must be FALSE for correct output.
9993
 
9994
   We should return FALSE for QImode and HImode because these modes
9995
   are not ok in the floating-point registers.  However, this prevents
9996
   tieing these modes to SImode and DImode in the general registers.
9997
   So, this isn't a good idea.  We rely on HARD_REGNO_MODE_OK and
9998
   CANNOT_CHANGE_MODE_CLASS to prevent these modes from being used
9999
   in the floating-point registers.  */
10000
 
10001
bool
10002
pa_modes_tieable_p (enum machine_mode mode1, enum machine_mode mode2)
10003
{
10004
  /* Don't tie modes in different classes.  */
10005
  if (GET_MODE_CLASS (mode1) != GET_MODE_CLASS (mode2))
10006
    return false;
10007
 
10008
  return true;
10009
}
10010
 
10011
 
10012
/* Length in units of the trampoline instruction code.  */
10013
 
10014
#define TRAMPOLINE_CODE_SIZE (TARGET_64BIT ? 24 : (TARGET_PA_20 ? 32 : 40))
10015
 
10016
 
10017
/* Output assembler code for a block containing the constant parts
10018
   of a trampoline, leaving space for the variable parts.\
10019
 
10020
   The trampoline sets the static chain pointer to STATIC_CHAIN_REGNUM
10021
   and then branches to the specified routine.
10022
 
10023
   This code template is copied from text segment to stack location
10024
   and then patched with pa_trampoline_init to contain valid values,
10025
   and then entered as a subroutine.
10026
 
10027
   It is best to keep this as small as possible to avoid having to
10028
   flush multiple lines in the cache.  */
10029
 
10030
static void
10031
pa_asm_trampoline_template (FILE *f)
10032
{
10033
  if (!TARGET_64BIT)
10034
    {
10035
      fputs ("\tldw     36(%r22),%r21\n", f);
10036
      fputs ("\tbb,>=,n %r21,30,.+16\n", f);
10037
      if (ASSEMBLER_DIALECT == 0)
10038
        fputs ("\tdepi  0,31,2,%r21\n", f);
10039
      else
10040
        fputs ("\tdepwi 0,31,2,%r21\n", f);
10041
      fputs ("\tldw     4(%r21),%r19\n", f);
10042
      fputs ("\tldw     0(%r21),%r21\n", f);
10043
      if (TARGET_PA_20)
10044
        {
10045
          fputs ("\tbve (%r21)\n", f);
10046
          fputs ("\tldw 40(%r22),%r29\n", f);
10047
          fputs ("\t.word       0\n", f);
10048
          fputs ("\t.word       0\n", f);
10049
        }
10050
      else
10051
        {
10052
          fputs ("\tldsid       (%r21),%r1\n", f);
10053
          fputs ("\tmtsp        %r1,%sr0\n", f);
10054
          fputs ("\tbe  0(%sr0,%r21)\n", f);
10055
          fputs ("\tldw 40(%r22),%r29\n", f);
10056
        }
10057
      fputs ("\t.word   0\n", f);
10058
      fputs ("\t.word   0\n", f);
10059
      fputs ("\t.word   0\n", f);
10060
      fputs ("\t.word   0\n", f);
10061
    }
10062
  else
10063
    {
10064
      fputs ("\t.dword 0\n", f);
10065
      fputs ("\t.dword 0\n", f);
10066
      fputs ("\t.dword 0\n", f);
10067
      fputs ("\t.dword 0\n", f);
10068
      fputs ("\tmfia    %r31\n", f);
10069
      fputs ("\tldd     24(%r31),%r1\n", f);
10070
      fputs ("\tldd     24(%r1),%r27\n", f);
10071
      fputs ("\tldd     16(%r1),%r1\n", f);
10072
      fputs ("\tbve     (%r1)\n", f);
10073
      fputs ("\tldd     32(%r31),%r31\n", f);
10074
      fputs ("\t.dword 0  ; fptr\n", f);
10075
      fputs ("\t.dword 0  ; static link\n", f);
10076
    }
10077
}
10078
 
10079
/* Emit RTL insns to initialize the variable parts of a trampoline.
10080
   FNADDR is an RTX for the address of the function's pure code.
10081
   CXT is an RTX for the static chain value for the function.
10082
 
10083
   Move the function address to the trampoline template at offset 36.
10084
   Move the static chain value to trampoline template at offset 40.
10085
   Move the trampoline address to trampoline template at offset 44.
10086
   Move r19 to trampoline template at offset 48.  The latter two
10087
   words create a plabel for the indirect call to the trampoline.
10088
 
10089
   A similar sequence is used for the 64-bit port but the plabel is
10090
   at the beginning of the trampoline.
10091
 
10092
   Finally, the cache entries for the trampoline code are flushed.
10093
   This is necessary to ensure that the trampoline instruction sequence
10094
   is written to memory prior to any attempts at prefetching the code
10095
   sequence.  */
10096
 
10097
static void
10098
pa_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
10099
{
10100
  rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
10101
  rtx start_addr = gen_reg_rtx (Pmode);
10102
  rtx end_addr = gen_reg_rtx (Pmode);
10103
  rtx line_length = gen_reg_rtx (Pmode);
10104
  rtx r_tramp, tmp;
10105
 
10106
  emit_block_move (m_tramp, assemble_trampoline_template (),
10107
                   GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
10108
  r_tramp = force_reg (Pmode, XEXP (m_tramp, 0));
10109
 
10110
  if (!TARGET_64BIT)
10111
    {
10112
      tmp = adjust_address (m_tramp, Pmode, 36);
10113
      emit_move_insn (tmp, fnaddr);
10114
      tmp = adjust_address (m_tramp, Pmode, 40);
10115
      emit_move_insn (tmp, chain_value);
10116
 
10117
      /* Create a fat pointer for the trampoline.  */
10118
      tmp = adjust_address (m_tramp, Pmode, 44);
10119
      emit_move_insn (tmp, r_tramp);
10120
      tmp = adjust_address (m_tramp, Pmode, 48);
10121
      emit_move_insn (tmp, gen_rtx_REG (Pmode, 19));
10122
 
10123
      /* fdc and fic only use registers for the address to flush,
10124
         they do not accept integer displacements.  We align the
10125
         start and end addresses to the beginning of their respective
10126
         cache lines to minimize the number of lines flushed.  */
10127
      emit_insn (gen_andsi3 (start_addr, r_tramp,
10128
                             GEN_INT (-MIN_CACHELINE_SIZE)));
10129
      tmp = force_reg (Pmode, plus_constant (r_tramp, TRAMPOLINE_CODE_SIZE-1));
10130
      emit_insn (gen_andsi3 (end_addr, tmp,
10131
                             GEN_INT (-MIN_CACHELINE_SIZE)));
10132
      emit_move_insn (line_length, GEN_INT (MIN_CACHELINE_SIZE));
10133
      emit_insn (gen_dcacheflushsi (start_addr, end_addr, line_length));
10134
      emit_insn (gen_icacheflushsi (start_addr, end_addr, line_length,
10135
                                    gen_reg_rtx (Pmode),
10136
                                    gen_reg_rtx (Pmode)));
10137
    }
10138
  else
10139
    {
10140
      tmp = adjust_address (m_tramp, Pmode, 56);
10141
      emit_move_insn (tmp, fnaddr);
10142
      tmp = adjust_address (m_tramp, Pmode, 64);
10143
      emit_move_insn (tmp, chain_value);
10144
 
10145
      /* Create a fat pointer for the trampoline.  */
10146
      tmp = adjust_address (m_tramp, Pmode, 16);
10147
      emit_move_insn (tmp, force_reg (Pmode, plus_constant (r_tramp, 32)));
10148
      tmp = adjust_address (m_tramp, Pmode, 24);
10149
      emit_move_insn (tmp, gen_rtx_REG (Pmode, 27));
10150
 
10151
      /* fdc and fic only use registers for the address to flush,
10152
         they do not accept integer displacements.  We align the
10153
         start and end addresses to the beginning of their respective
10154
         cache lines to minimize the number of lines flushed.  */
10155
      tmp = force_reg (Pmode, plus_constant (r_tramp, 32));
10156
      emit_insn (gen_anddi3 (start_addr, tmp,
10157
                             GEN_INT (-MIN_CACHELINE_SIZE)));
10158
      tmp = force_reg (Pmode, plus_constant (tmp, TRAMPOLINE_CODE_SIZE - 1));
10159
      emit_insn (gen_anddi3 (end_addr, tmp,
10160
                             GEN_INT (-MIN_CACHELINE_SIZE)));
10161
      emit_move_insn (line_length, GEN_INT (MIN_CACHELINE_SIZE));
10162
      emit_insn (gen_dcacheflushdi (start_addr, end_addr, line_length));
10163
      emit_insn (gen_icacheflushdi (start_addr, end_addr, line_length,
10164
                                    gen_reg_rtx (Pmode),
10165
                                    gen_reg_rtx (Pmode)));
10166
    }
10167
}
10168
 
10169
/* Perform any machine-specific adjustment in the address of the trampoline.
10170
   ADDR contains the address that was passed to pa_trampoline_init.
10171
   Adjust the trampoline address to point to the plabel at offset 44.  */
10172
 
10173
static rtx
10174
pa_trampoline_adjust_address (rtx addr)
10175
{
10176
  if (!TARGET_64BIT)
10177
    addr = memory_address (Pmode, plus_constant (addr, 46));
10178
  return addr;
10179
}
10180
 
10181
static rtx
10182
pa_delegitimize_address (rtx orig_x)
10183
{
10184
  rtx x = delegitimize_mem_from_attrs (orig_x);
10185
 
10186
  if (GET_CODE (x) == LO_SUM
10187
      && GET_CODE (XEXP (x, 1)) == UNSPEC
10188
      && XINT (XEXP (x, 1), 1) == UNSPEC_DLTIND14R)
10189
    return gen_const_mem (Pmode, XVECEXP (XEXP (x, 1), 0, 0));
10190
  return x;
10191
}
10192
 
10193
static rtx
10194
pa_internal_arg_pointer (void)
10195
{
10196
  /* The argument pointer and the hard frame pointer are the same in
10197
     the 32-bit runtime, so we don't need a copy.  */
10198
  if (TARGET_64BIT)
10199
    return copy_to_reg (virtual_incoming_args_rtx);
10200
  else
10201
    return virtual_incoming_args_rtx;
10202
}
10203
 
10204
/* Given FROM and TO register numbers, say whether this elimination is allowed.
10205
   Frame pointer elimination is automatically handled.  */
10206
 
10207
static bool
10208
pa_can_eliminate (const int from, const int to)
10209
{
10210
  /* The argument cannot be eliminated in the 64-bit runtime.  */
10211
  if (TARGET_64BIT && from == ARG_POINTER_REGNUM)
10212
    return false;
10213
 
10214
  return (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM
10215
          ? ! frame_pointer_needed
10216
          : true);
10217
}
10218
 
10219
/* Define the offset between two registers, FROM to be eliminated and its
10220
   replacement TO, at the start of a routine.  */
10221
HOST_WIDE_INT
10222
pa_initial_elimination_offset (int from, int to)
10223
{
10224
  HOST_WIDE_INT offset;
10225
 
10226
  if ((from == HARD_FRAME_POINTER_REGNUM || from == FRAME_POINTER_REGNUM)
10227
      && to == STACK_POINTER_REGNUM)
10228
    offset = -pa_compute_frame_size (get_frame_size (), 0);
10229
  else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
10230
    offset = 0;
10231
  else
10232
    gcc_unreachable ();
10233
 
10234
  return offset;
10235
}
10236
 
10237
static void
10238
pa_conditional_register_usage (void)
10239
{
10240
  int i;
10241
 
10242
  if (!TARGET_64BIT && !TARGET_PA_11)
10243
    {
10244
      for (i = 56; i <= FP_REG_LAST; i++)
10245
        fixed_regs[i] = call_used_regs[i] = 1;
10246
      for (i = 33; i < 56; i += 2)
10247
        fixed_regs[i] = call_used_regs[i] = 1;
10248
    }
10249
  if (TARGET_DISABLE_FPREGS || TARGET_SOFT_FLOAT)
10250
    {
10251
      for (i = FP_REG_FIRST; i <= FP_REG_LAST; i++)
10252
        fixed_regs[i] = call_used_regs[i] = 1;
10253
    }
10254
  if (flag_pic)
10255
    fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
10256
}
10257
 
10258
/* Target hook for c_mode_for_suffix.  */
10259
 
10260
static enum machine_mode
10261
pa_c_mode_for_suffix (char suffix)
10262
{
10263
  if (HPUX_LONG_DOUBLE_LIBRARY)
10264
    {
10265
      if (suffix == 'q')
10266
        return TFmode;
10267
    }
10268
 
10269
  return VOIDmode;
10270
}
10271
 
10272
/* Target hook for function_section.  */
10273
 
10274
static section *
10275
pa_function_section (tree decl, enum node_frequency freq,
10276
                     bool startup, bool exit)
10277
{
10278
  /* Put functions in text section if target doesn't have named sections.  */
10279
  if (!targetm_common.have_named_sections)
10280
    return text_section;
10281
 
10282
  /* Force nested functions into the same section as the containing
10283
     function.  */
10284
  if (decl
10285
      && DECL_SECTION_NAME (decl) == NULL_TREE
10286
      && DECL_CONTEXT (decl) != NULL_TREE
10287
      && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
10288
      && DECL_SECTION_NAME (DECL_CONTEXT (decl)) == NULL_TREE)
10289
    return function_section (DECL_CONTEXT (decl));
10290
 
10291
  /* Otherwise, use the default function section.  */
10292
  return default_function_section (decl, freq, startup, exit);
10293
}
10294
 
10295
/* Implement TARGET_LEGITIMATE_CONSTANT_P.
10296
 
10297
   In 64-bit mode, we reject CONST_DOUBLES.  We also reject CONST_INTS
10298
   that need more than three instructions to load prior to reload.  This
10299
   limit is somewhat arbitrary.  It takes three instructions to load a
10300
   CONST_INT from memory but two are memory accesses.  It may be better
10301
   to increase the allowed range for CONST_INTS.  We may also be able
10302
   to handle CONST_DOUBLES.  */
10303
 
10304
static bool
10305
pa_legitimate_constant_p (enum machine_mode mode, rtx x)
10306
{
10307
  if (GET_MODE_CLASS (mode) == MODE_FLOAT && x != CONST0_RTX (mode))
10308
    return false;
10309
 
10310
  if (!NEW_HP_ASSEMBLER && !TARGET_GAS && GET_CODE (x) == LABEL_REF)
10311
    return false;
10312
 
10313
  /* TLS_MODEL_GLOBAL_DYNAMIC and TLS_MODEL_LOCAL_DYNAMIC are not
10314
     legitimate constants.  */
10315
  if (PA_SYMBOL_REF_TLS_P (x))
10316
   {
10317
     enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
10318
 
10319
     if (model == TLS_MODEL_GLOBAL_DYNAMIC || model == TLS_MODEL_LOCAL_DYNAMIC)
10320
       return false;
10321
   }
10322
 
10323
  if (TARGET_64BIT && GET_CODE (x) == CONST_DOUBLE)
10324
    return false;
10325
 
10326
  if (TARGET_64BIT
10327
      && HOST_BITS_PER_WIDE_INT > 32
10328
      && GET_CODE (x) == CONST_INT
10329
      && !reload_in_progress
10330
      && !reload_completed
10331
      && !LEGITIMATE_64BIT_CONST_INT_P (INTVAL (x))
10332
      && !pa_cint_ok_for_move (INTVAL (x)))
10333
    return false;
10334
 
10335
  if (function_label_operand (x, mode))
10336
    return false;
10337
 
10338
  return true;
10339
}
10340
 
10341
#include "gt-pa.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.