OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [config/] [rs6000/] [rs6000.c] - Blame information for rev 816

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Subroutines used for code generation on IBM RS/6000.
2
   Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3
   2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4
   Free Software Foundation, Inc.
5
   Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6
 
7
   This file is part of GCC.
8
 
9
   GCC is free software; you can redistribute it and/or modify it
10
   under the terms of the GNU General Public License as published
11
   by the Free Software Foundation; either version 3, or (at your
12
   option) any later version.
13
 
14
   GCC is distributed in the hope that it will be useful, but WITHOUT
15
   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16
   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
17
   License for more details.
18
 
19
   You should have received a copy of the GNU General Public License
20
   along with GCC; see the file COPYING3.  If not see
21
   <http://www.gnu.org/licenses/>.  */
22
 
23
#include "config.h"
24
#include "system.h"
25
#include "coretypes.h"
26
#include "tm.h"
27
#include "rtl.h"
28
#include "regs.h"
29
#include "hard-reg-set.h"
30
#include "real.h"
31
#include "insn-config.h"
32
#include "conditions.h"
33
#include "insn-attr.h"
34
#include "flags.h"
35
#include "recog.h"
36
#include "obstack.h"
37
#include "tree.h"
38
#include "expr.h"
39
#include "optabs.h"
40
#include "except.h"
41
#include "function.h"
42
#include "output.h"
43
#include "basic-block.h"
44
#include "integrate.h"
45
#include "toplev.h"
46
#include "ggc.h"
47
#include "hashtab.h"
48
#include "tm_p.h"
49
#include "target.h"
50
#include "target-def.h"
51
#include "langhooks.h"
52
#include "reload.h"
53
#include "cfglayout.h"
54
#include "sched-int.h"
55
#include "tree-gimple.h"
56
#include "intl.h"
57
#include "params.h"
58
#include "tm-constrs.h"
59
#if TARGET_XCOFF
60
#include "xcoffout.h"  /* get declarations of xcoff_*_section_name */
61
#endif
62
#if TARGET_MACHO
63
#include "gstab.h"  /* for N_SLINE */
64
#endif
65
 
66
#ifndef TARGET_NO_PROTOTYPE
67
#define TARGET_NO_PROTOTYPE 0
68
#endif
69
 
70
#define min(A,B)        ((A) < (B) ? (A) : (B))
71
#define max(A,B)        ((A) > (B) ? (A) : (B))
72
 
73
/* Structure used to define the rs6000 stack */
74
typedef struct rs6000_stack {
75
  int first_gp_reg_save;        /* first callee saved GP register used */
76
  int first_fp_reg_save;        /* first callee saved FP register used */
77
  int first_altivec_reg_save;   /* first callee saved AltiVec register used */
78
  int lr_save_p;                /* true if the link reg needs to be saved */
79
  int cr_save_p;                /* true if the CR reg needs to be saved */
80
  unsigned int vrsave_mask;     /* mask of vec registers to save */
81
  int push_p;                   /* true if we need to allocate stack space */
82
  int calls_p;                  /* true if the function makes any calls */
83
  int world_save_p;             /* true if we're saving *everything*:
84
                                   r13-r31, cr, f14-f31, vrsave, v20-v31  */
85
  enum rs6000_abi abi;          /* which ABI to use */
86
  int gp_save_offset;           /* offset to save GP regs from initial SP */
87
  int fp_save_offset;           /* offset to save FP regs from initial SP */
88
  int altivec_save_offset;      /* offset to save AltiVec regs from initial SP */
89
  int lr_save_offset;           /* offset to save LR from initial SP */
90
  int cr_save_offset;           /* offset to save CR from initial SP */
91
  int vrsave_save_offset;       /* offset to save VRSAVE from initial SP */
92
  int spe_gp_save_offset;       /* offset to save spe 64-bit gprs  */
93
  int varargs_save_offset;      /* offset to save the varargs registers */
94
  int ehrd_offset;              /* offset to EH return data */
95
  int reg_size;                 /* register size (4 or 8) */
96
  HOST_WIDE_INT vars_size;      /* variable save area size */
97
  int parm_size;                /* outgoing parameter size */
98
  int save_size;                /* save area size */
99
  int fixed_size;               /* fixed size of stack frame */
100
  int gp_size;                  /* size of saved GP registers */
101
  int fp_size;                  /* size of saved FP registers */
102
  int altivec_size;             /* size of saved AltiVec registers */
103
  int cr_size;                  /* size to hold CR if not in save_size */
104
  int vrsave_size;              /* size to hold VRSAVE if not in save_size */
105
  int altivec_padding_size;     /* size of altivec alignment padding if
106
                                   not in save_size */
107
  int spe_gp_size;              /* size of 64-bit GPR save size for SPE */
108
  int spe_padding_size;
109
  HOST_WIDE_INT total_size;     /* total bytes allocated for stack */
110
  int spe_64bit_regs_used;
111
} rs6000_stack_t;
112
 
113
/* A C structure for machine-specific, per-function data.
114
   This is added to the cfun structure.  */
115
typedef struct machine_function GTY(())
116
{
117
  /* Flags if __builtin_return_address (n) with n >= 1 was used.  */
118
  int ra_needs_full_frame;
119
  /* Some local-dynamic symbol.  */
120
  const char *some_ld_name;
121
  /* Whether the instruction chain has been scanned already.  */
122
  int insn_chain_scanned_p;
123
  /* Flags if __builtin_return_address (0) was used.  */
124
  int ra_need_lr;
125
  /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
126
     varargs save area.  */
127
  HOST_WIDE_INT varargs_save_offset;
128
} machine_function;
129
 
130
/* Target cpu type */
131
 
132
enum processor_type rs6000_cpu;
133
struct rs6000_cpu_select rs6000_select[3] =
134
{
135
  /* switch             name,                   tune    arch */
136
  { (const char *)0,     "--with-cpu=",          1,      1 },
137
  { (const char *)0,     "-mcpu=",               1,      1 },
138
  { (const char *)0,     "-mtune=",              1,      0 },
139
};
140
 
141
/* Always emit branch hint bits.  */
142
static GTY(()) bool rs6000_always_hint;
143
 
144
/* Schedule instructions for group formation.  */
145
static GTY(()) bool rs6000_sched_groups;
146
 
147
/* Support for -msched-costly-dep option.  */
148
const char *rs6000_sched_costly_dep_str;
149
enum rs6000_dependence_cost rs6000_sched_costly_dep;
150
 
151
/* Support for -minsert-sched-nops option.  */
152
const char *rs6000_sched_insert_nops_str;
153
enum rs6000_nop_insertion rs6000_sched_insert_nops;
154
 
155
/* Support targetm.vectorize.builtin_mask_for_load.  */
156
static GTY(()) tree altivec_builtin_mask_for_load;
157
 
158
/* Size of long double.  */
159
int rs6000_long_double_type_size;
160
 
161
/* IEEE quad extended precision long double. */
162
int rs6000_ieeequad;
163
 
164
/* Whether -mabi=altivec has appeared.  */
165
int rs6000_altivec_abi;
166
 
167
/* Nonzero if we want SPE ABI extensions.  */
168
int rs6000_spe_abi;
169
 
170
/* Nonzero if floating point operations are done in the GPRs.  */
171
int rs6000_float_gprs = 0;
172
 
173
/* Nonzero if we want Darwin's struct-by-value-in-regs ABI.  */
174
int rs6000_darwin64_abi;
175
 
176
/* Set to nonzero once AIX common-mode calls have been defined.  */
177
static GTY(()) int common_mode_defined;
178
 
179
/* Save information from a "cmpxx" operation until the branch or scc is
180
   emitted.  */
181
rtx rs6000_compare_op0, rs6000_compare_op1;
182
int rs6000_compare_fp_p;
183
 
184
/* Label number of label created for -mrelocatable, to call to so we can
185
   get the address of the GOT section */
186
int rs6000_pic_labelno;
187
 
188
#ifdef USING_ELFOS_H
189
/* Which abi to adhere to */
190
const char *rs6000_abi_name;
191
 
192
/* Semantics of the small data area */
193
enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
194
 
195
/* Which small data model to use */
196
const char *rs6000_sdata_name = (char *)0;
197
 
198
/* Counter for labels which are to be placed in .fixup.  */
199
int fixuplabelno = 0;
200
#endif
201
 
202
/* Bit size of immediate TLS offsets and string from which it is decoded.  */
203
int rs6000_tls_size = 32;
204
const char *rs6000_tls_size_string;
205
 
206
/* ABI enumeration available for subtarget to use.  */
207
enum rs6000_abi rs6000_current_abi;
208
 
209
/* Whether to use variant of AIX ABI for PowerPC64 Linux.  */
210
int dot_symbols;
211
 
212
/* Debug flags */
213
const char *rs6000_debug_name;
214
int rs6000_debug_stack;         /* debug stack applications */
215
int rs6000_debug_arg;           /* debug argument handling */
216
 
217
/* Value is TRUE if register/mode pair is acceptable.  */
218
bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
219
 
220
/* Built in types.  */
221
 
222
tree rs6000_builtin_types[RS6000_BTI_MAX];
223
tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
224
 
225
const char *rs6000_traceback_name;
226
static enum {
227
  traceback_default = 0,
228
  traceback_none,
229
  traceback_part,
230
  traceback_full
231
} rs6000_traceback;
232
 
233
/* Flag to say the TOC is initialized */
234
int toc_initialized;
235
char toc_label_name[10];
236
 
237
static GTY(()) section *read_only_data_section;
238
static GTY(()) section *private_data_section;
239
static GTY(()) section *read_only_private_data_section;
240
static GTY(()) section *sdata2_section;
241
static GTY(()) section *toc_section;
242
 
243
/* Control alignment for fields within structures.  */
244
/* String from -malign-XXXXX.  */
245
int rs6000_alignment_flags;
246
 
247
/* True for any options that were explicitly set.  */
248
struct {
249
  bool aix_struct_ret;          /* True if -maix-struct-ret was used.  */
250
  bool alignment;               /* True if -malign- was used.  */
251
  bool abi;                     /* True if -mabi=spe/nospe was used.  */
252
  bool spe;                     /* True if -mspe= was used.  */
253
  bool float_gprs;              /* True if -mfloat-gprs= was used.  */
254
  bool isel;                    /* True if -misel was used. */
255
  bool long_double;             /* True if -mlong-double- was used.  */
256
  bool ieee;                    /* True if -mabi=ieee/ibmlongdouble used.  */
257
} rs6000_explicit_options;
258
 
259
struct builtin_description
260
{
261
  /* mask is not const because we're going to alter it below.  This
262
     nonsense will go away when we rewrite the -march infrastructure
263
     to give us more target flag bits.  */
264
  unsigned int mask;
265
  const enum insn_code icode;
266
  const char *const name;
267
  const enum rs6000_builtins code;
268
};
269
 
270
/* Target cpu costs.  */
271
 
272
struct processor_costs {
273
  const int mulsi;        /* cost of SImode multiplication.  */
274
  const int mulsi_const;  /* cost of SImode multiplication by constant.  */
275
  const int mulsi_const9; /* cost of SImode mult by short constant.  */
276
  const int muldi;        /* cost of DImode multiplication.  */
277
  const int divsi;        /* cost of SImode division.  */
278
  const int divdi;        /* cost of DImode division.  */
279
  const int fp;           /* cost of simple SFmode and DFmode insns.  */
280
  const int dmul;         /* cost of DFmode multiplication (and fmadd).  */
281
  const int sdiv;         /* cost of SFmode division (fdivs).  */
282
  const int ddiv;         /* cost of DFmode division (fdiv).  */
283
};
284
 
285
const struct processor_costs *rs6000_cost;
286
 
287
/* Processor costs (relative to an add) */
288
 
289
/* Instruction size costs on 32bit processors.  */
290
static const
291
struct processor_costs size32_cost = {
292
  COSTS_N_INSNS (1),    /* mulsi */
293
  COSTS_N_INSNS (1),    /* mulsi_const */
294
  COSTS_N_INSNS (1),    /* mulsi_const9 */
295
  COSTS_N_INSNS (1),    /* muldi */
296
  COSTS_N_INSNS (1),    /* divsi */
297
  COSTS_N_INSNS (1),    /* divdi */
298
  COSTS_N_INSNS (1),    /* fp */
299
  COSTS_N_INSNS (1),    /* dmul */
300
  COSTS_N_INSNS (1),    /* sdiv */
301
  COSTS_N_INSNS (1),    /* ddiv */
302
};
303
 
304
/* Instruction size costs on 64bit processors.  */
305
static const
306
struct processor_costs size64_cost = {
307
  COSTS_N_INSNS (1),    /* mulsi */
308
  COSTS_N_INSNS (1),    /* mulsi_const */
309
  COSTS_N_INSNS (1),    /* mulsi_const9 */
310
  COSTS_N_INSNS (1),    /* muldi */
311
  COSTS_N_INSNS (1),    /* divsi */
312
  COSTS_N_INSNS (1),    /* divdi */
313
  COSTS_N_INSNS (1),    /* fp */
314
  COSTS_N_INSNS (1),    /* dmul */
315
  COSTS_N_INSNS (1),    /* sdiv */
316
  COSTS_N_INSNS (1),    /* ddiv */
317
};
318
 
319
/* Instruction costs on RIOS1 processors.  */
320
static const
321
struct processor_costs rios1_cost = {
322
  COSTS_N_INSNS (5),    /* mulsi */
323
  COSTS_N_INSNS (4),    /* mulsi_const */
324
  COSTS_N_INSNS (3),    /* mulsi_const9 */
325
  COSTS_N_INSNS (5),    /* muldi */
326
  COSTS_N_INSNS (19),   /* divsi */
327
  COSTS_N_INSNS (19),   /* divdi */
328
  COSTS_N_INSNS (2),    /* fp */
329
  COSTS_N_INSNS (2),    /* dmul */
330
  COSTS_N_INSNS (19),   /* sdiv */
331
  COSTS_N_INSNS (19),   /* ddiv */
332
};
333
 
334
/* Instruction costs on RIOS2 processors.  */
335
static const
336
struct processor_costs rios2_cost = {
337
  COSTS_N_INSNS (2),    /* mulsi */
338
  COSTS_N_INSNS (2),    /* mulsi_const */
339
  COSTS_N_INSNS (2),    /* mulsi_const9 */
340
  COSTS_N_INSNS (2),    /* muldi */
341
  COSTS_N_INSNS (13),   /* divsi */
342
  COSTS_N_INSNS (13),   /* divdi */
343
  COSTS_N_INSNS (2),    /* fp */
344
  COSTS_N_INSNS (2),    /* dmul */
345
  COSTS_N_INSNS (17),   /* sdiv */
346
  COSTS_N_INSNS (17),   /* ddiv */
347
};
348
 
349
/* Instruction costs on RS64A processors.  */
350
static const
351
struct processor_costs rs64a_cost = {
352
  COSTS_N_INSNS (20),   /* mulsi */
353
  COSTS_N_INSNS (12),   /* mulsi_const */
354
  COSTS_N_INSNS (8),    /* mulsi_const9 */
355
  COSTS_N_INSNS (34),   /* muldi */
356
  COSTS_N_INSNS (65),   /* divsi */
357
  COSTS_N_INSNS (67),   /* divdi */
358
  COSTS_N_INSNS (4),    /* fp */
359
  COSTS_N_INSNS (4),    /* dmul */
360
  COSTS_N_INSNS (31),   /* sdiv */
361
  COSTS_N_INSNS (31),   /* ddiv */
362
};
363
 
364
/* Instruction costs on MPCCORE processors.  */
365
static const
366
struct processor_costs mpccore_cost = {
367
  COSTS_N_INSNS (2),    /* mulsi */
368
  COSTS_N_INSNS (2),    /* mulsi_const */
369
  COSTS_N_INSNS (2),    /* mulsi_const9 */
370
  COSTS_N_INSNS (2),    /* muldi */
371
  COSTS_N_INSNS (6),    /* divsi */
372
  COSTS_N_INSNS (6),    /* divdi */
373
  COSTS_N_INSNS (4),    /* fp */
374
  COSTS_N_INSNS (5),    /* dmul */
375
  COSTS_N_INSNS (10),   /* sdiv */
376
  COSTS_N_INSNS (17),   /* ddiv */
377
};
378
 
379
/* Instruction costs on PPC403 processors.  */
380
static const
381
struct processor_costs ppc403_cost = {
382
  COSTS_N_INSNS (4),    /* mulsi */
383
  COSTS_N_INSNS (4),    /* mulsi_const */
384
  COSTS_N_INSNS (4),    /* mulsi_const9 */
385
  COSTS_N_INSNS (4),    /* muldi */
386
  COSTS_N_INSNS (33),   /* divsi */
387
  COSTS_N_INSNS (33),   /* divdi */
388
  COSTS_N_INSNS (11),   /* fp */
389
  COSTS_N_INSNS (11),   /* dmul */
390
  COSTS_N_INSNS (11),   /* sdiv */
391
  COSTS_N_INSNS (11),   /* ddiv */
392
};
393
 
394
/* Instruction costs on PPC405 processors.  */
395
static const
396
struct processor_costs ppc405_cost = {
397
  COSTS_N_INSNS (5),    /* mulsi */
398
  COSTS_N_INSNS (4),    /* mulsi_const */
399
  COSTS_N_INSNS (3),    /* mulsi_const9 */
400
  COSTS_N_INSNS (5),    /* muldi */
401
  COSTS_N_INSNS (35),   /* divsi */
402
  COSTS_N_INSNS (35),   /* divdi */
403
  COSTS_N_INSNS (11),   /* fp */
404
  COSTS_N_INSNS (11),   /* dmul */
405
  COSTS_N_INSNS (11),   /* sdiv */
406
  COSTS_N_INSNS (11),   /* ddiv */
407
};
408
 
409
/* Instruction costs on PPC440 processors.  */
410
static const
411
struct processor_costs ppc440_cost = {
412
  COSTS_N_INSNS (3),    /* mulsi */
413
  COSTS_N_INSNS (2),    /* mulsi_const */
414
  COSTS_N_INSNS (2),    /* mulsi_const9 */
415
  COSTS_N_INSNS (3),    /* muldi */
416
  COSTS_N_INSNS (34),   /* divsi */
417
  COSTS_N_INSNS (34),   /* divdi */
418
  COSTS_N_INSNS (5),    /* fp */
419
  COSTS_N_INSNS (5),    /* dmul */
420
  COSTS_N_INSNS (19),   /* sdiv */
421
  COSTS_N_INSNS (33),   /* ddiv */
422
};
423
 
424
/* Instruction costs on PPC601 processors.  */
425
static const
426
struct processor_costs ppc601_cost = {
427
  COSTS_N_INSNS (5),    /* mulsi */
428
  COSTS_N_INSNS (5),    /* mulsi_const */
429
  COSTS_N_INSNS (5),    /* mulsi_const9 */
430
  COSTS_N_INSNS (5),    /* muldi */
431
  COSTS_N_INSNS (36),   /* divsi */
432
  COSTS_N_INSNS (36),   /* divdi */
433
  COSTS_N_INSNS (4),    /* fp */
434
  COSTS_N_INSNS (5),    /* dmul */
435
  COSTS_N_INSNS (17),   /* sdiv */
436
  COSTS_N_INSNS (31),   /* ddiv */
437
};
438
 
439
/* Instruction costs on PPC603 processors.  */
440
static const
441
struct processor_costs ppc603_cost = {
442
  COSTS_N_INSNS (5),    /* mulsi */
443
  COSTS_N_INSNS (3),    /* mulsi_const */
444
  COSTS_N_INSNS (2),    /* mulsi_const9 */
445
  COSTS_N_INSNS (5),    /* muldi */
446
  COSTS_N_INSNS (37),   /* divsi */
447
  COSTS_N_INSNS (37),   /* divdi */
448
  COSTS_N_INSNS (3),    /* fp */
449
  COSTS_N_INSNS (4),    /* dmul */
450
  COSTS_N_INSNS (18),   /* sdiv */
451
  COSTS_N_INSNS (33),   /* ddiv */
452
};
453
 
454
/* Instruction costs on PPC604 processors.  */
455
static const
456
struct processor_costs ppc604_cost = {
457
  COSTS_N_INSNS (4),    /* mulsi */
458
  COSTS_N_INSNS (4),    /* mulsi_const */
459
  COSTS_N_INSNS (4),    /* mulsi_const9 */
460
  COSTS_N_INSNS (4),    /* muldi */
461
  COSTS_N_INSNS (20),   /* divsi */
462
  COSTS_N_INSNS (20),   /* divdi */
463
  COSTS_N_INSNS (3),    /* fp */
464
  COSTS_N_INSNS (3),    /* dmul */
465
  COSTS_N_INSNS (18),   /* sdiv */
466
  COSTS_N_INSNS (32),   /* ddiv */
467
};
468
 
469
/* Instruction costs on PPC604e processors.  */
470
static const
471
struct processor_costs ppc604e_cost = {
472
  COSTS_N_INSNS (2),    /* mulsi */
473
  COSTS_N_INSNS (2),    /* mulsi_const */
474
  COSTS_N_INSNS (2),    /* mulsi_const9 */
475
  COSTS_N_INSNS (2),    /* muldi */
476
  COSTS_N_INSNS (20),   /* divsi */
477
  COSTS_N_INSNS (20),   /* divdi */
478
  COSTS_N_INSNS (3),    /* fp */
479
  COSTS_N_INSNS (3),    /* dmul */
480
  COSTS_N_INSNS (18),   /* sdiv */
481
  COSTS_N_INSNS (32),   /* ddiv */
482
};
483
 
484
/* Instruction costs on PPC620 processors.  */
485
static const
486
struct processor_costs ppc620_cost = {
487
  COSTS_N_INSNS (5),    /* mulsi */
488
  COSTS_N_INSNS (4),    /* mulsi_const */
489
  COSTS_N_INSNS (3),    /* mulsi_const9 */
490
  COSTS_N_INSNS (7),    /* muldi */
491
  COSTS_N_INSNS (21),   /* divsi */
492
  COSTS_N_INSNS (37),   /* divdi */
493
  COSTS_N_INSNS (3),    /* fp */
494
  COSTS_N_INSNS (3),    /* dmul */
495
  COSTS_N_INSNS (18),   /* sdiv */
496
  COSTS_N_INSNS (32),   /* ddiv */
497
};
498
 
499
/* Instruction costs on PPC630 processors.  */
500
static const
501
struct processor_costs ppc630_cost = {
502
  COSTS_N_INSNS (5),    /* mulsi */
503
  COSTS_N_INSNS (4),    /* mulsi_const */
504
  COSTS_N_INSNS (3),    /* mulsi_const9 */
505
  COSTS_N_INSNS (7),    /* muldi */
506
  COSTS_N_INSNS (21),   /* divsi */
507
  COSTS_N_INSNS (37),   /* divdi */
508
  COSTS_N_INSNS (3),    /* fp */
509
  COSTS_N_INSNS (3),    /* dmul */
510
  COSTS_N_INSNS (17),   /* sdiv */
511
  COSTS_N_INSNS (21),   /* ddiv */
512
};
513
 
514
/* Instruction costs on PPC750 and PPC7400 processors.  */
515
static const
516
struct processor_costs ppc750_cost = {
517
  COSTS_N_INSNS (5),    /* mulsi */
518
  COSTS_N_INSNS (3),    /* mulsi_const */
519
  COSTS_N_INSNS (2),    /* mulsi_const9 */
520
  COSTS_N_INSNS (5),    /* muldi */
521
  COSTS_N_INSNS (17),   /* divsi */
522
  COSTS_N_INSNS (17),   /* divdi */
523
  COSTS_N_INSNS (3),    /* fp */
524
  COSTS_N_INSNS (3),    /* dmul */
525
  COSTS_N_INSNS (17),   /* sdiv */
526
  COSTS_N_INSNS (31),   /* ddiv */
527
};
528
 
529
/* Instruction costs on PPC7450 processors.  */
530
static const
531
struct processor_costs ppc7450_cost = {
532
  COSTS_N_INSNS (4),    /* mulsi */
533
  COSTS_N_INSNS (3),    /* mulsi_const */
534
  COSTS_N_INSNS (3),    /* mulsi_const9 */
535
  COSTS_N_INSNS (4),    /* muldi */
536
  COSTS_N_INSNS (23),   /* divsi */
537
  COSTS_N_INSNS (23),   /* divdi */
538
  COSTS_N_INSNS (5),    /* fp */
539
  COSTS_N_INSNS (5),    /* dmul */
540
  COSTS_N_INSNS (21),   /* sdiv */
541
  COSTS_N_INSNS (35),   /* ddiv */
542
};
543
 
544
/* Instruction costs on PPC8540 processors.  */
545
static const
546
struct processor_costs ppc8540_cost = {
547
  COSTS_N_INSNS (4),    /* mulsi */
548
  COSTS_N_INSNS (4),    /* mulsi_const */
549
  COSTS_N_INSNS (4),    /* mulsi_const9 */
550
  COSTS_N_INSNS (4),    /* muldi */
551
  COSTS_N_INSNS (19),   /* divsi */
552
  COSTS_N_INSNS (19),   /* divdi */
553
  COSTS_N_INSNS (4),    /* fp */
554
  COSTS_N_INSNS (4),    /* dmul */
555
  COSTS_N_INSNS (29),   /* sdiv */
556
  COSTS_N_INSNS (29),   /* ddiv */
557
};
558
 
559
/* Instruction costs on POWER4 and POWER5 processors.  */
560
static const
561
struct processor_costs power4_cost = {
562
  COSTS_N_INSNS (3),    /* mulsi */
563
  COSTS_N_INSNS (2),    /* mulsi_const */
564
  COSTS_N_INSNS (2),    /* mulsi_const9 */
565
  COSTS_N_INSNS (4),    /* muldi */
566
  COSTS_N_INSNS (18),   /* divsi */
567
  COSTS_N_INSNS (34),   /* divdi */
568
  COSTS_N_INSNS (3),    /* fp */
569
  COSTS_N_INSNS (3),    /* dmul */
570
  COSTS_N_INSNS (17),   /* sdiv */
571
  COSTS_N_INSNS (17),   /* ddiv */
572
};
573
 
574
 
575
static bool rs6000_function_ok_for_sibcall (tree, tree);
576
static const char *rs6000_invalid_within_doloop (rtx);
577
static rtx rs6000_generate_compare (enum rtx_code);
578
static void rs6000_maybe_dead (rtx);
579
static void rs6000_emit_stack_tie (void);
580
static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
581
static rtx spe_synthesize_frame_save (rtx);
582
static bool spe_func_has_64bit_regs_p (void);
583
static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
584
                             int, HOST_WIDE_INT);
585
static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
586
static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
587
static unsigned rs6000_hash_constant (rtx);
588
static unsigned toc_hash_function (const void *);
589
static int toc_hash_eq (const void *, const void *);
590
static int constant_pool_expr_1 (rtx, int *, int *);
591
static bool constant_pool_expr_p (rtx);
592
static bool legitimate_small_data_p (enum machine_mode, rtx);
593
static bool legitimate_indexed_address_p (rtx, int);
594
static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
595
static struct machine_function * rs6000_init_machine_status (void);
596
static bool rs6000_assemble_integer (rtx, unsigned int, int);
597
static bool no_global_regs_above (int);
598
#ifdef HAVE_GAS_HIDDEN
599
static void rs6000_assemble_visibility (tree, int);
600
#endif
601
static int rs6000_ra_ever_killed (void);
602
static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
603
static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
604
static bool rs6000_ms_bitfield_layout_p (tree);
605
static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
606
static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
607
static const char *rs6000_mangle_fundamental_type (tree);
608
extern const struct attribute_spec rs6000_attribute_table[];
609
static void rs6000_set_default_type_attributes (tree);
610
static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
611
static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
612
static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
613
                                    tree);
614
static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
615
static bool rs6000_return_in_memory (tree, tree);
616
static void rs6000_file_start (void);
617
#if TARGET_ELF
618
static int rs6000_elf_reloc_rw_mask (void);
619
static void rs6000_elf_asm_out_constructor (rtx, int);
620
static void rs6000_elf_asm_out_destructor (rtx, int);
621
static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
622
static void rs6000_elf_asm_init_sections (void);
623
static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
624
                                               unsigned HOST_WIDE_INT);
625
static void rs6000_elf_encode_section_info (tree, rtx, int)
626
     ATTRIBUTE_UNUSED;
627
#endif
628
static bool rs6000_use_blocks_for_constant_p (enum machine_mode, rtx);
629
#if TARGET_XCOFF
630
static void rs6000_xcoff_asm_output_anchor (rtx);
631
static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
632
static void rs6000_xcoff_asm_init_sections (void);
633
static int rs6000_xcoff_reloc_rw_mask (void);
634
static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
635
static section *rs6000_xcoff_select_section (tree, int,
636
                                             unsigned HOST_WIDE_INT);
637
static void rs6000_xcoff_unique_section (tree, int);
638
static section *rs6000_xcoff_select_rtx_section
639
  (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
640
static const char * rs6000_xcoff_strip_name_encoding (const char *);
641
static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
642
static void rs6000_xcoff_file_start (void);
643
static void rs6000_xcoff_file_end (void);
644
#endif
645
static int rs6000_variable_issue (FILE *, int, rtx, int);
646
static bool rs6000_rtx_costs (rtx, int, int, int *);
647
static int rs6000_adjust_cost (rtx, rtx, rtx, int);
648
static bool is_microcoded_insn (rtx);
649
static int is_dispatch_slot_restricted (rtx);
650
static bool is_cracked_insn (rtx);
651
static bool is_branch_slot_insn (rtx);
652
static int rs6000_adjust_priority (rtx, int);
653
static int rs6000_issue_rate (void);
654
static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
655
static rtx get_next_active_insn (rtx, rtx);
656
static bool insn_terminates_group_p (rtx , enum group_termination);
657
static bool is_costly_group (rtx *, rtx);
658
static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
659
static int redefine_groups (FILE *, int, rtx, rtx);
660
static int pad_groups (FILE *, int, rtx, rtx);
661
static void rs6000_sched_finish (FILE *, int);
662
static int rs6000_use_sched_lookahead (void);
663
static tree rs6000_builtin_mask_for_load (void);
664
 
665
static void def_builtin (int, const char *, tree, int);
666
static bool rs6000_vector_alignment_reachable (tree, bool);
667
static void rs6000_init_builtins (void);
668
static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
669
static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
670
static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
671
static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
672
static void altivec_init_builtins (void);
673
static void rs6000_common_init_builtins (void);
674
static void rs6000_init_libfuncs (void);
675
 
676
static void enable_mask_for_builtins (struct builtin_description *, int,
677
                                      enum rs6000_builtins,
678
                                      enum rs6000_builtins);
679
static tree build_opaque_vector_type (tree, int);
680
static void spe_init_builtins (void);
681
static rtx spe_expand_builtin (tree, rtx, bool *);
682
static rtx spe_expand_stv_builtin (enum insn_code, tree);
683
static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
684
static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
685
static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
686
static rs6000_stack_t *rs6000_stack_info (void);
687
static void debug_stack_info (rs6000_stack_t *);
688
 
689
static rtx altivec_expand_builtin (tree, rtx, bool *);
690
static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
691
static rtx altivec_expand_st_builtin (tree, rtx, bool *);
692
static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
693
static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
694
static rtx altivec_expand_predicate_builtin (enum insn_code,
695
                                             const char *, tree, rtx);
696
static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
697
static rtx altivec_expand_stv_builtin (enum insn_code, tree);
698
static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
699
static rtx altivec_expand_vec_set_builtin (tree);
700
static rtx altivec_expand_vec_ext_builtin (tree, rtx);
701
static int get_element_number (tree, tree);
702
static bool rs6000_handle_option (size_t, const char *, int);
703
static void rs6000_parse_tls_size_option (void);
704
static void rs6000_parse_yes_no_option (const char *, const char *, int *);
705
static int first_altivec_reg_to_save (void);
706
static unsigned int compute_vrsave_mask (void);
707
static void compute_save_world_info (rs6000_stack_t *info_ptr);
708
static void is_altivec_return_reg (rtx, void *);
709
static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
710
int easy_vector_constant (rtx, enum machine_mode);
711
static bool rs6000_is_opaque_type (tree);
712
static rtx rs6000_dwarf_register_span (rtx);
713
static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
714
static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
715
static rtx rs6000_tls_get_addr (void);
716
static rtx rs6000_got_sym (void);
717
static int rs6000_tls_symbol_ref_1 (rtx *, void *);
718
static const char *rs6000_get_some_local_dynamic_name (void);
719
static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
720
static rtx rs6000_complex_function_value (enum machine_mode);
721
static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
722
                                    enum machine_mode, tree);
723
static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
724
                                                      HOST_WIDE_INT);
725
static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
726
                                                        tree, HOST_WIDE_INT);
727
static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
728
                                              HOST_WIDE_INT,
729
                                              rtx[], int *);
730
static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
731
                                               tree, HOST_WIDE_INT,
732
                                               rtx[], int *);
733
static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, tree, int, bool);
734
static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
735
static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
736
static void setup_incoming_varargs (CUMULATIVE_ARGS *,
737
                                    enum machine_mode, tree,
738
                                    int *, int);
739
static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
740
                                      tree, bool);
741
static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
742
                                     tree, bool);
743
static const char *invalid_arg_for_unprototyped_fn (tree, tree, tree);
744
#if TARGET_MACHO
745
static void macho_branch_islands (void);
746
static int no_previous_def (tree function_name);
747
static tree get_prev_label (tree function_name);
748
static void rs6000_darwin_file_start (void);
749
#endif
750
 
751
static tree rs6000_build_builtin_va_list (void);
752
static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
753
static bool rs6000_must_pass_in_stack (enum machine_mode, tree);
754
static bool rs6000_scalar_mode_supported_p (enum machine_mode);
755
static bool rs6000_vector_mode_supported_p (enum machine_mode);
756
static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
757
                             enum machine_mode);
758
static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
759
                                       enum machine_mode);
760
static int get_vsel_insn (enum machine_mode);
761
static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
762
static tree rs6000_stack_protect_fail (void);
763
 
764
const int INSN_NOT_AVAILABLE = -1;
765
static enum machine_mode rs6000_eh_return_filter_mode (void);
766
 
767
/* Hash table stuff for keeping track of TOC entries.  */
768
 
769
struct toc_hash_struct GTY(())
770
{
771
  /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
772
     ASM_OUTPUT_SPECIAL_POOL_ENTRY_P.  */
773
  rtx key;
774
  enum machine_mode key_mode;
775
  int labelno;
776
};
777
 
778
static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
779
 
780
/* Default register names.  */
781
char rs6000_reg_names[][8] =
782
{
783
      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
784
      "8",  "9", "10", "11", "12", "13", "14", "15",
785
     "16", "17", "18", "19", "20", "21", "22", "23",
786
     "24", "25", "26", "27", "28", "29", "30", "31",
787
      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
788
      "8",  "9", "10", "11", "12", "13", "14", "15",
789
     "16", "17", "18", "19", "20", "21", "22", "23",
790
     "24", "25", "26", "27", "28", "29", "30", "31",
791
     "mq", "lr", "ctr","ap",
792
      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
793
      "xer",
794
      /* AltiVec registers.  */
795
      "0",  "1",  "2",  "3",  "4",  "5",  "6", "7",
796
      "8",  "9",  "10", "11", "12", "13", "14", "15",
797
      "16", "17", "18", "19", "20", "21", "22", "23",
798
      "24", "25", "26", "27", "28", "29", "30", "31",
799
      "vrsave", "vscr",
800
      /* SPE registers.  */
801
      "spe_acc", "spefscr",
802
      /* Soft frame pointer.  */
803
      "sfp"
804
};
805
 
806
#ifdef TARGET_REGNAMES
807
static const char alt_reg_names[][8] =
808
{
809
   "%r0",   "%r1",  "%r2",  "%r3",  "%r4",  "%r5",  "%r6",  "%r7",
810
   "%r8",   "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
811
  "%r16",  "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
812
  "%r24",  "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
813
   "%f0",   "%f1",  "%f2",  "%f3",  "%f4",  "%f5",  "%f6",  "%f7",
814
   "%f8",   "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
815
  "%f16",  "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
816
  "%f24",  "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
817
    "mq",    "lr",  "ctr",   "ap",
818
  "%cr0",  "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
819
   "xer",
820
  /* AltiVec registers.  */
821
   "%v0",  "%v1",  "%v2",  "%v3",  "%v4",  "%v5",  "%v6", "%v7",
822
   "%v8",  "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
823
  "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
824
  "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
825
  "vrsave", "vscr",
826
  /* SPE registers.  */
827
  "spe_acc", "spefscr",
828
  /* Soft frame pointer.  */
829
  "sfp"
830
};
831
#endif
832
 
833
#ifndef MASK_STRICT_ALIGN
834
#define MASK_STRICT_ALIGN 0
835
#endif
836
#ifndef TARGET_PROFILE_KERNEL
837
#define TARGET_PROFILE_KERNEL 0
838
#endif
839
 
840
/* The VRSAVE bitmask puts bit %v0 as the most significant bit.  */
841
#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
842
 
843
/* Initialize the GCC target structure.  */
844
#undef TARGET_ATTRIBUTE_TABLE
845
#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
846
#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
847
#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
848
 
849
#undef TARGET_ASM_ALIGNED_DI_OP
850
#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
851
 
852
/* Default unaligned ops are only provided for ELF.  Find the ops needed
853
   for non-ELF systems.  */
854
#ifndef OBJECT_FORMAT_ELF
855
#if TARGET_XCOFF
856
/* For XCOFF.  rs6000_assemble_integer will handle unaligned DIs on
857
   64-bit targets.  */
858
#undef TARGET_ASM_UNALIGNED_HI_OP
859
#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
860
#undef TARGET_ASM_UNALIGNED_SI_OP
861
#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
862
#undef TARGET_ASM_UNALIGNED_DI_OP
863
#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
864
#else
865
/* For Darwin.  */
866
#undef TARGET_ASM_UNALIGNED_HI_OP
867
#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
868
#undef TARGET_ASM_UNALIGNED_SI_OP
869
#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
870
#undef TARGET_ASM_UNALIGNED_DI_OP
871
#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
872
#undef TARGET_ASM_ALIGNED_DI_OP
873
#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
874
#endif
875
#endif
876
 
877
/* This hook deals with fixups for relocatable code and DI-mode objects
878
   in 64-bit code.  */
879
#undef TARGET_ASM_INTEGER
880
#define TARGET_ASM_INTEGER rs6000_assemble_integer
881
 
882
#ifdef HAVE_GAS_HIDDEN
883
#undef TARGET_ASM_ASSEMBLE_VISIBILITY
884
#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
885
#endif
886
 
887
#undef TARGET_HAVE_TLS
888
#define TARGET_HAVE_TLS HAVE_AS_TLS
889
 
890
#undef TARGET_CANNOT_FORCE_CONST_MEM
891
#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
892
 
893
#undef TARGET_ASM_FUNCTION_PROLOGUE
894
#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
895
#undef TARGET_ASM_FUNCTION_EPILOGUE
896
#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
897
 
898
#undef  TARGET_SCHED_VARIABLE_ISSUE
899
#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
900
 
901
#undef TARGET_SCHED_ISSUE_RATE
902
#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
903
#undef TARGET_SCHED_ADJUST_COST
904
#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
905
#undef TARGET_SCHED_ADJUST_PRIORITY
906
#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
907
#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
908
#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
909
#undef TARGET_SCHED_FINISH
910
#define TARGET_SCHED_FINISH rs6000_sched_finish
911
 
912
#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
913
#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
914
 
915
#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
916
#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
917
 
918
#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
919
#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
920
 
921
#undef TARGET_INIT_BUILTINS
922
#define TARGET_INIT_BUILTINS rs6000_init_builtins
923
 
924
#undef TARGET_EXPAND_BUILTIN
925
#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
926
 
927
#undef TARGET_MANGLE_FUNDAMENTAL_TYPE
928
#define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
929
 
930
#undef TARGET_INIT_LIBFUNCS
931
#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
932
 
933
#if TARGET_MACHO
934
#undef TARGET_BINDS_LOCAL_P
935
#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
936
#endif
937
 
938
#undef TARGET_MS_BITFIELD_LAYOUT_P
939
#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
940
 
941
#undef TARGET_ASM_OUTPUT_MI_THUNK
942
#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
943
 
944
#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
945
#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
946
 
947
#undef TARGET_FUNCTION_OK_FOR_SIBCALL
948
#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
949
 
950
#undef TARGET_INVALID_WITHIN_DOLOOP
951
#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
952
 
953
#undef TARGET_RTX_COSTS
954
#define TARGET_RTX_COSTS rs6000_rtx_costs
955
#undef TARGET_ADDRESS_COST
956
#define TARGET_ADDRESS_COST hook_int_rtx_0
957
 
958
#undef TARGET_VECTOR_OPAQUE_P
959
#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
960
 
961
#undef TARGET_DWARF_REGISTER_SPAN
962
#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
963
 
964
/* On rs6000, function arguments are promoted, as are function return
965
   values.  */
966
#undef TARGET_PROMOTE_FUNCTION_ARGS
967
#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
968
#undef TARGET_PROMOTE_FUNCTION_RETURN
969
#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
970
 
971
#undef TARGET_RETURN_IN_MEMORY
972
#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
973
 
974
#undef TARGET_SETUP_INCOMING_VARARGS
975
#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
976
 
977
/* Always strict argument naming on rs6000.  */
978
#undef TARGET_STRICT_ARGUMENT_NAMING
979
#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
980
#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
981
#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
982
#undef TARGET_SPLIT_COMPLEX_ARG
983
#define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
984
#undef TARGET_MUST_PASS_IN_STACK
985
#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
986
#undef TARGET_PASS_BY_REFERENCE
987
#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
988
#undef TARGET_ARG_PARTIAL_BYTES
989
#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
990
 
991
#undef TARGET_BUILD_BUILTIN_VA_LIST
992
#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
993
 
994
#undef TARGET_GIMPLIFY_VA_ARG_EXPR
995
#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
996
 
997
#undef TARGET_EH_RETURN_FILTER_MODE
998
#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
999
 
1000
#undef TARGET_SCALAR_MODE_SUPPORTED_P
1001
#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1002
 
1003
#undef TARGET_VECTOR_MODE_SUPPORTED_P
1004
#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1005
 
1006
#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1007
#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1008
 
1009
#undef TARGET_HANDLE_OPTION
1010
#define TARGET_HANDLE_OPTION rs6000_handle_option
1011
 
1012
#undef TARGET_DEFAULT_TARGET_FLAGS
1013
#define TARGET_DEFAULT_TARGET_FLAGS \
1014
  (TARGET_DEFAULT)
1015
 
1016
#undef TARGET_STACK_PROTECT_FAIL
1017
#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1018
 
1019
/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1020
   The PowerPC architecture requires only weak consistency among
1021
   processors--that is, memory accesses between processors need not be
1022
   sequentially consistent and memory accesses among processors can occur
1023
   in any order. The ability to order memory accesses weakly provides
1024
   opportunities for more efficient use of the system bus. Unless a
1025
   dependency exists, the 604e allows read operations to precede store
1026
   operations.  */
1027
#undef TARGET_RELAXED_ORDERING
1028
#define TARGET_RELAXED_ORDERING true
1029
 
1030
#ifdef HAVE_AS_TLS
1031
#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1032
#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1033
#endif
1034
 
1035
/* Use a 32-bit anchor range.  This leads to sequences like:
1036
 
1037
        addis   tmp,anchor,high
1038
        add     dest,tmp,low
1039
 
1040
   where tmp itself acts as an anchor, and can be shared between
1041
   accesses to the same 64k page.  */
1042
#undef TARGET_MIN_ANCHOR_OFFSET
1043
#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1044
#undef TARGET_MAX_ANCHOR_OFFSET
1045
#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1046
#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1047
#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1048
 
1049
struct gcc_target targetm = TARGET_INITIALIZER;
1050
 
1051
 
1052
/* Value is 1 if hard register REGNO can hold a value of machine-mode
1053
   MODE.  */
1054
static int
1055
rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1056
{
1057
  /* The GPRs can hold any mode, but values bigger than one register
1058
     cannot go past R31.  */
1059
  if (INT_REGNO_P (regno))
1060
    return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1061
 
1062
  /* The float registers can only hold floating modes and DImode.
1063
     This also excludes decimal float modes.  */
1064
  if (FP_REGNO_P (regno))
1065
    return
1066
      (SCALAR_FLOAT_MODE_P (mode)
1067
       && !DECIMAL_FLOAT_MODE_P (mode)
1068
       && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1069
      || (GET_MODE_CLASS (mode) == MODE_INT
1070
          && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD);
1071
 
1072
  /* The CR register can only hold CC modes.  */
1073
  if (CR_REGNO_P (regno))
1074
    return GET_MODE_CLASS (mode) == MODE_CC;
1075
 
1076
  if (XER_REGNO_P (regno))
1077
    return mode == PSImode;
1078
 
1079
  /* AltiVec only in AldyVec registers.  */
1080
  if (ALTIVEC_REGNO_P (regno))
1081
    return ALTIVEC_VECTOR_MODE (mode);
1082
 
1083
  /* ...but GPRs can hold SIMD data on the SPE in one register.  */
1084
  if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1085
    return 1;
1086
 
1087
  /* We cannot put TImode anywhere except general register and it must be
1088
     able to fit within the register set.  */
1089
 
1090
  return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1091
}
1092
 
1093
/* Initialize rs6000_hard_regno_mode_ok_p table.  */
1094
static void
1095
rs6000_init_hard_regno_mode_ok (void)
1096
{
1097
  int r, m;
1098
 
1099
  for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1100
    for (m = 0; m < NUM_MACHINE_MODES; ++m)
1101
      if (rs6000_hard_regno_mode_ok (r, m))
1102
        rs6000_hard_regno_mode_ok_p[m][r] = true;
1103
}
1104
 
1105
/* If not otherwise specified by a target, make 'long double' equivalent to
1106
   'double'.  */
1107
 
1108
#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1109
#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1110
#endif
1111
 
1112
/* Override command line options.  Mostly we process the processor
1113
   type and sometimes adjust other TARGET_ options.  */
1114
 
1115
void
1116
rs6000_override_options (const char *default_cpu)
1117
{
1118
  size_t i, j;
1119
  struct rs6000_cpu_select *ptr;
1120
  int set_masks;
1121
 
1122
  /* Simplifications for entries below.  */
1123
 
1124
  enum {
1125
    POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1126
    POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1127
  };
1128
 
1129
  /* This table occasionally claims that a processor does not support
1130
     a particular feature even though it does, but the feature is slower
1131
     than the alternative.  Thus, it shouldn't be relied on as a
1132
     complete description of the processor's support.
1133
 
1134
     Please keep this list in order, and don't forget to update the
1135
     documentation in invoke.texi when adding a new processor or
1136
     flag.  */
1137
  static struct ptt
1138
    {
1139
      const char *const name;           /* Canonical processor name.  */
1140
      const enum processor_type processor; /* Processor type enum value.  */
1141
      const int target_enable;  /* Target flags to enable.  */
1142
    } const processor_target_table[]
1143
      = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1144
         {"403", PROCESSOR_PPC403,
1145
          POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
1146
         {"405", PROCESSOR_PPC405,
1147
          POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1148
         {"405fp", PROCESSOR_PPC405,
1149
          POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
1150
         {"440", PROCESSOR_PPC440,
1151
          POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1152
         {"440fp", PROCESSOR_PPC440,
1153
          POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
1154
         {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
1155
         {"601", PROCESSOR_PPC601,
1156
          MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1157
         {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1158
         {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1159
         {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1160
         {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1161
         {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1162
         {"620", PROCESSOR_PPC620,
1163
          POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1164
         {"630", PROCESSOR_PPC630,
1165
          POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1166
         {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1167
         {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1168
         {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1169
         {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1170
         {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1171
         {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1172
         {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1173
         {"8540", PROCESSOR_PPC8540,
1174
          POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_STRICT_ALIGN},
1175
         /* 8548 has a dummy entry for now.  */
1176
         {"8548", PROCESSOR_PPC8540,
1177
          POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_STRICT_ALIGN},
1178
         {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1179
         {"970", PROCESSOR_POWER4,
1180
          POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1181
         {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1182
         {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1183
         {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1184
         {"G4",  PROCESSOR_PPC7450, POWERPC_7400_MASK},
1185
         {"G5", PROCESSOR_POWER4,
1186
          POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1187
         {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1188
         {"power2", PROCESSOR_POWER,
1189
          MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1190
         {"power3", PROCESSOR_PPC630,
1191
          POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1192
         {"power4", PROCESSOR_POWER4,
1193
          POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
1194
         {"power5", PROCESSOR_POWER5,
1195
          POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1196
          | MASK_MFCRF | MASK_POPCNTB},
1197
         {"power5+", PROCESSOR_POWER5,
1198
          POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1199
          | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
1200
         {"power6", PROCESSOR_POWER5,
1201
          POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
1202
          | MASK_FPRND},
1203
         {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1204
         {"powerpc64", PROCESSOR_POWERPC64,
1205
          POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1206
         {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1207
         {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1208
         {"rios2", PROCESSOR_RIOS2,
1209
          MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1210
         {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1211
         {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1212
         {"rs64", PROCESSOR_RS64A,
1213
          POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
1214
      };
1215
 
1216
  const size_t ptt_size = ARRAY_SIZE (processor_target_table);
1217
 
1218
  /* Some OSs don't support saving the high part of 64-bit registers on
1219
     context switch.  Other OSs don't support saving Altivec registers.
1220
     On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1221
     settings; if the user wants either, the user must explicitly specify
1222
     them and we won't interfere with the user's specification.  */
1223
 
1224
  enum {
1225
    POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
1226
    POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
1227
                     | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
1228
                     | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
1229
                     | MASK_DLMZB)
1230
  };
1231
 
1232
  rs6000_init_hard_regno_mode_ok ();
1233
 
1234
  set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
1235
#ifdef OS_MISSING_POWERPC64
1236
  if (OS_MISSING_POWERPC64)
1237
    set_masks &= ~MASK_POWERPC64;
1238
#endif
1239
#ifdef OS_MISSING_ALTIVEC
1240
  if (OS_MISSING_ALTIVEC)
1241
    set_masks &= ~MASK_ALTIVEC;
1242
#endif
1243
 
1244
  /* Don't override by the processor default if given explicitly.  */
1245
  set_masks &= ~target_flags_explicit;
1246
 
1247
  /* Identify the processor type.  */
1248
  rs6000_select[0].string = default_cpu;
1249
  rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
1250
 
1251
  for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1252
    {
1253
      ptr = &rs6000_select[i];
1254
      if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1255
        {
1256
          for (j = 0; j < ptt_size; j++)
1257
            if (! strcmp (ptr->string, processor_target_table[j].name))
1258
              {
1259
                if (ptr->set_tune_p)
1260
                  rs6000_cpu = processor_target_table[j].processor;
1261
 
1262
                if (ptr->set_arch_p)
1263
                  {
1264
                    target_flags &= ~set_masks;
1265
                    target_flags |= (processor_target_table[j].target_enable
1266
                                     & set_masks);
1267
                  }
1268
                break;
1269
              }
1270
 
1271
          if (j == ptt_size)
1272
            error ("bad value (%s) for %s switch", ptr->string, ptr->name);
1273
        }
1274
    }
1275
 
1276
  if (TARGET_E500)
1277
    rs6000_isel = 1;
1278
 
1279
  /* If we are optimizing big endian systems for space, use the load/store
1280
     multiple and string instructions.  */
1281
  if (BYTES_BIG_ENDIAN && optimize_size)
1282
    target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
1283
 
1284
  /* Don't allow -mmultiple or -mstring on little endian systems
1285
     unless the cpu is a 750, because the hardware doesn't support the
1286
     instructions used in little endian mode, and causes an alignment
1287
     trap.  The 750 does not cause an alignment trap (except when the
1288
     target is unaligned).  */
1289
 
1290
  if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
1291
    {
1292
      if (TARGET_MULTIPLE)
1293
        {
1294
          target_flags &= ~MASK_MULTIPLE;
1295
          if ((target_flags_explicit & MASK_MULTIPLE) != 0)
1296
            warning (0, "-mmultiple is not supported on little endian systems");
1297
        }
1298
 
1299
      if (TARGET_STRING)
1300
        {
1301
          target_flags &= ~MASK_STRING;
1302
          if ((target_flags_explicit & MASK_STRING) != 0)
1303
            warning (0, "-mstring is not supported on little endian systems");
1304
        }
1305
    }
1306
 
1307
  /* Set debug flags */
1308
  if (rs6000_debug_name)
1309
    {
1310
      if (! strcmp (rs6000_debug_name, "all"))
1311
        rs6000_debug_stack = rs6000_debug_arg = 1;
1312
      else if (! strcmp (rs6000_debug_name, "stack"))
1313
        rs6000_debug_stack = 1;
1314
      else if (! strcmp (rs6000_debug_name, "arg"))
1315
        rs6000_debug_arg = 1;
1316
      else
1317
        error ("unknown -mdebug-%s switch", rs6000_debug_name);
1318
    }
1319
 
1320
  if (rs6000_traceback_name)
1321
    {
1322
      if (! strncmp (rs6000_traceback_name, "full", 4))
1323
        rs6000_traceback = traceback_full;
1324
      else if (! strncmp (rs6000_traceback_name, "part", 4))
1325
        rs6000_traceback = traceback_part;
1326
      else if (! strncmp (rs6000_traceback_name, "no", 2))
1327
        rs6000_traceback = traceback_none;
1328
      else
1329
        error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
1330
               rs6000_traceback_name);
1331
    }
1332
 
1333
  if (!rs6000_explicit_options.long_double)
1334
    rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1335
 
1336
#ifndef POWERPC_LINUX
1337
  if (!rs6000_explicit_options.ieee)
1338
    rs6000_ieeequad = 1;
1339
#endif
1340
 
1341
  /* Set Altivec ABI as default for powerpc64 linux.  */
1342
  if (TARGET_ELF && TARGET_64BIT)
1343
    {
1344
      rs6000_altivec_abi = 1;
1345
      TARGET_ALTIVEC_VRSAVE = 1;
1346
    }
1347
 
1348
  /* Set the Darwin64 ABI as default for 64-bit Darwin.  */
1349
  if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1350
    {
1351
      rs6000_darwin64_abi = 1;
1352
#if TARGET_MACHO
1353
      darwin_one_byte_bool = 1;
1354
#endif
1355
      /* Default to natural alignment, for better performance.  */
1356
      rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1357
    }
1358
 
1359
  /* Place FP constants in the constant pool instead of TOC
1360
     if section anchors enabled.  */
1361
  if (flag_section_anchors)
1362
    TARGET_NO_FP_IN_TOC = 1;
1363
 
1364
  /* Handle -mtls-size option.  */
1365
  rs6000_parse_tls_size_option ();
1366
 
1367
#ifdef SUBTARGET_OVERRIDE_OPTIONS
1368
  SUBTARGET_OVERRIDE_OPTIONS;
1369
#endif
1370
#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1371
  SUBSUBTARGET_OVERRIDE_OPTIONS;
1372
#endif
1373
#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1374
  SUB3TARGET_OVERRIDE_OPTIONS;
1375
#endif
1376
 
1377
  if (TARGET_E500)
1378
    {
1379
      if (TARGET_ALTIVEC)
1380
        error ("AltiVec and E500 instructions cannot coexist");
1381
 
1382
      /* The e500 does not have string instructions, and we set
1383
         MASK_STRING above when optimizing for size.  */
1384
      if ((target_flags & MASK_STRING) != 0)
1385
        target_flags = target_flags & ~MASK_STRING;
1386
    }
1387
  else if (rs6000_select[1].string != NULL)
1388
    {
1389
      /* For the powerpc-eabispe configuration, we set all these by
1390
         default, so let's unset them if we manually set another
1391
         CPU that is not the E500.  */
1392
      if (!rs6000_explicit_options.abi)
1393
        rs6000_spe_abi = 0;
1394
      if (!rs6000_explicit_options.spe)
1395
        rs6000_spe = 0;
1396
      if (!rs6000_explicit_options.float_gprs)
1397
        rs6000_float_gprs = 0;
1398
      if (!rs6000_explicit_options.isel)
1399
        rs6000_isel = 0;
1400
      if (!rs6000_explicit_options.long_double)
1401
        rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1402
    }
1403
 
1404
  rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
1405
                        && rs6000_cpu != PROCESSOR_POWER5);
1406
  rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1407
                         || rs6000_cpu == PROCESSOR_POWER5);
1408
 
1409
  rs6000_sched_restricted_insns_priority
1410
    = (rs6000_sched_groups ? 1 : 0);
1411
 
1412
  /* Handle -msched-costly-dep option.  */
1413
  rs6000_sched_costly_dep
1414
    = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
1415
 
1416
  if (rs6000_sched_costly_dep_str)
1417
    {
1418
      if (! strcmp (rs6000_sched_costly_dep_str, "no"))
1419
        rs6000_sched_costly_dep = no_dep_costly;
1420
      else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
1421
        rs6000_sched_costly_dep = all_deps_costly;
1422
      else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
1423
        rs6000_sched_costly_dep = true_store_to_load_dep_costly;
1424
      else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
1425
        rs6000_sched_costly_dep = store_to_load_dep_costly;
1426
      else
1427
        rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
1428
    }
1429
 
1430
  /* Handle -minsert-sched-nops option.  */
1431
  rs6000_sched_insert_nops
1432
    = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
1433
 
1434
  if (rs6000_sched_insert_nops_str)
1435
    {
1436
      if (! strcmp (rs6000_sched_insert_nops_str, "no"))
1437
        rs6000_sched_insert_nops = sched_finish_none;
1438
      else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
1439
        rs6000_sched_insert_nops = sched_finish_pad_groups;
1440
      else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
1441
        rs6000_sched_insert_nops = sched_finish_regroup_exact;
1442
      else
1443
        rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
1444
    }
1445
 
1446
#ifdef TARGET_REGNAMES
1447
  /* If the user desires alternate register names, copy in the
1448
     alternate names now.  */
1449
  if (TARGET_REGNAMES)
1450
    memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
1451
#endif
1452
 
1453
  /* Set aix_struct_return last, after the ABI is determined.
1454
     If -maix-struct-return or -msvr4-struct-return was explicitly
1455
     used, don't override with the ABI default.  */
1456
  if (!rs6000_explicit_options.aix_struct_ret)
1457
    aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
1458
 
1459
  if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
1460
    REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1461
 
1462
  if (TARGET_TOC)
1463
    ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1464
 
1465
  /* We can only guarantee the availability of DI pseudo-ops when
1466
     assembling for 64-bit targets.  */
1467
  if (!TARGET_64BIT)
1468
    {
1469
      targetm.asm_out.aligned_op.di = NULL;
1470
      targetm.asm_out.unaligned_op.di = NULL;
1471
    }
1472
 
1473
  /* Set branch target alignment, if not optimizing for size.  */
1474
  if (!optimize_size)
1475
    {
1476
      if (rs6000_sched_groups)
1477
        {
1478
          if (align_functions <= 0)
1479
            align_functions = 16;
1480
          if (align_jumps <= 0)
1481
            align_jumps = 16;
1482
          if (align_loops <= 0)
1483
            align_loops = 16;
1484
        }
1485
      if (align_jumps_max_skip <= 0)
1486
        align_jumps_max_skip = 15;
1487
      if (align_loops_max_skip <= 0)
1488
        align_loops_max_skip = 15;
1489
    }
1490
 
1491
  /* Arrange to save and restore machine status around nested functions.  */
1492
  init_machine_status = rs6000_init_machine_status;
1493
 
1494
  /* We should always be splitting complex arguments, but we can't break
1495
     Linux and Darwin ABIs at the moment.  For now, only AIX is fixed.  */
1496
  if (DEFAULT_ABI != ABI_AIX)
1497
    targetm.calls.split_complex_arg = NULL;
1498
 
1499
  /* Initialize rs6000_cost with the appropriate target costs.  */
1500
  if (optimize_size)
1501
    rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1502
  else
1503
    switch (rs6000_cpu)
1504
      {
1505
      case PROCESSOR_RIOS1:
1506
        rs6000_cost = &rios1_cost;
1507
        break;
1508
 
1509
      case PROCESSOR_RIOS2:
1510
        rs6000_cost = &rios2_cost;
1511
        break;
1512
 
1513
      case PROCESSOR_RS64A:
1514
        rs6000_cost = &rs64a_cost;
1515
        break;
1516
 
1517
      case PROCESSOR_MPCCORE:
1518
        rs6000_cost = &mpccore_cost;
1519
        break;
1520
 
1521
      case PROCESSOR_PPC403:
1522
        rs6000_cost = &ppc403_cost;
1523
        break;
1524
 
1525
      case PROCESSOR_PPC405:
1526
        rs6000_cost = &ppc405_cost;
1527
        break;
1528
 
1529
      case PROCESSOR_PPC440:
1530
        rs6000_cost = &ppc440_cost;
1531
        break;
1532
 
1533
      case PROCESSOR_PPC601:
1534
        rs6000_cost = &ppc601_cost;
1535
        break;
1536
 
1537
      case PROCESSOR_PPC603:
1538
        rs6000_cost = &ppc603_cost;
1539
        break;
1540
 
1541
      case PROCESSOR_PPC604:
1542
        rs6000_cost = &ppc604_cost;
1543
        break;
1544
 
1545
      case PROCESSOR_PPC604e:
1546
        rs6000_cost = &ppc604e_cost;
1547
        break;
1548
 
1549
      case PROCESSOR_PPC620:
1550
        rs6000_cost = &ppc620_cost;
1551
        break;
1552
 
1553
      case PROCESSOR_PPC630:
1554
        rs6000_cost = &ppc630_cost;
1555
        break;
1556
 
1557
      case PROCESSOR_PPC750:
1558
      case PROCESSOR_PPC7400:
1559
        rs6000_cost = &ppc750_cost;
1560
        break;
1561
 
1562
      case PROCESSOR_PPC7450:
1563
        rs6000_cost = &ppc7450_cost;
1564
        break;
1565
 
1566
      case PROCESSOR_PPC8540:
1567
        rs6000_cost = &ppc8540_cost;
1568
        break;
1569
 
1570
      case PROCESSOR_POWER4:
1571
      case PROCESSOR_POWER5:
1572
        rs6000_cost = &power4_cost;
1573
        break;
1574
 
1575
      default:
1576
        gcc_unreachable ();
1577
      }
1578
}
1579
 
1580
/* Implement targetm.vectorize.builtin_mask_for_load.  */
1581
static tree
1582
rs6000_builtin_mask_for_load (void)
1583
{
1584
  if (TARGET_ALTIVEC)
1585
    return altivec_builtin_mask_for_load;
1586
  else
1587
    return 0;
1588
}
1589
 
1590
 
1591
/* Return true iff, data reference of TYPE can reach vector alignment (16)
1592
   after applying N number of iterations.  This routine does not determine
1593
   how may iterations are required to reach desired alignment.  */
1594
 
1595
static bool
1596
rs6000_vector_alignment_reachable (tree type ATTRIBUTE_UNUSED, bool is_packed)
1597
{
1598
  if (is_packed)
1599
    return false;
1600
 
1601
  if (TARGET_32BIT)
1602
    {
1603
      if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
1604
        return true;
1605
 
1606
      if (rs6000_alignment_flags ==  MASK_ALIGN_POWER)
1607
        return true;
1608
 
1609
      return false;
1610
    }
1611
  else
1612
    {
1613
      if (TARGET_MACHO)
1614
        return false;
1615
 
1616
      /* Assuming that all other types are naturally aligned. CHECKME!  */
1617
      return true;
1618
    }
1619
}
1620
 
1621
/* Handle generic options of the form -mfoo=yes/no.
1622
   NAME is the option name.
1623
   VALUE is the option value.
1624
   FLAG is the pointer to the flag where to store a 1 or 0, depending on
1625
   whether the option value is 'yes' or 'no' respectively.  */
1626
static void
1627
rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1628
{
1629
  if (value == 0)
1630
    return;
1631
  else if (!strcmp (value, "yes"))
1632
    *flag = 1;
1633
  else if (!strcmp (value, "no"))
1634
    *flag = 0;
1635
  else
1636
    error ("unknown -m%s= option specified: '%s'", name, value);
1637
}
1638
 
1639
/* Validate and record the size specified with the -mtls-size option.  */
1640
 
1641
static void
1642
rs6000_parse_tls_size_option (void)
1643
{
1644
  if (rs6000_tls_size_string == 0)
1645
    return;
1646
  else if (strcmp (rs6000_tls_size_string, "16") == 0)
1647
    rs6000_tls_size = 16;
1648
  else if (strcmp (rs6000_tls_size_string, "32") == 0)
1649
    rs6000_tls_size = 32;
1650
  else if (strcmp (rs6000_tls_size_string, "64") == 0)
1651
    rs6000_tls_size = 64;
1652
  else
1653
    error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
1654
}
1655
 
1656
void
1657
optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1658
{
1659
  if (DEFAULT_ABI == ABI_DARWIN)
1660
    /* The Darwin libraries never set errno, so we might as well
1661
       avoid calling them when that's the only reason we would.  */
1662
    flag_errno_math = 0;
1663
 
1664
  /* Double growth factor to counter reduced min jump length.  */
1665
  set_param_value ("max-grow-copy-bb-insns", 16);
1666
 
1667
  /* Enable section anchors by default.
1668
     Skip section anchors for Objective C and Objective C++
1669
     until front-ends fixed.  */
1670
  if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
1671
    flag_section_anchors = 1;
1672
}
1673
 
1674
/* Implement TARGET_HANDLE_OPTION.  */
1675
 
1676
static bool
1677
rs6000_handle_option (size_t code, const char *arg, int value)
1678
{
1679
  switch (code)
1680
    {
1681
    case OPT_mno_power:
1682
      target_flags &= ~(MASK_POWER | MASK_POWER2
1683
                        | MASK_MULTIPLE | MASK_STRING);
1684
      target_flags_explicit |= (MASK_POWER | MASK_POWER2
1685
                                | MASK_MULTIPLE | MASK_STRING);
1686
      break;
1687
    case OPT_mno_powerpc:
1688
      target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
1689
                        | MASK_PPC_GFXOPT | MASK_POWERPC64);
1690
      target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
1691
                                | MASK_PPC_GFXOPT | MASK_POWERPC64);
1692
      break;
1693
    case OPT_mfull_toc:
1694
      target_flags &= ~MASK_MINIMAL_TOC;
1695
      TARGET_NO_FP_IN_TOC = 0;
1696
      TARGET_NO_SUM_IN_TOC = 0;
1697
      target_flags_explicit |= MASK_MINIMAL_TOC;
1698
#ifdef TARGET_USES_SYSV4_OPT
1699
      /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
1700
         just the same as -mminimal-toc.  */
1701
      target_flags |= MASK_MINIMAL_TOC;
1702
      target_flags_explicit |= MASK_MINIMAL_TOC;
1703
#endif
1704
      break;
1705
 
1706
#ifdef TARGET_USES_SYSV4_OPT
1707
    case OPT_mtoc:
1708
      /* Make -mtoc behave like -mminimal-toc.  */
1709
      target_flags |= MASK_MINIMAL_TOC;
1710
      target_flags_explicit |= MASK_MINIMAL_TOC;
1711
      break;
1712
#endif
1713
 
1714
#ifdef TARGET_USES_AIX64_OPT
1715
    case OPT_maix64:
1716
#else
1717
    case OPT_m64:
1718
#endif
1719
      target_flags |= MASK_POWERPC64 | MASK_POWERPC;
1720
      target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
1721
      target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
1722
      break;
1723
 
1724
#ifdef TARGET_USES_AIX64_OPT
1725
    case OPT_maix32:
1726
#else
1727
    case OPT_m32:
1728
#endif
1729
      target_flags &= ~MASK_POWERPC64;
1730
      target_flags_explicit |= MASK_POWERPC64;
1731
      break;
1732
 
1733
    case OPT_minsert_sched_nops_:
1734
      rs6000_sched_insert_nops_str = arg;
1735
      break;
1736
 
1737
    case OPT_mminimal_toc:
1738
      if (value == 1)
1739
        {
1740
          TARGET_NO_FP_IN_TOC = 0;
1741
          TARGET_NO_SUM_IN_TOC = 0;
1742
        }
1743
      break;
1744
 
1745
    case OPT_mpower:
1746
      if (value == 1)
1747
        {
1748
          target_flags |= (MASK_MULTIPLE | MASK_STRING);
1749
          target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
1750
        }
1751
      break;
1752
 
1753
    case OPT_mpower2:
1754
      if (value == 1)
1755
        {
1756
          target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1757
          target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1758
        }
1759
      break;
1760
 
1761
    case OPT_mpowerpc_gpopt:
1762
    case OPT_mpowerpc_gfxopt:
1763
      if (value == 1)
1764
        {
1765
          target_flags |= MASK_POWERPC;
1766
          target_flags_explicit |= MASK_POWERPC;
1767
        }
1768
      break;
1769
 
1770
    case OPT_maix_struct_return:
1771
    case OPT_msvr4_struct_return:
1772
      rs6000_explicit_options.aix_struct_ret = true;
1773
      break;
1774
 
1775
    case OPT_mvrsave_:
1776
      rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
1777
      break;
1778
 
1779
    case OPT_misel_:
1780
      rs6000_explicit_options.isel = true;
1781
      rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
1782
      break;
1783
 
1784
    case OPT_mspe_:
1785
      rs6000_explicit_options.spe = true;
1786
      rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
1787
      /* No SPE means 64-bit long doubles, even if an E500.  */
1788
      if (!rs6000_spe)
1789
        rs6000_long_double_type_size = 64;
1790
      break;
1791
 
1792
    case OPT_mdebug_:
1793
      rs6000_debug_name = arg;
1794
      break;
1795
 
1796
#ifdef TARGET_USES_SYSV4_OPT
1797
    case OPT_mcall_:
1798
      rs6000_abi_name = arg;
1799
      break;
1800
 
1801
    case OPT_msdata_:
1802
      rs6000_sdata_name = arg;
1803
      break;
1804
 
1805
    case OPT_mtls_size_:
1806
      rs6000_tls_size_string = arg;
1807
      break;
1808
 
1809
    case OPT_mrelocatable:
1810
      if (value == 1)
1811
        {
1812
          target_flags |= MASK_MINIMAL_TOC;
1813
          target_flags_explicit |= MASK_MINIMAL_TOC;
1814
          TARGET_NO_FP_IN_TOC = 1;
1815
        }
1816
      break;
1817
 
1818
    case OPT_mrelocatable_lib:
1819
      if (value == 1)
1820
        {
1821
          target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
1822
          target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
1823
          TARGET_NO_FP_IN_TOC = 1;
1824
        }
1825
      else
1826
        {
1827
          target_flags &= ~MASK_RELOCATABLE;
1828
          target_flags_explicit |= MASK_RELOCATABLE;
1829
        }
1830
      break;
1831
#endif
1832
 
1833
    case OPT_mabi_:
1834
      if (!strcmp (arg, "altivec"))
1835
        {
1836
          rs6000_explicit_options.abi = true;
1837
          rs6000_altivec_abi = 1;
1838
          rs6000_spe_abi = 0;
1839
        }
1840
      else if (! strcmp (arg, "no-altivec"))
1841
        {
1842
          /* ??? Don't set rs6000_explicit_options.abi here, to allow
1843
             the default for rs6000_spe_abi to be chosen later.  */
1844
          rs6000_altivec_abi = 0;
1845
        }
1846
      else if (! strcmp (arg, "spe"))
1847
        {
1848
          rs6000_explicit_options.abi = true;
1849
          rs6000_spe_abi = 1;
1850
          rs6000_altivec_abi = 0;
1851
          if (!TARGET_SPE_ABI)
1852
            error ("not configured for ABI: '%s'", arg);
1853
        }
1854
      else if (! strcmp (arg, "no-spe"))
1855
        {
1856
          rs6000_explicit_options.abi = true;
1857
          rs6000_spe_abi = 0;
1858
        }
1859
 
1860
      /* These are here for testing during development only, do not
1861
         document in the manual please.  */
1862
      else if (! strcmp (arg, "d64"))
1863
        {
1864
          rs6000_darwin64_abi = 1;
1865
          warning (0, "Using darwin64 ABI");
1866
        }
1867
      else if (! strcmp (arg, "d32"))
1868
        {
1869
          rs6000_darwin64_abi = 0;
1870
          warning (0, "Using old darwin ABI");
1871
        }
1872
 
1873
      else if (! strcmp (arg, "ibmlongdouble"))
1874
        {
1875
          rs6000_explicit_options.ieee = true;
1876
          rs6000_ieeequad = 0;
1877
          warning (0, "Using IBM extended precision long double");
1878
        }
1879
      else if (! strcmp (arg, "ieeelongdouble"))
1880
        {
1881
          rs6000_explicit_options.ieee = true;
1882
          rs6000_ieeequad = 1;
1883
          warning (0, "Using IEEE extended precision long double");
1884
        }
1885
 
1886
      else
1887
        {
1888
          error ("unknown ABI specified: '%s'", arg);
1889
          return false;
1890
        }
1891
      break;
1892
 
1893
    case OPT_mcpu_:
1894
      rs6000_select[1].string = arg;
1895
      break;
1896
 
1897
    case OPT_mtune_:
1898
      rs6000_select[2].string = arg;
1899
      break;
1900
 
1901
    case OPT_mtraceback_:
1902
      rs6000_traceback_name = arg;
1903
      break;
1904
 
1905
    case OPT_mfloat_gprs_:
1906
      rs6000_explicit_options.float_gprs = true;
1907
      if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
1908
        rs6000_float_gprs = 1;
1909
      else if (! strcmp (arg, "double"))
1910
        rs6000_float_gprs = 2;
1911
      else if (! strcmp (arg, "no"))
1912
        rs6000_float_gprs = 0;
1913
      else
1914
        {
1915
          error ("invalid option for -mfloat-gprs: '%s'", arg);
1916
          return false;
1917
        }
1918
      break;
1919
 
1920
    case OPT_mlong_double_:
1921
      rs6000_explicit_options.long_double = true;
1922
      rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1923
      if (value != 64 && value != 128)
1924
        {
1925
          error ("Unknown switch -mlong-double-%s", arg);
1926
          rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1927
          return false;
1928
        }
1929
      else
1930
        rs6000_long_double_type_size = value;
1931
      break;
1932
 
1933
    case OPT_msched_costly_dep_:
1934
      rs6000_sched_costly_dep_str = arg;
1935
      break;
1936
 
1937
    case OPT_malign_:
1938
      rs6000_explicit_options.alignment = true;
1939
      if (! strcmp (arg, "power"))
1940
        {
1941
          /* On 64-bit Darwin, power alignment is ABI-incompatible with
1942
             some C library functions, so warn about it. The flag may be
1943
             useful for performance studies from time to time though, so
1944
             don't disable it entirely.  */
1945
          if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1946
            warning (0, "-malign-power is not supported for 64-bit Darwin;"
1947
                     " it is incompatible with the installed C and C++ libraries");
1948
          rs6000_alignment_flags = MASK_ALIGN_POWER;
1949
        }
1950
      else if (! strcmp (arg, "natural"))
1951
        rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1952
      else
1953
        {
1954
          error ("unknown -malign-XXXXX option specified: '%s'", arg);
1955
          return false;
1956
        }
1957
      break;
1958
    }
1959
  return true;
1960
}
1961
 
1962
/* Do anything needed at the start of the asm file.  */
1963
 
1964
static void
1965
rs6000_file_start (void)
1966
{
1967
  size_t i;
1968
  char buffer[80];
1969
  const char *start = buffer;
1970
  struct rs6000_cpu_select *ptr;
1971
  const char *default_cpu = TARGET_CPU_DEFAULT;
1972
  FILE *file = asm_out_file;
1973
 
1974
  default_file_start ();
1975
 
1976
#ifdef TARGET_BI_ARCH
1977
  if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1978
    default_cpu = 0;
1979
#endif
1980
 
1981
  if (flag_verbose_asm)
1982
    {
1983
      sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1984
      rs6000_select[0].string = default_cpu;
1985
 
1986
      for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1987
        {
1988
          ptr = &rs6000_select[i];
1989
          if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1990
            {
1991
              fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1992
              start = "";
1993
            }
1994
        }
1995
 
1996
      if (PPC405_ERRATUM77)
1997
        {
1998
          fprintf (file, "%s PPC405CR_ERRATUM77", start);
1999
          start = "";
2000
        }
2001
 
2002
#ifdef USING_ELFOS_H
2003
      switch (rs6000_sdata)
2004
        {
2005
        case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2006
        case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2007
        case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2008
        case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2009
        }
2010
 
2011
      if (rs6000_sdata && g_switch_value)
2012
        {
2013
          fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2014
                   g_switch_value);
2015
          start = "";
2016
        }
2017
#endif
2018
 
2019
      if (*start == '\0')
2020
        putc ('\n', file);
2021
    }
2022
 
2023
  if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2024
    {
2025
      switch_to_section (toc_section);
2026
      switch_to_section (text_section);
2027
    }
2028
}
2029
 
2030
 
2031
/* Return nonzero if this function is known to have a null epilogue.  */
2032
 
2033
int
2034
direct_return (void)
2035
{
2036
  if (reload_completed)
2037
    {
2038
      rs6000_stack_t *info = rs6000_stack_info ();
2039
 
2040
      if (info->first_gp_reg_save == 32
2041
          && info->first_fp_reg_save == 64
2042
          && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
2043
          && ! info->lr_save_p
2044
          && ! info->cr_save_p
2045
          && info->vrsave_mask == 0
2046
          && ! info->push_p)
2047
        return 1;
2048
    }
2049
 
2050
  return 0;
2051
}
2052
 
2053
/* Return the number of instructions it takes to form a constant in an
2054
   integer register.  */
2055
 
2056
int
2057
num_insns_constant_wide (HOST_WIDE_INT value)
2058
{
2059
  /* signed constant loadable with {cal|addi} */
2060
  if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
2061
    return 1;
2062
 
2063
  /* constant loadable with {cau|addis} */
2064
  else if ((value & 0xffff) == 0
2065
           && (value >> 31 == -1 || value >> 31 == 0))
2066
    return 1;
2067
 
2068
#if HOST_BITS_PER_WIDE_INT == 64
2069
  else if (TARGET_POWERPC64)
2070
    {
2071
      HOST_WIDE_INT low  = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2072
      HOST_WIDE_INT high = value >> 31;
2073
 
2074
      if (high == 0 || high == -1)
2075
        return 2;
2076
 
2077
      high >>= 1;
2078
 
2079
      if (low == 0)
2080
        return num_insns_constant_wide (high) + 1;
2081
      else
2082
        return (num_insns_constant_wide (high)
2083
                + num_insns_constant_wide (low) + 1);
2084
    }
2085
#endif
2086
 
2087
  else
2088
    return 2;
2089
}
2090
 
2091
int
2092
num_insns_constant (rtx op, enum machine_mode mode)
2093
{
2094
  HOST_WIDE_INT low, high;
2095
 
2096
  switch (GET_CODE (op))
2097
    {
2098
    case CONST_INT:
2099
#if HOST_BITS_PER_WIDE_INT == 64
2100
      if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
2101
          && mask64_operand (op, mode))
2102
        return 2;
2103
      else
2104
#endif
2105
        return num_insns_constant_wide (INTVAL (op));
2106
 
2107
      case CONST_DOUBLE:
2108
        if (mode == SFmode)
2109
          {
2110
            long l;
2111
            REAL_VALUE_TYPE rv;
2112
 
2113
            REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2114
            REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2115
            return num_insns_constant_wide ((HOST_WIDE_INT) l);
2116
          }
2117
 
2118
        if (mode == VOIDmode || mode == DImode)
2119
          {
2120
            high = CONST_DOUBLE_HIGH (op);
2121
            low  = CONST_DOUBLE_LOW (op);
2122
          }
2123
        else
2124
          {
2125
            long l[2];
2126
            REAL_VALUE_TYPE rv;
2127
 
2128
            REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2129
            REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
2130
            high = l[WORDS_BIG_ENDIAN == 0];
2131
            low  = l[WORDS_BIG_ENDIAN != 0];
2132
          }
2133
 
2134
        if (TARGET_32BIT)
2135
          return (num_insns_constant_wide (low)
2136
                  + num_insns_constant_wide (high));
2137
        else
2138
          {
2139
            if ((high == 0 && low >= 0)
2140
                || (high == -1 && low < 0))
2141
              return num_insns_constant_wide (low);
2142
 
2143
            else if (mask64_operand (op, mode))
2144
              return 2;
2145
 
2146
            else if (low == 0)
2147
              return num_insns_constant_wide (high) + 1;
2148
 
2149
            else
2150
              return (num_insns_constant_wide (high)
2151
                      + num_insns_constant_wide (low) + 1);
2152
          }
2153
 
2154
    default:
2155
      gcc_unreachable ();
2156
    }
2157
}
2158
 
2159
/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2160
   If the mode of OP is MODE_VECTOR_INT, this simply returns the
2161
   corresponding element of the vector, but for V4SFmode and V2SFmode,
2162
   the corresponding "float" is interpreted as an SImode integer.  */
2163
 
2164
static HOST_WIDE_INT
2165
const_vector_elt_as_int (rtx op, unsigned int elt)
2166
{
2167
  rtx tmp = CONST_VECTOR_ELT (op, elt);
2168
  if (GET_MODE (op) == V4SFmode
2169
      || GET_MODE (op) == V2SFmode)
2170
    tmp = gen_lowpart (SImode, tmp);
2171
  return INTVAL (tmp);
2172
}
2173
 
2174
/* Return true if OP can be synthesized with a particular vspltisb, vspltish
2175
   or vspltisw instruction.  OP is a CONST_VECTOR.  Which instruction is used
2176
   depends on STEP and COPIES, one of which will be 1.  If COPIES > 1,
2177
   all items are set to the same value and contain COPIES replicas of the
2178
   vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2179
   operand and the others are set to the value of the operand's msb.  */
2180
 
2181
static bool
2182
vspltis_constant (rtx op, unsigned step, unsigned copies)
2183
{
2184
  enum machine_mode mode = GET_MODE (op);
2185
  enum machine_mode inner = GET_MODE_INNER (mode);
2186
 
2187
  unsigned i;
2188
  unsigned nunits = GET_MODE_NUNITS (mode);
2189
  unsigned bitsize = GET_MODE_BITSIZE (inner);
2190
  unsigned mask = GET_MODE_MASK (inner);
2191
 
2192
  HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
2193
  HOST_WIDE_INT splat_val = val;
2194
  HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2195
 
2196
  /* Construct the value to be splatted, if possible.  If not, return 0.  */
2197
  for (i = 2; i <= copies; i *= 2)
2198
    {
2199
      HOST_WIDE_INT small_val;
2200
      bitsize /= 2;
2201
      small_val = splat_val >> bitsize;
2202
      mask >>= bitsize;
2203
      if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2204
        return false;
2205
      splat_val = small_val;
2206
    }
2207
 
2208
  /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw].  */
2209
  if (EASY_VECTOR_15 (splat_val))
2210
    ;
2211
 
2212
  /* Also check if we can splat, and then add the result to itself.  Do so if
2213
     the value is positive, of if the splat instruction is using OP's mode;
2214
     for splat_val < 0, the splat and the add should use the same mode.  */
2215
  else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2216
           && (splat_val >= 0 || (step == 1 && copies == 1)))
2217
    ;
2218
 
2219
  else
2220
    return false;
2221
 
2222
  /* Check if VAL is present in every STEP-th element, and the
2223
     other elements are filled with its most significant bit.  */
2224
  for (i = 0; i < nunits - 1; ++i)
2225
    {
2226
      HOST_WIDE_INT desired_val;
2227
      if (((i + 1) & (step - 1)) == 0)
2228
        desired_val = val;
2229
      else
2230
        desired_val = msb_val;
2231
 
2232
      if (desired_val != const_vector_elt_as_int (op, i))
2233
        return false;
2234
    }
2235
 
2236
  return true;
2237
}
2238
 
2239
 
2240
/* Return true if OP is of the given MODE and can be synthesized
2241
   with a vspltisb, vspltish or vspltisw.  */
2242
 
2243
bool
2244
easy_altivec_constant (rtx op, enum machine_mode mode)
2245
{
2246
  unsigned step, copies;
2247
 
2248
  if (mode == VOIDmode)
2249
    mode = GET_MODE (op);
2250
  else if (mode != GET_MODE (op))
2251
    return false;
2252
 
2253
  /* Start with a vspltisw.  */
2254
  step = GET_MODE_NUNITS (mode) / 4;
2255
  copies = 1;
2256
 
2257
  if (vspltis_constant (op, step, copies))
2258
    return true;
2259
 
2260
  /* Then try with a vspltish.  */
2261
  if (step == 1)
2262
    copies <<= 1;
2263
  else
2264
    step >>= 1;
2265
 
2266
  if (vspltis_constant (op, step, copies))
2267
    return true;
2268
 
2269
  /* And finally a vspltisb.  */
2270
  if (step == 1)
2271
    copies <<= 1;
2272
  else
2273
    step >>= 1;
2274
 
2275
  if (vspltis_constant (op, step, copies))
2276
    return true;
2277
 
2278
  return false;
2279
}
2280
 
2281
/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2282
   result is OP.  Abort if it is not possible.  */
2283
 
2284
rtx
2285
gen_easy_altivec_constant (rtx op)
2286
{
2287
  enum machine_mode mode = GET_MODE (op);
2288
  int nunits = GET_MODE_NUNITS (mode);
2289
  rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2290
  unsigned step = nunits / 4;
2291
  unsigned copies = 1;
2292
 
2293
  /* Start with a vspltisw.  */
2294
  if (vspltis_constant (op, step, copies))
2295
    return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2296
 
2297
  /* Then try with a vspltish.  */
2298
  if (step == 1)
2299
    copies <<= 1;
2300
  else
2301
    step >>= 1;
2302
 
2303
  if (vspltis_constant (op, step, copies))
2304
    return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2305
 
2306
  /* And finally a vspltisb.  */
2307
  if (step == 1)
2308
    copies <<= 1;
2309
  else
2310
    step >>= 1;
2311
 
2312
  if (vspltis_constant (op, step, copies))
2313
    return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2314
 
2315
  gcc_unreachable ();
2316
}
2317
 
2318
const char *
2319
output_vec_const_move (rtx *operands)
2320
{
2321
  int cst, cst2;
2322
  enum machine_mode mode;
2323
  rtx dest, vec;
2324
 
2325
  dest = operands[0];
2326
  vec = operands[1];
2327
  mode = GET_MODE (dest);
2328
 
2329
  if (TARGET_ALTIVEC)
2330
    {
2331
      rtx splat_vec;
2332
      if (zero_constant (vec, mode))
2333
        return "vxor %0,%0,%0";
2334
 
2335
      splat_vec = gen_easy_altivec_constant (vec);
2336
      gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2337
      operands[1] = XEXP (splat_vec, 0);
2338
      if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2339
        return "#";
2340
 
2341
      switch (GET_MODE (splat_vec))
2342
        {
2343
        case V4SImode:
2344
          return "vspltisw %0,%1";
2345
 
2346
        case V8HImode:
2347
          return "vspltish %0,%1";
2348
 
2349
        case V16QImode:
2350
          return "vspltisb %0,%1";
2351
 
2352
        default:
2353
          gcc_unreachable ();
2354
        }
2355
    }
2356
 
2357
  gcc_assert (TARGET_SPE);
2358
 
2359
  /* Vector constant 0 is handled as a splitter of V2SI, and in the
2360
     pattern of V1DI, V4HI, and V2SF.
2361
 
2362
     FIXME: We should probably return # and add post reload
2363
     splitters for these, but this way is so easy ;-).  */
2364
  cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2365
  cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2366
  operands[1] = CONST_VECTOR_ELT (vec, 0);
2367
  operands[2] = CONST_VECTOR_ELT (vec, 1);
2368
  if (cst == cst2)
2369
    return "li %0,%1\n\tevmergelo %0,%0,%0";
2370
  else
2371
    return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
2372
}
2373
 
2374
/* Initialize vector TARGET to VALS.  */
2375
 
2376
void
2377
rs6000_expand_vector_init (rtx target, rtx vals)
2378
{
2379
  enum machine_mode mode = GET_MODE (target);
2380
  enum machine_mode inner_mode = GET_MODE_INNER (mode);
2381
  int n_elts = GET_MODE_NUNITS (mode);
2382
  int n_var = 0, one_var = -1;
2383
  bool all_same = true, all_const_zero = true;
2384
  rtx x, mem;
2385
  int i;
2386
 
2387
  for (i = 0; i < n_elts; ++i)
2388
    {
2389
      x = XVECEXP (vals, 0, i);
2390
      if (!CONSTANT_P (x))
2391
        ++n_var, one_var = i;
2392
      else if (x != CONST0_RTX (inner_mode))
2393
        all_const_zero = false;
2394
 
2395
      if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
2396
        all_same = false;
2397
    }
2398
 
2399
  if (n_var == 0)
2400
    {
2401
      if (mode != V4SFmode && all_const_zero)
2402
        {
2403
          /* Zero register.  */
2404
          emit_insn (gen_rtx_SET (VOIDmode, target,
2405
                                  gen_rtx_XOR (mode, target, target)));
2406
          return;
2407
        }
2408
      else if (mode != V4SFmode && easy_vector_constant (vals, mode))
2409
        {
2410
          /* Splat immediate.  */
2411
          emit_insn (gen_rtx_SET (VOIDmode, target, vals));
2412
          return;
2413
        }
2414
      else if (all_same)
2415
        ;       /* Splat vector element.  */
2416
      else
2417
        {
2418
          /* Load from constant pool.  */
2419
          emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2420
          return;
2421
        }
2422
    }
2423
 
2424
  /* Store value to stack temp.  Load vector element.  Splat.  */
2425
  if (all_same)
2426
    {
2427
      mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2428
      emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
2429
                      XVECEXP (vals, 0, 0));
2430
      x = gen_rtx_UNSPEC (VOIDmode,
2431
                          gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2432
      emit_insn (gen_rtx_PARALLEL (VOIDmode,
2433
                                   gen_rtvec (2,
2434
                                              gen_rtx_SET (VOIDmode,
2435
                                                           target, mem),
2436
                                              x)));
2437
      x = gen_rtx_VEC_SELECT (inner_mode, target,
2438
                              gen_rtx_PARALLEL (VOIDmode,
2439
                                                gen_rtvec (1, const0_rtx)));
2440
      emit_insn (gen_rtx_SET (VOIDmode, target,
2441
                              gen_rtx_VEC_DUPLICATE (mode, x)));
2442
      return;
2443
    }
2444
 
2445
  /* One field is non-constant.  Load constant then overwrite
2446
     varying field.  */
2447
  if (n_var == 1)
2448
    {
2449
      rtx copy = copy_rtx (vals);
2450
 
2451
      /* Load constant part of vector, substitute neighboring value for
2452
         varying element.  */
2453
      XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
2454
      rs6000_expand_vector_init (target, copy);
2455
 
2456
      /* Insert variable.  */
2457
      rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
2458
      return;
2459
    }
2460
 
2461
  /* Construct the vector in memory one field at a time
2462
     and load the whole vector.  */
2463
  mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2464
  for (i = 0; i < n_elts; i++)
2465
    emit_move_insn (adjust_address_nv (mem, inner_mode,
2466
                                    i * GET_MODE_SIZE (inner_mode)),
2467
                    XVECEXP (vals, 0, i));
2468
  emit_move_insn (target, mem);
2469
}
2470
 
2471
/* Set field ELT of TARGET to VAL.  */
2472
 
2473
void
2474
rs6000_expand_vector_set (rtx target, rtx val, int elt)
2475
{
2476
  enum machine_mode mode = GET_MODE (target);
2477
  enum machine_mode inner_mode = GET_MODE_INNER (mode);
2478
  rtx reg = gen_reg_rtx (mode);
2479
  rtx mask, mem, x;
2480
  int width = GET_MODE_SIZE (inner_mode);
2481
  int i;
2482
 
2483
  /* Load single variable value.  */
2484
  mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2485
  emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
2486
  x = gen_rtx_UNSPEC (VOIDmode,
2487
                      gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2488
  emit_insn (gen_rtx_PARALLEL (VOIDmode,
2489
                               gen_rtvec (2,
2490
                                          gen_rtx_SET (VOIDmode,
2491
                                                       reg, mem),
2492
                                          x)));
2493
 
2494
  /* Linear sequence.  */
2495
  mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
2496
  for (i = 0; i < 16; ++i)
2497
    XVECEXP (mask, 0, i) = GEN_INT (i);
2498
 
2499
  /* Set permute mask to insert element into target.  */
2500
  for (i = 0; i < width; ++i)
2501
    XVECEXP (mask, 0, elt*width + i)
2502
      = GEN_INT (i + 0x10);
2503
  x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
2504
  x = gen_rtx_UNSPEC (mode,
2505
                      gen_rtvec (3, target, reg,
2506
                                 force_reg (V16QImode, x)),
2507
                      UNSPEC_VPERM);
2508
  emit_insn (gen_rtx_SET (VOIDmode, target, x));
2509
}
2510
 
2511
/* Extract field ELT from VEC into TARGET.  */
2512
 
2513
void
2514
rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
2515
{
2516
  enum machine_mode mode = GET_MODE (vec);
2517
  enum machine_mode inner_mode = GET_MODE_INNER (mode);
2518
  rtx mem, x;
2519
 
2520
  /* Allocate mode-sized buffer.  */
2521
  mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2522
 
2523
  /* Add offset to field within buffer matching vector element.  */
2524
  mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
2525
 
2526
  /* Store single field into mode-sized buffer.  */
2527
  x = gen_rtx_UNSPEC (VOIDmode,
2528
                      gen_rtvec (1, const0_rtx), UNSPEC_STVE);
2529
  emit_insn (gen_rtx_PARALLEL (VOIDmode,
2530
                               gen_rtvec (2,
2531
                                          gen_rtx_SET (VOIDmode,
2532
                                                       mem, vec),
2533
                                          x)));
2534
  emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
2535
}
2536
 
2537
/* Generates shifts and masks for a pair of rldicl or rldicr insns to
2538
   implement ANDing by the mask IN.  */
2539
void
2540
build_mask64_2_operands (rtx in, rtx *out)
2541
{
2542
#if HOST_BITS_PER_WIDE_INT >= 64
2543
  unsigned HOST_WIDE_INT c, lsb, m1, m2;
2544
  int shift;
2545
 
2546
  gcc_assert (GET_CODE (in) == CONST_INT);
2547
 
2548
  c = INTVAL (in);
2549
  if (c & 1)
2550
    {
2551
      /* Assume c initially something like 0x00fff000000fffff.  The idea
2552
         is to rotate the word so that the middle ^^^^^^ group of zeros
2553
         is at the MS end and can be cleared with an rldicl mask.  We then
2554
         rotate back and clear off the MS    ^^ group of zeros with a
2555
         second rldicl.  */
2556
      c = ~c;                   /*   c == 0xff000ffffff00000 */
2557
      lsb = c & -c;             /* lsb == 0x0000000000100000 */
2558
      m1 = -lsb;                /*  m1 == 0xfffffffffff00000 */
2559
      c = ~c;                   /*   c == 0x00fff000000fffff */
2560
      c &= -lsb;                /*   c == 0x00fff00000000000 */
2561
      lsb = c & -c;             /* lsb == 0x0000100000000000 */
2562
      c = ~c;                   /*   c == 0xff000fffffffffff */
2563
      c &= -lsb;                /*   c == 0xff00000000000000 */
2564
      shift = 0;
2565
      while ((lsb >>= 1) != 0)
2566
        shift++;                /* shift == 44 on exit from loop */
2567
      m1 <<= 64 - shift;        /*  m1 == 0xffffff0000000000 */
2568
      m1 = ~m1;                 /*  m1 == 0x000000ffffffffff */
2569
      m2 = ~c;                  /*  m2 == 0x00ffffffffffffff */
2570
    }
2571
  else
2572
    {
2573
      /* Assume c initially something like 0xff000f0000000000.  The idea
2574
         is to rotate the word so that the     ^^^  middle group of zeros
2575
         is at the LS end and can be cleared with an rldicr mask.  We then
2576
         rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2577
         a second rldicr.  */
2578
      lsb = c & -c;             /* lsb == 0x0000010000000000 */
2579
      m2 = -lsb;                /*  m2 == 0xffffff0000000000 */
2580
      c = ~c;                   /*   c == 0x00fff0ffffffffff */
2581
      c &= -lsb;                /*   c == 0x00fff00000000000 */
2582
      lsb = c & -c;             /* lsb == 0x0000100000000000 */
2583
      c = ~c;                   /*   c == 0xff000fffffffffff */
2584
      c &= -lsb;                /*   c == 0xff00000000000000 */
2585
      shift = 0;
2586
      while ((lsb >>= 1) != 0)
2587
        shift++;                /* shift == 44 on exit from loop */
2588
      m1 = ~c;                  /*  m1 == 0x00ffffffffffffff */
2589
      m1 >>= shift;             /*  m1 == 0x0000000000000fff */
2590
      m1 = ~m1;                 /*  m1 == 0xfffffffffffff000 */
2591
    }
2592
 
2593
  /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2594
     masks will be all 1's.  We are guaranteed more than one transition.  */
2595
  out[0] = GEN_INT (64 - shift);
2596
  out[1] = GEN_INT (m1);
2597
  out[2] = GEN_INT (shift);
2598
  out[3] = GEN_INT (m2);
2599
#else
2600
  (void)in;
2601
  (void)out;
2602
  gcc_unreachable ();
2603
#endif
2604
}
2605
 
2606
/* Return TRUE if OP is an invalid SUBREG operation on the e500.  */
2607
 
2608
bool
2609
invalid_e500_subreg (rtx op, enum machine_mode mode)
2610
{
2611
  if (TARGET_E500_DOUBLE)
2612
    {
2613
      /* Reject (subreg:SI (reg:DF)).  */
2614
      if (GET_CODE (op) == SUBREG
2615
          && mode == SImode
2616
          && REG_P (SUBREG_REG (op))
2617
          && GET_MODE (SUBREG_REG (op)) == DFmode)
2618
        return true;
2619
 
2620
      /* Reject (subreg:DF (reg:DI)).  */
2621
      if (GET_CODE (op) == SUBREG
2622
          && mode == DFmode
2623
          && REG_P (SUBREG_REG (op))
2624
          && GET_MODE (SUBREG_REG (op)) == DImode)
2625
        return true;
2626
    }
2627
 
2628
  if (TARGET_SPE
2629
      && GET_CODE (op) == SUBREG
2630
      && mode == SImode
2631
      && REG_P (SUBREG_REG (op))
2632
      && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
2633
    return true;
2634
 
2635
  return false;
2636
}
2637
 
2638
/* Darwin, AIX increases natural record alignment to doubleword if the first
2639
   field is an FP double while the FP fields remain word aligned.  */
2640
 
2641
unsigned int
2642
rs6000_special_round_type_align (tree type, unsigned int computed,
2643
                                 unsigned int specified)
2644
{
2645
  unsigned int align = MAX (computed, specified);
2646
  tree field = TYPE_FIELDS (type);
2647
 
2648
  /* Skip all non field decls */
2649
  while (field != NULL && TREE_CODE (field) != FIELD_DECL)
2650
    field = TREE_CHAIN (field);
2651
 
2652
  if (field != NULL && field != type)
2653
    {
2654
      type = TREE_TYPE (field);
2655
      while (TREE_CODE (type) == ARRAY_TYPE)
2656
        type = TREE_TYPE (type);
2657
 
2658
      if (type != error_mark_node && TYPE_MODE (type) == DFmode)
2659
        align = MAX (align, 64);
2660
    }
2661
 
2662
  return align;
2663
}
2664
 
2665
/* Return 1 for an operand in small memory on V.4/eabi.  */
2666
 
2667
int
2668
small_data_operand (rtx op ATTRIBUTE_UNUSED,
2669
                    enum machine_mode mode ATTRIBUTE_UNUSED)
2670
{
2671
#if TARGET_ELF
2672
  rtx sym_ref;
2673
 
2674
  if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2675
    return 0;
2676
 
2677
  if (DEFAULT_ABI != ABI_V4)
2678
    return 0;
2679
 
2680
  if (GET_CODE (op) == SYMBOL_REF)
2681
    sym_ref = op;
2682
 
2683
  else if (GET_CODE (op) != CONST
2684
           || GET_CODE (XEXP (op, 0)) != PLUS
2685
           || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2686
           || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2687
    return 0;
2688
 
2689
  else
2690
    {
2691
      rtx sum = XEXP (op, 0);
2692
      HOST_WIDE_INT summand;
2693
 
2694
      /* We have to be careful here, because it is the referenced address
2695
         that must be 32k from _SDA_BASE_, not just the symbol.  */
2696
      summand = INTVAL (XEXP (sum, 1));
2697
      if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2698
        return 0;
2699
 
2700
      sym_ref = XEXP (sum, 0);
2701
    }
2702
 
2703
  return SYMBOL_REF_SMALL_P (sym_ref);
2704
#else
2705
  return 0;
2706
#endif
2707
}
2708
 
2709
/* Return true if either operand is a general purpose register.  */
2710
 
2711
bool
2712
gpr_or_gpr_p (rtx op0, rtx op1)
2713
{
2714
  return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2715
          || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2716
}
2717
 
2718
 
2719
/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address.  */
2720
 
2721
static int
2722
constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2723
{
2724
  switch (GET_CODE (op))
2725
    {
2726
    case SYMBOL_REF:
2727
      if (RS6000_SYMBOL_REF_TLS_P (op))
2728
        return 0;
2729
      else if (CONSTANT_POOL_ADDRESS_P (op))
2730
        {
2731
          if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2732
            {
2733
              *have_sym = 1;
2734
              return 1;
2735
            }
2736
          else
2737
            return 0;
2738
        }
2739
      else if (! strcmp (XSTR (op, 0), toc_label_name))
2740
        {
2741
          *have_toc = 1;
2742
          return 1;
2743
        }
2744
      else
2745
        return 0;
2746
    case PLUS:
2747
    case MINUS:
2748
      return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2749
              && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2750
    case CONST:
2751
      return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2752
    case CONST_INT:
2753
      return 1;
2754
    default:
2755
      return 0;
2756
    }
2757
}
2758
 
2759
static bool
2760
constant_pool_expr_p (rtx op)
2761
{
2762
  int have_sym = 0;
2763
  int have_toc = 0;
2764
  return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2765
}
2766
 
2767
bool
2768
toc_relative_expr_p (rtx op)
2769
{
2770
  int have_sym = 0;
2771
  int have_toc = 0;
2772
  return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2773
}
2774
 
2775
bool
2776
legitimate_constant_pool_address_p (rtx x)
2777
{
2778
  return (TARGET_TOC
2779
          && GET_CODE (x) == PLUS
2780
          && GET_CODE (XEXP (x, 0)) == REG
2781
          && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2782
          && constant_pool_expr_p (XEXP (x, 1)));
2783
}
2784
 
2785
static bool
2786
legitimate_small_data_p (enum machine_mode mode, rtx x)
2787
{
2788
  return (DEFAULT_ABI == ABI_V4
2789
          && !flag_pic && !TARGET_TOC
2790
          && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2791
          && small_data_operand (x, mode));
2792
}
2793
 
2794
/* SPE offset addressing is limited to 5-bits worth of double words.  */
2795
#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2796
 
2797
bool
2798
rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2799
{
2800
  unsigned HOST_WIDE_INT offset, extra;
2801
 
2802
  if (GET_CODE (x) != PLUS)
2803
    return false;
2804
  if (GET_CODE (XEXP (x, 0)) != REG)
2805
    return false;
2806
  if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2807
    return false;
2808
  if (legitimate_constant_pool_address_p (x))
2809
    return true;
2810
  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2811
    return false;
2812
 
2813
  offset = INTVAL (XEXP (x, 1));
2814
  extra = 0;
2815
  switch (mode)
2816
    {
2817
    case V16QImode:
2818
    case V8HImode:
2819
    case V4SFmode:
2820
    case V4SImode:
2821
      /* AltiVec vector modes.  Only reg+reg addressing is valid and
2822
         constant offset zero should not occur due to canonicalization.
2823
         Allow any offset when not strict before reload.  */
2824
      return !strict;
2825
 
2826
    case V4HImode:
2827
    case V2SImode:
2828
    case V1DImode:
2829
    case V2SFmode:
2830
      /* SPE vector modes.  */
2831
      return SPE_CONST_OFFSET_OK (offset);
2832
 
2833
    case DFmode:
2834
      if (TARGET_E500_DOUBLE)
2835
        return SPE_CONST_OFFSET_OK (offset);
2836
 
2837
    case DImode:
2838
      /* On e500v2, we may have:
2839
 
2840
           (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
2841
 
2842
         Which gets addressed with evldd instructions.  */
2843
      if (TARGET_E500_DOUBLE)
2844
        return SPE_CONST_OFFSET_OK (offset);
2845
 
2846
      if (mode == DFmode || !TARGET_POWERPC64)
2847
        extra = 4;
2848
      else if (offset & 3)
2849
        return false;
2850
      break;
2851
 
2852
    case TFmode:
2853
    case TImode:
2854
      if (mode == TFmode || !TARGET_POWERPC64)
2855
        extra = 12;
2856
      else if (offset & 3)
2857
        return false;
2858
      else
2859
        extra = 8;
2860
      break;
2861
 
2862
    default:
2863
      break;
2864
    }
2865
 
2866
  offset += 0x8000;
2867
  return (offset < 0x10000) && (offset + extra < 0x10000);
2868
}
2869
 
2870
static bool
2871
legitimate_indexed_address_p (rtx x, int strict)
2872
{
2873
  rtx op0, op1;
2874
 
2875
  if (GET_CODE (x) != PLUS)
2876
    return false;
2877
 
2878
  op0 = XEXP (x, 0);
2879
  op1 = XEXP (x, 1);
2880
 
2881
  /* Recognize the rtl generated by reload which we know will later be
2882
     replaced with proper base and index regs.  */
2883
  if (!strict
2884
      && reload_in_progress
2885
      && (REG_P (op0) || GET_CODE (op0) == PLUS)
2886
      && REG_P (op1))
2887
    return true;
2888
 
2889
  return (REG_P (op0) && REG_P (op1)
2890
          && ((INT_REG_OK_FOR_BASE_P (op0, strict)
2891
               && INT_REG_OK_FOR_INDEX_P (op1, strict))
2892
              || (INT_REG_OK_FOR_BASE_P (op1, strict)
2893
                  && INT_REG_OK_FOR_INDEX_P (op0, strict))));
2894
}
2895
 
2896
inline bool
2897
legitimate_indirect_address_p (rtx x, int strict)
2898
{
2899
  return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2900
}
2901
 
2902
bool
2903
macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2904
{
2905
  if (!TARGET_MACHO || !flag_pic
2906
      || mode != SImode || GET_CODE (x) != MEM)
2907
    return false;
2908
  x = XEXP (x, 0);
2909
 
2910
  if (GET_CODE (x) != LO_SUM)
2911
    return false;
2912
  if (GET_CODE (XEXP (x, 0)) != REG)
2913
    return false;
2914
  if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2915
    return false;
2916
  x = XEXP (x, 1);
2917
 
2918
  return CONSTANT_P (x);
2919
}
2920
 
2921
static bool
2922
legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2923
{
2924
  if (GET_CODE (x) != LO_SUM)
2925
    return false;
2926
  if (GET_CODE (XEXP (x, 0)) != REG)
2927
    return false;
2928
  if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2929
    return false;
2930
  /* Restrict addressing for DI because of our SUBREG hackery.  */
2931
  if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
2932
    return false;
2933
  x = XEXP (x, 1);
2934
 
2935
  if (TARGET_ELF || TARGET_MACHO)
2936
    {
2937
      if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
2938
        return false;
2939
      if (TARGET_TOC)
2940
        return false;
2941
      if (GET_MODE_NUNITS (mode) != 1)
2942
        return false;
2943
      if (GET_MODE_BITSIZE (mode) > 64
2944
          || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
2945
              && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode)))
2946
        return false;
2947
 
2948
      return CONSTANT_P (x);
2949
    }
2950
 
2951
  return false;
2952
}
2953
 
2954
 
2955
/* Try machine-dependent ways of modifying an illegitimate address
2956
   to be legitimate.  If we find one, return the new, valid address.
2957
   This is used from only one place: `memory_address' in explow.c.
2958
 
2959
   OLDX is the address as it was before break_out_memory_refs was
2960
   called.  In some cases it is useful to look at this to decide what
2961
   needs to be done.
2962
 
2963
   MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2964
 
2965
   It is always safe for this function to do nothing.  It exists to
2966
   recognize opportunities to optimize the output.
2967
 
2968
   On RS/6000, first check for the sum of a register with a constant
2969
   integer that is out of range.  If so, generate code to add the
2970
   constant with the low-order 16 bits masked to the register and force
2971
   this result into another register (this can be done with `cau').
2972
   Then generate an address of REG+(CONST&0xffff), allowing for the
2973
   possibility of bit 16 being a one.
2974
 
2975
   Then check for the sum of a register and something not constant, try to
2976
   load the other things into a register and return the sum.  */
2977
 
2978
rtx
2979
rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2980
                           enum machine_mode mode)
2981
{
2982
  if (GET_CODE (x) == SYMBOL_REF)
2983
    {
2984
      enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2985
      if (model != 0)
2986
        return rs6000_legitimize_tls_address (x, model);
2987
    }
2988
 
2989
  if (GET_CODE (x) == PLUS
2990
      && GET_CODE (XEXP (x, 0)) == REG
2991
      && GET_CODE (XEXP (x, 1)) == CONST_INT
2992
      && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2993
    {
2994
      HOST_WIDE_INT high_int, low_int;
2995
      rtx sum;
2996
      low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2997
      high_int = INTVAL (XEXP (x, 1)) - low_int;
2998
      sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2999
                                         GEN_INT (high_int)), 0);
3000
      return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3001
    }
3002
  else if (GET_CODE (x) == PLUS
3003
           && GET_CODE (XEXP (x, 0)) == REG
3004
           && GET_CODE (XEXP (x, 1)) != CONST_INT
3005
           && GET_MODE_NUNITS (mode) == 1
3006
           && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3007
               || TARGET_POWERPC64
3008
               || (((mode != DImode && mode != DFmode) || TARGET_E500_DOUBLE)
3009
                   && mode != TFmode))
3010
           && (TARGET_POWERPC64 || mode != DImode)
3011
           && mode != TImode)
3012
    {
3013
      return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3014
                           force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3015
    }
3016
  else if (ALTIVEC_VECTOR_MODE (mode))
3017
    {
3018
      rtx reg;
3019
 
3020
      /* Make sure both operands are registers.  */
3021
      if (GET_CODE (x) == PLUS)
3022
        return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
3023
                             force_reg (Pmode, XEXP (x, 1)));
3024
 
3025
      reg = force_reg (Pmode, x);
3026
      return reg;
3027
    }
3028
  else if (SPE_VECTOR_MODE (mode)
3029
           || (TARGET_E500_DOUBLE && (mode == DFmode
3030
                                      || mode == DImode)))
3031
    {
3032
      if (mode == DImode)
3033
        return NULL_RTX;
3034
      /* We accept [reg + reg] and [reg + OFFSET].  */
3035
 
3036
      if (GET_CODE (x) == PLUS)
3037
        {
3038
          rtx op1 = XEXP (x, 0);
3039
          rtx op2 = XEXP (x, 1);
3040
 
3041
          op1 = force_reg (Pmode, op1);
3042
 
3043
          if (GET_CODE (op2) != REG
3044
              && (GET_CODE (op2) != CONST_INT
3045
                  || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
3046
            op2 = force_reg (Pmode, op2);
3047
 
3048
          return gen_rtx_PLUS (Pmode, op1, op2);
3049
        }
3050
 
3051
      return force_reg (Pmode, x);
3052
    }
3053
  else if (TARGET_ELF
3054
           && TARGET_32BIT
3055
           && TARGET_NO_TOC
3056
           && ! flag_pic
3057
           && GET_CODE (x) != CONST_INT
3058
           && GET_CODE (x) != CONST_DOUBLE
3059
           && CONSTANT_P (x)
3060
           && GET_MODE_NUNITS (mode) == 1
3061
           && (GET_MODE_BITSIZE (mode) <= 32
3062
               || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
3063
    {
3064
      rtx reg = gen_reg_rtx (Pmode);
3065
      emit_insn (gen_elf_high (reg, x));
3066
      return gen_rtx_LO_SUM (Pmode, reg, x);
3067
    }
3068
  else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3069
           && ! flag_pic
3070
#if TARGET_MACHO
3071
           && ! MACHO_DYNAMIC_NO_PIC_P
3072
#endif
3073
           && GET_CODE (x) != CONST_INT
3074
           && GET_CODE (x) != CONST_DOUBLE
3075
           && CONSTANT_P (x)
3076
           && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
3077
           && mode != DImode
3078
           && mode != TImode)
3079
    {
3080
      rtx reg = gen_reg_rtx (Pmode);
3081
      emit_insn (gen_macho_high (reg, x));
3082
      return gen_rtx_LO_SUM (Pmode, reg, x);
3083
    }
3084
  else if (TARGET_TOC
3085
           && constant_pool_expr_p (x)
3086
           && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
3087
    {
3088
      return create_TOC_reference (x);
3089
    }
3090
  else
3091
    return NULL_RTX;
3092
}
3093
 
3094
/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
3095
   We need to emit DTP-relative relocations.  */
3096
 
3097
static void
3098
rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3099
{
3100
  switch (size)
3101
    {
3102
    case 4:
3103
      fputs ("\t.long\t", file);
3104
      break;
3105
    case 8:
3106
      fputs (DOUBLE_INT_ASM_OP, file);
3107
      break;
3108
    default:
3109
      gcc_unreachable ();
3110
    }
3111
  output_addr_const (file, x);
3112
  fputs ("@dtprel+0x8000", file);
3113
}
3114
 
3115
/* Construct the SYMBOL_REF for the tls_get_addr function.  */
3116
 
3117
static GTY(()) rtx rs6000_tls_symbol;
3118
static rtx
3119
rs6000_tls_get_addr (void)
3120
{
3121
  if (!rs6000_tls_symbol)
3122
    rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3123
 
3124
  return rs6000_tls_symbol;
3125
}
3126
 
3127
/* Construct the SYMBOL_REF for TLS GOT references.  */
3128
 
3129
static GTY(()) rtx rs6000_got_symbol;
3130
static rtx
3131
rs6000_got_sym (void)
3132
{
3133
  if (!rs6000_got_symbol)
3134
    {
3135
      rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3136
      SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3137
      SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
3138
    }
3139
 
3140
  return rs6000_got_symbol;
3141
}
3142
 
3143
/* ADDR contains a thread-local SYMBOL_REF.  Generate code to compute
3144
   this (thread-local) address.  */
3145
 
3146
static rtx
3147
rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
3148
{
3149
  rtx dest, insn;
3150
 
3151
  dest = gen_reg_rtx (Pmode);
3152
  if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3153
    {
3154
      rtx tlsreg;
3155
 
3156
      if (TARGET_64BIT)
3157
        {
3158
          tlsreg = gen_rtx_REG (Pmode, 13);
3159
          insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3160
        }
3161
      else
3162
        {
3163
          tlsreg = gen_rtx_REG (Pmode, 2);
3164
          insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3165
        }
3166
      emit_insn (insn);
3167
    }
3168
  else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3169
    {
3170
      rtx tlsreg, tmp;
3171
 
3172
      tmp = gen_reg_rtx (Pmode);
3173
      if (TARGET_64BIT)
3174
        {
3175
          tlsreg = gen_rtx_REG (Pmode, 13);
3176
          insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3177
        }
3178
      else
3179
        {
3180
          tlsreg = gen_rtx_REG (Pmode, 2);
3181
          insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3182
        }
3183
      emit_insn (insn);
3184
      if (TARGET_64BIT)
3185
        insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3186
      else
3187
        insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3188
      emit_insn (insn);
3189
    }
3190
  else
3191
    {
3192
      rtx r3, got, tga, tmp1, tmp2, eqv;
3193
 
3194
      /* We currently use relocations like @got@tlsgd for tls, which
3195
         means the linker will handle allocation of tls entries, placing
3196
         them in the .got section.  So use a pointer to the .got section,
3197
         not one to secondary TOC sections used by 64-bit -mminimal-toc,
3198
         or to secondary GOT sections used by 32-bit -fPIC.  */
3199
      if (TARGET_64BIT)
3200
        got = gen_rtx_REG (Pmode, 2);
3201
      else
3202
        {
3203
          if (flag_pic == 1)
3204
            got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3205
          else
3206
            {
3207
              rtx gsym = rs6000_got_sym ();
3208
              got = gen_reg_rtx (Pmode);
3209
              if (flag_pic == 0)
3210
                rs6000_emit_move (got, gsym, Pmode);
3211
              else
3212
                {
3213
                  rtx tempLR, tmp3, mem;
3214
                  rtx first, last;
3215
 
3216
                  tempLR = gen_reg_rtx (Pmode);
3217
                  tmp1 = gen_reg_rtx (Pmode);
3218
                  tmp2 = gen_reg_rtx (Pmode);
3219
                  tmp3 = gen_reg_rtx (Pmode);
3220
                  mem = gen_const_mem (Pmode, tmp1);
3221
 
3222
                  first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, gsym));
3223
                  emit_move_insn (tmp1, tempLR);
3224
                  emit_move_insn (tmp2, mem);
3225
                  emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3226
                  last = emit_move_insn (got, tmp3);
3227
                  REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
3228
                                                        REG_NOTES (last));
3229
                  REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
3230
                                                         REG_NOTES (first));
3231
                  REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
3232
                                                        REG_NOTES (last));
3233
                }
3234
            }
3235
        }
3236
 
3237
      if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3238
        {
3239
          r3 = gen_rtx_REG (Pmode, 3);
3240
          if (TARGET_64BIT)
3241
            insn = gen_tls_gd_64 (r3, got, addr);
3242
          else
3243
            insn = gen_tls_gd_32 (r3, got, addr);
3244
          start_sequence ();
3245
          emit_insn (insn);
3246
          tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3247
          insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3248
          insn = emit_call_insn (insn);
3249
          CONST_OR_PURE_CALL_P (insn) = 1;
3250
          use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3251
          insn = get_insns ();
3252
          end_sequence ();
3253
          emit_libcall_block (insn, dest, r3, addr);
3254
        }
3255
      else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3256
        {
3257
          r3 = gen_rtx_REG (Pmode, 3);
3258
          if (TARGET_64BIT)
3259
            insn = gen_tls_ld_64 (r3, got);
3260
          else
3261
            insn = gen_tls_ld_32 (r3, got);
3262
          start_sequence ();
3263
          emit_insn (insn);
3264
          tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3265
          insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3266
          insn = emit_call_insn (insn);
3267
          CONST_OR_PURE_CALL_P (insn) = 1;
3268
          use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3269
          insn = get_insns ();
3270
          end_sequence ();
3271
          tmp1 = gen_reg_rtx (Pmode);
3272
          eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3273
                                UNSPEC_TLSLD);
3274
          emit_libcall_block (insn, tmp1, r3, eqv);
3275
          if (rs6000_tls_size == 16)
3276
            {
3277
              if (TARGET_64BIT)
3278
                insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3279
              else
3280
                insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3281
            }
3282
          else if (rs6000_tls_size == 32)
3283
            {
3284
              tmp2 = gen_reg_rtx (Pmode);
3285
              if (TARGET_64BIT)
3286
                insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3287
              else
3288
                insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3289
              emit_insn (insn);
3290
              if (TARGET_64BIT)
3291
                insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3292
              else
3293
                insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3294
            }
3295
          else
3296
            {
3297
              tmp2 = gen_reg_rtx (Pmode);
3298
              if (TARGET_64BIT)
3299
                insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3300
              else
3301
                insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3302
              emit_insn (insn);
3303
              insn = gen_rtx_SET (Pmode, dest,
3304
                                  gen_rtx_PLUS (Pmode, tmp2, tmp1));
3305
            }
3306
          emit_insn (insn);
3307
        }
3308
      else
3309
        {
3310
          /* IE, or 64 bit offset LE.  */
3311
          tmp2 = gen_reg_rtx (Pmode);
3312
          if (TARGET_64BIT)
3313
            insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3314
          else
3315
            insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3316
          emit_insn (insn);
3317
          if (TARGET_64BIT)
3318
            insn = gen_tls_tls_64 (dest, tmp2, addr);
3319
          else
3320
            insn = gen_tls_tls_32 (dest, tmp2, addr);
3321
          emit_insn (insn);
3322
        }
3323
    }
3324
 
3325
  return dest;
3326
}
3327
 
3328
/* Return 1 if X contains a thread-local symbol.  */
3329
 
3330
bool
3331
rs6000_tls_referenced_p (rtx x)
3332
{
3333
  if (! TARGET_HAVE_TLS)
3334
    return false;
3335
 
3336
  return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3337
}
3338
 
3339
/* Return 1 if *X is a thread-local symbol.  This is the same as
3340
   rs6000_tls_symbol_ref except for the type of the unused argument.  */
3341
 
3342
static int
3343
rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3344
{
3345
  return RS6000_SYMBOL_REF_TLS_P (*x);
3346
}
3347
 
3348
/* The convention appears to be to define this wherever it is used.
3349
   With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3350
   is now used here.  */
3351
#ifndef REG_MODE_OK_FOR_BASE_P
3352
#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3353
#endif
3354
 
3355
/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS.  Returns a value to
3356
   replace the input X, or the original X if no replacement is called for.
3357
   The output parameter *WIN is 1 if the calling macro should goto WIN,
3358
 
3359
 
3360
   For RS/6000, we wish to handle large displacements off a base
3361
   register by splitting the addend across an addiu/addis and the mem insn.
3362
   This cuts number of extra insns needed from 3 to 1.
3363
 
3364
   On Darwin, we use this to generate code for floating point constants.
3365
   A movsf_low is generated so we wind up with 2 instructions rather than 3.
3366
   The Darwin code is inside #if TARGET_MACHO because only then is
3367
   machopic_function_base_name() defined.  */
3368
rtx
3369
rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3370
                                  int opnum, int type,
3371
                                  int ind_levels ATTRIBUTE_UNUSED, int *win)
3372
{
3373
  /* We must recognize output that we have already generated ourselves.  */
3374
  if (GET_CODE (x) == PLUS
3375
      && GET_CODE (XEXP (x, 0)) == PLUS
3376
      && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3377
      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3378
      && GET_CODE (XEXP (x, 1)) == CONST_INT)
3379
    {
3380
      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3381
                   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3382
                   opnum, (enum reload_type)type);
3383
      *win = 1;
3384
      return x;
3385
    }
3386
 
3387
#if TARGET_MACHO
3388
  if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3389
      && GET_CODE (x) == LO_SUM
3390
      && GET_CODE (XEXP (x, 0)) == PLUS
3391
      && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3392
      && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3393
      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3394
      && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3395
      && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3396
      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3397
      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3398
    {
3399
      /* Result of previous invocation of this function on Darwin
3400
         floating point constant.  */
3401
      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3402
                   BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3403
                   opnum, (enum reload_type)type);
3404
      *win = 1;
3405
      return x;
3406
    }
3407
#endif
3408
 
3409
  /* Force ld/std non-word aligned offset into base register by wrapping
3410
     in offset 0.  */
3411
  if (GET_CODE (x) == PLUS
3412
      && GET_CODE (XEXP (x, 0)) == REG
3413
      && REGNO (XEXP (x, 0)) < 32
3414
      && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3415
      && GET_CODE (XEXP (x, 1)) == CONST_INT
3416
      && (INTVAL (XEXP (x, 1)) & 3) != 0
3417
      && !ALTIVEC_VECTOR_MODE (mode)
3418
      && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
3419
      && TARGET_POWERPC64)
3420
    {
3421
      x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
3422
      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3423
                   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3424
                   opnum, (enum reload_type) type);
3425
      *win = 1;
3426
      return x;
3427
    }
3428
 
3429
  if (GET_CODE (x) == PLUS
3430
      && GET_CODE (XEXP (x, 0)) == REG
3431
      && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3432
      && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3433
      && GET_CODE (XEXP (x, 1)) == CONST_INT
3434
      && !SPE_VECTOR_MODE (mode)
3435
      && !(TARGET_E500_DOUBLE && (mode == DFmode
3436
                                  || mode == DImode))
3437
      && !ALTIVEC_VECTOR_MODE (mode))
3438
    {
3439
      HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3440
      HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3441
      HOST_WIDE_INT high
3442
        = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3443
 
3444
      /* Check for 32-bit overflow.  */
3445
      if (high + low != val)
3446
        {
3447
          *win = 0;
3448
          return x;
3449
        }
3450
 
3451
      /* Reload the high part into a base reg; leave the low part
3452
         in the mem directly.  */
3453
 
3454
      x = gen_rtx_PLUS (GET_MODE (x),
3455
                        gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3456
                                      GEN_INT (high)),
3457
                        GEN_INT (low));
3458
 
3459
      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3460
                   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3461
                   opnum, (enum reload_type)type);
3462
      *win = 1;
3463
      return x;
3464
    }
3465
 
3466
  if (GET_CODE (x) == SYMBOL_REF
3467
      && !ALTIVEC_VECTOR_MODE (mode)
3468
      && !SPE_VECTOR_MODE (mode)
3469
#if TARGET_MACHO
3470
      && DEFAULT_ABI == ABI_DARWIN
3471
      && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3472
#else
3473
      && DEFAULT_ABI == ABI_V4
3474
      && !flag_pic
3475
#endif
3476
      /* Don't do this for TFmode, since the result isn't offsettable.
3477
         The same goes for DImode without 64-bit gprs and DFmode
3478
         without fprs.  */
3479
      && mode != TFmode
3480
      && (mode != DImode || TARGET_POWERPC64)
3481
      && (mode != DFmode || TARGET_POWERPC64
3482
          || (TARGET_FPRS && TARGET_HARD_FLOAT)))
3483
    {
3484
#if TARGET_MACHO
3485
      if (flag_pic)
3486
        {
3487
          rtx offset = gen_rtx_CONST (Pmode,
3488
                         gen_rtx_MINUS (Pmode, x,
3489
                                        machopic_function_base_sym ()));
3490
          x = gen_rtx_LO_SUM (GET_MODE (x),
3491
                gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3492
                  gen_rtx_HIGH (Pmode, offset)), offset);
3493
        }
3494
      else
3495
#endif
3496
        x = gen_rtx_LO_SUM (GET_MODE (x),
3497
              gen_rtx_HIGH (Pmode, x), x);
3498
 
3499
      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3500
                   BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3501
                   opnum, (enum reload_type)type);
3502
      *win = 1;
3503
      return x;
3504
    }
3505
 
3506
  /* Reload an offset address wrapped by an AND that represents the
3507
     masking of the lower bits.  Strip the outer AND and let reload
3508
     convert the offset address into an indirect address.  */
3509
  if (TARGET_ALTIVEC
3510
      && ALTIVEC_VECTOR_MODE (mode)
3511
      && GET_CODE (x) == AND
3512
      && GET_CODE (XEXP (x, 0)) == PLUS
3513
      && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3514
      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3515
      && GET_CODE (XEXP (x, 1)) == CONST_INT
3516
      && INTVAL (XEXP (x, 1)) == -16)
3517
    {
3518
      x = XEXP (x, 0);
3519
      *win = 1;
3520
      return x;
3521
    }
3522
 
3523
  if (TARGET_TOC
3524
      && constant_pool_expr_p (x)
3525
      && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3526
    {
3527
      x = create_TOC_reference (x);
3528
      *win = 1;
3529
      return x;
3530
    }
3531
  *win = 0;
3532
  return x;
3533
}
3534
 
3535
/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3536
   that is a valid memory address for an instruction.
3537
   The MODE argument is the machine mode for the MEM expression
3538
   that wants to use this address.
3539
 
3540
   On the RS/6000, there are four valid address: a SYMBOL_REF that
3541
   refers to a constant pool entry of an address (or the sum of it
3542
   plus a constant), a short (16-bit signed) constant plus a register,
3543
   the sum of two registers, or a register indirect, possibly with an
3544
   auto-increment.  For DFmode and DImode with a constant plus register,
3545
   we must ensure that both words are addressable or PowerPC64 with offset
3546
   word aligned.
3547
 
3548
   For modes spanning multiple registers (DFmode in 32-bit GPRs,
3549
   32-bit DImode, TImode, TFmode), indexed addressing cannot be used because
3550
   adjacent memory cells are accessed by adding word-sized offsets
3551
   during assembly output.  */
3552
int
3553
rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3554
{
3555
  /* If this is an unaligned stvx/ldvx type address, discard the outer AND.  */
3556
  if (TARGET_ALTIVEC
3557
      && ALTIVEC_VECTOR_MODE (mode)
3558
      && GET_CODE (x) == AND
3559
      && GET_CODE (XEXP (x, 1)) == CONST_INT
3560
      && INTVAL (XEXP (x, 1)) == -16)
3561
    x = XEXP (x, 0);
3562
 
3563
  if (RS6000_SYMBOL_REF_TLS_P (x))
3564
    return 0;
3565
  if (legitimate_indirect_address_p (x, reg_ok_strict))
3566
    return 1;
3567
  if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3568
      && !ALTIVEC_VECTOR_MODE (mode)
3569
      && !SPE_VECTOR_MODE (mode)
3570
      && mode != TFmode
3571
      /* Restrict addressing for DI because of our SUBREG hackery.  */
3572
      && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
3573
      && TARGET_UPDATE
3574
      && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3575
    return 1;
3576
  if (legitimate_small_data_p (mode, x))
3577
    return 1;
3578
  if (legitimate_constant_pool_address_p (x))
3579
    return 1;
3580
  /* If not REG_OK_STRICT (before reload) let pass any stack offset.  */
3581
  if (! reg_ok_strict
3582
      && GET_CODE (x) == PLUS
3583
      && GET_CODE (XEXP (x, 0)) == REG
3584
      && (XEXP (x, 0) == virtual_stack_vars_rtx
3585
          || XEXP (x, 0) == arg_pointer_rtx)
3586
      && GET_CODE (XEXP (x, 1)) == CONST_INT)
3587
    return 1;
3588
  if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
3589
    return 1;
3590
  if (mode != TImode
3591
      && mode != TFmode
3592
      && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3593
          || TARGET_POWERPC64
3594
          || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
3595
      && (TARGET_POWERPC64 || mode != DImode)
3596
      && legitimate_indexed_address_p (x, reg_ok_strict))
3597
    return 1;
3598
  if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3599
    return 1;
3600
  return 0;
3601
}
3602
 
3603
/* Go to LABEL if ADDR (a legitimate address expression)
3604
   has an effect that depends on the machine mode it is used for.
3605
 
3606
   On the RS/6000 this is true of all integral offsets (since AltiVec
3607
   modes don't allow them) or is a pre-increment or decrement.
3608
 
3609
   ??? Except that due to conceptual problems in offsettable_address_p
3610
   we can't really report the problems of integral offsets.  So leave
3611
   this assuming that the adjustable offset must be valid for the
3612
   sub-words of a TFmode operand, which is what we had before.  */
3613
 
3614
bool
3615
rs6000_mode_dependent_address (rtx addr)
3616
{
3617
  switch (GET_CODE (addr))
3618
    {
3619
    case PLUS:
3620
      if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3621
        {
3622
          unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3623
          return val + 12 + 0x8000 >= 0x10000;
3624
        }
3625
      break;
3626
 
3627
    case LO_SUM:
3628
      return true;
3629
 
3630
    case PRE_INC:
3631
    case PRE_DEC:
3632
      return TARGET_UPDATE;
3633
 
3634
    default:
3635
      break;
3636
    }
3637
 
3638
  return false;
3639
}
3640
 
3641
/* More elaborate version of recog's offsettable_memref_p predicate
3642
   that works around the ??? note of rs6000_mode_dependent_address.
3643
   In particular it accepts
3644
 
3645
     (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
3646
 
3647
   in 32-bit mode, that the recog predicate rejects.  */
3648
 
3649
bool
3650
rs6000_offsettable_memref_p (rtx op)
3651
{
3652
  if (!MEM_P (op))
3653
    return false;
3654
 
3655
  /* First mimic offsettable_memref_p.  */
3656
  if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
3657
    return true;
3658
 
3659
  /* offsettable_address_p invokes rs6000_mode_dependent_address, but
3660
     the latter predicate knows nothing about the mode of the memory
3661
     reference and, therefore, assumes that it is the largest supported
3662
     mode (TFmode).  As a consequence, legitimate offsettable memory
3663
     references are rejected.  rs6000_legitimate_offset_address_p contains
3664
     the correct logic for the PLUS case of rs6000_mode_dependent_address.  */
3665
  return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
3666
}
3667
 
3668
/* Return number of consecutive hard regs needed starting at reg REGNO
3669
   to hold something of mode MODE.
3670
   This is ordinarily the length in words of a value of mode MODE
3671
   but can be less for certain modes in special long registers.
3672
 
3673
   For the SPE, GPRs are 64 bits but only 32 bits are visible in
3674
   scalar instructions.  The upper 32 bits are only available to the
3675
   SIMD instructions.
3676
 
3677
   POWER and PowerPC GPRs hold 32 bits worth;
3678
   PowerPC64 GPRs and FPRs point register holds 64 bits worth.  */
3679
 
3680
int
3681
rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
3682
{
3683
  if (FP_REGNO_P (regno))
3684
    return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3685
 
3686
  if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
3687
    return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
3688
 
3689
  if (ALTIVEC_REGNO_P (regno))
3690
    return
3691
      (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
3692
 
3693
  /* The value returned for SCmode in the E500 double case is 2 for
3694
     ABI compatibility; storing an SCmode value in a single register
3695
     would require function_arg and rs6000_spe_function_arg to handle
3696
     SCmode so as to pass the value correctly in a pair of
3697
     registers.  */
3698
  if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode)
3699
    return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3700
 
3701
  return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3702
}
3703
 
3704
/* Change register usage conditional on target flags.  */
3705
void
3706
rs6000_conditional_register_usage (void)
3707
{
3708
  int i;
3709
 
3710
  /* Set MQ register fixed (already call_used) if not POWER
3711
     architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3712
     be allocated.  */
3713
  if (! TARGET_POWER)
3714
    fixed_regs[64] = 1;
3715
 
3716
  /* 64-bit AIX and Linux reserve GPR13 for thread-private data.  */
3717
  if (TARGET_64BIT)
3718
    fixed_regs[13] = call_used_regs[13]
3719
      = call_really_used_regs[13] = 1;
3720
 
3721
  /* Conditionally disable FPRs.  */
3722
  if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
3723
    for (i = 32; i < 64; i++)
3724
      fixed_regs[i] = call_used_regs[i]
3725
        = call_really_used_regs[i] = 1;
3726
 
3727
  /* The TOC register is not killed across calls in a way that is
3728
     visible to the compiler.  */
3729
  if (DEFAULT_ABI == ABI_AIX)
3730
    call_really_used_regs[2] = 0;
3731
 
3732
  if (DEFAULT_ABI == ABI_V4
3733
      && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3734
      && flag_pic == 2)
3735
    fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3736
 
3737
  if (DEFAULT_ABI == ABI_V4
3738
      && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3739
      && flag_pic == 1)
3740
    fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3741
      = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3742
      = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3743
 
3744
  if (DEFAULT_ABI == ABI_DARWIN
3745
      && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
3746
      fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3747
      = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3748
      = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3749
 
3750
  if (TARGET_TOC && TARGET_MINIMAL_TOC)
3751
    fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3752
      = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3753
 
3754
  if (TARGET_ALTIVEC)
3755
    global_regs[VSCR_REGNO] = 1;
3756
 
3757
  if (TARGET_SPE)
3758
    {
3759
      global_regs[SPEFSCR_REGNO] = 1;
3760
      fixed_regs[FIXED_SCRATCH]
3761
        = call_used_regs[FIXED_SCRATCH]
3762
        = call_really_used_regs[FIXED_SCRATCH] = 1;
3763
    }
3764
 
3765
  if (! TARGET_ALTIVEC)
3766
    {
3767
      for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
3768
        fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
3769
      call_really_used_regs[VRSAVE_REGNO] = 1;
3770
    }
3771
 
3772
  if (TARGET_ALTIVEC_ABI)
3773
    for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
3774
      call_used_regs[i] = call_really_used_regs[i] = 1;
3775
}
3776
 
3777
/* Try to output insns to set TARGET equal to the constant C if it can
3778
   be done in less than N insns.  Do all computations in MODE.
3779
   Returns the place where the output has been placed if it can be
3780
   done and the insns have been emitted.  If it would take more than N
3781
   insns, zero is returned and no insns and emitted.  */
3782
 
3783
rtx
3784
rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3785
                       rtx source, int n ATTRIBUTE_UNUSED)
3786
{
3787
  rtx result, insn, set;
3788
  HOST_WIDE_INT c0, c1;
3789
 
3790
  switch (mode)
3791
    {
3792
      case  QImode:
3793
    case HImode:
3794
      if (dest == NULL)
3795
        dest = gen_reg_rtx (mode);
3796
      emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3797
      return dest;
3798
 
3799
    case SImode:
3800
      result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3801
 
3802
      emit_insn (gen_rtx_SET (VOIDmode, result,
3803
                              GEN_INT (INTVAL (source)
3804
                                       & (~ (HOST_WIDE_INT) 0xffff))));
3805
      emit_insn (gen_rtx_SET (VOIDmode, dest,
3806
                              gen_rtx_IOR (SImode, result,
3807
                                           GEN_INT (INTVAL (source) & 0xffff))));
3808
      result = dest;
3809
      break;
3810
 
3811
    case DImode:
3812
      switch (GET_CODE (source))
3813
        {
3814
        case CONST_INT:
3815
          c0 = INTVAL (source);
3816
          c1 = -(c0 < 0);
3817
          break;
3818
 
3819
        case CONST_DOUBLE:
3820
#if HOST_BITS_PER_WIDE_INT >= 64
3821
          c0 = CONST_DOUBLE_LOW (source);
3822
          c1 = -(c0 < 0);
3823
#else
3824
          c0 = CONST_DOUBLE_LOW (source);
3825
          c1 = CONST_DOUBLE_HIGH (source);
3826
#endif
3827
          break;
3828
 
3829
        default:
3830
          gcc_unreachable ();
3831
        }
3832
 
3833
      result = rs6000_emit_set_long_const (dest, c0, c1);
3834
      break;
3835
 
3836
    default:
3837
      gcc_unreachable ();
3838
    }
3839
 
3840
  insn = get_last_insn ();
3841
  set = single_set (insn);
3842
  if (! CONSTANT_P (SET_SRC (set)))
3843
    set_unique_reg_note (insn, REG_EQUAL, source);
3844
 
3845
  return result;
3846
}
3847
 
3848
/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3849
   fall back to a straight forward decomposition.  We do this to avoid
3850
   exponential run times encountered when looking for longer sequences
3851
   with rs6000_emit_set_const.  */
3852
static rtx
3853
rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3854
{
3855
  if (!TARGET_POWERPC64)
3856
    {
3857
      rtx operand1, operand2;
3858
 
3859
      operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3860
                                        DImode);
3861
      operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3862
                                        DImode);
3863
      emit_move_insn (operand1, GEN_INT (c1));
3864
      emit_move_insn (operand2, GEN_INT (c2));
3865
    }
3866
  else
3867
    {
3868
      HOST_WIDE_INT ud1, ud2, ud3, ud4;
3869
 
3870
      ud1 = c1 & 0xffff;
3871
      ud2 = (c1 & 0xffff0000) >> 16;
3872
#if HOST_BITS_PER_WIDE_INT >= 64
3873
      c2 = c1 >> 32;
3874
#endif
3875
      ud3 = c2 & 0xffff;
3876
      ud4 = (c2 & 0xffff0000) >> 16;
3877
 
3878
      if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3879
          || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3880
        {
3881
          if (ud1 & 0x8000)
3882
            emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) -  0x8000)));
3883
          else
3884
            emit_move_insn (dest, GEN_INT (ud1));
3885
        }
3886
 
3887
      else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3888
               || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3889
        {
3890
          if (ud2 & 0x8000)
3891
            emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3892
                                           - 0x80000000));
3893
          else
3894
            emit_move_insn (dest, GEN_INT (ud2 << 16));
3895
          if (ud1 != 0)
3896
            emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3897
        }
3898
      else if ((ud4 == 0xffff && (ud3 & 0x8000))
3899
               || (ud4 == 0 && ! (ud3 & 0x8000)))
3900
        {
3901
          if (ud3 & 0x8000)
3902
            emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3903
                                           - 0x80000000));
3904
          else
3905
            emit_move_insn (dest, GEN_INT (ud3 << 16));
3906
 
3907
          if (ud2 != 0)
3908
            emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3909
          emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3910
          if (ud1 != 0)
3911
            emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3912
        }
3913
      else
3914
        {
3915
          if (ud4 & 0x8000)
3916
            emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3917
                                           - 0x80000000));
3918
          else
3919
            emit_move_insn (dest, GEN_INT (ud4 << 16));
3920
 
3921
          if (ud3 != 0)
3922
            emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3923
 
3924
          emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3925
          if (ud2 != 0)
3926
            emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3927
                                               GEN_INT (ud2 << 16)));
3928
          if (ud1 != 0)
3929
            emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3930
        }
3931
    }
3932
  return dest;
3933
}
3934
 
3935
/* Helper for the following.  Get rid of [r+r] memory refs
3936
   in cases where it won't work (TImode, TFmode).  */
3937
 
3938
static void
3939
rs6000_eliminate_indexed_memrefs (rtx operands[2])
3940
{
3941
  if (GET_CODE (operands[0]) == MEM
3942
      && GET_CODE (XEXP (operands[0], 0)) != REG
3943
      && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
3944
      && ! reload_in_progress)
3945
    operands[0]
3946
      = replace_equiv_address (operands[0],
3947
                               copy_addr_to_reg (XEXP (operands[0], 0)));
3948
 
3949
  if (GET_CODE (operands[1]) == MEM
3950
      && GET_CODE (XEXP (operands[1], 0)) != REG
3951
      && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
3952
      && ! reload_in_progress)
3953
    operands[1]
3954
      = replace_equiv_address (operands[1],
3955
                               copy_addr_to_reg (XEXP (operands[1], 0)));
3956
}
3957
 
3958
/* Emit a move from SOURCE to DEST in mode MODE.  */
3959
void
3960
rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3961
{
3962
  rtx operands[2];
3963
  operands[0] = dest;
3964
  operands[1] = source;
3965
 
3966
  /* Sanity checks.  Check that we get CONST_DOUBLE only when we should.  */
3967
  if (GET_CODE (operands[1]) == CONST_DOUBLE
3968
      && ! FLOAT_MODE_P (mode)
3969
      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3970
    {
3971
      /* FIXME.  This should never happen.  */
3972
      /* Since it seems that it does, do the safe thing and convert
3973
         to a CONST_INT.  */
3974
      operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3975
    }
3976
  gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
3977
              || FLOAT_MODE_P (mode)
3978
              || ((CONST_DOUBLE_HIGH (operands[1]) != 0
3979
                   || CONST_DOUBLE_LOW (operands[1]) < 0)
3980
                  && (CONST_DOUBLE_HIGH (operands[1]) != -1
3981
                      || CONST_DOUBLE_LOW (operands[1]) >= 0)));
3982
 
3983
  /* Check if GCC is setting up a block move that will end up using FP
3984
     registers as temporaries.  We must make sure this is acceptable.  */
3985
  if (GET_CODE (operands[0]) == MEM
3986
      && GET_CODE (operands[1]) == MEM
3987
      && mode == DImode
3988
      && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3989
          || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3990
      && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3991
                                            ? 32 : MEM_ALIGN (operands[0])))
3992
            || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3993
                                               ? 32
3994
                                               : MEM_ALIGN (operands[1]))))
3995
      && ! MEM_VOLATILE_P (operands [0])
3996
      && ! MEM_VOLATILE_P (operands [1]))
3997
    {
3998
      emit_move_insn (adjust_address (operands[0], SImode, 0),
3999
                      adjust_address (operands[1], SImode, 0));
4000
      emit_move_insn (adjust_address (operands[0], SImode, 4),
4001
                      adjust_address (operands[1], SImode, 4));
4002
      return;
4003
    }
4004
 
4005
  if (!no_new_pseudos && GET_CODE (operands[0]) == MEM
4006
      && !gpc_reg_operand (operands[1], mode))
4007
    operands[1] = force_reg (mode, operands[1]);
4008
 
4009
  if (mode == SFmode && ! TARGET_POWERPC
4010
      && TARGET_HARD_FLOAT && TARGET_FPRS
4011
      && GET_CODE (operands[0]) == MEM)
4012
    {
4013
      int regnum;
4014
 
4015
      if (reload_in_progress || reload_completed)
4016
        regnum = true_regnum (operands[1]);
4017
      else if (GET_CODE (operands[1]) == REG)
4018
        regnum = REGNO (operands[1]);
4019
      else
4020
        regnum = -1;
4021
 
4022
      /* If operands[1] is a register, on POWER it may have
4023
         double-precision data in it, so truncate it to single
4024
         precision.  */
4025
      if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4026
        {
4027
          rtx newreg;
4028
          newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
4029
          emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4030
          operands[1] = newreg;
4031
        }
4032
    }
4033
 
4034
  /* Recognize the case where operand[1] is a reference to thread-local
4035
     data and load its address to a register.  */
4036
  if (rs6000_tls_referenced_p (operands[1]))
4037
    {
4038
      enum tls_model model;
4039
      rtx tmp = operands[1];
4040
      rtx addend = NULL;
4041
 
4042
      if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4043
        {
4044
          addend = XEXP (XEXP (tmp, 0), 1);
4045
          tmp = XEXP (XEXP (tmp, 0), 0);
4046
        }
4047
 
4048
      gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4049
      model = SYMBOL_REF_TLS_MODEL (tmp);
4050
      gcc_assert (model != 0);
4051
 
4052
      tmp = rs6000_legitimize_tls_address (tmp, model);
4053
      if (addend)
4054
        {
4055
          tmp = gen_rtx_PLUS (mode, tmp, addend);
4056
          tmp = force_operand (tmp, operands[0]);
4057
        }
4058
      operands[1] = tmp;
4059
    }
4060
 
4061
  /* Handle the case where reload calls us with an invalid address.  */
4062
  if (reload_in_progress && mode == Pmode
4063
      && (! general_operand (operands[1], mode)
4064
          || ! nonimmediate_operand (operands[0], mode)))
4065
    goto emit_set;
4066
 
4067
  /* 128-bit constant floating-point values on Darwin should really be
4068
     loaded as two parts.  */
4069
  if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
4070
      && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4071
    {
4072
      /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4073
         know how to get a DFmode SUBREG of a TFmode.  */
4074
      rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
4075
                        simplify_gen_subreg (DImode, operands[1], mode, 0),
4076
                        DImode);
4077
      rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
4078
                                             GET_MODE_SIZE (DImode)),
4079
                        simplify_gen_subreg (DImode, operands[1], mode,
4080
                                             GET_MODE_SIZE (DImode)),
4081
                        DImode);
4082
      return;
4083
    }
4084
 
4085
  /* FIXME:  In the long term, this switch statement should go away
4086
     and be replaced by a sequence of tests based on things like
4087
     mode == Pmode.  */
4088
  switch (mode)
4089
    {
4090
    case HImode:
4091
    case QImode:
4092
      if (CONSTANT_P (operands[1])
4093
          && GET_CODE (operands[1]) != CONST_INT)
4094
        operands[1] = force_const_mem (mode, operands[1]);
4095
      break;
4096
 
4097
    case TFmode:
4098
      rs6000_eliminate_indexed_memrefs (operands);
4099
      /* fall through */
4100
 
4101
    case DFmode:
4102
    case SFmode:
4103
      if (CONSTANT_P (operands[1])
4104
          && ! easy_fp_constant (operands[1], mode))
4105
        operands[1] = force_const_mem (mode, operands[1]);
4106
      break;
4107
 
4108
    case V16QImode:
4109
    case V8HImode:
4110
    case V4SFmode:
4111
    case V4SImode:
4112
    case V4HImode:
4113
    case V2SFmode:
4114
    case V2SImode:
4115
    case V1DImode:
4116
      if (CONSTANT_P (operands[1])
4117
          && !easy_vector_constant (operands[1], mode))
4118
        operands[1] = force_const_mem (mode, operands[1]);
4119
      break;
4120
 
4121
    case SImode:
4122
    case DImode:
4123
      /* Use default pattern for address of ELF small data */
4124
      if (TARGET_ELF
4125
          && mode == Pmode
4126
          && DEFAULT_ABI == ABI_V4
4127
          && (GET_CODE (operands[1]) == SYMBOL_REF
4128
              || GET_CODE (operands[1]) == CONST)
4129
          && small_data_operand (operands[1], mode))
4130
        {
4131
          emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4132
          return;
4133
        }
4134
 
4135
      if (DEFAULT_ABI == ABI_V4
4136
          && mode == Pmode && mode == SImode
4137
          && flag_pic == 1 && got_operand (operands[1], mode))
4138
        {
4139
          emit_insn (gen_movsi_got (operands[0], operands[1]));
4140
          return;
4141
        }
4142
 
4143
      if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
4144
          && TARGET_NO_TOC
4145
          && ! flag_pic
4146
          && mode == Pmode
4147
          && CONSTANT_P (operands[1])
4148
          && GET_CODE (operands[1]) != HIGH
4149
          && GET_CODE (operands[1]) != CONST_INT)
4150
        {
4151
          rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
4152
 
4153
          /* If this is a function address on -mcall-aixdesc,
4154
             convert it to the address of the descriptor.  */
4155
          if (DEFAULT_ABI == ABI_AIX
4156
              && GET_CODE (operands[1]) == SYMBOL_REF
4157
              && XSTR (operands[1], 0)[0] == '.')
4158
            {
4159
              const char *name = XSTR (operands[1], 0);
4160
              rtx new_ref;
4161
              while (*name == '.')
4162
                name++;
4163
              new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4164
              CONSTANT_POOL_ADDRESS_P (new_ref)
4165
                = CONSTANT_POOL_ADDRESS_P (operands[1]);
4166
              SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
4167
              SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
4168
              SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
4169
              operands[1] = new_ref;
4170
            }
4171
 
4172
          if (DEFAULT_ABI == ABI_DARWIN)
4173
            {
4174
#if TARGET_MACHO
4175
              if (MACHO_DYNAMIC_NO_PIC_P)
4176
                {
4177
                  /* Take care of any required data indirection.  */
4178
                  operands[1] = rs6000_machopic_legitimize_pic_address (
4179
                                  operands[1], mode, operands[0]);
4180
                  if (operands[0] != operands[1])
4181
                    emit_insn (gen_rtx_SET (VOIDmode,
4182
                                            operands[0], operands[1]));
4183
                  return;
4184
                }
4185
#endif
4186
              emit_insn (gen_macho_high (target, operands[1]));
4187
              emit_insn (gen_macho_low (operands[0], target, operands[1]));
4188
              return;
4189
            }
4190
 
4191
          emit_insn (gen_elf_high (target, operands[1]));
4192
          emit_insn (gen_elf_low (operands[0], target, operands[1]));
4193
          return;
4194
        }
4195
 
4196
      /* If this is a SYMBOL_REF that refers to a constant pool entry,
4197
         and we have put it in the TOC, we just need to make a TOC-relative
4198
         reference to it.  */
4199
      if (TARGET_TOC
4200
          && GET_CODE (operands[1]) == SYMBOL_REF
4201
          && constant_pool_expr_p (operands[1])
4202
          && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4203
                                              get_pool_mode (operands[1])))
4204
        {
4205
          operands[1] = create_TOC_reference (operands[1]);
4206
        }
4207
      else if (mode == Pmode
4208
               && CONSTANT_P (operands[1])
4209
               && ((GET_CODE (operands[1]) != CONST_INT
4210
                    && ! easy_fp_constant (operands[1], mode))
4211
                   || (GET_CODE (operands[1]) == CONST_INT
4212
                       && num_insns_constant (operands[1], mode) > 2)
4213
                   || (GET_CODE (operands[0]) == REG
4214
                       && FP_REGNO_P (REGNO (operands[0]))))
4215
               && GET_CODE (operands[1]) != HIGH
4216
               && ! legitimate_constant_pool_address_p (operands[1])
4217
               && ! toc_relative_expr_p (operands[1]))
4218
        {
4219
          /* Emit a USE operation so that the constant isn't deleted if
4220
             expensive optimizations are turned on because nobody
4221
             references it.  This should only be done for operands that
4222
             contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4223
             This should not be done for operands that contain LABEL_REFs.
4224
             For now, we just handle the obvious case.  */
4225
          if (GET_CODE (operands[1]) != LABEL_REF)
4226
            emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4227
 
4228
#if TARGET_MACHO
4229
          /* Darwin uses a special PIC legitimizer.  */
4230
          if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
4231
            {
4232
              operands[1] =
4233
                rs6000_machopic_legitimize_pic_address (operands[1], mode,
4234
                                                        operands[0]);
4235
              if (operands[0] != operands[1])
4236
                emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4237
              return;
4238
            }
4239
#endif
4240
 
4241
          /* If we are to limit the number of things we put in the TOC and
4242
             this is a symbol plus a constant we can add in one insn,
4243
             just put the symbol in the TOC and add the constant.  Don't do
4244
             this if reload is in progress.  */
4245
          if (GET_CODE (operands[1]) == CONST
4246
              && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4247
              && GET_CODE (XEXP (operands[1], 0)) == PLUS
4248
              && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
4249
              && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4250
                  || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4251
              && ! side_effects_p (operands[0]))
4252
            {
4253
              rtx sym =
4254
                force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
4255
              rtx other = XEXP (XEXP (operands[1], 0), 1);
4256
 
4257
              sym = force_reg (mode, sym);
4258
              if (mode == SImode)
4259
                emit_insn (gen_addsi3 (operands[0], sym, other));
4260
              else
4261
                emit_insn (gen_adddi3 (operands[0], sym, other));
4262
              return;
4263
            }
4264
 
4265
          operands[1] = force_const_mem (mode, operands[1]);
4266
 
4267
          if (TARGET_TOC
4268
              && constant_pool_expr_p (XEXP (operands[1], 0))
4269
              && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4270
                        get_pool_constant (XEXP (operands[1], 0)),
4271
                        get_pool_mode (XEXP (operands[1], 0))))
4272
            {
4273
              operands[1]
4274
                = gen_const_mem (mode,
4275
                                 create_TOC_reference (XEXP (operands[1], 0)));
4276
              set_mem_alias_set (operands[1], get_TOC_alias_set ());
4277
            }
4278
        }
4279
      break;
4280
 
4281
    case TImode:
4282
      rs6000_eliminate_indexed_memrefs (operands);
4283
 
4284
      if (TARGET_POWER)
4285
        {
4286
          emit_insn (gen_rtx_PARALLEL (VOIDmode,
4287
                       gen_rtvec (2,
4288
                                  gen_rtx_SET (VOIDmode,
4289
                                               operands[0], operands[1]),
4290
                                  gen_rtx_CLOBBER (VOIDmode,
4291
                                                   gen_rtx_SCRATCH (SImode)))));
4292
          return;
4293
        }
4294
      break;
4295
 
4296
    default:
4297
      gcc_unreachable ();
4298
    }
4299
 
4300
  /* Above, we may have called force_const_mem which may have returned
4301
     an invalid address.  If we can, fix this up; otherwise, reload will
4302
     have to deal with it.  */
4303
  if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4304
    operands[1] = validize_mem (operands[1]);
4305
 
4306
 emit_set:
4307
  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4308
}
4309
 
4310
/* Nonzero if we can use a floating-point register to pass this arg.  */
4311
#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE)         \
4312
  (SCALAR_FLOAT_MODE_P (MODE)                   \
4313
   && !DECIMAL_FLOAT_MODE_P (MODE)              \
4314
   && (CUM)->fregno <= FP_ARG_MAX_REG           \
4315
   && TARGET_HARD_FLOAT && TARGET_FPRS)
4316
 
4317
/* Nonzero if we can use an AltiVec register to pass this arg.  */
4318
#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED)      \
4319
  (ALTIVEC_VECTOR_MODE (MODE)                           \
4320
   && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG              \
4321
   && TARGET_ALTIVEC_ABI                                \
4322
   && (NAMED))
4323
 
4324
/* Return a nonzero value to say to return the function value in
4325
   memory, just as large structures are always returned.  TYPE will be
4326
   the data type of the value, and FNTYPE will be the type of the
4327
   function doing the returning, or @code{NULL} for libcalls.
4328
 
4329
   The AIX ABI for the RS/6000 specifies that all structures are
4330
   returned in memory.  The Darwin ABI does the same.  The SVR4 ABI
4331
   specifies that structures <= 8 bytes are returned in r3/r4, but a
4332
   draft put them in memory, and GCC used to implement the draft
4333
   instead of the final standard.  Therefore, aix_struct_return
4334
   controls this instead of DEFAULT_ABI; V.4 targets needing backward
4335
   compatibility can change DRAFT_V4_STRUCT_RET to override the
4336
   default, and -m switches get the final word.  See
4337
   rs6000_override_options for more details.
4338
 
4339
   The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4340
   long double support is enabled.  These values are returned in memory.
4341
 
4342
   int_size_in_bytes returns -1 for variable size objects, which go in
4343
   memory always.  The cast to unsigned makes -1 > 8.  */
4344
 
4345
static bool
4346
rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
4347
{
4348
  /* In the darwin64 abi, try to use registers for larger structs
4349
     if possible.  */
4350
  if (rs6000_darwin64_abi
4351
      && TREE_CODE (type) == RECORD_TYPE
4352
      && int_size_in_bytes (type) > 0)
4353
    {
4354
      CUMULATIVE_ARGS valcum;
4355
      rtx valret;
4356
 
4357
      valcum.words = 0;
4358
      valcum.fregno = FP_ARG_MIN_REG;
4359
      valcum.vregno = ALTIVEC_ARG_MIN_REG;
4360
      /* Do a trial code generation as if this were going to be passed
4361
         as an argument; if any part goes in memory, we return NULL.  */
4362
      valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
4363
      if (valret)
4364
        return false;
4365
      /* Otherwise fall through to more conventional ABI rules.  */
4366
    }
4367
 
4368
  if (AGGREGATE_TYPE_P (type)
4369
      && (aix_struct_return
4370
          || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4371
    return true;
4372
 
4373
  /* Allow -maltivec -mabi=no-altivec without warning.  Altivec vector
4374
     modes only exist for GCC vector types if -maltivec.  */
4375
  if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
4376
      && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
4377
    return false;
4378
 
4379
  /* Return synthetic vectors in memory.  */
4380
  if (TREE_CODE (type) == VECTOR_TYPE
4381
      && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
4382
    {
4383
      static bool warned_for_return_big_vectors = false;
4384
      if (!warned_for_return_big_vectors)
4385
        {
4386
          warning (0, "GCC vector returned by reference: "
4387
                   "non-standard ABI extension with no compatibility guarantee");
4388
          warned_for_return_big_vectors = true;
4389
        }
4390
      return true;
4391
    }
4392
 
4393
  if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
4394
    return true;
4395
 
4396
  return false;
4397
}
4398
 
4399
/* Initialize a variable CUM of type CUMULATIVE_ARGS
4400
   for a call to a function whose data type is FNTYPE.
4401
   For a library call, FNTYPE is 0.
4402
 
4403
   For incoming args we set the number of arguments in the prototype large
4404
   so we never return a PARALLEL.  */
4405
 
4406
void
4407
init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
4408
                      rtx libname ATTRIBUTE_UNUSED, int incoming,
4409
                      int libcall, int n_named_args)
4410
{
4411
  static CUMULATIVE_ARGS zero_cumulative;
4412
 
4413
  *cum = zero_cumulative;
4414
  cum->words = 0;
4415
  cum->fregno = FP_ARG_MIN_REG;
4416
  cum->vregno = ALTIVEC_ARG_MIN_REG;
4417
  cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
4418
  cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
4419
                      ? CALL_LIBCALL : CALL_NORMAL);
4420
  cum->sysv_gregno = GP_ARG_MIN_REG;
4421
  cum->stdarg = fntype
4422
    && (TYPE_ARG_TYPES (fntype) != 0
4423
        && (TREE_VALUE (tree_last  (TYPE_ARG_TYPES (fntype)))
4424
            != void_type_node));
4425
 
4426
  cum->nargs_prototype = 0;
4427
  if (incoming || cum->prototype)
4428
    cum->nargs_prototype = n_named_args;
4429
 
4430
  /* Check for a longcall attribute.  */
4431
  if ((!fntype && rs6000_default_long_calls)
4432
      || (fntype
4433
          && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
4434
          && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
4435
    cum->call_cookie |= CALL_LONG;
4436
 
4437
  if (TARGET_DEBUG_ARG)
4438
    {
4439
      fprintf (stderr, "\ninit_cumulative_args:");
4440
      if (fntype)
4441
        {
4442
          tree ret_type = TREE_TYPE (fntype);
4443
          fprintf (stderr, " ret code = %s,",
4444
                   tree_code_name[ (int)TREE_CODE (ret_type) ]);
4445
        }
4446
 
4447
      if (cum->call_cookie & CALL_LONG)
4448
        fprintf (stderr, " longcall,");
4449
 
4450
      fprintf (stderr, " proto = %d, nargs = %d\n",
4451
               cum->prototype, cum->nargs_prototype);
4452
    }
4453
 
4454
  if (fntype
4455
      && !TARGET_ALTIVEC
4456
      && TARGET_ALTIVEC_ABI
4457
      && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
4458
    {
4459
      error ("cannot return value in vector register because"
4460
             " altivec instructions are disabled, use -maltivec"
4461
             " to enable them");
4462
    }
4463
}
4464
 
4465
/* Return true if TYPE must be passed on the stack and not in registers.  */
4466
 
4467
static bool
4468
rs6000_must_pass_in_stack (enum machine_mode mode, tree type)
4469
{
4470
  if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
4471
    return must_pass_in_stack_var_size (mode, type);
4472
  else
4473
    return must_pass_in_stack_var_size_or_pad (mode, type);
4474
}
4475
 
4476
/* If defined, a C expression which determines whether, and in which
4477
   direction, to pad out an argument with extra space.  The value
4478
   should be of type `enum direction': either `upward' to pad above
4479
   the argument, `downward' to pad below, or `none' to inhibit
4480
   padding.
4481
 
4482
   For the AIX ABI structs are always stored left shifted in their
4483
   argument slot.  */
4484
 
4485
enum direction
4486
function_arg_padding (enum machine_mode mode, tree type)
4487
{
4488
#ifndef AGGREGATE_PADDING_FIXED
4489
#define AGGREGATE_PADDING_FIXED 0
4490
#endif
4491
#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4492
#define AGGREGATES_PAD_UPWARD_ALWAYS 0
4493
#endif
4494
 
4495
  if (!AGGREGATE_PADDING_FIXED)
4496
    {
4497
      /* GCC used to pass structures of the same size as integer types as
4498
         if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4499
         i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4500
         passed padded downward, except that -mstrict-align further
4501
         muddied the water in that multi-component structures of 2 and 4
4502
         bytes in size were passed padded upward.
4503
 
4504
         The following arranges for best compatibility with previous
4505
         versions of gcc, but removes the -mstrict-align dependency.  */
4506
      if (BYTES_BIG_ENDIAN)
4507
        {
4508
          HOST_WIDE_INT size = 0;
4509
 
4510
          if (mode == BLKmode)
4511
            {
4512
              if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4513
                size = int_size_in_bytes (type);
4514
            }
4515
          else
4516
            size = GET_MODE_SIZE (mode);
4517
 
4518
          if (size == 1 || size == 2 || size == 4)
4519
            return downward;
4520
        }
4521
      return upward;
4522
    }
4523
 
4524
  if (AGGREGATES_PAD_UPWARD_ALWAYS)
4525
    {
4526
      if (type != 0 && AGGREGATE_TYPE_P (type))
4527
        return upward;
4528
    }
4529
 
4530
  /* Fall back to the default.  */
4531
  return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
4532
}
4533
 
4534
/* If defined, a C expression that gives the alignment boundary, in bits,
4535
   of an argument with the specified mode and type.  If it is not defined,
4536
   PARM_BOUNDARY is used for all arguments.
4537
 
4538
   V.4 wants long longs and doubles to be double word aligned.  Just
4539
   testing the mode size is a boneheaded way to do this as it means
4540
   that other types such as complex int are also double word aligned.
4541
   However, we're stuck with this because changing the ABI might break
4542
   existing library interfaces.
4543
 
4544
   Doubleword align SPE vectors.
4545
   Quadword align Altivec vectors.
4546
   Quadword align large synthetic vector types.   */
4547
 
4548
int
4549
function_arg_boundary (enum machine_mode mode, tree type)
4550
{
4551
  if (DEFAULT_ABI == ABI_V4
4552
      && (GET_MODE_SIZE (mode) == 8
4553
          || (TARGET_HARD_FLOAT
4554
              && TARGET_FPRS
4555
              && mode == TFmode)))
4556
    return 64;
4557
  else if (SPE_VECTOR_MODE (mode)
4558
           || (type && TREE_CODE (type) == VECTOR_TYPE
4559
               && int_size_in_bytes (type) >= 8
4560
               && int_size_in_bytes (type) < 16))
4561
    return 64;
4562
  else if (ALTIVEC_VECTOR_MODE (mode)
4563
           || (type && TREE_CODE (type) == VECTOR_TYPE
4564
               && int_size_in_bytes (type) >= 16))
4565
    return 128;
4566
  else if (rs6000_darwin64_abi && mode == BLKmode
4567
           && type && TYPE_ALIGN (type) > 64)
4568
    return 128;
4569
  else
4570
    return PARM_BOUNDARY;
4571
}
4572
 
4573
/* For a function parm of MODE and TYPE, return the starting word in
4574
   the parameter area.  NWORDS of the parameter area are already used.  */
4575
 
4576
static unsigned int
4577
rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
4578
{
4579
  unsigned int align;
4580
  unsigned int parm_offset;
4581
 
4582
  align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4583
  parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
4584
  return nwords + (-(parm_offset + nwords) & align);
4585
}
4586
 
4587
/* Compute the size (in words) of a function argument.  */
4588
 
4589
static unsigned long
4590
rs6000_arg_size (enum machine_mode mode, tree type)
4591
{
4592
  unsigned long size;
4593
 
4594
  if (mode != BLKmode)
4595
    size = GET_MODE_SIZE (mode);
4596
  else
4597
    size = int_size_in_bytes (type);
4598
 
4599
  if (TARGET_32BIT)
4600
    return (size + 3) >> 2;
4601
  else
4602
    return (size + 7) >> 3;
4603
}
4604
 
4605
/* Use this to flush pending int fields.  */
4606
 
4607
static void
4608
rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
4609
                                          HOST_WIDE_INT bitpos)
4610
{
4611
  unsigned int startbit, endbit;
4612
  int intregs, intoffset;
4613
  enum machine_mode mode;
4614
 
4615
  if (cum->intoffset == -1)
4616
    return;
4617
 
4618
  intoffset = cum->intoffset;
4619
  cum->intoffset = -1;
4620
 
4621
  if (intoffset % BITS_PER_WORD != 0)
4622
    {
4623
      mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4624
                            MODE_INT, 0);
4625
      if (mode == BLKmode)
4626
        {
4627
          /* We couldn't find an appropriate mode, which happens,
4628
             e.g., in packed structs when there are 3 bytes to load.
4629
             Back intoffset back to the beginning of the word in this
4630
             case.  */
4631
          intoffset = intoffset & -BITS_PER_WORD;
4632
        }
4633
    }
4634
 
4635
  startbit = intoffset & -BITS_PER_WORD;
4636
  endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4637
  intregs = (endbit - startbit) / BITS_PER_WORD;
4638
  cum->words += intregs;
4639
}
4640
 
4641
/* The darwin64 ABI calls for us to recurse down through structs,
4642
   looking for elements passed in registers.  Unfortunately, we have
4643
   to track int register count here also because of misalignments
4644
   in powerpc alignment mode.  */
4645
 
4646
static void
4647
rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
4648
                                            tree type,
4649
                                            HOST_WIDE_INT startbitpos)
4650
{
4651
  tree f;
4652
 
4653
  for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4654
    if (TREE_CODE (f) == FIELD_DECL)
4655
      {
4656
        HOST_WIDE_INT bitpos = startbitpos;
4657
        tree ftype = TREE_TYPE (f);
4658
        enum machine_mode mode;
4659
        if (ftype == error_mark_node)
4660
          continue;
4661
        mode = TYPE_MODE (ftype);
4662
 
4663
        if (DECL_SIZE (f) != 0
4664
            && host_integerp (bit_position (f), 1))
4665
          bitpos += int_bit_position (f);
4666
 
4667
        /* ??? FIXME: else assume zero offset.  */
4668
 
4669
        if (TREE_CODE (ftype) == RECORD_TYPE)
4670
          rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
4671
        else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
4672
          {
4673
            rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4674
            cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4675
            cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
4676
          }
4677
        else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
4678
          {
4679
            rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4680
            cum->vregno++;
4681
            cum->words += 2;
4682
          }
4683
        else if (cum->intoffset == -1)
4684
          cum->intoffset = bitpos;
4685
      }
4686
}
4687
 
4688
/* Update the data in CUM to advance over an argument
4689
   of mode MODE and data type TYPE.
4690
   (TYPE is null for libcalls where that information may not be available.)
4691
 
4692
   Note that for args passed by reference, function_arg will be called
4693
   with MODE and TYPE set to that of the pointer to the arg, not the arg
4694
   itself.  */
4695
 
4696
void
4697
function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4698
                      tree type, int named, int depth)
4699
{
4700
  int size;
4701
 
4702
  /* Only tick off an argument if we're not recursing.  */
4703
  if (depth == 0)
4704
    cum->nargs_prototype--;
4705
 
4706
  if (TARGET_ALTIVEC_ABI
4707
      && (ALTIVEC_VECTOR_MODE (mode)
4708
          || (type && TREE_CODE (type) == VECTOR_TYPE
4709
              && int_size_in_bytes (type) == 16)))
4710
    {
4711
      bool stack = false;
4712
 
4713
      if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4714
        {
4715
          cum->vregno++;
4716
          if (!TARGET_ALTIVEC)
4717
            error ("cannot pass argument in vector register because"
4718
                   " altivec instructions are disabled, use -maltivec"
4719
                   " to enable them");
4720
 
4721
          /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4722
             even if it is going to be passed in a vector register.
4723
             Darwin does the same for variable-argument functions.  */
4724
          if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4725
              || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4726
            stack = true;
4727
        }
4728
      else
4729
        stack = true;
4730
 
4731
      if (stack)
4732
        {
4733
          int align;
4734
 
4735
          /* Vector parameters must be 16-byte aligned.  This places
4736
             them at 2 mod 4 in terms of words in 32-bit mode, since
4737
             the parameter save area starts at offset 24 from the
4738
             stack.  In 64-bit mode, they just have to start on an
4739
             even word, since the parameter save area is 16-byte
4740
             aligned.  Space for GPRs is reserved even if the argument
4741
             will be passed in memory.  */
4742
          if (TARGET_32BIT)
4743
            align = (2 - cum->words) & 3;
4744
          else
4745
            align = cum->words & 1;
4746
          cum->words += align + rs6000_arg_size (mode, type);
4747
 
4748
          if (TARGET_DEBUG_ARG)
4749
            {
4750
              fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4751
                       cum->words, align);
4752
              fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4753
                       cum->nargs_prototype, cum->prototype,
4754
                       GET_MODE_NAME (mode));
4755
            }
4756
        }
4757
    }
4758
  else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4759
           && !cum->stdarg
4760
           && cum->sysv_gregno <= GP_ARG_MAX_REG)
4761
    cum->sysv_gregno++;
4762
 
4763
  else if (rs6000_darwin64_abi
4764
           && mode == BLKmode
4765
           && TREE_CODE (type) == RECORD_TYPE
4766
           && (size = int_size_in_bytes (type)) > 0)
4767
    {
4768
      /* Variable sized types have size == -1 and are
4769
         treated as if consisting entirely of ints.
4770
         Pad to 16 byte boundary if needed.  */
4771
      if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
4772
          && (cum->words % 2) != 0)
4773
        cum->words++;
4774
      /* For varargs, we can just go up by the size of the struct. */
4775
      if (!named)
4776
        cum->words += (size + 7) / 8;
4777
      else
4778
        {
4779
          /* It is tempting to say int register count just goes up by
4780
             sizeof(type)/8, but this is wrong in a case such as
4781
             { int; double; int; } [powerpc alignment].  We have to
4782
             grovel through the fields for these too.  */
4783
          cum->intoffset = 0;
4784
          rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
4785
          rs6000_darwin64_record_arg_advance_flush (cum,
4786
                                                    size * BITS_PER_UNIT);
4787
        }
4788
    }
4789
  else if (DEFAULT_ABI == ABI_V4)
4790
    {
4791
      if (TARGET_HARD_FLOAT && TARGET_FPRS
4792
          && (mode == SFmode || mode == DFmode
4793
              || (mode == TFmode && !TARGET_IEEEQUAD)))
4794
        {
4795
          if (cum->fregno + (mode == TFmode ? 1 : 0) <= FP_ARG_V4_MAX_REG)
4796
            cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4797
          else
4798
            {
4799
              cum->fregno = FP_ARG_V4_MAX_REG + 1;
4800
              if (mode == DFmode || mode == TFmode)
4801
                cum->words += cum->words & 1;
4802
              cum->words += rs6000_arg_size (mode, type);
4803
            }
4804
        }
4805
      else
4806
        {
4807
          int n_words = rs6000_arg_size (mode, type);
4808
          int gregno = cum->sysv_gregno;
4809
 
4810
          /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4811
             (r7,r8) or (r9,r10).  As does any other 2 word item such
4812
             as complex int due to a historical mistake.  */
4813
          if (n_words == 2)
4814
            gregno += (1 - gregno) & 1;
4815
 
4816
          /* Multi-reg args are not split between registers and stack.  */
4817
          if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4818
            {
4819
              /* Long long and SPE vectors are aligned on the stack.
4820
                 So are other 2 word items such as complex int due to
4821
                 a historical mistake.  */
4822
              if (n_words == 2)
4823
                cum->words += cum->words & 1;
4824
              cum->words += n_words;
4825
            }
4826
 
4827
          /* Note: continuing to accumulate gregno past when we've started
4828
             spilling to the stack indicates the fact that we've started
4829
             spilling to the stack to expand_builtin_saveregs.  */
4830
          cum->sysv_gregno = gregno + n_words;
4831
        }
4832
 
4833
      if (TARGET_DEBUG_ARG)
4834
        {
4835
          fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4836
                   cum->words, cum->fregno);
4837
          fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4838
                   cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4839
          fprintf (stderr, "mode = %4s, named = %d\n",
4840
                   GET_MODE_NAME (mode), named);
4841
        }
4842
    }
4843
  else
4844
    {
4845
      int n_words = rs6000_arg_size (mode, type);
4846
      int start_words = cum->words;
4847
      int align_words = rs6000_parm_start (mode, type, start_words);
4848
 
4849
      cum->words = align_words + n_words;
4850
 
4851
      if (SCALAR_FLOAT_MODE_P (mode)
4852
          && !DECIMAL_FLOAT_MODE_P (mode)
4853
          && TARGET_HARD_FLOAT && TARGET_FPRS)
4854
        cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4855
 
4856
      if (TARGET_DEBUG_ARG)
4857
        {
4858
          fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4859
                   cum->words, cum->fregno);
4860
          fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4861
                   cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4862
          fprintf (stderr, "named = %d, align = %d, depth = %d\n",
4863
                   named, align_words - start_words, depth);
4864
        }
4865
    }
4866
}
4867
 
4868
static rtx
4869
spe_build_register_parallel (enum machine_mode mode, int gregno)
4870
{
4871
  rtx r1, r3;
4872
 
4873
  switch (mode)
4874
    {
4875
    case DFmode:
4876
      r1 = gen_rtx_REG (DImode, gregno);
4877
      r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
4878
      return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
4879
 
4880
    case DCmode:
4881
      r1 = gen_rtx_REG (DImode, gregno);
4882
      r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
4883
      r3 = gen_rtx_REG (DImode, gregno + 2);
4884
      r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
4885
      return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
4886
 
4887
    default:
4888
      gcc_unreachable ();
4889
    }
4890
}
4891
 
4892
/* Determine where to put a SIMD argument on the SPE.  */
4893
static rtx
4894
rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4895
                         tree type)
4896
{
4897
  int gregno = cum->sysv_gregno;
4898
 
4899
  /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
4900
     are passed and returned in a pair of GPRs for ABI compatibility.  */
4901
  if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DCmode))
4902
    {
4903
      int n_words = rs6000_arg_size (mode, type);
4904
 
4905
      /* Doubles go in an odd/even register pair (r5/r6, etc).  */
4906
      if (mode == DFmode)
4907
        gregno += (1 - gregno) & 1;
4908
 
4909
      /* Multi-reg args are not split between registers and stack.  */
4910
      if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4911
        return NULL_RTX;
4912
 
4913
      return spe_build_register_parallel (mode, gregno);
4914
    }
4915
  if (cum->stdarg)
4916
    {
4917
      int n_words = rs6000_arg_size (mode, type);
4918
 
4919
      /* SPE vectors are put in odd registers.  */
4920
      if (n_words == 2 && (gregno & 1) == 0)
4921
        gregno += 1;
4922
 
4923
      if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4924
        {
4925
          rtx r1, r2;
4926
          enum machine_mode m = SImode;
4927
 
4928
          r1 = gen_rtx_REG (m, gregno);
4929
          r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4930
          r2 = gen_rtx_REG (m, gregno + 1);
4931
          r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4932
          return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4933
        }
4934
      else
4935
        return NULL_RTX;
4936
    }
4937
  else
4938
    {
4939
      if (gregno <= GP_ARG_MAX_REG)
4940
        return gen_rtx_REG (mode, gregno);
4941
      else
4942
        return NULL_RTX;
4943
    }
4944
}
4945
 
4946
/* A subroutine of rs6000_darwin64_record_arg.  Assign the bits of the
4947
   structure between cum->intoffset and bitpos to integer registers.  */
4948
 
4949
static void
4950
rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
4951
                                  HOST_WIDE_INT bitpos, rtx rvec[], int *k)
4952
{
4953
  enum machine_mode mode;
4954
  unsigned int regno;
4955
  unsigned int startbit, endbit;
4956
  int this_regno, intregs, intoffset;
4957
  rtx reg;
4958
 
4959
  if (cum->intoffset == -1)
4960
    return;
4961
 
4962
  intoffset = cum->intoffset;
4963
  cum->intoffset = -1;
4964
 
4965
  /* If this is the trailing part of a word, try to only load that
4966
     much into the register.  Otherwise load the whole register.  Note
4967
     that in the latter case we may pick up unwanted bits.  It's not a
4968
     problem at the moment but may wish to revisit.  */
4969
 
4970
  if (intoffset % BITS_PER_WORD != 0)
4971
    {
4972
      mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4973
                          MODE_INT, 0);
4974
      if (mode == BLKmode)
4975
        {
4976
          /* We couldn't find an appropriate mode, which happens,
4977
             e.g., in packed structs when there are 3 bytes to load.
4978
             Back intoffset back to the beginning of the word in this
4979
             case.  */
4980
         intoffset = intoffset & -BITS_PER_WORD;
4981
         mode = word_mode;
4982
        }
4983
    }
4984
  else
4985
    mode = word_mode;
4986
 
4987
  startbit = intoffset & -BITS_PER_WORD;
4988
  endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4989
  intregs = (endbit - startbit) / BITS_PER_WORD;
4990
  this_regno = cum->words + intoffset / BITS_PER_WORD;
4991
 
4992
  if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
4993
    cum->use_stack = 1;
4994
 
4995
  intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
4996
  if (intregs <= 0)
4997
    return;
4998
 
4999
  intoffset /= BITS_PER_UNIT;
5000
  do
5001
    {
5002
      regno = GP_ARG_MIN_REG + this_regno;
5003
      reg = gen_rtx_REG (mode, regno);
5004
      rvec[(*k)++] =
5005
        gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
5006
 
5007
      this_regno += 1;
5008
      intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
5009
      mode = word_mode;
5010
      intregs -= 1;
5011
    }
5012
  while (intregs > 0);
5013
}
5014
 
5015
/* Recursive workhorse for the following.  */
5016
 
5017
static void
5018
rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type,
5019
                                    HOST_WIDE_INT startbitpos, rtx rvec[],
5020
                                    int *k)
5021
{
5022
  tree f;
5023
 
5024
  for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5025
    if (TREE_CODE (f) == FIELD_DECL)
5026
      {
5027
        HOST_WIDE_INT bitpos = startbitpos;
5028
        tree ftype = TREE_TYPE (f);
5029
        enum machine_mode mode;
5030
        if (ftype == error_mark_node)
5031
          continue;
5032
        mode = TYPE_MODE (ftype);
5033
 
5034
        if (DECL_SIZE (f) != 0
5035
            && host_integerp (bit_position (f), 1))
5036
          bitpos += int_bit_position (f);
5037
 
5038
        /* ??? FIXME: else assume zero offset.  */
5039
 
5040
        if (TREE_CODE (ftype) == RECORD_TYPE)
5041
          rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
5042
        else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
5043
          {
5044
#if 0
5045
            switch (mode)
5046
              {
5047
              case SCmode: mode = SFmode; break;
5048
              case DCmode: mode = DFmode; break;
5049
              case TCmode: mode = TFmode; break;
5050
              default: break;
5051
              }
5052
#endif
5053
            rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5054
            rvec[(*k)++]
5055
              = gen_rtx_EXPR_LIST (VOIDmode,
5056
                                   gen_rtx_REG (mode, cum->fregno++),
5057
                                   GEN_INT (bitpos / BITS_PER_UNIT));
5058
            if (mode == TFmode)
5059
              cum->fregno++;
5060
          }
5061
        else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
5062
          {
5063
            rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5064
            rvec[(*k)++]
5065
              = gen_rtx_EXPR_LIST (VOIDmode,
5066
                                   gen_rtx_REG (mode, cum->vregno++),
5067
                                   GEN_INT (bitpos / BITS_PER_UNIT));
5068
          }
5069
        else if (cum->intoffset == -1)
5070
          cum->intoffset = bitpos;
5071
      }
5072
}
5073
 
5074
/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
5075
   the register(s) to be used for each field and subfield of a struct
5076
   being passed by value, along with the offset of where the
5077
   register's value may be found in the block.  FP fields go in FP
5078
   register, vector fields go in vector registers, and everything
5079
   else goes in int registers, packed as in memory.
5080
 
5081
   This code is also used for function return values.  RETVAL indicates
5082
   whether this is the case.
5083
 
5084
   Much of this is taken from the SPARC V9 port, which has a similar
5085
   calling convention.  */
5086
 
5087
static rtx
5088
rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, tree type,
5089
                            int named, bool retval)
5090
{
5091
  rtx rvec[FIRST_PSEUDO_REGISTER];
5092
  int k = 1, kbase = 1;
5093
  HOST_WIDE_INT typesize = int_size_in_bytes (type);
5094
  /* This is a copy; modifications are not visible to our caller.  */
5095
  CUMULATIVE_ARGS copy_cum = *orig_cum;
5096
  CUMULATIVE_ARGS *cum = &copy_cum;
5097
 
5098
  /* Pad to 16 byte boundary if needed.  */
5099
  if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5100
      && (cum->words % 2) != 0)
5101
    cum->words++;
5102
 
5103
  cum->intoffset = 0;
5104
  cum->use_stack = 0;
5105
  cum->named = named;
5106
 
5107
  /* Put entries into rvec[] for individual FP and vector fields, and
5108
     for the chunks of memory that go in int regs.  Note we start at
5109
     element 1; 0 is reserved for an indication of using memory, and
5110
     may or may not be filled in below. */
5111
  rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
5112
  rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
5113
 
5114
  /* If any part of the struct went on the stack put all of it there.
5115
     This hack is because the generic code for
5116
     FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
5117
     parts of the struct are not at the beginning.  */
5118
  if (cum->use_stack)
5119
    {
5120
      if (retval)
5121
        return NULL_RTX;    /* doesn't go in registers at all */
5122
      kbase = 0;
5123
      rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5124
    }
5125
  if (k > 1 || cum->use_stack)
5126
    return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
5127
  else
5128
    return NULL_RTX;
5129
}
5130
 
5131
/* Determine where to place an argument in 64-bit mode with 32-bit ABI.  */
5132
 
5133
static rtx
5134
rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
5135
{
5136
  int n_units;
5137
  int i, k;
5138
  rtx rvec[GP_ARG_NUM_REG + 1];
5139
 
5140
  if (align_words >= GP_ARG_NUM_REG)
5141
    return NULL_RTX;
5142
 
5143
  n_units = rs6000_arg_size (mode, type);
5144
 
5145
  /* Optimize the simple case where the arg fits in one gpr, except in
5146
     the case of BLKmode due to assign_parms assuming that registers are
5147
     BITS_PER_WORD wide.  */
5148
  if (n_units == 0
5149
      || (n_units == 1 && mode != BLKmode))
5150
    return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5151
 
5152
  k = 0;
5153
  if (align_words + n_units > GP_ARG_NUM_REG)
5154
    /* Not all of the arg fits in gprs.  Say that it goes in memory too,
5155
       using a magic NULL_RTX component.
5156
       This is not strictly correct.  Only some of the arg belongs in
5157
       memory, not all of it.  However, the normal scheme using
5158
       function_arg_partial_nregs can result in unusual subregs, eg.
5159
       (subreg:SI (reg:DF) 4), which are not handled well.  The code to
5160
       store the whole arg to memory is often more efficient than code
5161
       to store pieces, and we know that space is available in the right
5162
       place for the whole arg.  */
5163
    rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5164
 
5165
  i = 0;
5166
  do
5167
    {
5168
      rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
5169
      rtx off = GEN_INT (i++ * 4);
5170
      rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5171
    }
5172
  while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
5173
 
5174
  return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
5175
}
5176
 
5177
/* Determine where to put an argument to a function.
5178
   Value is zero to push the argument on the stack,
5179
   or a hard register in which to store the argument.
5180
 
5181
   MODE is the argument's machine mode.
5182
   TYPE is the data type of the argument (as a tree).
5183
    This is null for libcalls where that information may
5184
    not be available.
5185
   CUM is a variable of type CUMULATIVE_ARGS which gives info about
5186
    the preceding args and about the function being called.  It is
5187
    not modified in this routine.
5188
   NAMED is nonzero if this argument is a named parameter
5189
    (otherwise it is an extra parameter matching an ellipsis).
5190
 
5191
   On RS/6000 the first eight words of non-FP are normally in registers
5192
   and the rest are pushed.  Under AIX, the first 13 FP args are in registers.
5193
   Under V.4, the first 8 FP args are in registers.
5194
 
5195
   If this is floating-point and no prototype is specified, we use
5196
   both an FP and integer register (or possibly FP reg and stack).  Library
5197
   functions (when CALL_LIBCALL is set) always have the proper types for args,
5198
   so we can pass the FP value just in one register.  emit_library_function
5199
   doesn't support PARALLEL anyway.
5200
 
5201
   Note that for args passed by reference, function_arg will be called
5202
   with MODE and TYPE set to that of the pointer to the arg, not the arg
5203
   itself.  */
5204
 
5205
rtx
5206
function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5207
              tree type, int named)
5208
{
5209
  enum rs6000_abi abi = DEFAULT_ABI;
5210
 
5211
  /* Return a marker to indicate whether CR1 needs to set or clear the
5212
     bit that V.4 uses to say fp args were passed in registers.
5213
     Assume that we don't need the marker for software floating point,
5214
     or compiler generated library calls.  */
5215
  if (mode == VOIDmode)
5216
    {
5217
      if (abi == ABI_V4
5218
          && (cum->call_cookie & CALL_LIBCALL) == 0
5219
          && (cum->stdarg
5220
              || (cum->nargs_prototype < 0
5221
                  && (cum->prototype || TARGET_NO_PROTOTYPE))))
5222
        {
5223
          /* For the SPE, we need to crxor CR6 always.  */
5224
          if (TARGET_SPE_ABI)
5225
            return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
5226
          else if (TARGET_HARD_FLOAT && TARGET_FPRS)
5227
            return GEN_INT (cum->call_cookie
5228
                            | ((cum->fregno == FP_ARG_MIN_REG)
5229
                               ? CALL_V4_SET_FP_ARGS
5230
                               : CALL_V4_CLEAR_FP_ARGS));
5231
        }
5232
 
5233
      return GEN_INT (cum->call_cookie);
5234
    }
5235
 
5236
  if (rs6000_darwin64_abi && mode == BLKmode
5237
      && TREE_CODE (type) == RECORD_TYPE)
5238
    {
5239
      rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
5240
      if (rslt != NULL_RTX)
5241
        return rslt;
5242
      /* Else fall through to usual handling.  */
5243
    }
5244
 
5245
  if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
5246
    if (TARGET_64BIT && ! cum->prototype)
5247
      {
5248
        /* Vector parameters get passed in vector register
5249
           and also in GPRs or memory, in absence of prototype.  */
5250
        int align_words;
5251
        rtx slot;
5252
        align_words = (cum->words + 1) & ~1;
5253
 
5254
        if (align_words >= GP_ARG_NUM_REG)
5255
          {
5256
            slot = NULL_RTX;
5257
          }
5258
        else
5259
          {
5260
            slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5261
          }
5262
        return gen_rtx_PARALLEL (mode,
5263
                 gen_rtvec (2,
5264
                            gen_rtx_EXPR_LIST (VOIDmode,
5265
                                               slot, const0_rtx),
5266
                            gen_rtx_EXPR_LIST (VOIDmode,
5267
                                               gen_rtx_REG (mode, cum->vregno),
5268
                                               const0_rtx)));
5269
      }
5270
    else
5271
      return gen_rtx_REG (mode, cum->vregno);
5272
  else if (TARGET_ALTIVEC_ABI
5273
           && (ALTIVEC_VECTOR_MODE (mode)
5274
               || (type && TREE_CODE (type) == VECTOR_TYPE
5275
                   && int_size_in_bytes (type) == 16)))
5276
    {
5277
      if (named || abi == ABI_V4)
5278
        return NULL_RTX;
5279
      else
5280
        {
5281
          /* Vector parameters to varargs functions under AIX or Darwin
5282
             get passed in memory and possibly also in GPRs.  */
5283
          int align, align_words, n_words;
5284
          enum machine_mode part_mode;
5285
 
5286
          /* Vector parameters must be 16-byte aligned.  This places them at
5287
             2 mod 4 in terms of words in 32-bit mode, since the parameter
5288
             save area starts at offset 24 from the stack.  In 64-bit mode,
5289
             they just have to start on an even word, since the parameter
5290
             save area is 16-byte aligned.  */
5291
          if (TARGET_32BIT)
5292
            align = (2 - cum->words) & 3;
5293
          else
5294
            align = cum->words & 1;
5295
          align_words = cum->words + align;
5296
 
5297
          /* Out of registers?  Memory, then.  */
5298
          if (align_words >= GP_ARG_NUM_REG)
5299
            return NULL_RTX;
5300
 
5301
          if (TARGET_32BIT && TARGET_POWERPC64)
5302
            return rs6000_mixed_function_arg (mode, type, align_words);
5303
 
5304
          /* The vector value goes in GPRs.  Only the part of the
5305
             value in GPRs is reported here.  */
5306
          part_mode = mode;
5307
          n_words = rs6000_arg_size (mode, type);
5308
          if (align_words + n_words > GP_ARG_NUM_REG)
5309
            /* Fortunately, there are only two possibilities, the value
5310
               is either wholly in GPRs or half in GPRs and half not.  */
5311
            part_mode = DImode;
5312
 
5313
          return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
5314
        }
5315
    }
5316
  else if (TARGET_SPE_ABI && TARGET_SPE
5317
           && (SPE_VECTOR_MODE (mode)
5318
               || (TARGET_E500_DOUBLE && (mode == DFmode
5319
                                          || mode == DCmode))))
5320
    return rs6000_spe_function_arg (cum, mode, type);
5321
 
5322
  else if (abi == ABI_V4)
5323
    {
5324
      if (TARGET_HARD_FLOAT && TARGET_FPRS
5325
          && (mode == SFmode || mode == DFmode
5326
              || (mode == TFmode && !TARGET_IEEEQUAD)))
5327
        {
5328
          if (cum->fregno + (mode == TFmode ? 1 : 0) <= FP_ARG_V4_MAX_REG)
5329
            return gen_rtx_REG (mode, cum->fregno);
5330
          else
5331
            return NULL_RTX;
5332
        }
5333
      else
5334
        {
5335
          int n_words = rs6000_arg_size (mode, type);
5336
          int gregno = cum->sysv_gregno;
5337
 
5338
          /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5339
             (r7,r8) or (r9,r10).  As does any other 2 word item such
5340
             as complex int due to a historical mistake.  */
5341
          if (n_words == 2)
5342
            gregno += (1 - gregno) & 1;
5343
 
5344
          /* Multi-reg args are not split between registers and stack.  */
5345
          if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5346
            return NULL_RTX;
5347
 
5348
          if (TARGET_32BIT && TARGET_POWERPC64)
5349
            return rs6000_mixed_function_arg (mode, type,
5350
                                              gregno - GP_ARG_MIN_REG);
5351
          return gen_rtx_REG (mode, gregno);
5352
        }
5353
    }
5354
  else
5355
    {
5356
      int align_words = rs6000_parm_start (mode, type, cum->words);
5357
 
5358
      if (USE_FP_FOR_ARG_P (cum, mode, type))
5359
        {
5360
          rtx rvec[GP_ARG_NUM_REG + 1];
5361
          rtx r;
5362
          int k;
5363
          bool needs_psave;
5364
          enum machine_mode fmode = mode;
5365
          unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
5366
 
5367
          if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
5368
            {
5369
              /* Currently, we only ever need one reg here because complex
5370
                 doubles are split.  */
5371
              gcc_assert (cum->fregno == FP_ARG_MAX_REG && fmode == TFmode);
5372
 
5373
              /* Long double split over regs and memory.  */
5374
              fmode = DFmode;
5375
            }
5376
 
5377
          /* Do we also need to pass this arg in the parameter save
5378
             area?  */
5379
          needs_psave = (type
5380
                         && (cum->nargs_prototype <= 0
5381
                             || (DEFAULT_ABI == ABI_AIX
5382
                                 && TARGET_XL_COMPAT
5383
                                 && align_words >= GP_ARG_NUM_REG)));
5384
 
5385
          if (!needs_psave && mode == fmode)
5386
            return gen_rtx_REG (fmode, cum->fregno);
5387
 
5388
          k = 0;
5389
          if (needs_psave)
5390
            {
5391
              /* Describe the part that goes in gprs or the stack.
5392
                 This piece must come first, before the fprs.  */
5393
              if (align_words < GP_ARG_NUM_REG)
5394
                {
5395
                  unsigned long n_words = rs6000_arg_size (mode, type);
5396
 
5397
                  if (align_words + n_words > GP_ARG_NUM_REG
5398
                      || (TARGET_32BIT && TARGET_POWERPC64))
5399
                    {
5400
                      /* If this is partially on the stack, then we only
5401
                         include the portion actually in registers here.  */
5402
                      enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
5403
                      rtx off;
5404
                      int i = 0;
5405
                      if (align_words + n_words > GP_ARG_NUM_REG)
5406
                        /* Not all of the arg fits in gprs.  Say that it
5407
                           goes in memory too, using a magic NULL_RTX
5408
                           component.  Also see comment in
5409
                           rs6000_mixed_function_arg for why the normal
5410
                           function_arg_partial_nregs scheme doesn't work
5411
                           in this case. */
5412
                        rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
5413
                                                       const0_rtx);
5414
                      do
5415
                        {
5416
                          r = gen_rtx_REG (rmode,
5417
                                           GP_ARG_MIN_REG + align_words);
5418
                          off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
5419
                          rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5420
                        }
5421
                      while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
5422
                    }
5423
                  else
5424
                    {
5425
                      /* The whole arg fits in gprs.  */
5426
                      r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5427
                      rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5428
                    }
5429
                }
5430
              else
5431
                /* It's entirely in memory.  */
5432
                rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5433
            }
5434
 
5435
          /* Describe where this piece goes in the fprs.  */
5436
          r = gen_rtx_REG (fmode, cum->fregno);
5437
          rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5438
 
5439
          return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
5440
        }
5441
      else if (align_words < GP_ARG_NUM_REG)
5442
        {
5443
          if (TARGET_32BIT && TARGET_POWERPC64)
5444
            return rs6000_mixed_function_arg (mode, type, align_words);
5445
 
5446
          if (mode == BLKmode)
5447
            mode = Pmode;
5448
 
5449
          return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5450
        }
5451
      else
5452
        return NULL_RTX;
5453
    }
5454
}
5455
 
5456
/* For an arg passed partly in registers and partly in memory, this is
5457
   the number of bytes passed in registers.  For args passed entirely in
5458
   registers or entirely in memory, zero.  When an arg is described by a
5459
   PARALLEL, perhaps using more than one register type, this function
5460
   returns the number of bytes used by the first element of the PARALLEL.  */
5461
 
5462
static int
5463
rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5464
                          tree type, bool named)
5465
{
5466
  int ret = 0;
5467
  int align_words;
5468
 
5469
  if (DEFAULT_ABI == ABI_V4)
5470
    return 0;
5471
 
5472
  if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
5473
      && cum->nargs_prototype >= 0)
5474
    return 0;
5475
 
5476
  /* In this complicated case we just disable the partial_nregs code.  */
5477
  if (rs6000_darwin64_abi && mode == BLKmode
5478
      && TREE_CODE (type) == RECORD_TYPE
5479
      && int_size_in_bytes (type) > 0)
5480
    return 0;
5481
 
5482
  align_words = rs6000_parm_start (mode, type, cum->words);
5483
 
5484
  if (USE_FP_FOR_ARG_P (cum, mode, type))
5485
    {
5486
      /* If we are passing this arg in the fixed parameter save area
5487
         (gprs or memory) as well as fprs, then this function should
5488
         return the number of partial bytes passed in the parameter
5489
         save area rather than partial bytes passed in fprs.  */
5490
      if (type
5491
          && (cum->nargs_prototype <= 0
5492
              || (DEFAULT_ABI == ABI_AIX
5493
                  && TARGET_XL_COMPAT
5494
                  && align_words >= GP_ARG_NUM_REG)))
5495
        return 0;
5496
      else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
5497
               > FP_ARG_MAX_REG + 1)
5498
        ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
5499
      else if (cum->nargs_prototype >= 0)
5500
        return 0;
5501
    }
5502
 
5503
  if (align_words < GP_ARG_NUM_REG
5504
      && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
5505
    ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
5506
 
5507
  if (ret != 0 && TARGET_DEBUG_ARG)
5508
    fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
5509
 
5510
  return ret;
5511
}
5512
 
5513
/* A C expression that indicates when an argument must be passed by
5514
   reference.  If nonzero for an argument, a copy of that argument is
5515
   made in memory and a pointer to the argument is passed instead of
5516
   the argument itself.  The pointer is passed in whatever way is
5517
   appropriate for passing a pointer to that type.
5518
 
5519
   Under V.4, aggregates and long double are passed by reference.
5520
 
5521
   As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5522
   reference unless the AltiVec vector extension ABI is in force.
5523
 
5524
   As an extension to all ABIs, variable sized types are passed by
5525
   reference.  */
5526
 
5527
static bool
5528
rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
5529
                          enum machine_mode mode, tree type,
5530
                          bool named ATTRIBUTE_UNUSED)
5531
{
5532
  if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
5533
    {
5534
      if (TARGET_DEBUG_ARG)
5535
        fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
5536
      return 1;
5537
    }
5538
 
5539
  if (!type)
5540
    return 0;
5541
 
5542
  if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
5543
    {
5544
      if (TARGET_DEBUG_ARG)
5545
        fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
5546
      return 1;
5547
    }
5548
 
5549
  if (int_size_in_bytes (type) < 0)
5550
    {
5551
      if (TARGET_DEBUG_ARG)
5552
        fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
5553
      return 1;
5554
    }
5555
 
5556
  /* Allow -maltivec -mabi=no-altivec without warning.  Altivec vector
5557
     modes only exist for GCC vector types if -maltivec.  */
5558
  if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
5559
    {
5560
      if (TARGET_DEBUG_ARG)
5561
        fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
5562
      return 1;
5563
    }
5564
 
5565
  /* Pass synthetic vectors in memory.  */
5566
  if (TREE_CODE (type) == VECTOR_TYPE
5567
      && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
5568
    {
5569
      static bool warned_for_pass_big_vectors = false;
5570
      if (TARGET_DEBUG_ARG)
5571
        fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
5572
      if (!warned_for_pass_big_vectors)
5573
        {
5574
          warning (0, "GCC vector passed by reference: "
5575
                   "non-standard ABI extension with no compatibility guarantee");
5576
          warned_for_pass_big_vectors = true;
5577
        }
5578
      return 1;
5579
    }
5580
 
5581
  return 0;
5582
}
5583
 
5584
static void
5585
rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5586
{
5587
  int i;
5588
  enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
5589
 
5590
  if (nregs == 0)
5591
    return;
5592
 
5593
  for (i = 0; i < nregs; i++)
5594
    {
5595
      rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5596
      if (reload_completed)
5597
        {
5598
          if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
5599
            tem = NULL_RTX;
5600
          else
5601
            tem = simplify_gen_subreg (reg_mode, x, BLKmode,
5602
                                       i * GET_MODE_SIZE (reg_mode));
5603
        }
5604
      else
5605
        tem = replace_equiv_address (tem, XEXP (tem, 0));
5606
 
5607
      gcc_assert (tem);
5608
 
5609
      emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
5610
    }
5611
}
5612
 
5613
/* Perform any needed actions needed for a function that is receiving a
5614
   variable number of arguments.
5615
 
5616
   CUM is as above.
5617
 
5618
   MODE and TYPE are the mode and type of the current parameter.
5619
 
5620
   PRETEND_SIZE is a variable that should be set to the amount of stack
5621
   that must be pushed by the prolog to pretend that our caller pushed
5622
   it.
5623
 
5624
   Normally, this macro will push all remaining incoming registers on the
5625
   stack and set PRETEND_SIZE to the length of the registers pushed.  */
5626
 
5627
static void
5628
setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5629
                        tree type, int *pretend_size ATTRIBUTE_UNUSED,
5630
                        int no_rtl)
5631
{
5632
  CUMULATIVE_ARGS next_cum;
5633
  int reg_size = TARGET_32BIT ? 4 : 8;
5634
  rtx save_area = NULL_RTX, mem;
5635
  int first_reg_offset, set;
5636
 
5637
  /* Skip the last named argument.  */
5638
  next_cum = *cum;
5639
  function_arg_advance (&next_cum, mode, type, 1, 0);
5640
 
5641
  if (DEFAULT_ABI == ABI_V4)
5642
    {
5643
      first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
5644
 
5645
      if (! no_rtl)
5646
        {
5647
          int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
5648
          HOST_WIDE_INT offset = 0;
5649
 
5650
          /* Try to optimize the size of the varargs save area.
5651
             The ABI requires that ap.reg_save_area is doubleword
5652
             aligned, but we don't need to allocate space for all
5653
             the bytes, only those to which we actually will save
5654
             anything.  */
5655
          if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
5656
            gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
5657
          if (TARGET_HARD_FLOAT && TARGET_FPRS
5658
              && next_cum.fregno <= FP_ARG_V4_MAX_REG
5659
              && cfun->va_list_fpr_size)
5660
            {
5661
              if (gpr_reg_num)
5662
                fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
5663
                           * UNITS_PER_FP_WORD;
5664
              if (cfun->va_list_fpr_size
5665
                  < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
5666
                fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
5667
              else
5668
                fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
5669
                            * UNITS_PER_FP_WORD;
5670
            }
5671
          if (gpr_reg_num)
5672
            {
5673
              offset = -((first_reg_offset * reg_size) & ~7);
5674
              if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
5675
                {
5676
                  gpr_reg_num = cfun->va_list_gpr_size;
5677
                  if (reg_size == 4 && (first_reg_offset & 1))
5678
                    gpr_reg_num++;
5679
                }
5680
              gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
5681
            }
5682
          else if (fpr_size)
5683
            offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
5684
                       * UNITS_PER_FP_WORD
5685
                     - (int) (GP_ARG_NUM_REG * reg_size);
5686
 
5687
          if (gpr_size + fpr_size)
5688
            {
5689
              rtx reg_save_area
5690
                = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
5691
              gcc_assert (GET_CODE (reg_save_area) == MEM);
5692
              reg_save_area = XEXP (reg_save_area, 0);
5693
              if (GET_CODE (reg_save_area) == PLUS)
5694
                {
5695
                  gcc_assert (XEXP (reg_save_area, 0)
5696
                              == virtual_stack_vars_rtx);
5697
                  gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
5698
                  offset += INTVAL (XEXP (reg_save_area, 1));
5699
                }
5700
              else
5701
                gcc_assert (reg_save_area == virtual_stack_vars_rtx);
5702
            }
5703
 
5704
          cfun->machine->varargs_save_offset = offset;
5705
          save_area = plus_constant (virtual_stack_vars_rtx, offset);
5706
        }
5707
    }
5708
  else
5709
    {
5710
      first_reg_offset = next_cum.words;
5711
      save_area = virtual_incoming_args_rtx;
5712
 
5713
      if (targetm.calls.must_pass_in_stack (mode, type))
5714
        first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
5715
    }
5716
 
5717
  set = get_varargs_alias_set ();
5718
  if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
5719
      && cfun->va_list_gpr_size)
5720
    {
5721
      int nregs = GP_ARG_NUM_REG - first_reg_offset;
5722
 
5723
      if (va_list_gpr_counter_field)
5724
        {
5725
          /* V4 va_list_gpr_size counts number of registers needed.  */
5726
          if (nregs > cfun->va_list_gpr_size)
5727
            nregs = cfun->va_list_gpr_size;
5728
        }
5729
      else
5730
        {
5731
          /* char * va_list instead counts number of bytes needed.  */
5732
          if (nregs > cfun->va_list_gpr_size / reg_size)
5733
            nregs = cfun->va_list_gpr_size / reg_size;
5734
        }
5735
 
5736
      mem = gen_rtx_MEM (BLKmode,
5737
                         plus_constant (save_area,
5738
                                        first_reg_offset * reg_size));
5739
      MEM_NOTRAP_P (mem) = 1;
5740
      set_mem_alias_set (mem, set);
5741
      set_mem_align (mem, BITS_PER_WORD);
5742
 
5743
      rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
5744
                                  nregs);
5745
    }
5746
 
5747
  /* Save FP registers if needed.  */
5748
  if (DEFAULT_ABI == ABI_V4
5749
      && TARGET_HARD_FLOAT && TARGET_FPRS
5750
      && ! no_rtl
5751
      && next_cum.fregno <= FP_ARG_V4_MAX_REG
5752
      && cfun->va_list_fpr_size)
5753
    {
5754
      int fregno = next_cum.fregno, nregs;
5755
      rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
5756
      rtx lab = gen_label_rtx ();
5757
      int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
5758
                                               * UNITS_PER_FP_WORD);
5759
 
5760
      emit_jump_insn
5761
        (gen_rtx_SET (VOIDmode,
5762
                      pc_rtx,
5763
                      gen_rtx_IF_THEN_ELSE (VOIDmode,
5764
                                            gen_rtx_NE (VOIDmode, cr1,
5765
                                                        const0_rtx),
5766
                                            gen_rtx_LABEL_REF (VOIDmode, lab),
5767
                                            pc_rtx)));
5768
 
5769
      for (nregs = 0;
5770
           fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5771
           fregno++, off += UNITS_PER_FP_WORD, nregs++)
5772
        {
5773
          mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
5774
          MEM_NOTRAP_P (mem) = 1;
5775
          set_mem_alias_set (mem, set);
5776
          set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
5777
          emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
5778
        }
5779
 
5780
      emit_label (lab);
5781
    }
5782
}
5783
 
5784
/* Create the va_list data type.  */
5785
 
5786
static tree
5787
rs6000_build_builtin_va_list (void)
5788
{
5789
  tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
5790
 
5791
  /* For AIX, prefer 'char *' because that's what the system
5792
     header files like.  */
5793
  if (DEFAULT_ABI != ABI_V4)
5794
    return build_pointer_type (char_type_node);
5795
 
5796
  record = (*lang_hooks.types.make_type) (RECORD_TYPE);
5797
  type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5798
 
5799
  f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
5800
                      unsigned_char_type_node);
5801
  f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
5802
                      unsigned_char_type_node);
5803
  /* Give the two bytes of padding a name, so that -Wpadded won't warn on
5804
     every user file.  */
5805
  f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
5806
                      short_unsigned_type_node);
5807
  f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
5808
                      ptr_type_node);
5809
  f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
5810
                      ptr_type_node);
5811
 
5812
  va_list_gpr_counter_field = f_gpr;
5813
  va_list_fpr_counter_field = f_fpr;
5814
 
5815
  DECL_FIELD_CONTEXT (f_gpr) = record;
5816
  DECL_FIELD_CONTEXT (f_fpr) = record;
5817
  DECL_FIELD_CONTEXT (f_res) = record;
5818
  DECL_FIELD_CONTEXT (f_ovf) = record;
5819
  DECL_FIELD_CONTEXT (f_sav) = record;
5820
 
5821
  TREE_CHAIN (record) = type_decl;
5822
  TYPE_NAME (record) = type_decl;
5823
  TYPE_FIELDS (record) = f_gpr;
5824
  TREE_CHAIN (f_gpr) = f_fpr;
5825
  TREE_CHAIN (f_fpr) = f_res;
5826
  TREE_CHAIN (f_res) = f_ovf;
5827
  TREE_CHAIN (f_ovf) = f_sav;
5828
 
5829
  layout_type (record);
5830
 
5831
  /* The correct type is an array type of one element.  */
5832
  return build_array_type (record, build_index_type (size_zero_node));
5833
}
5834
 
5835
/* Implement va_start.  */
5836
 
5837
void
5838
rs6000_va_start (tree valist, rtx nextarg)
5839
{
5840
  HOST_WIDE_INT words, n_gpr, n_fpr;
5841
  tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5842
  tree gpr, fpr, ovf, sav, t;
5843
 
5844
  /* Only SVR4 needs something special.  */
5845
  if (DEFAULT_ABI != ABI_V4)
5846
    {
5847
      std_expand_builtin_va_start (valist, nextarg);
5848
      return;
5849
    }
5850
 
5851
  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5852
  f_fpr = TREE_CHAIN (f_gpr);
5853
  f_res = TREE_CHAIN (f_fpr);
5854
  f_ovf = TREE_CHAIN (f_res);
5855
  f_sav = TREE_CHAIN (f_ovf);
5856
 
5857
  valist = build_va_arg_indirect_ref (valist);
5858
  gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5859
  fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5860
  ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5861
  sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5862
 
5863
  /* Count number of gp and fp argument registers used.  */
5864
  words = current_function_args_info.words;
5865
  n_gpr = MIN (current_function_args_info.sysv_gregno - GP_ARG_MIN_REG,
5866
               GP_ARG_NUM_REG);
5867
  n_fpr = MIN (current_function_args_info.fregno - FP_ARG_MIN_REG,
5868
               FP_ARG_NUM_REG);
5869
 
5870
  if (TARGET_DEBUG_ARG)
5871
    fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
5872
             HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
5873
             words, n_gpr, n_fpr);
5874
 
5875
  if (cfun->va_list_gpr_size)
5876
    {
5877
      t = build2 (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
5878
                  build_int_cst (NULL_TREE, n_gpr));
5879
      TREE_SIDE_EFFECTS (t) = 1;
5880
      expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5881
    }
5882
 
5883
  if (cfun->va_list_fpr_size)
5884
    {
5885
      t = build2 (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
5886
                  build_int_cst (NULL_TREE, n_fpr));
5887
      TREE_SIDE_EFFECTS (t) = 1;
5888
      expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5889
    }
5890
 
5891
  /* Find the overflow area.  */
5892
  t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5893
  if (words != 0)
5894
    t = build2 (PLUS_EXPR, TREE_TYPE (ovf), t,
5895
                build_int_cst (NULL_TREE, words * UNITS_PER_WORD));
5896
  t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5897
  TREE_SIDE_EFFECTS (t) = 1;
5898
  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5899
 
5900
  /* If there were no va_arg invocations, don't set up the register
5901
     save area.  */
5902
  if (!cfun->va_list_gpr_size
5903
      && !cfun->va_list_fpr_size
5904
      && n_gpr < GP_ARG_NUM_REG
5905
      && n_fpr < FP_ARG_V4_MAX_REG)
5906
    return;
5907
 
5908
  /* Find the register save area.  */
5909
  t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5910
  if (cfun->machine->varargs_save_offset)
5911
    t = build2 (PLUS_EXPR, TREE_TYPE (sav), t,
5912
                build_int_cst (NULL_TREE, cfun->machine->varargs_save_offset));
5913
  t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5914
  TREE_SIDE_EFFECTS (t) = 1;
5915
  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5916
}
5917
 
5918
/* Implement va_arg.  */
5919
 
5920
tree
5921
rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
5922
{
5923
  tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5924
  tree gpr, fpr, ovf, sav, reg, t, u;
5925
  int size, rsize, n_reg, sav_ofs, sav_scale;
5926
  tree lab_false, lab_over, addr;
5927
  int align;
5928
  tree ptrtype = build_pointer_type (type);
5929
 
5930
  if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
5931
    {
5932
      t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
5933
      return build_va_arg_indirect_ref (t);
5934
    }
5935
 
5936
  if (DEFAULT_ABI != ABI_V4)
5937
    {
5938
      if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
5939
        {
5940
          tree elem_type = TREE_TYPE (type);
5941
          enum machine_mode elem_mode = TYPE_MODE (elem_type);
5942
          int elem_size = GET_MODE_SIZE (elem_mode);
5943
 
5944
          if (elem_size < UNITS_PER_WORD)
5945
            {
5946
              tree real_part, imag_part;
5947
              tree post = NULL_TREE;
5948
 
5949
              real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5950
                                                  &post);
5951
              /* Copy the value into a temporary, lest the formal temporary
5952
                 be reused out from under us.  */
5953
              real_part = get_initialized_tmp_var (real_part, pre_p, &post);
5954
              append_to_statement_list (post, pre_p);
5955
 
5956
              imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5957
                                                  post_p);
5958
 
5959
              return build2 (COMPLEX_EXPR, type, real_part, imag_part);
5960
            }
5961
        }
5962
 
5963
      return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
5964
    }
5965
 
5966
  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5967
  f_fpr = TREE_CHAIN (f_gpr);
5968
  f_res = TREE_CHAIN (f_fpr);
5969
  f_ovf = TREE_CHAIN (f_res);
5970
  f_sav = TREE_CHAIN (f_ovf);
5971
 
5972
  valist = build_va_arg_indirect_ref (valist);
5973
  gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5974
  fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5975
  ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5976
  sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5977
 
5978
  size = int_size_in_bytes (type);
5979
  rsize = (size + 3) / 4;
5980
  align = 1;
5981
 
5982
  if (TARGET_HARD_FLOAT && TARGET_FPRS
5983
      && (TYPE_MODE (type) == SFmode
5984
          || TYPE_MODE (type) == DFmode
5985
          || TYPE_MODE (type) == TFmode))
5986
    {
5987
      /* FP args go in FP registers, if present.  */
5988
      reg = fpr;
5989
      n_reg = (size + 7) / 8;
5990
      sav_ofs = 8*4;
5991
      sav_scale = 8;
5992
      if (TYPE_MODE (type) != SFmode)
5993
        align = 8;
5994
    }
5995
  else
5996
    {
5997
      /* Otherwise into GP registers.  */
5998
      reg = gpr;
5999
      n_reg = rsize;
6000
      sav_ofs = 0;
6001
      sav_scale = 4;
6002
      if (n_reg == 2)
6003
        align = 8;
6004
    }
6005
 
6006
  /* Pull the value out of the saved registers....  */
6007
 
6008
  lab_over = NULL;
6009
  addr = create_tmp_var (ptr_type_node, "addr");
6010
  DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
6011
 
6012
  /*  AltiVec vectors never go in registers when -mabi=altivec.  */
6013
  if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
6014
    align = 16;
6015
  else
6016
    {
6017
      lab_false = create_artificial_label ();
6018
      lab_over = create_artificial_label ();
6019
 
6020
      /* Long long and SPE vectors are aligned in the registers.
6021
         As are any other 2 gpr item such as complex int due to a
6022
         historical mistake.  */
6023
      u = reg;
6024
      if (n_reg == 2 && reg == gpr)
6025
        {
6026
          u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
6027
                     size_int (n_reg - 1));
6028
          u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
6029
        }
6030
 
6031
      t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
6032
      t = build2 (GE_EXPR, boolean_type_node, u, t);
6033
      u = build1 (GOTO_EXPR, void_type_node, lab_false);
6034
      t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
6035
      gimplify_and_add (t, pre_p);
6036
 
6037
      t = sav;
6038
      if (sav_ofs)
6039
        t = build2 (PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
6040
 
6041
      u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, size_int (n_reg));
6042
      u = build1 (CONVERT_EXPR, integer_type_node, u);
6043
      u = build2 (MULT_EXPR, integer_type_node, u, size_int (sav_scale));
6044
      t = build2 (PLUS_EXPR, ptr_type_node, t, u);
6045
 
6046
      t = build2 (MODIFY_EXPR, void_type_node, addr, t);
6047
      gimplify_and_add (t, pre_p);
6048
 
6049
      t = build1 (GOTO_EXPR, void_type_node, lab_over);
6050
      gimplify_and_add (t, pre_p);
6051
 
6052
      t = build1 (LABEL_EXPR, void_type_node, lab_false);
6053
      append_to_statement_list (t, pre_p);
6054
 
6055
      if ((n_reg == 2 && reg != gpr) || n_reg > 2)
6056
        {
6057
          /* Ensure that we don't find any more args in regs.
6058
             Alignment has taken care of the n_reg == 2 gpr case.  */
6059
          t = build2 (MODIFY_EXPR, TREE_TYPE (reg), reg, size_int (8));
6060
          gimplify_and_add (t, pre_p);
6061
        }
6062
    }
6063
 
6064
  /* ... otherwise out of the overflow area.  */
6065
 
6066
  /* Care for on-stack alignment if needed.  */
6067
  t = ovf;
6068
  if (align != 1)
6069
    {
6070
      t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
6071
      t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
6072
                  build_int_cst (NULL_TREE, -align));
6073
    }
6074
  gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
6075
 
6076
  u = build2 (MODIFY_EXPR, void_type_node, addr, t);
6077
  gimplify_and_add (u, pre_p);
6078
 
6079
  t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
6080
  t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6081
  gimplify_and_add (t, pre_p);
6082
 
6083
  if (lab_over)
6084
    {
6085
      t = build1 (LABEL_EXPR, void_type_node, lab_over);
6086
      append_to_statement_list (t, pre_p);
6087
    }
6088
 
6089
  if (STRICT_ALIGNMENT
6090
      && (TYPE_ALIGN (type)
6091
          > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
6092
    {
6093
      /* The value (of type complex double, for example) may not be
6094
         aligned in memory in the saved registers, so copy via a
6095
         temporary.  (This is the same code as used for SPARC.)  */
6096
      tree tmp = create_tmp_var (type, "va_arg_tmp");
6097
      tree dest_addr = build_fold_addr_expr (tmp);
6098
 
6099
      tree copy = build_function_call_expr
6100
        (implicit_built_in_decls[BUILT_IN_MEMCPY],
6101
         tree_cons (NULL_TREE, dest_addr,
6102
                    tree_cons (NULL_TREE, addr,
6103
                               tree_cons (NULL_TREE, size_int (rsize * 4),
6104
                                          NULL_TREE))));
6105
 
6106
      gimplify_and_add (copy, pre_p);
6107
      addr = dest_addr;
6108
    }
6109
 
6110
  addr = fold_convert (ptrtype, addr);
6111
  return build_va_arg_indirect_ref (addr);
6112
}
6113
 
6114
/* Builtins.  */
6115
 
6116
static void
6117
def_builtin (int mask, const char *name, tree type, int code)
6118
{
6119
  if (mask & target_flags)
6120
    {
6121
      if (rs6000_builtin_decls[code])
6122
        abort ();
6123
 
6124
      rs6000_builtin_decls[code] =
6125
        lang_hooks.builtin_function (name, type, code, BUILT_IN_MD,
6126
                                     NULL, NULL_TREE);
6127
    }
6128
}
6129
 
6130
/* Simple ternary operations: VECd = foo (VECa, VECb, VECc).  */
6131
 
6132
static const struct builtin_description bdesc_3arg[] =
6133
{
6134
  { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
6135
  { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
6136
  { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
6137
  { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
6138
  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
6139
  { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
6140
  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
6141
  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
6142
  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
6143
  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
6144
  { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
6145
  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
6146
  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
6147
  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
6148
  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
6149
  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
6150
  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
6151
  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
6152
  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
6153
  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
6154
  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
6155
  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
6156
  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
6157
 
6158
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
6159
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
6160
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
6161
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
6162
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
6163
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
6164
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
6165
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
6166
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
6167
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
6168
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
6169
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
6170
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
6171
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
6172
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
6173
};
6174
 
6175
/* DST operations: void foo (void *, const int, const char).  */
6176
 
6177
static const struct builtin_description bdesc_dst[] =
6178
{
6179
  { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
6180
  { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
6181
  { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
6182
  { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
6183
 
6184
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
6185
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
6186
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
6187
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
6188
};
6189
 
6190
/* Simple binary operations: VECc = foo (VECa, VECb).  */
6191
 
6192
static struct builtin_description bdesc_2arg[] =
6193
{
6194
  { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
6195
  { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
6196
  { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
6197
  { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
6198
  { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
6199
  { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
6200
  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
6201
  { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
6202
  { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
6203
  { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
6204
  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
6205
  { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
6206
  { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
6207
  { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
6208
  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
6209
  { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
6210
  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
6211
  { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
6212
  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
6213
  { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
6214
  { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
6215
  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
6216
  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
6217
  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
6218
  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
6219
  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
6220
  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
6221
  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
6222
  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
6223
  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
6224
  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
6225
  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
6226
  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
6227
  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
6228
  { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
6229
  { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
6230
  { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
6231
  { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
6232
  { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
6233
  { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
6234
  { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
6235
  { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
6236
  { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
6237
  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
6238
  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
6239
  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
6240
  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
6241
  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
6242
  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
6243
  { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
6244
  { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
6245
  { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
6246
  { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
6247
  { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
6248
  { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
6249
  { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
6250
  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
6251
  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
6252
  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
6253
  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
6254
  { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
6255
  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
6256
  { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
6257
  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
6258
  { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
6259
  { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
6260
  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
6261
  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
6262
  { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
6263
  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
6264
  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
6265
  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
6266
  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
6267
  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
6268
  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
6269
  { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
6270
  { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
6271
  { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
6272
  { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
6273
  { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
6274
  { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
6275
  { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
6276
  { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
6277
  { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
6278
  { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
6279
  { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
6280
  { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
6281
  { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
6282
  { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
6283
  { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
6284
  { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
6285
  { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
6286
  { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
6287
  { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
6288
  { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
6289
  { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
6290
  { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
6291
  { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
6292
  { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
6293
  { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
6294
  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
6295
  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
6296
  { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
6297
  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
6298
  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
6299
  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
6300
  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
6301
  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
6302
  { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
6303
  { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
6304
  { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
6305
 
6306
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
6307
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
6308
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
6309
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
6310
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
6311
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
6312
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
6313
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
6314
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
6315
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
6316
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
6317
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
6318
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
6319
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
6320
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
6321
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
6322
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
6323
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
6324
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
6325
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
6326
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
6327
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
6328
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
6329
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
6330
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
6331
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
6332
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
6333
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
6334
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
6335
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
6336
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
6337
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
6338
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
6339
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
6340
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
6341
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
6342
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
6343
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
6344
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
6345
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
6346
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
6347
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
6348
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
6349
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
6350
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
6351
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
6352
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
6353
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
6354
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
6355
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
6356
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
6357
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
6358
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
6359
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
6360
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
6361
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
6362
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
6363
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
6364
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
6365
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
6366
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
6367
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
6368
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
6369
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
6370
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
6371
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
6372
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
6373
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
6374
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
6375
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
6376
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
6377
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
6378
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
6379
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
6380
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
6381
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
6382
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
6383
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
6384
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
6385
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
6386
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
6387
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
6388
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
6389
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
6390
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
6391
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
6392
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
6393
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
6394
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
6395
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
6396
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
6397
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
6398
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
6399
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
6400
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
6401
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
6402
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
6403
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
6404
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
6405
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
6406
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
6407
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
6408
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
6409
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
6410
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
6411
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
6412
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
6413
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
6414
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
6415
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
6416
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
6417
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
6418
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
6419
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
6420
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
6421
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
6422
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
6423
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
6424
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
6425
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
6426
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
6427
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
6428
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
6429
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
6430
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
6431
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
6432
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
6433
 
6434
  /* Place holder, leave as first spe builtin.  */
6435
  { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
6436
  { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
6437
  { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
6438
  { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
6439
  { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
6440
  { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
6441
  { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
6442
  { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
6443
  { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
6444
  { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
6445
  { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
6446
  { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
6447
  { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
6448
  { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
6449
  { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
6450
  { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
6451
  { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
6452
  { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
6453
  { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
6454
  { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
6455
  { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
6456
  { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
6457
  { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
6458
  { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
6459
  { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
6460
  { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
6461
  { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
6462
  { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
6463
  { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
6464
  { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
6465
  { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
6466
  { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
6467
  { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
6468
  { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
6469
  { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
6470
  { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
6471
  { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
6472
  { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
6473
  { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
6474
  { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
6475
  { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
6476
  { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
6477
  { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
6478
  { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
6479
  { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
6480
  { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
6481
  { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
6482
  { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
6483
  { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
6484
  { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
6485
  { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
6486
  { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
6487
  { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
6488
  { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
6489
  { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
6490
  { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
6491
  { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
6492
  { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
6493
  { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
6494
  { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
6495
  { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
6496
  { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
6497
  { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
6498
  { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
6499
  { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
6500
  { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
6501
  { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
6502
  { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
6503
  { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
6504
  { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
6505
  { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
6506
  { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
6507
  { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
6508
  { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
6509
  { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
6510
  { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
6511
  { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
6512
  { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
6513
  { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
6514
  { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
6515
  { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
6516
  { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
6517
  { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
6518
  { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
6519
  { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
6520
  { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
6521
  { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
6522
  { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
6523
  { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
6524
  { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
6525
  { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
6526
  { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
6527
  { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
6528
  { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
6529
  { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
6530
  { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
6531
  { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
6532
  { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
6533
  { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
6534
  { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
6535
  { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
6536
  { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
6537
  { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
6538
  { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
6539
  { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
6540
  { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
6541
  { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
6542
  { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
6543
  { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
6544
 
6545
  /* SPE binary operations expecting a 5-bit unsigned literal.  */
6546
  { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
6547
 
6548
  { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
6549
  { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
6550
  { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
6551
  { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
6552
  { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
6553
  { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
6554
  { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
6555
  { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
6556
  { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
6557
  { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
6558
  { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
6559
  { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
6560
  { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
6561
  { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
6562
  { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
6563
  { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
6564
  { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
6565
  { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
6566
  { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
6567
  { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
6568
  { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
6569
  { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
6570
  { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
6571
  { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
6572
  { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
6573
  { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
6574
 
6575
  /* Place-holder.  Leave as last binary SPE builtin.  */
6576
  { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
6577
};
6578
 
6579
/* AltiVec predicates.  */
6580
 
6581
struct builtin_description_predicates
6582
{
6583
  const unsigned int mask;
6584
  const enum insn_code icode;
6585
  const char *opcode;
6586
  const char *const name;
6587
  const enum rs6000_builtins code;
6588
};
6589
 
6590
static const struct builtin_description_predicates bdesc_altivec_preds[] =
6591
{
6592
  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
6593
  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
6594
  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
6595
  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
6596
  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
6597
  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
6598
  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
6599
  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
6600
  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
6601
  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
6602
  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
6603
  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
6604
  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
6605
 
6606
  { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
6607
  { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
6608
  { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
6609
};
6610
 
6611
/* SPE predicates.  */
6612
static struct builtin_description bdesc_spe_predicates[] =
6613
{
6614
  /* Place-holder.  Leave as first.  */
6615
  { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
6616
  { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
6617
  { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
6618
  { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
6619
  { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
6620
  { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
6621
  { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
6622
  { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
6623
  { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
6624
  { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
6625
  /* Place-holder.  Leave as last.  */
6626
  { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
6627
};
6628
 
6629
/* SPE evsel predicates.  */
6630
static struct builtin_description bdesc_spe_evsel[] =
6631
{
6632
  /* Place-holder.  Leave as first.  */
6633
  { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
6634
  { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
6635
  { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
6636
  { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
6637
  { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
6638
  { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
6639
  { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
6640
  { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
6641
  { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
6642
  { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
6643
  /* Place-holder.  Leave as last.  */
6644
  { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
6645
};
6646
 
6647
/* ABS* operations.  */
6648
 
6649
static const struct builtin_description bdesc_abs[] =
6650
{
6651
  { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
6652
  { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
6653
  { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
6654
  { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
6655
  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
6656
  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
6657
  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
6658
};
6659
 
6660
/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
6661
   foo (VECa).  */
6662
 
6663
static struct builtin_description bdesc_1arg[] =
6664
{
6665
  { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
6666
  { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
6667
  { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
6668
  { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
6669
  { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
6670
  { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
6671
  { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
6672
  { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
6673
  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
6674
  { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
6675
  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
6676
  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
6677
  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
6678
  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
6679
  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
6680
  { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
6681
  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
6682
 
6683
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
6684
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
6685
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
6686
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
6687
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
6688
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
6689
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
6690
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
6691
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
6692
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
6693
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
6694
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
6695
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
6696
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
6697
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
6698
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
6699
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
6700
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
6701
  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
6702
 
6703
  /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
6704
     end with SPE_BUILTIN_EVSUBFUSIAAW.  */
6705
  { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
6706
  { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
6707
  { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
6708
  { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
6709
  { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
6710
  { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
6711
  { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
6712
  { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
6713
  { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
6714
  { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
6715
  { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
6716
  { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
6717
  { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
6718
  { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
6719
  { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
6720
  { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
6721
  { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
6722
  { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
6723
  { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
6724
  { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
6725
  { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
6726
  { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
6727
  { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6728
  { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
6729
  { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
6730
  { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
6731
  { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
6732
  { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
6733
 
6734
  /* Place-holder.  Leave as last unary SPE builtin.  */
6735
  { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW }
6736
};
6737
 
6738
static rtx
6739
rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
6740
{
6741
  rtx pat;
6742
  tree arg0 = TREE_VALUE (arglist);
6743
  rtx op0 = expand_normal (arg0);
6744
  enum machine_mode tmode = insn_data[icode].operand[0].mode;
6745
  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6746
 
6747
  if (icode == CODE_FOR_nothing)
6748
    /* Builtin not supported on this processor.  */
6749
    return 0;
6750
 
6751
  /* If we got invalid arguments bail out before generating bad rtl.  */
6752
  if (arg0 == error_mark_node)
6753
    return const0_rtx;
6754
 
6755
  if (icode == CODE_FOR_altivec_vspltisb
6756
      || icode == CODE_FOR_altivec_vspltish
6757
      || icode == CODE_FOR_altivec_vspltisw
6758
      || icode == CODE_FOR_spe_evsplatfi
6759
      || icode == CODE_FOR_spe_evsplati)
6760
    {
6761
      /* Only allow 5-bit *signed* literals.  */
6762
      if (GET_CODE (op0) != CONST_INT
6763
          || INTVAL (op0) > 15
6764
          || INTVAL (op0) < -16)
6765
        {
6766
          error ("argument 1 must be a 5-bit signed literal");
6767
          return const0_rtx;
6768
        }
6769
    }
6770
 
6771
  if (target == 0
6772
      || GET_MODE (target) != tmode
6773
      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6774
    target = gen_reg_rtx (tmode);
6775
 
6776
  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6777
    op0 = copy_to_mode_reg (mode0, op0);
6778
 
6779
  pat = GEN_FCN (icode) (target, op0);
6780
  if (! pat)
6781
    return 0;
6782
  emit_insn (pat);
6783
 
6784
  return target;
6785
}
6786
 
6787
static rtx
6788
altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
6789
{
6790
  rtx pat, scratch1, scratch2;
6791
  tree arg0 = TREE_VALUE (arglist);
6792
  rtx op0 = expand_normal (arg0);
6793
  enum machine_mode tmode = insn_data[icode].operand[0].mode;
6794
  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6795
 
6796
  /* If we have invalid arguments, bail out before generating bad rtl.  */
6797
  if (arg0 == error_mark_node)
6798
    return const0_rtx;
6799
 
6800
  if (target == 0
6801
      || GET_MODE (target) != tmode
6802
      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6803
    target = gen_reg_rtx (tmode);
6804
 
6805
  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6806
    op0 = copy_to_mode_reg (mode0, op0);
6807
 
6808
  scratch1 = gen_reg_rtx (mode0);
6809
  scratch2 = gen_reg_rtx (mode0);
6810
 
6811
  pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
6812
  if (! pat)
6813
    return 0;
6814
  emit_insn (pat);
6815
 
6816
  return target;
6817
}
6818
 
6819
static rtx
6820
rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
6821
{
6822
  rtx pat;
6823
  tree arg0 = TREE_VALUE (arglist);
6824
  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6825
  rtx op0 = expand_normal (arg0);
6826
  rtx op1 = expand_normal (arg1);
6827
  enum machine_mode tmode = insn_data[icode].operand[0].mode;
6828
  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6829
  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6830
 
6831
  if (icode == CODE_FOR_nothing)
6832
    /* Builtin not supported on this processor.  */
6833
    return 0;
6834
 
6835
  /* If we got invalid arguments bail out before generating bad rtl.  */
6836
  if (arg0 == error_mark_node || arg1 == error_mark_node)
6837
    return const0_rtx;
6838
 
6839
  if (icode == CODE_FOR_altivec_vcfux
6840
      || icode == CODE_FOR_altivec_vcfsx
6841
      || icode == CODE_FOR_altivec_vctsxs
6842
      || icode == CODE_FOR_altivec_vctuxs
6843
      || icode == CODE_FOR_altivec_vspltb
6844
      || icode == CODE_FOR_altivec_vsplth
6845
      || icode == CODE_FOR_altivec_vspltw
6846
      || icode == CODE_FOR_spe_evaddiw
6847
      || icode == CODE_FOR_spe_evldd
6848
      || icode == CODE_FOR_spe_evldh
6849
      || icode == CODE_FOR_spe_evldw
6850
      || icode == CODE_FOR_spe_evlhhesplat
6851
      || icode == CODE_FOR_spe_evlhhossplat
6852
      || icode == CODE_FOR_spe_evlhhousplat
6853
      || icode == CODE_FOR_spe_evlwhe
6854
      || icode == CODE_FOR_spe_evlwhos
6855
      || icode == CODE_FOR_spe_evlwhou
6856
      || icode == CODE_FOR_spe_evlwhsplat
6857
      || icode == CODE_FOR_spe_evlwwsplat
6858
      || icode == CODE_FOR_spe_evrlwi
6859
      || icode == CODE_FOR_spe_evslwi
6860
      || icode == CODE_FOR_spe_evsrwis
6861
      || icode == CODE_FOR_spe_evsubifw
6862
      || icode == CODE_FOR_spe_evsrwiu)
6863
    {
6864
      /* Only allow 5-bit unsigned literals.  */
6865
      STRIP_NOPS (arg1);
6866
      if (TREE_CODE (arg1) != INTEGER_CST
6867
          || TREE_INT_CST_LOW (arg1) & ~0x1f)
6868
        {
6869
          error ("argument 2 must be a 5-bit unsigned literal");
6870
          return const0_rtx;
6871
        }
6872
    }
6873
 
6874
  if (target == 0
6875
      || GET_MODE (target) != tmode
6876
      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6877
    target = gen_reg_rtx (tmode);
6878
 
6879
  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6880
    op0 = copy_to_mode_reg (mode0, op0);
6881
  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6882
    op1 = copy_to_mode_reg (mode1, op1);
6883
 
6884
  pat = GEN_FCN (icode) (target, op0, op1);
6885
  if (! pat)
6886
    return 0;
6887
  emit_insn (pat);
6888
 
6889
  return target;
6890
}
6891
 
6892
static rtx
6893
altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
6894
                                  tree arglist, rtx target)
6895
{
6896
  rtx pat, scratch;
6897
  tree cr6_form = TREE_VALUE (arglist);
6898
  tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6899
  tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6900
  rtx op0 = expand_normal (arg0);
6901
  rtx op1 = expand_normal (arg1);
6902
  enum machine_mode tmode = SImode;
6903
  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6904
  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6905
  int cr6_form_int;
6906
 
6907
  if (TREE_CODE (cr6_form) != INTEGER_CST)
6908
    {
6909
      error ("argument 1 of __builtin_altivec_predicate must be a constant");
6910
      return const0_rtx;
6911
    }
6912
  else
6913
    cr6_form_int = TREE_INT_CST_LOW (cr6_form);
6914
 
6915
  gcc_assert (mode0 == mode1);
6916
 
6917
  /* If we have invalid arguments, bail out before generating bad rtl.  */
6918
  if (arg0 == error_mark_node || arg1 == error_mark_node)
6919
    return const0_rtx;
6920
 
6921
  if (target == 0
6922
      || GET_MODE (target) != tmode
6923
      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6924
    target = gen_reg_rtx (tmode);
6925
 
6926
  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6927
    op0 = copy_to_mode_reg (mode0, op0);
6928
  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6929
    op1 = copy_to_mode_reg (mode1, op1);
6930
 
6931
  scratch = gen_reg_rtx (mode0);
6932
 
6933
  pat = GEN_FCN (icode) (scratch, op0, op1,
6934
                         gen_rtx_SYMBOL_REF (Pmode, opcode));
6935
  if (! pat)
6936
    return 0;
6937
  emit_insn (pat);
6938
 
6939
  /* The vec_any* and vec_all* predicates use the same opcodes for two
6940
     different operations, but the bits in CR6 will be different
6941
     depending on what information we want.  So we have to play tricks
6942
     with CR6 to get the right bits out.
6943
 
6944
     If you think this is disgusting, look at the specs for the
6945
     AltiVec predicates.  */
6946
 
6947
  switch (cr6_form_int)
6948
    {
6949
    case 0:
6950
      emit_insn (gen_cr6_test_for_zero (target));
6951
      break;
6952
    case 1:
6953
      emit_insn (gen_cr6_test_for_zero_reverse (target));
6954
      break;
6955
    case 2:
6956
      emit_insn (gen_cr6_test_for_lt (target));
6957
      break;
6958
    case 3:
6959
      emit_insn (gen_cr6_test_for_lt_reverse (target));
6960
      break;
6961
    default:
6962
      error ("argument 1 of __builtin_altivec_predicate is out of range");
6963
      break;
6964
    }
6965
 
6966
  return target;
6967
}
6968
 
6969
static rtx
6970
altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
6971
{
6972
  rtx pat, addr;
6973
  tree arg0 = TREE_VALUE (arglist);
6974
  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6975
  enum machine_mode tmode = insn_data[icode].operand[0].mode;
6976
  enum machine_mode mode0 = Pmode;
6977
  enum machine_mode mode1 = Pmode;
6978
  rtx op0 = expand_normal (arg0);
6979
  rtx op1 = expand_normal (arg1);
6980
 
6981
  if (icode == CODE_FOR_nothing)
6982
    /* Builtin not supported on this processor.  */
6983
    return 0;
6984
 
6985
  /* If we got invalid arguments bail out before generating bad rtl.  */
6986
  if (arg0 == error_mark_node || arg1 == error_mark_node)
6987
    return const0_rtx;
6988
 
6989
  if (target == 0
6990
      || GET_MODE (target) != tmode
6991
      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6992
    target = gen_reg_rtx (tmode);
6993
 
6994
  op1 = copy_to_mode_reg (mode1, op1);
6995
 
6996
  if (op0 == const0_rtx)
6997
    {
6998
      addr = gen_rtx_MEM (tmode, op1);
6999
    }
7000
  else
7001
    {
7002
      op0 = copy_to_mode_reg (mode0, op0);
7003
      addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7004
    }
7005
 
7006
  pat = GEN_FCN (icode) (target, addr);
7007
 
7008
  if (! pat)
7009
    return 0;
7010
  emit_insn (pat);
7011
 
7012
  return target;
7013
}
7014
 
7015
static rtx
7016
spe_expand_stv_builtin (enum insn_code icode, tree arglist)
7017
{
7018
  tree arg0 = TREE_VALUE (arglist);
7019
  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7020
  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7021
  rtx op0 = expand_normal (arg0);
7022
  rtx op1 = expand_normal (arg1);
7023
  rtx op2 = expand_normal (arg2);
7024
  rtx pat;
7025
  enum machine_mode mode0 = insn_data[icode].operand[0].mode;
7026
  enum machine_mode mode1 = insn_data[icode].operand[1].mode;
7027
  enum machine_mode mode2 = insn_data[icode].operand[2].mode;
7028
 
7029
  /* Invalid arguments.  Bail before doing anything stoopid!  */
7030
  if (arg0 == error_mark_node
7031
      || arg1 == error_mark_node
7032
      || arg2 == error_mark_node)
7033
    return const0_rtx;
7034
 
7035
  if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
7036
    op0 = copy_to_mode_reg (mode2, op0);
7037
  if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
7038
    op1 = copy_to_mode_reg (mode0, op1);
7039
  if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7040
    op2 = copy_to_mode_reg (mode1, op2);
7041
 
7042
  pat = GEN_FCN (icode) (op1, op2, op0);
7043
  if (pat)
7044
    emit_insn (pat);
7045
  return NULL_RTX;
7046
}
7047
 
7048
static rtx
7049
altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
7050
{
7051
  tree arg0 = TREE_VALUE (arglist);
7052
  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7053
  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7054
  rtx op0 = expand_normal (arg0);
7055
  rtx op1 = expand_normal (arg1);
7056
  rtx op2 = expand_normal (arg2);
7057
  rtx pat, addr;
7058
  enum machine_mode tmode = insn_data[icode].operand[0].mode;
7059
  enum machine_mode mode1 = Pmode;
7060
  enum machine_mode mode2 = Pmode;
7061
 
7062
  /* Invalid arguments.  Bail before doing anything stoopid!  */
7063
  if (arg0 == error_mark_node
7064
      || arg1 == error_mark_node
7065
      || arg2 == error_mark_node)
7066
    return const0_rtx;
7067
 
7068
  if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
7069
    op0 = copy_to_mode_reg (tmode, op0);
7070
 
7071
  op2 = copy_to_mode_reg (mode2, op2);
7072
 
7073
  if (op1 == const0_rtx)
7074
    {
7075
      addr = gen_rtx_MEM (tmode, op2);
7076
    }
7077
  else
7078
    {
7079
      op1 = copy_to_mode_reg (mode1, op1);
7080
      addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
7081
    }
7082
 
7083
  pat = GEN_FCN (icode) (addr, op0);
7084
  if (pat)
7085
    emit_insn (pat);
7086
  return NULL_RTX;
7087
}
7088
 
7089
static rtx
7090
rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
7091
{
7092
  rtx pat;
7093
  tree arg0 = TREE_VALUE (arglist);
7094
  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7095
  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7096
  rtx op0 = expand_normal (arg0);
7097
  rtx op1 = expand_normal (arg1);
7098
  rtx op2 = expand_normal (arg2);
7099
  enum machine_mode tmode = insn_data[icode].operand[0].mode;
7100
  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7101
  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7102
  enum machine_mode mode2 = insn_data[icode].operand[3].mode;
7103
 
7104
  if (icode == CODE_FOR_nothing)
7105
    /* Builtin not supported on this processor.  */
7106
    return 0;
7107
 
7108
  /* If we got invalid arguments bail out before generating bad rtl.  */
7109
  if (arg0 == error_mark_node
7110
      || arg1 == error_mark_node
7111
      || arg2 == error_mark_node)
7112
    return const0_rtx;
7113
 
7114
  if (icode == CODE_FOR_altivec_vsldoi_v4sf
7115
      || icode == CODE_FOR_altivec_vsldoi_v4si
7116
      || icode == CODE_FOR_altivec_vsldoi_v8hi
7117
      || icode == CODE_FOR_altivec_vsldoi_v16qi)
7118
    {
7119
      /* Only allow 4-bit unsigned literals.  */
7120
      STRIP_NOPS (arg2);
7121
      if (TREE_CODE (arg2) != INTEGER_CST
7122
          || TREE_INT_CST_LOW (arg2) & ~0xf)
7123
        {
7124
          error ("argument 3 must be a 4-bit unsigned literal");
7125
          return const0_rtx;
7126
        }
7127
    }
7128
 
7129
  if (target == 0
7130
      || GET_MODE (target) != tmode
7131
      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7132
    target = gen_reg_rtx (tmode);
7133
 
7134
  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7135
    op0 = copy_to_mode_reg (mode0, op0);
7136
  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7137
    op1 = copy_to_mode_reg (mode1, op1);
7138
  if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
7139
    op2 = copy_to_mode_reg (mode2, op2);
7140
 
7141
  pat = GEN_FCN (icode) (target, op0, op1, op2);
7142
  if (! pat)
7143
    return 0;
7144
  emit_insn (pat);
7145
 
7146
  return target;
7147
}
7148
 
7149
/* Expand the lvx builtins.  */
7150
static rtx
7151
altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
7152
{
7153
  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7154
  tree arglist = TREE_OPERAND (exp, 1);
7155
  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7156
  tree arg0;
7157
  enum machine_mode tmode, mode0;
7158
  rtx pat, op0;
7159
  enum insn_code icode;
7160
 
7161
  switch (fcode)
7162
    {
7163
    case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
7164
      icode = CODE_FOR_altivec_lvx_v16qi;
7165
      break;
7166
    case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
7167
      icode = CODE_FOR_altivec_lvx_v8hi;
7168
      break;
7169
    case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
7170
      icode = CODE_FOR_altivec_lvx_v4si;
7171
      break;
7172
    case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
7173
      icode = CODE_FOR_altivec_lvx_v4sf;
7174
      break;
7175
    default:
7176
      *expandedp = false;
7177
      return NULL_RTX;
7178
    }
7179
 
7180
  *expandedp = true;
7181
 
7182
  arg0 = TREE_VALUE (arglist);
7183
  op0 = expand_normal (arg0);
7184
  tmode = insn_data[icode].operand[0].mode;
7185
  mode0 = insn_data[icode].operand[1].mode;
7186
 
7187
  if (target == 0
7188
      || GET_MODE (target) != tmode
7189
      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7190
    target = gen_reg_rtx (tmode);
7191
 
7192
  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7193
    op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
7194
 
7195
  pat = GEN_FCN (icode) (target, op0);
7196
  if (! pat)
7197
    return 0;
7198
  emit_insn (pat);
7199
  return target;
7200
}
7201
 
7202
/* Expand the stvx builtins.  */
7203
static rtx
7204
altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
7205
                           bool *expandedp)
7206
{
7207
  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7208
  tree arglist = TREE_OPERAND (exp, 1);
7209
  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7210
  tree arg0, arg1;
7211
  enum machine_mode mode0, mode1;
7212
  rtx pat, op0, op1;
7213
  enum insn_code icode;
7214
 
7215
  switch (fcode)
7216
    {
7217
    case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
7218
      icode = CODE_FOR_altivec_stvx_v16qi;
7219
      break;
7220
    case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
7221
      icode = CODE_FOR_altivec_stvx_v8hi;
7222
      break;
7223
    case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
7224
      icode = CODE_FOR_altivec_stvx_v4si;
7225
      break;
7226
    case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
7227
      icode = CODE_FOR_altivec_stvx_v4sf;
7228
      break;
7229
    default:
7230
      *expandedp = false;
7231
      return NULL_RTX;
7232
    }
7233
 
7234
  arg0 = TREE_VALUE (arglist);
7235
  arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7236
  op0 = expand_normal (arg0);
7237
  op1 = expand_normal (arg1);
7238
  mode0 = insn_data[icode].operand[0].mode;
7239
  mode1 = insn_data[icode].operand[1].mode;
7240
 
7241
  if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7242
    op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
7243
  if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7244
    op1 = copy_to_mode_reg (mode1, op1);
7245
 
7246
  pat = GEN_FCN (icode) (op0, op1);
7247
  if (pat)
7248
    emit_insn (pat);
7249
 
7250
  *expandedp = true;
7251
  return NULL_RTX;
7252
}
7253
 
7254
/* Expand the dst builtins.  */
7255
static rtx
7256
altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
7257
                            bool *expandedp)
7258
{
7259
  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7260
  tree arglist = TREE_OPERAND (exp, 1);
7261
  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7262
  tree arg0, arg1, arg2;
7263
  enum machine_mode mode0, mode1, mode2;
7264
  rtx pat, op0, op1, op2;
7265
  struct builtin_description *d;
7266
  size_t i;
7267
 
7268
  *expandedp = false;
7269
 
7270
  /* Handle DST variants.  */
7271
  d = (struct builtin_description *) bdesc_dst;
7272
  for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7273
    if (d->code == fcode)
7274
      {
7275
        arg0 = TREE_VALUE (arglist);
7276
        arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7277
        arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7278
        op0 = expand_normal (arg0);
7279
        op1 = expand_normal (arg1);
7280
        op2 = expand_normal (arg2);
7281
        mode0 = insn_data[d->icode].operand[0].mode;
7282
        mode1 = insn_data[d->icode].operand[1].mode;
7283
        mode2 = insn_data[d->icode].operand[2].mode;
7284
 
7285
        /* Invalid arguments, bail out before generating bad rtl.  */
7286
        if (arg0 == error_mark_node
7287
            || arg1 == error_mark_node
7288
            || arg2 == error_mark_node)
7289
          return const0_rtx;
7290
 
7291
        *expandedp = true;
7292
        STRIP_NOPS (arg2);
7293
        if (TREE_CODE (arg2) != INTEGER_CST
7294
            || TREE_INT_CST_LOW (arg2) & ~0x3)
7295
          {
7296
            error ("argument to %qs must be a 2-bit unsigned literal", d->name);
7297
            return const0_rtx;
7298
          }
7299
 
7300
        if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
7301
          op0 = copy_to_mode_reg (Pmode, op0);
7302
        if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
7303
          op1 = copy_to_mode_reg (mode1, op1);
7304
 
7305
        pat = GEN_FCN (d->icode) (op0, op1, op2);
7306
        if (pat != 0)
7307
          emit_insn (pat);
7308
 
7309
        return NULL_RTX;
7310
      }
7311
 
7312
  return NULL_RTX;
7313
}
7314
 
7315
/* Expand vec_init builtin.  */
7316
static rtx
7317
altivec_expand_vec_init_builtin (tree type, tree arglist, rtx target)
7318
{
7319
  enum machine_mode tmode = TYPE_MODE (type);
7320
  enum machine_mode inner_mode = GET_MODE_INNER (tmode);
7321
  int i, n_elt = GET_MODE_NUNITS (tmode);
7322
  rtvec v = rtvec_alloc (n_elt);
7323
 
7324
  gcc_assert (VECTOR_MODE_P (tmode));
7325
 
7326
  for (i = 0; i < n_elt; ++i, arglist = TREE_CHAIN (arglist))
7327
    {
7328
      rtx x = expand_normal (TREE_VALUE (arglist));
7329
      RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
7330
    }
7331
 
7332
  gcc_assert (arglist == NULL);
7333
 
7334
  if (!target || !register_operand (target, tmode))
7335
    target = gen_reg_rtx (tmode);
7336
 
7337
  rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
7338
  return target;
7339
}
7340
 
7341
/* Return the integer constant in ARG.  Constrain it to be in the range
7342
   of the subparts of VEC_TYPE; issue an error if not.  */
7343
 
7344
static int
7345
get_element_number (tree vec_type, tree arg)
7346
{
7347
  unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
7348
 
7349
  if (!host_integerp (arg, 1)
7350
      || (elt = tree_low_cst (arg, 1), elt > max))
7351
    {
7352
      error ("selector must be an integer constant in the range 0..%wi", max);
7353
      return 0;
7354
    }
7355
 
7356
  return elt;
7357
}
7358
 
7359
/* Expand vec_set builtin.  */
7360
static rtx
7361
altivec_expand_vec_set_builtin (tree arglist)
7362
{
7363
  enum machine_mode tmode, mode1;
7364
  tree arg0, arg1, arg2;
7365
  int elt;
7366
  rtx op0, op1;
7367
 
7368
  arg0 = TREE_VALUE (arglist);
7369
  arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7370
  arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7371
 
7372
  tmode = TYPE_MODE (TREE_TYPE (arg0));
7373
  mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7374
  gcc_assert (VECTOR_MODE_P (tmode));
7375
 
7376
  op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
7377
  op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
7378
  elt = get_element_number (TREE_TYPE (arg0), arg2);
7379
 
7380
  if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
7381
    op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
7382
 
7383
  op0 = force_reg (tmode, op0);
7384
  op1 = force_reg (mode1, op1);
7385
 
7386
  rs6000_expand_vector_set (op0, op1, elt);
7387
 
7388
  return op0;
7389
}
7390
 
7391
/* Expand vec_ext builtin.  */
7392
static rtx
7393
altivec_expand_vec_ext_builtin (tree arglist, rtx target)
7394
{
7395
  enum machine_mode tmode, mode0;
7396
  tree arg0, arg1;
7397
  int elt;
7398
  rtx op0;
7399
 
7400
  arg0 = TREE_VALUE (arglist);
7401
  arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7402
 
7403
  op0 = expand_normal (arg0);
7404
  elt = get_element_number (TREE_TYPE (arg0), arg1);
7405
 
7406
  tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7407
  mode0 = TYPE_MODE (TREE_TYPE (arg0));
7408
  gcc_assert (VECTOR_MODE_P (mode0));
7409
 
7410
  op0 = force_reg (mode0, op0);
7411
 
7412
  if (optimize || !target || !register_operand (target, tmode))
7413
    target = gen_reg_rtx (tmode);
7414
 
7415
  rs6000_expand_vector_extract (target, op0, elt);
7416
 
7417
  return target;
7418
}
7419
 
7420
/* Expand the builtin in EXP and store the result in TARGET.  Store
7421
   true in *EXPANDEDP if we found a builtin to expand.  */
7422
static rtx
7423
altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
7424
{
7425
  struct builtin_description *d;
7426
  struct builtin_description_predicates *dp;
7427
  size_t i;
7428
  enum insn_code icode;
7429
  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7430
  tree arglist = TREE_OPERAND (exp, 1);
7431
  tree arg0;
7432
  rtx op0, pat;
7433
  enum machine_mode tmode, mode0;
7434
  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7435
 
7436
  if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
7437
      && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
7438
    {
7439
      *expandedp = true;
7440
      error ("unresolved overload for Altivec builtin %qF", fndecl);
7441
      return const0_rtx;
7442
    }
7443
 
7444
  target = altivec_expand_ld_builtin (exp, target, expandedp);
7445
  if (*expandedp)
7446
    return target;
7447
 
7448
  target = altivec_expand_st_builtin (exp, target, expandedp);
7449
  if (*expandedp)
7450
    return target;
7451
 
7452
  target = altivec_expand_dst_builtin (exp, target, expandedp);
7453
  if (*expandedp)
7454
    return target;
7455
 
7456
  *expandedp = true;
7457
 
7458
  switch (fcode)
7459
    {
7460
    case ALTIVEC_BUILTIN_STVX:
7461
      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
7462
    case ALTIVEC_BUILTIN_STVEBX:
7463
      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
7464
    case ALTIVEC_BUILTIN_STVEHX:
7465
      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
7466
    case ALTIVEC_BUILTIN_STVEWX:
7467
      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
7468
    case ALTIVEC_BUILTIN_STVXL:
7469
      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
7470
 
7471
    case ALTIVEC_BUILTIN_MFVSCR:
7472
      icode = CODE_FOR_altivec_mfvscr;
7473
      tmode = insn_data[icode].operand[0].mode;
7474
 
7475
      if (target == 0
7476
          || GET_MODE (target) != tmode
7477
          || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7478
        target = gen_reg_rtx (tmode);
7479
 
7480
      pat = GEN_FCN (icode) (target);
7481
      if (! pat)
7482
        return 0;
7483
      emit_insn (pat);
7484
      return target;
7485
 
7486
    case ALTIVEC_BUILTIN_MTVSCR:
7487
      icode = CODE_FOR_altivec_mtvscr;
7488
      arg0 = TREE_VALUE (arglist);
7489
      op0 = expand_normal (arg0);
7490
      mode0 = insn_data[icode].operand[0].mode;
7491
 
7492
      /* If we got invalid arguments bail out before generating bad rtl.  */
7493
      if (arg0 == error_mark_node)
7494
        return const0_rtx;
7495
 
7496
      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7497
        op0 = copy_to_mode_reg (mode0, op0);
7498
 
7499
      pat = GEN_FCN (icode) (op0);
7500
      if (pat)
7501
        emit_insn (pat);
7502
      return NULL_RTX;
7503
 
7504
    case ALTIVEC_BUILTIN_DSSALL:
7505
      emit_insn (gen_altivec_dssall ());
7506
      return NULL_RTX;
7507
 
7508
    case ALTIVEC_BUILTIN_DSS:
7509
      icode = CODE_FOR_altivec_dss;
7510
      arg0 = TREE_VALUE (arglist);
7511
      STRIP_NOPS (arg0);
7512
      op0 = expand_normal (arg0);
7513
      mode0 = insn_data[icode].operand[0].mode;
7514
 
7515
      /* If we got invalid arguments bail out before generating bad rtl.  */
7516
      if (arg0 == error_mark_node)
7517
        return const0_rtx;
7518
 
7519
      if (TREE_CODE (arg0) != INTEGER_CST
7520
          || TREE_INT_CST_LOW (arg0) & ~0x3)
7521
        {
7522
          error ("argument to dss must be a 2-bit unsigned literal");
7523
          return const0_rtx;
7524
        }
7525
 
7526
      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7527
        op0 = copy_to_mode_reg (mode0, op0);
7528
 
7529
      emit_insn (gen_altivec_dss (op0));
7530
      return NULL_RTX;
7531
 
7532
    case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
7533
    case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
7534
    case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
7535
    case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
7536
      return altivec_expand_vec_init_builtin (TREE_TYPE (exp), arglist, target);
7537
 
7538
    case ALTIVEC_BUILTIN_VEC_SET_V4SI:
7539
    case ALTIVEC_BUILTIN_VEC_SET_V8HI:
7540
    case ALTIVEC_BUILTIN_VEC_SET_V16QI:
7541
    case ALTIVEC_BUILTIN_VEC_SET_V4SF:
7542
      return altivec_expand_vec_set_builtin (arglist);
7543
 
7544
    case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
7545
    case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
7546
    case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
7547
    case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
7548
      return altivec_expand_vec_ext_builtin (arglist, target);
7549
 
7550
    default:
7551
      break;
7552
      /* Fall through.  */
7553
    }
7554
 
7555
  /* Expand abs* operations.  */
7556
  d = (struct builtin_description *) bdesc_abs;
7557
  for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
7558
    if (d->code == fcode)
7559
      return altivec_expand_abs_builtin (d->icode, arglist, target);
7560
 
7561
  /* Expand the AltiVec predicates.  */
7562
  dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
7563
  for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
7564
    if (dp->code == fcode)
7565
      return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
7566
                                               arglist, target);
7567
 
7568
  /* LV* are funky.  We initialized them differently.  */
7569
  switch (fcode)
7570
    {
7571
    case ALTIVEC_BUILTIN_LVSL:
7572
      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
7573
                                        arglist, target);
7574
    case ALTIVEC_BUILTIN_LVSR:
7575
      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
7576
                                        arglist, target);
7577
    case ALTIVEC_BUILTIN_LVEBX:
7578
      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
7579
                                        arglist, target);
7580
    case ALTIVEC_BUILTIN_LVEHX:
7581
      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
7582
                                        arglist, target);
7583
    case ALTIVEC_BUILTIN_LVEWX:
7584
      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
7585
                                        arglist, target);
7586
    case ALTIVEC_BUILTIN_LVXL:
7587
      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
7588
                                        arglist, target);
7589
    case ALTIVEC_BUILTIN_LVX:
7590
      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
7591
                                        arglist, target);
7592
    default:
7593
      break;
7594
      /* Fall through.  */
7595
    }
7596
 
7597
  *expandedp = false;
7598
  return NULL_RTX;
7599
}
7600
 
7601
/* Binops that need to be initialized manually, but can be expanded
7602
   automagically by rs6000_expand_binop_builtin.  */
7603
static struct builtin_description bdesc_2arg_spe[] =
7604
{
7605
  { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
7606
  { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
7607
  { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
7608
  { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
7609
  { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
7610
  { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
7611
  { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
7612
  { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
7613
  { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
7614
  { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
7615
  { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
7616
  { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
7617
  { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
7618
  { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
7619
  { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
7620
  { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
7621
  { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
7622
  { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
7623
  { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
7624
  { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
7625
  { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
7626
  { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
7627
};
7628
 
7629
/* Expand the builtin in EXP and store the result in TARGET.  Store
7630
   true in *EXPANDEDP if we found a builtin to expand.
7631
 
7632
   This expands the SPE builtins that are not simple unary and binary
7633
   operations.  */
7634
static rtx
7635
spe_expand_builtin (tree exp, rtx target, bool *expandedp)
7636
{
7637
  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7638
  tree arglist = TREE_OPERAND (exp, 1);
7639
  tree arg1, arg0;
7640
  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7641
  enum insn_code icode;
7642
  enum machine_mode tmode, mode0;
7643
  rtx pat, op0;
7644
  struct builtin_description *d;
7645
  size_t i;
7646
 
7647
  *expandedp = true;
7648
 
7649
  /* Syntax check for a 5-bit unsigned immediate.  */
7650
  switch (fcode)
7651
    {
7652
    case SPE_BUILTIN_EVSTDD:
7653
    case SPE_BUILTIN_EVSTDH:
7654
    case SPE_BUILTIN_EVSTDW:
7655
    case SPE_BUILTIN_EVSTWHE:
7656
    case SPE_BUILTIN_EVSTWHO:
7657
    case SPE_BUILTIN_EVSTWWE:
7658
    case SPE_BUILTIN_EVSTWWO:
7659
      arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7660
      if (TREE_CODE (arg1) != INTEGER_CST
7661
          || TREE_INT_CST_LOW (arg1) & ~0x1f)
7662
        {
7663
          error ("argument 2 must be a 5-bit unsigned literal");
7664
          return const0_rtx;
7665
        }
7666
      break;
7667
    default:
7668
      break;
7669
    }
7670
 
7671
  /* The evsplat*i instructions are not quite generic.  */
7672
  switch (fcode)
7673
    {
7674
    case SPE_BUILTIN_EVSPLATFI:
7675
      return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
7676
                                         arglist, target);
7677
    case SPE_BUILTIN_EVSPLATI:
7678
      return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
7679
                                         arglist, target);
7680
    default:
7681
      break;
7682
    }
7683
 
7684
  d = (struct builtin_description *) bdesc_2arg_spe;
7685
  for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
7686
    if (d->code == fcode)
7687
      return rs6000_expand_binop_builtin (d->icode, arglist, target);
7688
 
7689
  d = (struct builtin_description *) bdesc_spe_predicates;
7690
  for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
7691
    if (d->code == fcode)
7692
      return spe_expand_predicate_builtin (d->icode, arglist, target);
7693
 
7694
  d = (struct builtin_description *) bdesc_spe_evsel;
7695
  for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
7696
    if (d->code == fcode)
7697
      return spe_expand_evsel_builtin (d->icode, arglist, target);
7698
 
7699
  switch (fcode)
7700
    {
7701
    case SPE_BUILTIN_EVSTDDX:
7702
      return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
7703
    case SPE_BUILTIN_EVSTDHX:
7704
      return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
7705
    case SPE_BUILTIN_EVSTDWX:
7706
      return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
7707
    case SPE_BUILTIN_EVSTWHEX:
7708
      return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
7709
    case SPE_BUILTIN_EVSTWHOX:
7710
      return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
7711
    case SPE_BUILTIN_EVSTWWEX:
7712
      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
7713
    case SPE_BUILTIN_EVSTWWOX:
7714
      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
7715
    case SPE_BUILTIN_EVSTDD:
7716
      return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
7717
    case SPE_BUILTIN_EVSTDH:
7718
      return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
7719
    case SPE_BUILTIN_EVSTDW:
7720
      return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
7721
    case SPE_BUILTIN_EVSTWHE:
7722
      return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
7723
    case SPE_BUILTIN_EVSTWHO:
7724
      return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
7725
    case SPE_BUILTIN_EVSTWWE:
7726
      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
7727
    case SPE_BUILTIN_EVSTWWO:
7728
      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
7729
    case SPE_BUILTIN_MFSPEFSCR:
7730
      icode = CODE_FOR_spe_mfspefscr;
7731
      tmode = insn_data[icode].operand[0].mode;
7732
 
7733
      if (target == 0
7734
          || GET_MODE (target) != tmode
7735
          || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7736
        target = gen_reg_rtx (tmode);
7737
 
7738
      pat = GEN_FCN (icode) (target);
7739
      if (! pat)
7740
        return 0;
7741
      emit_insn (pat);
7742
      return target;
7743
    case SPE_BUILTIN_MTSPEFSCR:
7744
      icode = CODE_FOR_spe_mtspefscr;
7745
      arg0 = TREE_VALUE (arglist);
7746
      op0 = expand_normal (arg0);
7747
      mode0 = insn_data[icode].operand[0].mode;
7748
 
7749
      if (arg0 == error_mark_node)
7750
        return const0_rtx;
7751
 
7752
      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7753
        op0 = copy_to_mode_reg (mode0, op0);
7754
 
7755
      pat = GEN_FCN (icode) (op0);
7756
      if (pat)
7757
        emit_insn (pat);
7758
      return NULL_RTX;
7759
    default:
7760
      break;
7761
    }
7762
 
7763
  *expandedp = false;
7764
  return NULL_RTX;
7765
}
7766
 
7767
static rtx
7768
spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
7769
{
7770
  rtx pat, scratch, tmp;
7771
  tree form = TREE_VALUE (arglist);
7772
  tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
7773
  tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7774
  rtx op0 = expand_normal (arg0);
7775
  rtx op1 = expand_normal (arg1);
7776
  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7777
  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7778
  int form_int;
7779
  enum rtx_code code;
7780
 
7781
  if (TREE_CODE (form) != INTEGER_CST)
7782
    {
7783
      error ("argument 1 of __builtin_spe_predicate must be a constant");
7784
      return const0_rtx;
7785
    }
7786
  else
7787
    form_int = TREE_INT_CST_LOW (form);
7788
 
7789
  gcc_assert (mode0 == mode1);
7790
 
7791
  if (arg0 == error_mark_node || arg1 == error_mark_node)
7792
    return const0_rtx;
7793
 
7794
  if (target == 0
7795
      || GET_MODE (target) != SImode
7796
      || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
7797
    target = gen_reg_rtx (SImode);
7798
 
7799
  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7800
    op0 = copy_to_mode_reg (mode0, op0);
7801
  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7802
    op1 = copy_to_mode_reg (mode1, op1);
7803
 
7804
  scratch = gen_reg_rtx (CCmode);
7805
 
7806
  pat = GEN_FCN (icode) (scratch, op0, op1);
7807
  if (! pat)
7808
    return const0_rtx;
7809
  emit_insn (pat);
7810
 
7811
  /* There are 4 variants for each predicate: _any_, _all_, _upper_,
7812
     _lower_.  We use one compare, but look in different bits of the
7813
     CR for each variant.
7814
 
7815
     There are 2 elements in each SPE simd type (upper/lower).  The CR
7816
     bits are set as follows:
7817
 
7818
     BIT0  | BIT 1  | BIT 2   | BIT 3
7819
     U     |   L    | (U | L) | (U & L)
7820
 
7821
     So, for an "all" relationship, BIT 3 would be set.
7822
     For an "any" relationship, BIT 2 would be set.  Etc.
7823
 
7824
     Following traditional nomenclature, these bits map to:
7825
 
7826
     BIT0  | BIT 1  | BIT 2   | BIT 3
7827
     LT    | GT     | EQ      | OV
7828
 
7829
     Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
7830
  */
7831
 
7832
  switch (form_int)
7833
    {
7834
      /* All variant.  OV bit.  */
7835
    case 0:
7836
      /* We need to get to the OV bit, which is the ORDERED bit.  We
7837
         could generate (ordered:SI (reg:CC xx) (const_int 0)), but
7838
         that's ugly and will make validate_condition_mode die.
7839
         So let's just use another pattern.  */
7840
      emit_insn (gen_move_from_CR_ov_bit (target, scratch));
7841
      return target;
7842
      /* Any variant.  EQ bit.  */
7843
    case 1:
7844
      code = EQ;
7845
      break;
7846
      /* Upper variant.  LT bit.  */
7847
    case 2:
7848
      code = LT;
7849
      break;
7850
      /* Lower variant.  GT bit.  */
7851
    case 3:
7852
      code = GT;
7853
      break;
7854
    default:
7855
      error ("argument 1 of __builtin_spe_predicate is out of range");
7856
      return const0_rtx;
7857
    }
7858
 
7859
  tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
7860
  emit_move_insn (target, tmp);
7861
 
7862
  return target;
7863
}
7864
 
7865
/* The evsel builtins look like this:
7866
 
7867
     e = __builtin_spe_evsel_OP (a, b, c, d);
7868
 
7869
   and work like this:
7870
 
7871
     e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
7872
     e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
7873
*/
7874
 
7875
static rtx
7876
spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
7877
{
7878
  rtx pat, scratch;
7879
  tree arg0 = TREE_VALUE (arglist);
7880
  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7881
  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7882
  tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
7883
  rtx op0 = expand_normal (arg0);
7884
  rtx op1 = expand_normal (arg1);
7885
  rtx op2 = expand_normal (arg2);
7886
  rtx op3 = expand_normal (arg3);
7887
  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7888
  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7889
 
7890
  gcc_assert (mode0 == mode1);
7891
 
7892
  if (arg0 == error_mark_node || arg1 == error_mark_node
7893
      || arg2 == error_mark_node || arg3 == error_mark_node)
7894
    return const0_rtx;
7895
 
7896
  if (target == 0
7897
      || GET_MODE (target) != mode0
7898
      || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
7899
    target = gen_reg_rtx (mode0);
7900
 
7901
  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7902
    op0 = copy_to_mode_reg (mode0, op0);
7903
  if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7904
    op1 = copy_to_mode_reg (mode0, op1);
7905
  if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7906
    op2 = copy_to_mode_reg (mode0, op2);
7907
  if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
7908
    op3 = copy_to_mode_reg (mode0, op3);
7909
 
7910
  /* Generate the compare.  */
7911
  scratch = gen_reg_rtx (CCmode);
7912
  pat = GEN_FCN (icode) (scratch, op0, op1);
7913
  if (! pat)
7914
    return const0_rtx;
7915
  emit_insn (pat);
7916
 
7917
  if (mode0 == V2SImode)
7918
    emit_insn (gen_spe_evsel (target, op2, op3, scratch));
7919
  else
7920
    emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
7921
 
7922
  return target;
7923
}
7924
 
7925
/* Expand an expression EXP that calls a built-in function,
7926
   with result going to TARGET if that's convenient
7927
   (and in mode MODE if that's convenient).
7928
   SUBTARGET may be used as the target for computing one of EXP's operands.
7929
   IGNORE is nonzero if the value is to be ignored.  */
7930
 
7931
static rtx
7932
rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
7933
                       enum machine_mode mode ATTRIBUTE_UNUSED,
7934
                       int ignore ATTRIBUTE_UNUSED)
7935
{
7936
  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7937
  tree arglist = TREE_OPERAND (exp, 1);
7938
  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7939
  struct builtin_description *d;
7940
  size_t i;
7941
  rtx ret;
7942
  bool success;
7943
 
7944
  if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
7945
      || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7946
    {
7947
      int icode = (int) CODE_FOR_altivec_lvsr;
7948
      enum machine_mode tmode = insn_data[icode].operand[0].mode;
7949
      enum machine_mode mode = insn_data[icode].operand[1].mode;
7950
      tree arg;
7951
      rtx op, addr, pat;
7952
 
7953
      gcc_assert (TARGET_ALTIVEC);
7954
 
7955
      arg = TREE_VALUE (arglist);
7956
      gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7957
      op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
7958
      addr = memory_address (mode, op);
7959
      if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7960
        op = addr;
7961
      else
7962
        {
7963
          /* For the load case need to negate the address.  */
7964
          op = gen_reg_rtx (GET_MODE (addr));
7965
          emit_insn (gen_rtx_SET (VOIDmode, op,
7966
                         gen_rtx_NEG (GET_MODE (addr), addr)));
7967
        }
7968
      op = gen_rtx_MEM (mode, op);
7969
 
7970
      if (target == 0
7971
          || GET_MODE (target) != tmode
7972
          || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7973
        target = gen_reg_rtx (tmode);
7974
 
7975
      /*pat = gen_altivec_lvsr (target, op);*/
7976
      pat = GEN_FCN (icode) (target, op);
7977
      if (!pat)
7978
        return 0;
7979
      emit_insn (pat);
7980
 
7981
      return target;
7982
    }
7983
 
7984
  if (TARGET_ALTIVEC)
7985
    {
7986
      ret = altivec_expand_builtin (exp, target, &success);
7987
 
7988
      if (success)
7989
        return ret;
7990
    }
7991
  if (TARGET_SPE)
7992
    {
7993
      ret = spe_expand_builtin (exp, target, &success);
7994
 
7995
      if (success)
7996
        return ret;
7997
    }
7998
 
7999
  gcc_assert (TARGET_ALTIVEC || TARGET_SPE);
8000
 
8001
  /* Handle simple unary operations.  */
8002
  d = (struct builtin_description *) bdesc_1arg;
8003
  for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8004
    if (d->code == fcode)
8005
      return rs6000_expand_unop_builtin (d->icode, arglist, target);
8006
 
8007
  /* Handle simple binary operations.  */
8008
  d = (struct builtin_description *) bdesc_2arg;
8009
  for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8010
    if (d->code == fcode)
8011
      return rs6000_expand_binop_builtin (d->icode, arglist, target);
8012
 
8013
  /* Handle simple ternary operations.  */
8014
  d = (struct builtin_description *) bdesc_3arg;
8015
  for (i = 0; i < ARRAY_SIZE  (bdesc_3arg); i++, d++)
8016
    if (d->code == fcode)
8017
      return rs6000_expand_ternop_builtin (d->icode, arglist, target);
8018
 
8019
  gcc_unreachable ();
8020
}
8021
 
8022
static tree
8023
build_opaque_vector_type (tree node, int nunits)
8024
{
8025
  node = copy_node (node);
8026
  TYPE_MAIN_VARIANT (node) = node;
8027
  return build_vector_type (node, nunits);
8028
}
8029
 
8030
static void
8031
rs6000_init_builtins (void)
8032
{
8033
  V2SI_type_node = build_vector_type (intSI_type_node, 2);
8034
  V2SF_type_node = build_vector_type (float_type_node, 2);
8035
  V4HI_type_node = build_vector_type (intHI_type_node, 4);
8036
  V4SI_type_node = build_vector_type (intSI_type_node, 4);
8037
  V4SF_type_node = build_vector_type (float_type_node, 4);
8038
  V8HI_type_node = build_vector_type (intHI_type_node, 8);
8039
  V16QI_type_node = build_vector_type (intQI_type_node, 16);
8040
 
8041
  unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
8042
  unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
8043
  unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
8044
 
8045
  opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
8046
  opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
8047
  opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
8048
  opaque_V4SI_type_node = copy_node (V4SI_type_node);
8049
 
8050
  /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
8051
     types, especially in C++ land.  Similarly, 'vector pixel' is distinct from
8052
     'vector unsigned short'.  */
8053
 
8054
  bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
8055
  bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8056
  bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
8057
  pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8058
 
8059
  long_integer_type_internal_node = long_integer_type_node;
8060
  long_unsigned_type_internal_node = long_unsigned_type_node;
8061
  intQI_type_internal_node = intQI_type_node;
8062
  uintQI_type_internal_node = unsigned_intQI_type_node;
8063
  intHI_type_internal_node = intHI_type_node;
8064
  uintHI_type_internal_node = unsigned_intHI_type_node;
8065
  intSI_type_internal_node = intSI_type_node;
8066
  uintSI_type_internal_node = unsigned_intSI_type_node;
8067
  float_type_internal_node = float_type_node;
8068
  void_type_internal_node = void_type_node;
8069
 
8070
  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8071
                                            get_identifier ("__bool char"),
8072
                                            bool_char_type_node));
8073
  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8074
                                            get_identifier ("__bool short"),
8075
                                            bool_short_type_node));
8076
  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8077
                                            get_identifier ("__bool int"),
8078
                                            bool_int_type_node));
8079
  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8080
                                            get_identifier ("__pixel"),
8081
                                            pixel_type_node));
8082
 
8083
  bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
8084
  bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
8085
  bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
8086
  pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8087
 
8088
  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8089
                                            get_identifier ("__vector unsigned char"),
8090
                                            unsigned_V16QI_type_node));
8091
  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8092
                                            get_identifier ("__vector signed char"),
8093
                                            V16QI_type_node));
8094
  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8095
                                            get_identifier ("__vector __bool char"),
8096
                                            bool_V16QI_type_node));
8097
 
8098
  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8099
                                            get_identifier ("__vector unsigned short"),
8100
                                            unsigned_V8HI_type_node));
8101
  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8102
                                            get_identifier ("__vector signed short"),
8103
                                            V8HI_type_node));
8104
  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8105
                                            get_identifier ("__vector __bool short"),
8106
                                            bool_V8HI_type_node));
8107
 
8108
  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8109
                                            get_identifier ("__vector unsigned int"),
8110
                                            unsigned_V4SI_type_node));
8111
  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8112
                                            get_identifier ("__vector signed int"),
8113
                                            V4SI_type_node));
8114
  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8115
                                            get_identifier ("__vector __bool int"),
8116
                                            bool_V4SI_type_node));
8117
 
8118
  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8119
                                            get_identifier ("__vector float"),
8120
                                            V4SF_type_node));
8121
  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8122
                                            get_identifier ("__vector __pixel"),
8123
                                            pixel_V8HI_type_node));
8124
 
8125
  if (TARGET_SPE)
8126
    spe_init_builtins ();
8127
  if (TARGET_ALTIVEC)
8128
    altivec_init_builtins ();
8129
  if (TARGET_ALTIVEC || TARGET_SPE)
8130
    rs6000_common_init_builtins ();
8131
 
8132
#if TARGET_XCOFF
8133
  /* AIX libm provides clog as __clog.  */
8134
  if (built_in_decls [BUILT_IN_CLOG])
8135
    set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
8136
#endif
8137
}
8138
 
8139
/* Search through a set of builtins and enable the mask bits.
8140
   DESC is an array of builtins.
8141
   SIZE is the total number of builtins.
8142
   START is the builtin enum at which to start.
8143
   END is the builtin enum at which to end.  */
8144
static void
8145
enable_mask_for_builtins (struct builtin_description *desc, int size,
8146
                          enum rs6000_builtins start,
8147
                          enum rs6000_builtins end)
8148
{
8149
  int i;
8150
 
8151
  for (i = 0; i < size; ++i)
8152
    if (desc[i].code == start)
8153
      break;
8154
 
8155
  if (i == size)
8156
    return;
8157
 
8158
  for (; i < size; ++i)
8159
    {
8160
      /* Flip all the bits on.  */
8161
      desc[i].mask = target_flags;
8162
      if (desc[i].code == end)
8163
        break;
8164
    }
8165
}
8166
 
8167
static void
8168
spe_init_builtins (void)
8169
{
8170
  tree endlink = void_list_node;
8171
  tree puint_type_node = build_pointer_type (unsigned_type_node);
8172
  tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
8173
  struct builtin_description *d;
8174
  size_t i;
8175
 
8176
  tree v2si_ftype_4_v2si
8177
    = build_function_type
8178
    (opaque_V2SI_type_node,
8179
     tree_cons (NULL_TREE, opaque_V2SI_type_node,
8180
                tree_cons (NULL_TREE, opaque_V2SI_type_node,
8181
                           tree_cons (NULL_TREE, opaque_V2SI_type_node,
8182
                                      tree_cons (NULL_TREE, opaque_V2SI_type_node,
8183
                                                 endlink)))));
8184
 
8185
  tree v2sf_ftype_4_v2sf
8186
    = build_function_type
8187
    (opaque_V2SF_type_node,
8188
     tree_cons (NULL_TREE, opaque_V2SF_type_node,
8189
                tree_cons (NULL_TREE, opaque_V2SF_type_node,
8190
                           tree_cons (NULL_TREE, opaque_V2SF_type_node,
8191
                                      tree_cons (NULL_TREE, opaque_V2SF_type_node,
8192
                                                 endlink)))));
8193
 
8194
  tree int_ftype_int_v2si_v2si
8195
    = build_function_type
8196
    (integer_type_node,
8197
     tree_cons (NULL_TREE, integer_type_node,
8198
                tree_cons (NULL_TREE, opaque_V2SI_type_node,
8199
                           tree_cons (NULL_TREE, opaque_V2SI_type_node,
8200
                                      endlink))));
8201
 
8202
  tree int_ftype_int_v2sf_v2sf
8203
    = build_function_type
8204
    (integer_type_node,
8205
     tree_cons (NULL_TREE, integer_type_node,
8206
                tree_cons (NULL_TREE, opaque_V2SF_type_node,
8207
                           tree_cons (NULL_TREE, opaque_V2SF_type_node,
8208
                                      endlink))));
8209
 
8210
  tree void_ftype_v2si_puint_int
8211
    = build_function_type (void_type_node,
8212
                           tree_cons (NULL_TREE, opaque_V2SI_type_node,
8213
                                      tree_cons (NULL_TREE, puint_type_node,
8214
                                                 tree_cons (NULL_TREE,
8215
                                                            integer_type_node,
8216
                                                            endlink))));
8217
 
8218
  tree void_ftype_v2si_puint_char
8219
    = build_function_type (void_type_node,
8220
                           tree_cons (NULL_TREE, opaque_V2SI_type_node,
8221
                                      tree_cons (NULL_TREE, puint_type_node,
8222
                                                 tree_cons (NULL_TREE,
8223
                                                            char_type_node,
8224
                                                            endlink))));
8225
 
8226
  tree void_ftype_v2si_pv2si_int
8227
    = build_function_type (void_type_node,
8228
                           tree_cons (NULL_TREE, opaque_V2SI_type_node,
8229
                                      tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
8230
                                                 tree_cons (NULL_TREE,
8231
                                                            integer_type_node,
8232
                                                            endlink))));
8233
 
8234
  tree void_ftype_v2si_pv2si_char
8235
    = build_function_type (void_type_node,
8236
                           tree_cons (NULL_TREE, opaque_V2SI_type_node,
8237
                                      tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
8238
                                                 tree_cons (NULL_TREE,
8239
                                                            char_type_node,
8240
                                                            endlink))));
8241
 
8242
  tree void_ftype_int
8243
    = build_function_type (void_type_node,
8244
                           tree_cons (NULL_TREE, integer_type_node, endlink));
8245
 
8246
  tree int_ftype_void
8247
    = build_function_type (integer_type_node, endlink);
8248
 
8249
  tree v2si_ftype_pv2si_int
8250
    = build_function_type (opaque_V2SI_type_node,
8251
                           tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
8252
                                      tree_cons (NULL_TREE, integer_type_node,
8253
                                                 endlink)));
8254
 
8255
  tree v2si_ftype_puint_int
8256
    = build_function_type (opaque_V2SI_type_node,
8257
                           tree_cons (NULL_TREE, puint_type_node,
8258
                                      tree_cons (NULL_TREE, integer_type_node,
8259
                                                 endlink)));
8260
 
8261
  tree v2si_ftype_pushort_int
8262
    = build_function_type (opaque_V2SI_type_node,
8263
                           tree_cons (NULL_TREE, pushort_type_node,
8264
                                      tree_cons (NULL_TREE, integer_type_node,
8265
                                                 endlink)));
8266
 
8267
  tree v2si_ftype_signed_char
8268
    = build_function_type (opaque_V2SI_type_node,
8269
                           tree_cons (NULL_TREE, signed_char_type_node,
8270
                                      endlink));
8271
 
8272
  /* The initialization of the simple binary and unary builtins is
8273
     done in rs6000_common_init_builtins, but we have to enable the
8274
     mask bits here manually because we have run out of `target_flags'
8275
     bits.  We really need to redesign this mask business.  */
8276
 
8277
  enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
8278
                            ARRAY_SIZE (bdesc_2arg),
8279
                            SPE_BUILTIN_EVADDW,
8280
                            SPE_BUILTIN_EVXOR);
8281
  enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
8282
                            ARRAY_SIZE (bdesc_1arg),
8283
                            SPE_BUILTIN_EVABS,
8284
                            SPE_BUILTIN_EVSUBFUSIAAW);
8285
  enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
8286
                            ARRAY_SIZE (bdesc_spe_predicates),
8287
                            SPE_BUILTIN_EVCMPEQ,
8288
                            SPE_BUILTIN_EVFSTSTLT);
8289
  enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
8290
                            ARRAY_SIZE (bdesc_spe_evsel),
8291
                            SPE_BUILTIN_EVSEL_CMPGTS,
8292
                            SPE_BUILTIN_EVSEL_FSTSTEQ);
8293
 
8294
  (*lang_hooks.decls.pushdecl)
8295
    (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
8296
                 opaque_V2SI_type_node));
8297
 
8298
  /* Initialize irregular SPE builtins.  */
8299
 
8300
  def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
8301
  def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
8302
  def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
8303
  def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
8304
  def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
8305
  def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
8306
  def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
8307
  def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
8308
  def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
8309
  def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
8310
  def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
8311
  def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
8312
  def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
8313
  def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
8314
  def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
8315
  def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
8316
  def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
8317
  def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
8318
 
8319
  /* Loads.  */
8320
  def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
8321
  def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
8322
  def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
8323
  def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
8324
  def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
8325
  def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
8326
  def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
8327
  def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
8328
  def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
8329
  def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
8330
  def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
8331
  def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
8332
  def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
8333
  def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
8334
  def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
8335
  def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
8336
  def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
8337
  def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
8338
  def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
8339
  def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
8340
  def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
8341
  def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
8342
 
8343
  /* Predicates.  */
8344
  d = (struct builtin_description *) bdesc_spe_predicates;
8345
  for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
8346
    {
8347
      tree type;
8348
 
8349
      switch (insn_data[d->icode].operand[1].mode)
8350
        {
8351
        case V2SImode:
8352
          type = int_ftype_int_v2si_v2si;
8353
          break;
8354
        case V2SFmode:
8355
          type = int_ftype_int_v2sf_v2sf;
8356
          break;
8357
        default:
8358
          gcc_unreachable ();
8359
        }
8360
 
8361
      def_builtin (d->mask, d->name, type, d->code);
8362
    }
8363
 
8364
  /* Evsel predicates.  */
8365
  d = (struct builtin_description *) bdesc_spe_evsel;
8366
  for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
8367
    {
8368
      tree type;
8369
 
8370
      switch (insn_data[d->icode].operand[1].mode)
8371
        {
8372
        case V2SImode:
8373
          type = v2si_ftype_4_v2si;
8374
          break;
8375
        case V2SFmode:
8376
          type = v2sf_ftype_4_v2sf;
8377
          break;
8378
        default:
8379
          gcc_unreachable ();
8380
        }
8381
 
8382
      def_builtin (d->mask, d->name, type, d->code);
8383
    }
8384
}
8385
 
8386
static void
8387
altivec_init_builtins (void)
8388
{
8389
  struct builtin_description *d;
8390
  struct builtin_description_predicates *dp;
8391
  size_t i;
8392
  tree ftype;
8393
 
8394
  tree pfloat_type_node = build_pointer_type (float_type_node);
8395
  tree pint_type_node = build_pointer_type (integer_type_node);
8396
  tree pshort_type_node = build_pointer_type (short_integer_type_node);
8397
  tree pchar_type_node = build_pointer_type (char_type_node);
8398
 
8399
  tree pvoid_type_node = build_pointer_type (void_type_node);
8400
 
8401
  tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
8402
  tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
8403
  tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
8404
  tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
8405
 
8406
  tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
8407
 
8408
  tree int_ftype_opaque
8409
    = build_function_type_list (integer_type_node,
8410
                                opaque_V4SI_type_node, NULL_TREE);
8411
 
8412
  tree opaque_ftype_opaque_int
8413
    = build_function_type_list (opaque_V4SI_type_node,
8414
                                opaque_V4SI_type_node, integer_type_node, NULL_TREE);
8415
  tree opaque_ftype_opaque_opaque_int
8416
    = build_function_type_list (opaque_V4SI_type_node,
8417
                                opaque_V4SI_type_node, opaque_V4SI_type_node,
8418
                                integer_type_node, NULL_TREE);
8419
  tree int_ftype_int_opaque_opaque
8420
    = build_function_type_list (integer_type_node,
8421
                                integer_type_node, opaque_V4SI_type_node,
8422
                                opaque_V4SI_type_node, NULL_TREE);
8423
  tree int_ftype_int_v4si_v4si
8424
    = build_function_type_list (integer_type_node,
8425
                                integer_type_node, V4SI_type_node,
8426
                                V4SI_type_node, NULL_TREE);
8427
  tree v4sf_ftype_pcfloat
8428
    = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
8429
  tree void_ftype_pfloat_v4sf
8430
    = build_function_type_list (void_type_node,
8431
                                pfloat_type_node, V4SF_type_node, NULL_TREE);
8432
  tree v4si_ftype_pcint
8433
    = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
8434
  tree void_ftype_pint_v4si
8435
    = build_function_type_list (void_type_node,
8436
                                pint_type_node, V4SI_type_node, NULL_TREE);
8437
  tree v8hi_ftype_pcshort
8438
    = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
8439
  tree void_ftype_pshort_v8hi
8440
    = build_function_type_list (void_type_node,
8441
                                pshort_type_node, V8HI_type_node, NULL_TREE);
8442
  tree v16qi_ftype_pcchar
8443
    = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
8444
  tree void_ftype_pchar_v16qi
8445
    = build_function_type_list (void_type_node,
8446
                                pchar_type_node, V16QI_type_node, NULL_TREE);
8447
  tree void_ftype_v4si
8448
    = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
8449
  tree v8hi_ftype_void
8450
    = build_function_type (V8HI_type_node, void_list_node);
8451
  tree void_ftype_void
8452
    = build_function_type (void_type_node, void_list_node);
8453
  tree void_ftype_int
8454
    = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
8455
 
8456
  tree opaque_ftype_long_pcvoid
8457
    = build_function_type_list (opaque_V4SI_type_node,
8458
                                long_integer_type_node, pcvoid_type_node, NULL_TREE);
8459
  tree v16qi_ftype_long_pcvoid
8460
    = build_function_type_list (V16QI_type_node,
8461
                                long_integer_type_node, pcvoid_type_node, NULL_TREE);
8462
  tree v8hi_ftype_long_pcvoid
8463
    = build_function_type_list (V8HI_type_node,
8464
                                long_integer_type_node, pcvoid_type_node, NULL_TREE);
8465
  tree v4si_ftype_long_pcvoid
8466
    = build_function_type_list (V4SI_type_node,
8467
                                long_integer_type_node, pcvoid_type_node, NULL_TREE);
8468
 
8469
  tree void_ftype_opaque_long_pvoid
8470
    = build_function_type_list (void_type_node,
8471
                                opaque_V4SI_type_node, long_integer_type_node,
8472
                                pvoid_type_node, NULL_TREE);
8473
  tree void_ftype_v4si_long_pvoid
8474
    = build_function_type_list (void_type_node,
8475
                                V4SI_type_node, long_integer_type_node,
8476
                                pvoid_type_node, NULL_TREE);
8477
  tree void_ftype_v16qi_long_pvoid
8478
    = build_function_type_list (void_type_node,
8479
                                V16QI_type_node, long_integer_type_node,
8480
                                pvoid_type_node, NULL_TREE);
8481
  tree void_ftype_v8hi_long_pvoid
8482
    = build_function_type_list (void_type_node,
8483
                                V8HI_type_node, long_integer_type_node,
8484
                                pvoid_type_node, NULL_TREE);
8485
  tree int_ftype_int_v8hi_v8hi
8486
    = build_function_type_list (integer_type_node,
8487
                                integer_type_node, V8HI_type_node,
8488
                                V8HI_type_node, NULL_TREE);
8489
  tree int_ftype_int_v16qi_v16qi
8490
    = build_function_type_list (integer_type_node,
8491
                                integer_type_node, V16QI_type_node,
8492
                                V16QI_type_node, NULL_TREE);
8493
  tree int_ftype_int_v4sf_v4sf
8494
    = build_function_type_list (integer_type_node,
8495
                                integer_type_node, V4SF_type_node,
8496
                                V4SF_type_node, NULL_TREE);
8497
  tree v4si_ftype_v4si
8498
    = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
8499
  tree v8hi_ftype_v8hi
8500
    = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
8501
  tree v16qi_ftype_v16qi
8502
    = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
8503
  tree v4sf_ftype_v4sf
8504
    = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8505
  tree void_ftype_pcvoid_int_int
8506
    = build_function_type_list (void_type_node,
8507
                                pcvoid_type_node, integer_type_node,
8508
                                integer_type_node, NULL_TREE);
8509
 
8510
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
8511
               ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
8512
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
8513
               ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
8514
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
8515
               ALTIVEC_BUILTIN_LD_INTERNAL_4si);
8516
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
8517
               ALTIVEC_BUILTIN_ST_INTERNAL_4si);
8518
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
8519
               ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
8520
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
8521
               ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
8522
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
8523
               ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
8524
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
8525
               ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
8526
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
8527
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
8528
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
8529
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
8530
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
8531
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
8532
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
8533
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
8534
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
8535
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
8536
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
8537
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
8538
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
8539
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
8540
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
8541
  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
8542
  def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
8543
  def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
8544
  def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
8545
  def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
8546
  def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
8547
  def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
8548
  def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
8549
  def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
8550
  def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
8551
  def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
8552
  def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
8553
  def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
8554
  def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
8555
  def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
8556
 
8557
  def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
8558
 
8559
  def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
8560
  def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
8561
  def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
8562
  def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
8563
  def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
8564
  def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
8565
  def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
8566
  def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
8567
  def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
8568
  def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8569
 
8570
  /* Add the DST variants.  */
8571
  d = (struct builtin_description *) bdesc_dst;
8572
  for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8573
    def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
8574
 
8575
  /* Initialize the predicates.  */
8576
  dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
8577
  for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
8578
    {
8579
      enum machine_mode mode1;
8580
      tree type;
8581
      bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8582
                           && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8583
 
8584
      if (is_overloaded)
8585
        mode1 = VOIDmode;
8586
      else
8587
        mode1 = insn_data[dp->icode].operand[1].mode;
8588
 
8589
      switch (mode1)
8590
        {
8591
        case VOIDmode:
8592
          type = int_ftype_int_opaque_opaque;
8593
          break;
8594
        case V4SImode:
8595
          type = int_ftype_int_v4si_v4si;
8596
          break;
8597
        case V8HImode:
8598
          type = int_ftype_int_v8hi_v8hi;
8599
          break;
8600
        case V16QImode:
8601
          type = int_ftype_int_v16qi_v16qi;
8602
          break;
8603
        case V4SFmode:
8604
          type = int_ftype_int_v4sf_v4sf;
8605
          break;
8606
        default:
8607
          gcc_unreachable ();
8608
        }
8609
 
8610
      def_builtin (dp->mask, dp->name, type, dp->code);
8611
    }
8612
 
8613
  /* Initialize the abs* operators.  */
8614
  d = (struct builtin_description *) bdesc_abs;
8615
  for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
8616
    {
8617
      enum machine_mode mode0;
8618
      tree type;
8619
 
8620
      mode0 = insn_data[d->icode].operand[0].mode;
8621
 
8622
      switch (mode0)
8623
        {
8624
        case V4SImode:
8625
          type = v4si_ftype_v4si;
8626
          break;
8627
        case V8HImode:
8628
          type = v8hi_ftype_v8hi;
8629
          break;
8630
        case V16QImode:
8631
          type = v16qi_ftype_v16qi;
8632
          break;
8633
        case V4SFmode:
8634
          type = v4sf_ftype_v4sf;
8635
          break;
8636
        default:
8637
          gcc_unreachable ();
8638
        }
8639
 
8640
      def_builtin (d->mask, d->name, type, d->code);
8641
    }
8642
 
8643
  if (TARGET_ALTIVEC)
8644
    {
8645
      tree decl;
8646
 
8647
      /* Initialize target builtin that implements
8648
         targetm.vectorize.builtin_mask_for_load.  */
8649
 
8650
      decl = lang_hooks.builtin_function ("__builtin_altivec_mask_for_load",
8651
                               v16qi_ftype_long_pcvoid,
8652
                               ALTIVEC_BUILTIN_MASK_FOR_LOAD,
8653
                               BUILT_IN_MD, NULL,
8654
                               tree_cons (get_identifier ("const"),
8655
                                          NULL_TREE, NULL_TREE));
8656
      /* Record the decl. Will be used by rs6000_builtin_mask_for_load.  */
8657
      altivec_builtin_mask_for_load = decl;
8658
    }
8659
 
8660
  /* Access to the vec_init patterns.  */
8661
  ftype = build_function_type_list (V4SI_type_node, integer_type_node,
8662
                                    integer_type_node, integer_type_node,
8663
                                    integer_type_node, NULL_TREE);
8664
  def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
8665
               ALTIVEC_BUILTIN_VEC_INIT_V4SI);
8666
 
8667
  ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
8668
                                    short_integer_type_node,
8669
                                    short_integer_type_node,
8670
                                    short_integer_type_node,
8671
                                    short_integer_type_node,
8672
                                    short_integer_type_node,
8673
                                    short_integer_type_node,
8674
                                    short_integer_type_node, NULL_TREE);
8675
  def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
8676
               ALTIVEC_BUILTIN_VEC_INIT_V8HI);
8677
 
8678
  ftype = build_function_type_list (V16QI_type_node, char_type_node,
8679
                                    char_type_node, char_type_node,
8680
                                    char_type_node, char_type_node,
8681
                                    char_type_node, char_type_node,
8682
                                    char_type_node, char_type_node,
8683
                                    char_type_node, char_type_node,
8684
                                    char_type_node, char_type_node,
8685
                                    char_type_node, char_type_node,
8686
                                    char_type_node, NULL_TREE);
8687
  def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
8688
               ALTIVEC_BUILTIN_VEC_INIT_V16QI);
8689
 
8690
  ftype = build_function_type_list (V4SF_type_node, float_type_node,
8691
                                    float_type_node, float_type_node,
8692
                                    float_type_node, NULL_TREE);
8693
  def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
8694
               ALTIVEC_BUILTIN_VEC_INIT_V4SF);
8695
 
8696
  /* Access to the vec_set patterns.  */
8697
  ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
8698
                                    intSI_type_node,
8699
                                    integer_type_node, NULL_TREE);
8700
  def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
8701
               ALTIVEC_BUILTIN_VEC_SET_V4SI);
8702
 
8703
  ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
8704
                                    intHI_type_node,
8705
                                    integer_type_node, NULL_TREE);
8706
  def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
8707
               ALTIVEC_BUILTIN_VEC_SET_V8HI);
8708
 
8709
  ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
8710
                                    intQI_type_node,
8711
                                    integer_type_node, NULL_TREE);
8712
  def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
8713
               ALTIVEC_BUILTIN_VEC_SET_V16QI);
8714
 
8715
  ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
8716
                                    float_type_node,
8717
                                    integer_type_node, NULL_TREE);
8718
  def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
8719
               ALTIVEC_BUILTIN_VEC_SET_V4SF);
8720
 
8721
  /* Access to the vec_extract patterns.  */
8722
  ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
8723
                                    integer_type_node, NULL_TREE);
8724
  def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
8725
               ALTIVEC_BUILTIN_VEC_EXT_V4SI);
8726
 
8727
  ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
8728
                                    integer_type_node, NULL_TREE);
8729
  def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
8730
               ALTIVEC_BUILTIN_VEC_EXT_V8HI);
8731
 
8732
  ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
8733
                                    integer_type_node, NULL_TREE);
8734
  def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
8735
               ALTIVEC_BUILTIN_VEC_EXT_V16QI);
8736
 
8737
  ftype = build_function_type_list (float_type_node, V4SF_type_node,
8738
                                    integer_type_node, NULL_TREE);
8739
  def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
8740
               ALTIVEC_BUILTIN_VEC_EXT_V4SF);
8741
}
8742
 
8743
static void
8744
rs6000_common_init_builtins (void)
8745
{
8746
  struct builtin_description *d;
8747
  size_t i;
8748
 
8749
  tree v4sf_ftype_v4sf_v4sf_v16qi
8750
    = build_function_type_list (V4SF_type_node,
8751
                                V4SF_type_node, V4SF_type_node,
8752
                                V16QI_type_node, NULL_TREE);
8753
  tree v4si_ftype_v4si_v4si_v16qi
8754
    = build_function_type_list (V4SI_type_node,
8755
                                V4SI_type_node, V4SI_type_node,
8756
                                V16QI_type_node, NULL_TREE);
8757
  tree v8hi_ftype_v8hi_v8hi_v16qi
8758
    = build_function_type_list (V8HI_type_node,
8759
                                V8HI_type_node, V8HI_type_node,
8760
                                V16QI_type_node, NULL_TREE);
8761
  tree v16qi_ftype_v16qi_v16qi_v16qi
8762
    = build_function_type_list (V16QI_type_node,
8763
                                V16QI_type_node, V16QI_type_node,
8764
                                V16QI_type_node, NULL_TREE);
8765
  tree v4si_ftype_int
8766
    = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
8767
  tree v8hi_ftype_int
8768
    = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
8769
  tree v16qi_ftype_int
8770
    = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
8771
  tree v8hi_ftype_v16qi
8772
    = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
8773
  tree v4sf_ftype_v4sf
8774
    = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8775
 
8776
  tree v2si_ftype_v2si_v2si
8777
    = build_function_type_list (opaque_V2SI_type_node,
8778
                                opaque_V2SI_type_node,
8779
                                opaque_V2SI_type_node, NULL_TREE);
8780
 
8781
  tree v2sf_ftype_v2sf_v2sf
8782
    = build_function_type_list (opaque_V2SF_type_node,
8783
                                opaque_V2SF_type_node,
8784
                                opaque_V2SF_type_node, NULL_TREE);
8785
 
8786
  tree v2si_ftype_int_int
8787
    = build_function_type_list (opaque_V2SI_type_node,
8788
                                integer_type_node, integer_type_node,
8789
                                NULL_TREE);
8790
 
8791
  tree opaque_ftype_opaque
8792
    = build_function_type_list (opaque_V4SI_type_node,
8793
                                opaque_V4SI_type_node, NULL_TREE);
8794
 
8795
  tree v2si_ftype_v2si
8796
    = build_function_type_list (opaque_V2SI_type_node,
8797
                                opaque_V2SI_type_node, NULL_TREE);
8798
 
8799
  tree v2sf_ftype_v2sf
8800
    = build_function_type_list (opaque_V2SF_type_node,
8801
                                opaque_V2SF_type_node, NULL_TREE);
8802
 
8803
  tree v2sf_ftype_v2si
8804
    = build_function_type_list (opaque_V2SF_type_node,
8805
                                opaque_V2SI_type_node, NULL_TREE);
8806
 
8807
  tree v2si_ftype_v2sf
8808
    = build_function_type_list (opaque_V2SI_type_node,
8809
                                opaque_V2SF_type_node, NULL_TREE);
8810
 
8811
  tree v2si_ftype_v2si_char
8812
    = build_function_type_list (opaque_V2SI_type_node,
8813
                                opaque_V2SI_type_node,
8814
                                char_type_node, NULL_TREE);
8815
 
8816
  tree v2si_ftype_int_char
8817
    = build_function_type_list (opaque_V2SI_type_node,
8818
                                integer_type_node, char_type_node, NULL_TREE);
8819
 
8820
  tree v2si_ftype_char
8821
    = build_function_type_list (opaque_V2SI_type_node,
8822
                                char_type_node, NULL_TREE);
8823
 
8824
  tree int_ftype_int_int
8825
    = build_function_type_list (integer_type_node,
8826
                                integer_type_node, integer_type_node,
8827
                                NULL_TREE);
8828
 
8829
  tree opaque_ftype_opaque_opaque
8830
    = build_function_type_list (opaque_V4SI_type_node,
8831
                                opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
8832
  tree v4si_ftype_v4si_v4si
8833
    = build_function_type_list (V4SI_type_node,
8834
                                V4SI_type_node, V4SI_type_node, NULL_TREE);
8835
  tree v4sf_ftype_v4si_int
8836
    = build_function_type_list (V4SF_type_node,
8837
                                V4SI_type_node, integer_type_node, NULL_TREE);
8838
  tree v4si_ftype_v4sf_int
8839
    = build_function_type_list (V4SI_type_node,
8840
                                V4SF_type_node, integer_type_node, NULL_TREE);
8841
  tree v4si_ftype_v4si_int
8842
    = build_function_type_list (V4SI_type_node,
8843
                                V4SI_type_node, integer_type_node, NULL_TREE);
8844
  tree v8hi_ftype_v8hi_int
8845
    = build_function_type_list (V8HI_type_node,
8846
                                V8HI_type_node, integer_type_node, NULL_TREE);
8847
  tree v16qi_ftype_v16qi_int
8848
    = build_function_type_list (V16QI_type_node,
8849
                                V16QI_type_node, integer_type_node, NULL_TREE);
8850
  tree v16qi_ftype_v16qi_v16qi_int
8851
    = build_function_type_list (V16QI_type_node,
8852
                                V16QI_type_node, V16QI_type_node,
8853
                                integer_type_node, NULL_TREE);
8854
  tree v8hi_ftype_v8hi_v8hi_int
8855
    = build_function_type_list (V8HI_type_node,
8856
                                V8HI_type_node, V8HI_type_node,
8857
                                integer_type_node, NULL_TREE);
8858
  tree v4si_ftype_v4si_v4si_int
8859
    = build_function_type_list (V4SI_type_node,
8860
                                V4SI_type_node, V4SI_type_node,
8861
                                integer_type_node, NULL_TREE);
8862
  tree v4sf_ftype_v4sf_v4sf_int
8863
    = build_function_type_list (V4SF_type_node,
8864
                                V4SF_type_node, V4SF_type_node,
8865
                                integer_type_node, NULL_TREE);
8866
  tree v4sf_ftype_v4sf_v4sf
8867
    = build_function_type_list (V4SF_type_node,
8868
                                V4SF_type_node, V4SF_type_node, NULL_TREE);
8869
  tree opaque_ftype_opaque_opaque_opaque
8870
    = build_function_type_list (opaque_V4SI_type_node,
8871
                                opaque_V4SI_type_node, opaque_V4SI_type_node,
8872
                                opaque_V4SI_type_node, NULL_TREE);
8873
  tree v4sf_ftype_v4sf_v4sf_v4si
8874
    = build_function_type_list (V4SF_type_node,
8875
                                V4SF_type_node, V4SF_type_node,
8876
                                V4SI_type_node, NULL_TREE);
8877
  tree v4sf_ftype_v4sf_v4sf_v4sf
8878
    = build_function_type_list (V4SF_type_node,
8879
                                V4SF_type_node, V4SF_type_node,
8880
                                V4SF_type_node, NULL_TREE);
8881
  tree v4si_ftype_v4si_v4si_v4si
8882
    = build_function_type_list (V4SI_type_node,
8883
                                V4SI_type_node, V4SI_type_node,
8884
                                V4SI_type_node, NULL_TREE);
8885
  tree v8hi_ftype_v8hi_v8hi
8886
    = build_function_type_list (V8HI_type_node,
8887
                                V8HI_type_node, V8HI_type_node, NULL_TREE);
8888
  tree v8hi_ftype_v8hi_v8hi_v8hi
8889
    = build_function_type_list (V8HI_type_node,
8890
                                V8HI_type_node, V8HI_type_node,
8891
                                V8HI_type_node, NULL_TREE);
8892
  tree v4si_ftype_v8hi_v8hi_v4si
8893
    = build_function_type_list (V4SI_type_node,
8894
                                V8HI_type_node, V8HI_type_node,
8895
                                V4SI_type_node, NULL_TREE);
8896
  tree v4si_ftype_v16qi_v16qi_v4si
8897
    = build_function_type_list (V4SI_type_node,
8898
                                V16QI_type_node, V16QI_type_node,
8899
                                V4SI_type_node, NULL_TREE);
8900
  tree v16qi_ftype_v16qi_v16qi
8901
    = build_function_type_list (V16QI_type_node,
8902
                                V16QI_type_node, V16QI_type_node, NULL_TREE);
8903
  tree v4si_ftype_v4sf_v4sf
8904
    = build_function_type_list (V4SI_type_node,
8905
                                V4SF_type_node, V4SF_type_node, NULL_TREE);
8906
  tree v8hi_ftype_v16qi_v16qi
8907
    = build_function_type_list (V8HI_type_node,
8908
                                V16QI_type_node, V16QI_type_node, NULL_TREE);
8909
  tree v4si_ftype_v8hi_v8hi
8910
    = build_function_type_list (V4SI_type_node,
8911
                                V8HI_type_node, V8HI_type_node, NULL_TREE);
8912
  tree v8hi_ftype_v4si_v4si
8913
    = build_function_type_list (V8HI_type_node,
8914
                                V4SI_type_node, V4SI_type_node, NULL_TREE);
8915
  tree v16qi_ftype_v8hi_v8hi
8916
    = build_function_type_list (V16QI_type_node,
8917
                                V8HI_type_node, V8HI_type_node, NULL_TREE);
8918
  tree v4si_ftype_v16qi_v4si
8919
    = build_function_type_list (V4SI_type_node,
8920
                                V16QI_type_node, V4SI_type_node, NULL_TREE);
8921
  tree v4si_ftype_v16qi_v16qi
8922
    = build_function_type_list (V4SI_type_node,
8923
                                V16QI_type_node, V16QI_type_node, NULL_TREE);
8924
  tree v4si_ftype_v8hi_v4si
8925
    = build_function_type_list (V4SI_type_node,
8926
                                V8HI_type_node, V4SI_type_node, NULL_TREE);
8927
  tree v4si_ftype_v8hi
8928
    = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
8929
  tree int_ftype_v4si_v4si
8930
    = build_function_type_list (integer_type_node,
8931
                                V4SI_type_node, V4SI_type_node, NULL_TREE);
8932
  tree int_ftype_v4sf_v4sf
8933
    = build_function_type_list (integer_type_node,
8934
                                V4SF_type_node, V4SF_type_node, NULL_TREE);
8935
  tree int_ftype_v16qi_v16qi
8936
    = build_function_type_list (integer_type_node,
8937
                                V16QI_type_node, V16QI_type_node, NULL_TREE);
8938
  tree int_ftype_v8hi_v8hi
8939
    = build_function_type_list (integer_type_node,
8940
                                V8HI_type_node, V8HI_type_node, NULL_TREE);
8941
 
8942
  /* Add the simple ternary operators.  */
8943
  d = (struct builtin_description *) bdesc_3arg;
8944
  for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
8945
    {
8946
      enum machine_mode mode0, mode1, mode2, mode3;
8947
      tree type;
8948
      bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8949
                           && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8950
 
8951
      if (is_overloaded)
8952
        {
8953
          mode0 = VOIDmode;
8954
          mode1 = VOIDmode;
8955
          mode2 = VOIDmode;
8956
          mode3 = VOIDmode;
8957
        }
8958
      else
8959
        {
8960
          if (d->name == 0 || d->icode == CODE_FOR_nothing)
8961
            continue;
8962
 
8963
          mode0 = insn_data[d->icode].operand[0].mode;
8964
          mode1 = insn_data[d->icode].operand[1].mode;
8965
          mode2 = insn_data[d->icode].operand[2].mode;
8966
          mode3 = insn_data[d->icode].operand[3].mode;
8967
        }
8968
 
8969
      /* When all four are of the same mode.  */
8970
      if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
8971
        {
8972
          switch (mode0)
8973
            {
8974
            case VOIDmode:
8975
              type = opaque_ftype_opaque_opaque_opaque;
8976
              break;
8977
            case V4SImode:
8978
              type = v4si_ftype_v4si_v4si_v4si;
8979
              break;
8980
            case V4SFmode:
8981
              type = v4sf_ftype_v4sf_v4sf_v4sf;
8982
              break;
8983
            case V8HImode:
8984
              type = v8hi_ftype_v8hi_v8hi_v8hi;
8985
              break;
8986
            case V16QImode:
8987
              type = v16qi_ftype_v16qi_v16qi_v16qi;
8988
              break;
8989
            default:
8990
              gcc_unreachable ();
8991
            }
8992
        }
8993
      else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
8994
        {
8995
          switch (mode0)
8996
            {
8997
            case V4SImode:
8998
              type = v4si_ftype_v4si_v4si_v16qi;
8999
              break;
9000
            case V4SFmode:
9001
              type = v4sf_ftype_v4sf_v4sf_v16qi;
9002
              break;
9003
            case V8HImode:
9004
              type = v8hi_ftype_v8hi_v8hi_v16qi;
9005
              break;
9006
            case V16QImode:
9007
              type = v16qi_ftype_v16qi_v16qi_v16qi;
9008
              break;
9009
            default:
9010
              gcc_unreachable ();
9011
            }
9012
        }
9013
      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
9014
               && mode3 == V4SImode)
9015
        type = v4si_ftype_v16qi_v16qi_v4si;
9016
      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
9017
               && mode3 == V4SImode)
9018
        type = v4si_ftype_v8hi_v8hi_v4si;
9019
      else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
9020
               && mode3 == V4SImode)
9021
        type = v4sf_ftype_v4sf_v4sf_v4si;
9022
 
9023
      /* vchar, vchar, vchar, 4 bit literal.  */
9024
      else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
9025
               && mode3 == QImode)
9026
        type = v16qi_ftype_v16qi_v16qi_int;
9027
 
9028
      /* vshort, vshort, vshort, 4 bit literal.  */
9029
      else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
9030
               && mode3 == QImode)
9031
        type = v8hi_ftype_v8hi_v8hi_int;
9032
 
9033
      /* vint, vint, vint, 4 bit literal.  */
9034
      else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
9035
               && mode3 == QImode)
9036
        type = v4si_ftype_v4si_v4si_int;
9037
 
9038
      /* vfloat, vfloat, vfloat, 4 bit literal.  */
9039
      else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
9040
               && mode3 == QImode)
9041
        type = v4sf_ftype_v4sf_v4sf_int;
9042
 
9043
      else
9044
        gcc_unreachable ();
9045
 
9046
      def_builtin (d->mask, d->name, type, d->code);
9047
    }
9048
 
9049
  /* Add the simple binary operators.  */
9050
  d = (struct builtin_description *) bdesc_2arg;
9051
  for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9052
    {
9053
      enum machine_mode mode0, mode1, mode2;
9054
      tree type;
9055
      bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9056
                           && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
9057
 
9058
      if (is_overloaded)
9059
        {
9060
          mode0 = VOIDmode;
9061
          mode1 = VOIDmode;
9062
          mode2 = VOIDmode;
9063
        }
9064
      else
9065
        {
9066
          if (d->name == 0 || d->icode == CODE_FOR_nothing)
9067
            continue;
9068
 
9069
          mode0 = insn_data[d->icode].operand[0].mode;
9070
          mode1 = insn_data[d->icode].operand[1].mode;
9071
          mode2 = insn_data[d->icode].operand[2].mode;
9072
        }
9073
 
9074
      /* When all three operands are of the same mode.  */
9075
      if (mode0 == mode1 && mode1 == mode2)
9076
        {
9077
          switch (mode0)
9078
            {
9079
            case VOIDmode:
9080
              type = opaque_ftype_opaque_opaque;
9081
              break;
9082
            case V4SFmode:
9083
              type = v4sf_ftype_v4sf_v4sf;
9084
              break;
9085
            case V4SImode:
9086
              type = v4si_ftype_v4si_v4si;
9087
              break;
9088
            case V16QImode:
9089
              type = v16qi_ftype_v16qi_v16qi;
9090
              break;
9091
            case V8HImode:
9092
              type = v8hi_ftype_v8hi_v8hi;
9093
              break;
9094
            case V2SImode:
9095
              type = v2si_ftype_v2si_v2si;
9096
              break;
9097
            case V2SFmode:
9098
              type = v2sf_ftype_v2sf_v2sf;
9099
              break;
9100
            case SImode:
9101
              type = int_ftype_int_int;
9102
              break;
9103
            default:
9104
              gcc_unreachable ();
9105
            }
9106
        }
9107
 
9108
      /* A few other combos we really don't want to do manually.  */
9109
 
9110
      /* vint, vfloat, vfloat.  */
9111
      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
9112
        type = v4si_ftype_v4sf_v4sf;
9113
 
9114
      /* vshort, vchar, vchar.  */
9115
      else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
9116
        type = v8hi_ftype_v16qi_v16qi;
9117
 
9118
      /* vint, vshort, vshort.  */
9119
      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
9120
        type = v4si_ftype_v8hi_v8hi;
9121
 
9122
      /* vshort, vint, vint.  */
9123
      else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
9124
        type = v8hi_ftype_v4si_v4si;
9125
 
9126
      /* vchar, vshort, vshort.  */
9127
      else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
9128
        type = v16qi_ftype_v8hi_v8hi;
9129
 
9130
      /* vint, vchar, vint.  */
9131
      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
9132
        type = v4si_ftype_v16qi_v4si;
9133
 
9134
      /* vint, vchar, vchar.  */
9135
      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
9136
        type = v4si_ftype_v16qi_v16qi;
9137
 
9138
      /* vint, vshort, vint.  */
9139
      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
9140
        type = v4si_ftype_v8hi_v4si;
9141
 
9142
      /* vint, vint, 5 bit literal.  */
9143
      else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
9144
        type = v4si_ftype_v4si_int;
9145
 
9146
      /* vshort, vshort, 5 bit literal.  */
9147
      else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
9148
        type = v8hi_ftype_v8hi_int;
9149
 
9150
      /* vchar, vchar, 5 bit literal.  */
9151
      else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
9152
        type = v16qi_ftype_v16qi_int;
9153
 
9154
      /* vfloat, vint, 5 bit literal.  */
9155
      else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
9156
        type = v4sf_ftype_v4si_int;
9157
 
9158
      /* vint, vfloat, 5 bit literal.  */
9159
      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
9160
        type = v4si_ftype_v4sf_int;
9161
 
9162
      else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
9163
        type = v2si_ftype_int_int;
9164
 
9165
      else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
9166
        type = v2si_ftype_v2si_char;
9167
 
9168
      else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
9169
        type = v2si_ftype_int_char;
9170
 
9171
      else
9172
        {
9173
          /* int, x, x.  */
9174
          gcc_assert (mode0 == SImode);
9175
          switch (mode1)
9176
            {
9177
            case V4SImode:
9178
              type = int_ftype_v4si_v4si;
9179
              break;
9180
            case V4SFmode:
9181
              type = int_ftype_v4sf_v4sf;
9182
              break;
9183
            case V16QImode:
9184
              type = int_ftype_v16qi_v16qi;
9185
              break;
9186
            case V8HImode:
9187
              type = int_ftype_v8hi_v8hi;
9188
              break;
9189
            default:
9190
              gcc_unreachable ();
9191
            }
9192
        }
9193
 
9194
      def_builtin (d->mask, d->name, type, d->code);
9195
    }
9196
 
9197
  /* Add the simple unary operators.  */
9198
  d = (struct builtin_description *) bdesc_1arg;
9199
  for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9200
    {
9201
      enum machine_mode mode0, mode1;
9202
      tree type;
9203
      bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9204
                           && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
9205
 
9206
      if (is_overloaded)
9207
        {
9208
          mode0 = VOIDmode;
9209
          mode1 = VOIDmode;
9210
        }
9211
      else
9212
        {
9213
          if (d->name == 0 || d->icode == CODE_FOR_nothing)
9214
            continue;
9215
 
9216
          mode0 = insn_data[d->icode].operand[0].mode;
9217
          mode1 = insn_data[d->icode].operand[1].mode;
9218
        }
9219
 
9220
      if (mode0 == V4SImode && mode1 == QImode)
9221
        type = v4si_ftype_int;
9222
      else if (mode0 == V8HImode && mode1 == QImode)
9223
        type = v8hi_ftype_int;
9224
      else if (mode0 == V16QImode && mode1 == QImode)
9225
        type = v16qi_ftype_int;
9226
      else if (mode0 == VOIDmode && mode1 == VOIDmode)
9227
        type = opaque_ftype_opaque;
9228
      else if (mode0 == V4SFmode && mode1 == V4SFmode)
9229
        type = v4sf_ftype_v4sf;
9230
      else if (mode0 == V8HImode && mode1 == V16QImode)
9231
        type = v8hi_ftype_v16qi;
9232
      else if (mode0 == V4SImode && mode1 == V8HImode)
9233
        type = v4si_ftype_v8hi;
9234
      else if (mode0 == V2SImode && mode1 == V2SImode)
9235
        type = v2si_ftype_v2si;
9236
      else if (mode0 == V2SFmode && mode1 == V2SFmode)
9237
        type = v2sf_ftype_v2sf;
9238
      else if (mode0 == V2SFmode && mode1 == V2SImode)
9239
        type = v2sf_ftype_v2si;
9240
      else if (mode0 == V2SImode && mode1 == V2SFmode)
9241
        type = v2si_ftype_v2sf;
9242
      else if (mode0 == V2SImode && mode1 == QImode)
9243
        type = v2si_ftype_char;
9244
      else
9245
        gcc_unreachable ();
9246
 
9247
      def_builtin (d->mask, d->name, type, d->code);
9248
    }
9249
}
9250
 
9251
static void
9252
rs6000_init_libfuncs (void)
9253
{
9254
  if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
9255
      && !TARGET_POWER2 && !TARGET_POWERPC)
9256
    {
9257
      /* AIX library routines for float->int conversion.  */
9258
      set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
9259
      set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
9260
      set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
9261
      set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
9262
    }
9263
 
9264
  if (!TARGET_IEEEQUAD)
9265
      /* AIX/Darwin/64-bit Linux quad floating point routines.  */
9266
    if (!TARGET_XL_COMPAT)
9267
      {
9268
        set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
9269
        set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
9270
        set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
9271
        set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
9272
 
9273
        if (TARGET_SOFT_FLOAT)
9274
          {
9275
            set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
9276
            set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
9277
            set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
9278
            set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
9279
            set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
9280
            set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
9281
            set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
9282
            set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
9283
 
9284
            set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
9285
            set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
9286
            set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
9287
            set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
9288
            set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
9289
            set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
9290
            set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
9291
            set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
9292
          }
9293
      }
9294
    else
9295
      {
9296
        set_optab_libfunc (add_optab, TFmode, "_xlqadd");
9297
        set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
9298
        set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
9299
        set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
9300
      }
9301
  else
9302
    {
9303
      /* 32-bit SVR4 quad floating point routines.  */
9304
 
9305
      set_optab_libfunc (add_optab, TFmode, "_q_add");
9306
      set_optab_libfunc (sub_optab, TFmode, "_q_sub");
9307
      set_optab_libfunc (neg_optab, TFmode, "_q_neg");
9308
      set_optab_libfunc (smul_optab, TFmode, "_q_mul");
9309
      set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
9310
      if (TARGET_PPC_GPOPT || TARGET_POWER2)
9311
        set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
9312
 
9313
      set_optab_libfunc (eq_optab, TFmode, "_q_feq");
9314
      set_optab_libfunc (ne_optab, TFmode, "_q_fne");
9315
      set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
9316
      set_optab_libfunc (ge_optab, TFmode, "_q_fge");
9317
      set_optab_libfunc (lt_optab, TFmode, "_q_flt");
9318
      set_optab_libfunc (le_optab, TFmode, "_q_fle");
9319
 
9320
      set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
9321
      set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
9322
      set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
9323
      set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
9324
      set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
9325
      set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
9326
      set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
9327
      set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
9328
    }
9329
}
9330
 
9331
 
9332
/* Expand a block clear operation, and return 1 if successful.  Return 0
9333
   if we should let the compiler generate normal code.
9334
 
9335
   operands[0] is the destination
9336
   operands[1] is the length
9337
   operands[3] is the alignment */
9338
 
9339
int
9340
expand_block_clear (rtx operands[])
9341
{
9342
  rtx orig_dest = operands[0];
9343
  rtx bytes_rtx = operands[1];
9344
  rtx align_rtx = operands[3];
9345
  bool constp   = (GET_CODE (bytes_rtx) == CONST_INT);
9346
  HOST_WIDE_INT align;
9347
  HOST_WIDE_INT bytes;
9348
  int offset;
9349
  int clear_bytes;
9350
  int clear_step;
9351
 
9352
  /* If this is not a fixed size move, just call memcpy */
9353
  if (! constp)
9354
    return 0;
9355
 
9356
  /* This must be a fixed size alignment  */
9357
  gcc_assert (GET_CODE (align_rtx) == CONST_INT);
9358
  align = INTVAL (align_rtx) * BITS_PER_UNIT;
9359
 
9360
  /* Anything to clear? */
9361
  bytes = INTVAL (bytes_rtx);
9362
  if (bytes <= 0)
9363
    return 1;
9364
 
9365
  /* Use the builtin memset after a point, to avoid huge code bloat.
9366
     When optimize_size, avoid any significant code bloat; calling
9367
     memset is about 4 instructions, so allow for one instruction to
9368
     load zero and three to do clearing.  */
9369
  if (TARGET_ALTIVEC && align >= 128)
9370
    clear_step = 16;
9371
  else if (TARGET_POWERPC64 && align >= 32)
9372
    clear_step = 8;
9373
  else
9374
    clear_step = 4;
9375
 
9376
  if (optimize_size && bytes > 3 * clear_step)
9377
    return 0;
9378
  if (! optimize_size && bytes > 8 * clear_step)
9379
    return 0;
9380
 
9381
  for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
9382
    {
9383
      enum machine_mode mode = BLKmode;
9384
      rtx dest;
9385
 
9386
      if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
9387
        {
9388
          clear_bytes = 16;
9389
          mode = V4SImode;
9390
        }
9391
      else if (bytes >= 8 && TARGET_POWERPC64
9392
          /* 64-bit loads and stores require word-aligned
9393
             displacements.  */
9394
          && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
9395
        {
9396
          clear_bytes = 8;
9397
          mode = DImode;
9398
        }
9399
      else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
9400
        {                       /* move 4 bytes */
9401
          clear_bytes = 4;
9402
          mode = SImode;
9403
        }
9404
      else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
9405
        {                       /* move 2 bytes */
9406
          clear_bytes = 2;
9407
          mode = HImode;
9408
        }
9409
      else /* move 1 byte at a time */
9410
        {
9411
          clear_bytes = 1;
9412
          mode = QImode;
9413
        }
9414
 
9415
      dest = adjust_address (orig_dest, mode, offset);
9416
 
9417
      emit_move_insn (dest, CONST0_RTX (mode));
9418
    }
9419
 
9420
  return 1;
9421
}
9422
 
9423
 
9424
/* Expand a block move operation, and return 1 if successful.  Return 0
9425
   if we should let the compiler generate normal code.
9426
 
9427
   operands[0] is the destination
9428
   operands[1] is the source
9429
   operands[2] is the length
9430
   operands[3] is the alignment */
9431
 
9432
#define MAX_MOVE_REG 4
9433
 
9434
int
9435
expand_block_move (rtx operands[])
9436
{
9437
  rtx orig_dest = operands[0];
9438
  rtx orig_src  = operands[1];
9439
  rtx bytes_rtx = operands[2];
9440
  rtx align_rtx = operands[3];
9441
  int constp    = (GET_CODE (bytes_rtx) == CONST_INT);
9442
  int align;
9443
  int bytes;
9444
  int offset;
9445
  int move_bytes;
9446
  rtx stores[MAX_MOVE_REG];
9447
  int num_reg = 0;
9448
 
9449
  /* If this is not a fixed size move, just call memcpy */
9450
  if (! constp)
9451
    return 0;
9452
 
9453
  /* This must be a fixed size alignment */
9454
  gcc_assert (GET_CODE (align_rtx) == CONST_INT);
9455
  align = INTVAL (align_rtx) * BITS_PER_UNIT;
9456
 
9457
  /* Anything to move? */
9458
  bytes = INTVAL (bytes_rtx);
9459
  if (bytes <= 0)
9460
    return 1;
9461
 
9462
  /* store_one_arg depends on expand_block_move to handle at least the size of
9463
     reg_parm_stack_space.  */
9464
  if (bytes > (TARGET_POWERPC64 ? 64 : 32))
9465
    return 0;
9466
 
9467
  for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
9468
    {
9469
      union {
9470
        rtx (*movmemsi) (rtx, rtx, rtx, rtx);
9471
        rtx (*mov) (rtx, rtx);
9472
      } gen_func;
9473
      enum machine_mode mode = BLKmode;
9474
      rtx src, dest;
9475
 
9476
      /* Altivec first, since it will be faster than a string move
9477
         when it applies, and usually not significantly larger.  */
9478
      if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
9479
        {
9480
          move_bytes = 16;
9481
          mode = V4SImode;
9482
          gen_func.mov = gen_movv4si;
9483
        }
9484
      else if (TARGET_STRING
9485
          && bytes > 24         /* move up to 32 bytes at a time */
9486
          && ! fixed_regs[5]
9487
          && ! fixed_regs[6]
9488
          && ! fixed_regs[7]
9489
          && ! fixed_regs[8]
9490
          && ! fixed_regs[9]
9491
          && ! fixed_regs[10]
9492
          && ! fixed_regs[11]
9493
          && ! fixed_regs[12])
9494
        {
9495
          move_bytes = (bytes > 32) ? 32 : bytes;
9496
          gen_func.movmemsi = gen_movmemsi_8reg;
9497
        }
9498
      else if (TARGET_STRING
9499
               && bytes > 16    /* move up to 24 bytes at a time */
9500
               && ! fixed_regs[5]
9501
               && ! fixed_regs[6]
9502
               && ! fixed_regs[7]
9503
               && ! fixed_regs[8]
9504
               && ! fixed_regs[9]
9505
               && ! fixed_regs[10])
9506
        {
9507
          move_bytes = (bytes > 24) ? 24 : bytes;
9508
          gen_func.movmemsi = gen_movmemsi_6reg;
9509
        }
9510
      else if (TARGET_STRING
9511
               && bytes > 8     /* move up to 16 bytes at a time */
9512
               && ! fixed_regs[5]
9513
               && ! fixed_regs[6]
9514
               && ! fixed_regs[7]
9515
               && ! fixed_regs[8])
9516
        {
9517
          move_bytes = (bytes > 16) ? 16 : bytes;
9518
          gen_func.movmemsi = gen_movmemsi_4reg;
9519
        }
9520
      else if (bytes >= 8 && TARGET_POWERPC64
9521
               /* 64-bit loads and stores require word-aligned
9522
                  displacements.  */
9523
               && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
9524
        {
9525
          move_bytes = 8;
9526
          mode = DImode;
9527
          gen_func.mov = gen_movdi;
9528
        }
9529
      else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
9530
        {                       /* move up to 8 bytes at a time */
9531
          move_bytes = (bytes > 8) ? 8 : bytes;
9532
          gen_func.movmemsi = gen_movmemsi_2reg;
9533
        }
9534
      else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
9535
        {                       /* move 4 bytes */
9536
          move_bytes = 4;
9537
          mode = SImode;
9538
          gen_func.mov = gen_movsi;
9539
        }
9540
      else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
9541
        {                       /* move 2 bytes */
9542
          move_bytes = 2;
9543
          mode = HImode;
9544
          gen_func.mov = gen_movhi;
9545
        }
9546
      else if (TARGET_STRING && bytes > 1)
9547
        {                       /* move up to 4 bytes at a time */
9548
          move_bytes = (bytes > 4) ? 4 : bytes;
9549
          gen_func.movmemsi = gen_movmemsi_1reg;
9550
        }
9551
      else /* move 1 byte at a time */
9552
        {
9553
          move_bytes = 1;
9554
          mode = QImode;
9555
          gen_func.mov = gen_movqi;
9556
        }
9557
 
9558
      src = adjust_address (orig_src, mode, offset);
9559
      dest = adjust_address (orig_dest, mode, offset);
9560
 
9561
      if (mode != BLKmode)
9562
        {
9563
          rtx tmp_reg = gen_reg_rtx (mode);
9564
 
9565
          emit_insn ((*gen_func.mov) (tmp_reg, src));
9566
          stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
9567
        }
9568
 
9569
      if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
9570
        {
9571
          int i;
9572
          for (i = 0; i < num_reg; i++)
9573
            emit_insn (stores[i]);
9574
          num_reg = 0;
9575
        }
9576
 
9577
      if (mode == BLKmode)
9578
        {
9579
          /* Move the address into scratch registers.  The movmemsi
9580
             patterns require zero offset.  */
9581
          if (!REG_P (XEXP (src, 0)))
9582
            {
9583
              rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
9584
              src = replace_equiv_address (src, src_reg);
9585
            }
9586
          set_mem_size (src, GEN_INT (move_bytes));
9587
 
9588
          if (!REG_P (XEXP (dest, 0)))
9589
            {
9590
              rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
9591
              dest = replace_equiv_address (dest, dest_reg);
9592
            }
9593
          set_mem_size (dest, GEN_INT (move_bytes));
9594
 
9595
          emit_insn ((*gen_func.movmemsi) (dest, src,
9596
                                           GEN_INT (move_bytes & 31),
9597
                                           align_rtx));
9598
        }
9599
    }
9600
 
9601
  return 1;
9602
}
9603
 
9604
 
9605
/* Return a string to perform a load_multiple operation.
9606
   operands[0] is the vector.
9607
   operands[1] is the source address.
9608
   operands[2] is the first destination register.  */
9609
 
9610
const char *
9611
rs6000_output_load_multiple (rtx operands[3])
9612
{
9613
  /* We have to handle the case where the pseudo used to contain the address
9614
     is assigned to one of the output registers.  */
9615
  int i, j;
9616
  int words = XVECLEN (operands[0], 0);
9617
  rtx xop[10];
9618
 
9619
  if (XVECLEN (operands[0], 0) == 1)
9620
    return "{l|lwz} %2,0(%1)";
9621
 
9622
  for (i = 0; i < words; i++)
9623
    if (refers_to_regno_p (REGNO (operands[2]) + i,
9624
                           REGNO (operands[2]) + i + 1, operands[1], 0))
9625
      {
9626
        if (i == words-1)
9627
          {
9628
            xop[0] = GEN_INT (4 * (words-1));
9629
            xop[1] = operands[1];
9630
            xop[2] = operands[2];
9631
            output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
9632
            return "";
9633
          }
9634
        else if (i == 0)
9635
          {
9636
            xop[0] = GEN_INT (4 * (words-1));
9637
            xop[1] = operands[1];
9638
            xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
9639
            output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
9640
            return "";
9641
          }
9642
        else
9643
          {
9644
            for (j = 0; j < words; j++)
9645
              if (j != i)
9646
                {
9647
                  xop[0] = GEN_INT (j * 4);
9648
                  xop[1] = operands[1];
9649
                  xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
9650
                  output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
9651
                }
9652
            xop[0] = GEN_INT (i * 4);
9653
            xop[1] = operands[1];
9654
            output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
9655
            return "";
9656
          }
9657
      }
9658
 
9659
  return "{lsi|lswi} %2,%1,%N0";
9660
}
9661
 
9662
 
9663
/* A validation routine: say whether CODE, a condition code, and MODE
9664
   match.  The other alternatives either don't make sense or should
9665
   never be generated.  */
9666
 
9667
void
9668
validate_condition_mode (enum rtx_code code, enum machine_mode mode)
9669
{
9670
  gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
9671
               || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
9672
              && GET_MODE_CLASS (mode) == MODE_CC);
9673
 
9674
  /* These don't make sense.  */
9675
  gcc_assert ((code != GT && code != LT && code != GE && code != LE)
9676
              || mode != CCUNSmode);
9677
 
9678
  gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
9679
              || mode == CCUNSmode);
9680
 
9681
  gcc_assert (mode == CCFPmode
9682
              || (code != ORDERED && code != UNORDERED
9683
                  && code != UNEQ && code != LTGT
9684
                  && code != UNGT && code != UNLT
9685
                  && code != UNGE && code != UNLE));
9686
 
9687
  /* These should never be generated except for
9688
     flag_finite_math_only.  */
9689
  gcc_assert (mode != CCFPmode
9690
              || flag_finite_math_only
9691
              || (code != LE && code != GE
9692
                  && code != UNEQ && code != LTGT
9693
                  && code != UNGT && code != UNLT));
9694
 
9695
  /* These are invalid; the information is not there.  */
9696
  gcc_assert (mode != CCEQmode || code == EQ || code == NE);
9697
}
9698
 
9699
 
9700
/* Return 1 if ANDOP is a mask that has no bits on that are not in the
9701
   mask required to convert the result of a rotate insn into a shift
9702
   left insn of SHIFTOP bits.  Both are known to be SImode CONST_INT.  */
9703
 
9704
int
9705
includes_lshift_p (rtx shiftop, rtx andop)
9706
{
9707
  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9708
 
9709
  shift_mask <<= INTVAL (shiftop);
9710
 
9711
  return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9712
}
9713
 
9714
/* Similar, but for right shift.  */
9715
 
9716
int
9717
includes_rshift_p (rtx shiftop, rtx andop)
9718
{
9719
  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9720
 
9721
  shift_mask >>= INTVAL (shiftop);
9722
 
9723
  return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9724
}
9725
 
9726
/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
9727
   to perform a left shift.  It must have exactly SHIFTOP least
9728
   significant 0's, then one or more 1's, then zero or more 0's.  */
9729
 
9730
int
9731
includes_rldic_lshift_p (rtx shiftop, rtx andop)
9732
{
9733
  if (GET_CODE (andop) == CONST_INT)
9734
    {
9735
      HOST_WIDE_INT c, lsb, shift_mask;
9736
 
9737
      c = INTVAL (andop);
9738
      if (c == 0 || c == ~0)
9739
        return 0;
9740
 
9741
      shift_mask = ~0;
9742
      shift_mask <<= INTVAL (shiftop);
9743
 
9744
      /* Find the least significant one bit.  */
9745
      lsb = c & -c;
9746
 
9747
      /* It must coincide with the LSB of the shift mask.  */
9748
      if (-lsb != shift_mask)
9749
        return 0;
9750
 
9751
      /* Invert to look for the next transition (if any).  */
9752
      c = ~c;
9753
 
9754
      /* Remove the low group of ones (originally low group of zeros).  */
9755
      c &= -lsb;
9756
 
9757
      /* Again find the lsb, and check we have all 1's above.  */
9758
      lsb = c & -c;
9759
      return c == -lsb;
9760
    }
9761
  else if (GET_CODE (andop) == CONST_DOUBLE
9762
           && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9763
    {
9764
      HOST_WIDE_INT low, high, lsb;
9765
      HOST_WIDE_INT shift_mask_low, shift_mask_high;
9766
 
9767
      low = CONST_DOUBLE_LOW (andop);
9768
      if (HOST_BITS_PER_WIDE_INT < 64)
9769
        high = CONST_DOUBLE_HIGH (andop);
9770
 
9771
      if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
9772
          || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
9773
        return 0;
9774
 
9775
      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9776
        {
9777
          shift_mask_high = ~0;
9778
          if (INTVAL (shiftop) > 32)
9779
            shift_mask_high <<= INTVAL (shiftop) - 32;
9780
 
9781
          lsb = high & -high;
9782
 
9783
          if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
9784
            return 0;
9785
 
9786
          high = ~high;
9787
          high &= -lsb;
9788
 
9789
          lsb = high & -high;
9790
          return high == -lsb;
9791
        }
9792
 
9793
      shift_mask_low = ~0;
9794
      shift_mask_low <<= INTVAL (shiftop);
9795
 
9796
      lsb = low & -low;
9797
 
9798
      if (-lsb != shift_mask_low)
9799
        return 0;
9800
 
9801
      if (HOST_BITS_PER_WIDE_INT < 64)
9802
        high = ~high;
9803
      low = ~low;
9804
      low &= -lsb;
9805
 
9806
      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9807
        {
9808
          lsb = high & -high;
9809
          return high == -lsb;
9810
        }
9811
 
9812
      lsb = low & -low;
9813
      return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
9814
    }
9815
  else
9816
    return 0;
9817
}
9818
 
9819
/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
9820
   to perform a left shift.  It must have SHIFTOP or more least
9821
   significant 0's, with the remainder of the word 1's.  */
9822
 
9823
int
9824
includes_rldicr_lshift_p (rtx shiftop, rtx andop)
9825
{
9826
  if (GET_CODE (andop) == CONST_INT)
9827
    {
9828
      HOST_WIDE_INT c, lsb, shift_mask;
9829
 
9830
      shift_mask = ~0;
9831
      shift_mask <<= INTVAL (shiftop);
9832
      c = INTVAL (andop);
9833
 
9834
      /* Find the least significant one bit.  */
9835
      lsb = c & -c;
9836
 
9837
      /* It must be covered by the shift mask.
9838
         This test also rejects c == 0.  */
9839
      if ((lsb & shift_mask) == 0)
9840
        return 0;
9841
 
9842
      /* Check we have all 1's above the transition, and reject all 1's.  */
9843
      return c == -lsb && lsb != 1;
9844
    }
9845
  else if (GET_CODE (andop) == CONST_DOUBLE
9846
           && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9847
    {
9848
      HOST_WIDE_INT low, lsb, shift_mask_low;
9849
 
9850
      low = CONST_DOUBLE_LOW (andop);
9851
 
9852
      if (HOST_BITS_PER_WIDE_INT < 64)
9853
        {
9854
          HOST_WIDE_INT high, shift_mask_high;
9855
 
9856
          high = CONST_DOUBLE_HIGH (andop);
9857
 
9858
          if (low == 0)
9859
            {
9860
              shift_mask_high = ~0;
9861
              if (INTVAL (shiftop) > 32)
9862
                shift_mask_high <<= INTVAL (shiftop) - 32;
9863
 
9864
              lsb = high & -high;
9865
 
9866
              if ((lsb & shift_mask_high) == 0)
9867
                return 0;
9868
 
9869
              return high == -lsb;
9870
            }
9871
          if (high != ~0)
9872
            return 0;
9873
        }
9874
 
9875
      shift_mask_low = ~0;
9876
      shift_mask_low <<= INTVAL (shiftop);
9877
 
9878
      lsb = low & -low;
9879
 
9880
      if ((lsb & shift_mask_low) == 0)
9881
        return 0;
9882
 
9883
      return low == -lsb && lsb != 1;
9884
    }
9885
  else
9886
    return 0;
9887
}
9888
 
9889
/* Return 1 if operands will generate a valid arguments to rlwimi
9890
instruction for insert with right shift in 64-bit mode.  The mask may
9891
not start on the first bit or stop on the last bit because wrap-around
9892
effects of instruction do not correspond to semantics of RTL insn.  */
9893
 
9894
int
9895
insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
9896
{
9897
  if (INTVAL (startop) > 32
9898
      && INTVAL (startop) < 64
9899
      && INTVAL (sizeop) > 1
9900
      && INTVAL (sizeop) + INTVAL (startop) < 64
9901
      && INTVAL (shiftop) > 0
9902
      && INTVAL (sizeop) + INTVAL (shiftop) < 32
9903
      && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
9904
    return 1;
9905
 
9906
  return 0;
9907
}
9908
 
9909
/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
9910
   for lfq and stfq insns iff the registers are hard registers.   */
9911
 
9912
int
9913
registers_ok_for_quad_peep (rtx reg1, rtx reg2)
9914
{
9915
  /* We might have been passed a SUBREG.  */
9916
  if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
9917
    return 0;
9918
 
9919
  /* We might have been passed non floating point registers.  */
9920
  if (!FP_REGNO_P (REGNO (reg1))
9921
      || !FP_REGNO_P (REGNO (reg2)))
9922
    return 0;
9923
 
9924
  return (REGNO (reg1) == REGNO (reg2) - 1);
9925
}
9926
 
9927
/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
9928
   addr1 and addr2 must be in consecutive memory locations
9929
   (addr2 == addr1 + 8).  */
9930
 
9931
int
9932
mems_ok_for_quad_peep (rtx mem1, rtx mem2)
9933
{
9934
  rtx addr1, addr2;
9935
  unsigned int reg1, reg2;
9936
  int offset1, offset2;
9937
 
9938
  /* The mems cannot be volatile.  */
9939
  if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
9940
    return 0;
9941
 
9942
  addr1 = XEXP (mem1, 0);
9943
  addr2 = XEXP (mem2, 0);
9944
 
9945
  /* Extract an offset (if used) from the first addr.  */
9946
  if (GET_CODE (addr1) == PLUS)
9947
    {
9948
      /* If not a REG, return zero.  */
9949
      if (GET_CODE (XEXP (addr1, 0)) != REG)
9950
        return 0;
9951
      else
9952
        {
9953
          reg1 = REGNO (XEXP (addr1, 0));
9954
          /* The offset must be constant!  */
9955
          if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
9956
            return 0;
9957
          offset1 = INTVAL (XEXP (addr1, 1));
9958
        }
9959
    }
9960
  else if (GET_CODE (addr1) != REG)
9961
    return 0;
9962
  else
9963
    {
9964
      reg1 = REGNO (addr1);
9965
      /* This was a simple (mem (reg)) expression.  Offset is 0.  */
9966
      offset1 = 0;
9967
    }
9968
 
9969
  /* And now for the second addr.  */
9970
  if (GET_CODE (addr2) == PLUS)
9971
    {
9972
      /* If not a REG, return zero.  */
9973
      if (GET_CODE (XEXP (addr2, 0)) != REG)
9974
        return 0;
9975
      else
9976
        {
9977
          reg2 = REGNO (XEXP (addr2, 0));
9978
          /* The offset must be constant. */
9979
          if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
9980
            return 0;
9981
          offset2 = INTVAL (XEXP (addr2, 1));
9982
        }
9983
    }
9984
  else if (GET_CODE (addr2) != REG)
9985
    return 0;
9986
  else
9987
    {
9988
      reg2 = REGNO (addr2);
9989
      /* This was a simple (mem (reg)) expression.  Offset is 0.  */
9990
      offset2 = 0;
9991
    }
9992
 
9993
  /* Both of these must have the same base register.  */
9994
  if (reg1 != reg2)
9995
    return 0;
9996
 
9997
  /* The offset for the second addr must be 8 more than the first addr.  */
9998
  if (offset2 != offset1 + 8)
9999
    return 0;
10000
 
10001
  /* All the tests passed.  addr1 and addr2 are valid for lfq or stfq
10002
     instructions.  */
10003
  return 1;
10004
}
10005
 
10006
/* Return the register class of a scratch register needed to copy IN into
10007
   or out of a register in CLASS in MODE.  If it can be done directly,
10008
   NO_REGS is returned.  */
10009
 
10010
enum reg_class
10011
rs6000_secondary_reload_class (enum reg_class class,
10012
                               enum machine_mode mode ATTRIBUTE_UNUSED,
10013
                               rtx in)
10014
{
10015
  int regno;
10016
 
10017
  if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
10018
#if TARGET_MACHO
10019
                     && MACHOPIC_INDIRECT
10020
#endif
10021
                     ))
10022
    {
10023
      /* We cannot copy a symbolic operand directly into anything
10024
         other than BASE_REGS for TARGET_ELF.  So indicate that a
10025
         register from BASE_REGS is needed as an intermediate
10026
         register.
10027
 
10028
         On Darwin, pic addresses require a load from memory, which
10029
         needs a base register.  */
10030
      if (class != BASE_REGS
10031
          && (GET_CODE (in) == SYMBOL_REF
10032
              || GET_CODE (in) == HIGH
10033
              || GET_CODE (in) == LABEL_REF
10034
              || GET_CODE (in) == CONST))
10035
        return BASE_REGS;
10036
    }
10037
 
10038
  if (GET_CODE (in) == REG)
10039
    {
10040
      regno = REGNO (in);
10041
      if (regno >= FIRST_PSEUDO_REGISTER)
10042
        {
10043
          regno = true_regnum (in);
10044
          if (regno >= FIRST_PSEUDO_REGISTER)
10045
            regno = -1;
10046
        }
10047
    }
10048
  else if (GET_CODE (in) == SUBREG)
10049
    {
10050
      regno = true_regnum (in);
10051
      if (regno >= FIRST_PSEUDO_REGISTER)
10052
        regno = -1;
10053
    }
10054
  else
10055
    regno = -1;
10056
 
10057
  /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
10058
     into anything.  */
10059
  if (class == GENERAL_REGS || class == BASE_REGS
10060
      || (regno >= 0 && INT_REGNO_P (regno)))
10061
    return NO_REGS;
10062
 
10063
  /* Constants, memory, and FP registers can go into FP registers.  */
10064
  if ((regno == -1 || FP_REGNO_P (regno))
10065
      && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
10066
    return NO_REGS;
10067
 
10068
  /* Memory, and AltiVec registers can go into AltiVec registers.  */
10069
  if ((regno == -1 || ALTIVEC_REGNO_P (regno))
10070
      && class == ALTIVEC_REGS)
10071
    return NO_REGS;
10072
 
10073
  /* We can copy among the CR registers.  */
10074
  if ((class == CR_REGS || class == CR0_REGS)
10075
      && regno >= 0 && CR_REGNO_P (regno))
10076
    return NO_REGS;
10077
 
10078
  /* Otherwise, we need GENERAL_REGS.  */
10079
  return GENERAL_REGS;
10080
}
10081
 
10082
/* Given a comparison operation, return the bit number in CCR to test.  We
10083
   know this is a valid comparison.
10084
 
10085
   SCC_P is 1 if this is for an scc.  That means that %D will have been
10086
   used instead of %C, so the bits will be in different places.
10087
 
10088
   Return -1 if OP isn't a valid comparison for some reason.  */
10089
 
10090
int
10091
ccr_bit (rtx op, int scc_p)
10092
{
10093
  enum rtx_code code = GET_CODE (op);
10094
  enum machine_mode cc_mode;
10095
  int cc_regnum;
10096
  int base_bit;
10097
  rtx reg;
10098
 
10099
  if (!COMPARISON_P (op))
10100
    return -1;
10101
 
10102
  reg = XEXP (op, 0);
10103
 
10104
  gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
10105
 
10106
  cc_mode = GET_MODE (reg);
10107
  cc_regnum = REGNO (reg);
10108
  base_bit = 4 * (cc_regnum - CR0_REGNO);
10109
 
10110
  validate_condition_mode (code, cc_mode);
10111
 
10112
  /* When generating a sCOND operation, only positive conditions are
10113
     allowed.  */
10114
  gcc_assert (!scc_p
10115
              || code == EQ || code == GT || code == LT || code == UNORDERED
10116
              || code == GTU || code == LTU);
10117
 
10118
  switch (code)
10119
    {
10120
    case NE:
10121
      return scc_p ? base_bit + 3 : base_bit + 2;
10122
    case EQ:
10123
      return base_bit + 2;
10124
    case GT:  case GTU:  case UNLE:
10125
      return base_bit + 1;
10126
    case LT:  case LTU:  case UNGE:
10127
      return base_bit;
10128
    case ORDERED:  case UNORDERED:
10129
      return base_bit + 3;
10130
 
10131
    case GE:  case GEU:
10132
      /* If scc, we will have done a cror to put the bit in the
10133
         unordered position.  So test that bit.  For integer, this is ! LT
10134
         unless this is an scc insn.  */
10135
      return scc_p ? base_bit + 3 : base_bit;
10136
 
10137
    case LE:  case LEU:
10138
      return scc_p ? base_bit + 3 : base_bit + 1;
10139
 
10140
    default:
10141
      gcc_unreachable ();
10142
    }
10143
}
10144
 
10145
/* Return the GOT register.  */
10146
 
10147
rtx
10148
rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
10149
{
10150
  /* The second flow pass currently (June 1999) can't update
10151
     regs_ever_live without disturbing other parts of the compiler, so
10152
     update it here to make the prolog/epilogue code happy.  */
10153
  if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
10154
    regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
10155
 
10156
  current_function_uses_pic_offset_table = 1;
10157
 
10158
  return pic_offset_table_rtx;
10159
}
10160
 
10161
/* Function to init struct machine_function.
10162
   This will be called, via a pointer variable,
10163
   from push_function_context.  */
10164
 
10165
static struct machine_function *
10166
rs6000_init_machine_status (void)
10167
{
10168
  return ggc_alloc_cleared (sizeof (machine_function));
10169
}
10170
 
10171
/* These macros test for integers and extract the low-order bits.  */
10172
#define INT_P(X)  \
10173
((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE)    \
10174
 && GET_MODE (X) == VOIDmode)
10175
 
10176
#define INT_LOWPART(X) \
10177
  (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
10178
 
10179
int
10180
extract_MB (rtx op)
10181
{
10182
  int i;
10183
  unsigned long val = INT_LOWPART (op);
10184
 
10185
  /* If the high bit is zero, the value is the first 1 bit we find
10186
     from the left.  */
10187
  if ((val & 0x80000000) == 0)
10188
    {
10189
      gcc_assert (val & 0xffffffff);
10190
 
10191
      i = 1;
10192
      while (((val <<= 1) & 0x80000000) == 0)
10193
        ++i;
10194
      return i;
10195
    }
10196
 
10197
  /* If the high bit is set and the low bit is not, or the mask is all
10198
     1's, the value is zero.  */
10199
  if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
10200
    return 0;
10201
 
10202
  /* Otherwise we have a wrap-around mask.  Look for the first 0 bit
10203
     from the right.  */
10204
  i = 31;
10205
  while (((val >>= 1) & 1) != 0)
10206
    --i;
10207
 
10208
  return i;
10209
}
10210
 
10211
int
10212
extract_ME (rtx op)
10213
{
10214
  int i;
10215
  unsigned long val = INT_LOWPART (op);
10216
 
10217
  /* If the low bit is zero, the value is the first 1 bit we find from
10218
     the right.  */
10219
  if ((val & 1) == 0)
10220
    {
10221
      gcc_assert (val & 0xffffffff);
10222
 
10223
      i = 30;
10224
      while (((val >>= 1) & 1) == 0)
10225
        --i;
10226
 
10227
      return i;
10228
    }
10229
 
10230
  /* If the low bit is set and the high bit is not, or the mask is all
10231
     1's, the value is 31.  */
10232
  if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
10233
    return 31;
10234
 
10235
  /* Otherwise we have a wrap-around mask.  Look for the first 0 bit
10236
     from the left.  */
10237
  i = 0;
10238
  while (((val <<= 1) & 0x80000000) != 0)
10239
    ++i;
10240
 
10241
  return i;
10242
}
10243
 
10244
/* Locate some local-dynamic symbol still in use by this function
10245
   so that we can print its name in some tls_ld pattern.  */
10246
 
10247
static const char *
10248
rs6000_get_some_local_dynamic_name (void)
10249
{
10250
  rtx insn;
10251
 
10252
  if (cfun->machine->some_ld_name)
10253
    return cfun->machine->some_ld_name;
10254
 
10255
  for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
10256
    if (INSN_P (insn)
10257
        && for_each_rtx (&PATTERN (insn),
10258
                         rs6000_get_some_local_dynamic_name_1, 0))
10259
      return cfun->machine->some_ld_name;
10260
 
10261
  gcc_unreachable ();
10262
}
10263
 
10264
/* Helper function for rs6000_get_some_local_dynamic_name.  */
10265
 
10266
static int
10267
rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
10268
{
10269
  rtx x = *px;
10270
 
10271
  if (GET_CODE (x) == SYMBOL_REF)
10272
    {
10273
      const char *str = XSTR (x, 0);
10274
      if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
10275
        {
10276
          cfun->machine->some_ld_name = str;
10277
          return 1;
10278
        }
10279
    }
10280
 
10281
  return 0;
10282
}
10283
 
10284
/* Write out a function code label.  */
10285
 
10286
void
10287
rs6000_output_function_entry (FILE *file, const char *fname)
10288
{
10289
  if (fname[0] != '.')
10290
    {
10291
      switch (DEFAULT_ABI)
10292
        {
10293
        default:
10294
          gcc_unreachable ();
10295
 
10296
        case ABI_AIX:
10297
          if (DOT_SYMBOLS)
10298
            putc ('.', file);
10299
          else
10300
            ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
10301
          break;
10302
 
10303
        case ABI_V4:
10304
        case ABI_DARWIN:
10305
          break;
10306
        }
10307
    }
10308
  if (TARGET_AIX)
10309
    RS6000_OUTPUT_BASENAME (file, fname);
10310
  else
10311
    assemble_name (file, fname);
10312
}
10313
 
10314
/* Print an operand.  Recognize special options, documented below.  */
10315
 
10316
#if TARGET_ELF
10317
#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
10318
#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
10319
#else
10320
#define SMALL_DATA_RELOC "sda21"
10321
#define SMALL_DATA_REG 0
10322
#endif
10323
 
10324
void
10325
print_operand (FILE *file, rtx x, int code)
10326
{
10327
  int i;
10328
  HOST_WIDE_INT val;
10329
  unsigned HOST_WIDE_INT uval;
10330
 
10331
  switch (code)
10332
    {
10333
    case '.':
10334
      /* Write out an instruction after the call which may be replaced
10335
         with glue code by the loader.  This depends on the AIX version.  */
10336
      asm_fprintf (file, RS6000_CALL_GLUE);
10337
      return;
10338
 
10339
      /* %a is output_address.  */
10340
 
10341
    case 'A':
10342
      /* If X is a constant integer whose low-order 5 bits are zero,
10343
         write 'l'.  Otherwise, write 'r'.  This is a kludge to fix a bug
10344
         in the AIX assembler where "sri" with a zero shift count
10345
         writes a trash instruction.  */
10346
      if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
10347
        putc ('l', file);
10348
      else
10349
        putc ('r', file);
10350
      return;
10351
 
10352
    case 'b':
10353
      /* If constant, low-order 16 bits of constant, unsigned.
10354
         Otherwise, write normally.  */
10355
      if (INT_P (x))
10356
        fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
10357
      else
10358
        print_operand (file, x, 0);
10359
      return;
10360
 
10361
    case 'B':
10362
      /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
10363
         for 64-bit mask direction.  */
10364
      putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
10365
      return;
10366
 
10367
      /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
10368
         output_operand.  */
10369
 
10370
    case 'c':
10371
      /* X is a CR register.  Print the number of the GT bit of the CR.  */
10372
      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10373
        output_operand_lossage ("invalid %%E value");
10374
      else
10375
        fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
10376
      return;
10377
 
10378
    case 'D':
10379
      /* Like 'J' but get to the GT bit only.  */
10380
      gcc_assert (GET_CODE (x) == REG);
10381
 
10382
      /* Bit 1 is GT bit.  */
10383
      i = 4 * (REGNO (x) - CR0_REGNO) + 1;
10384
 
10385
      /* Add one for shift count in rlinm for scc.  */
10386
      fprintf (file, "%d", i + 1);
10387
      return;
10388
 
10389
    case 'E':
10390
      /* X is a CR register.  Print the number of the EQ bit of the CR */
10391
      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10392
        output_operand_lossage ("invalid %%E value");
10393
      else
10394
        fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
10395
      return;
10396
 
10397
    case 'f':
10398
      /* X is a CR register.  Print the shift count needed to move it
10399
         to the high-order four bits.  */
10400
      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10401
        output_operand_lossage ("invalid %%f value");
10402
      else
10403
        fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
10404
      return;
10405
 
10406
    case 'F':
10407
      /* Similar, but print the count for the rotate in the opposite
10408
         direction.  */
10409
      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10410
        output_operand_lossage ("invalid %%F value");
10411
      else
10412
        fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
10413
      return;
10414
 
10415
    case 'G':
10416
      /* X is a constant integer.  If it is negative, print "m",
10417
         otherwise print "z".  This is to make an aze or ame insn.  */
10418
      if (GET_CODE (x) != CONST_INT)
10419
        output_operand_lossage ("invalid %%G value");
10420
      else if (INTVAL (x) >= 0)
10421
        putc ('z', file);
10422
      else
10423
        putc ('m', file);
10424
      return;
10425
 
10426
    case 'h':
10427
      /* If constant, output low-order five bits.  Otherwise, write
10428
         normally.  */
10429
      if (INT_P (x))
10430
        fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
10431
      else
10432
        print_operand (file, x, 0);
10433
      return;
10434
 
10435
    case 'H':
10436
      /* If constant, output low-order six bits.  Otherwise, write
10437
         normally.  */
10438
      if (INT_P (x))
10439
        fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
10440
      else
10441
        print_operand (file, x, 0);
10442
      return;
10443
 
10444
    case 'I':
10445
      /* Print `i' if this is a constant, else nothing.  */
10446
      if (INT_P (x))
10447
        putc ('i', file);
10448
      return;
10449
 
10450
    case 'j':
10451
      /* Write the bit number in CCR for jump.  */
10452
      i = ccr_bit (x, 0);
10453
      if (i == -1)
10454
        output_operand_lossage ("invalid %%j code");
10455
      else
10456
        fprintf (file, "%d", i);
10457
      return;
10458
 
10459
    case 'J':
10460
      /* Similar, but add one for shift count in rlinm for scc and pass
10461
         scc flag to `ccr_bit'.  */
10462
      i = ccr_bit (x, 1);
10463
      if (i == -1)
10464
        output_operand_lossage ("invalid %%J code");
10465
      else
10466
        /* If we want bit 31, write a shift count of zero, not 32.  */
10467
        fprintf (file, "%d", i == 31 ? 0 : i + 1);
10468
      return;
10469
 
10470
    case 'k':
10471
      /* X must be a constant.  Write the 1's complement of the
10472
         constant.  */
10473
      if (! INT_P (x))
10474
        output_operand_lossage ("invalid %%k value");
10475
      else
10476
        fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
10477
      return;
10478
 
10479
    case 'K':
10480
      /* X must be a symbolic constant on ELF.  Write an
10481
         expression suitable for an 'addi' that adds in the low 16
10482
         bits of the MEM.  */
10483
      if (GET_CODE (x) != CONST)
10484
        {
10485
          print_operand_address (file, x);
10486
          fputs ("@l", file);
10487
        }
10488
      else
10489
        {
10490
          if (GET_CODE (XEXP (x, 0)) != PLUS
10491
              || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
10492
                  && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
10493
              || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
10494
            output_operand_lossage ("invalid %%K value");
10495
          print_operand_address (file, XEXP (XEXP (x, 0), 0));
10496
          fputs ("@l", file);
10497
          /* For GNU as, there must be a non-alphanumeric character
10498
             between 'l' and the number.  The '-' is added by
10499
             print_operand() already.  */
10500
          if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
10501
            fputs ("+", file);
10502
          print_operand (file, XEXP (XEXP (x, 0), 1), 0);
10503
        }
10504
      return;
10505
 
10506
      /* %l is output_asm_label.  */
10507
 
10508
    case 'L':
10509
      /* Write second word of DImode or DFmode reference.  Works on register
10510
         or non-indexed memory only.  */
10511
      if (GET_CODE (x) == REG)
10512
        fputs (reg_names[REGNO (x) + 1], file);
10513
      else if (GET_CODE (x) == MEM)
10514
        {
10515
          /* Handle possible auto-increment.  Since it is pre-increment and
10516
             we have already done it, we can just use an offset of word.  */
10517
          if (GET_CODE (XEXP (x, 0)) == PRE_INC
10518
              || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10519
            output_address (plus_constant (XEXP (XEXP (x, 0), 0),
10520
                                           UNITS_PER_WORD));
10521
          else
10522
            output_address (XEXP (adjust_address_nv (x, SImode,
10523
                                                     UNITS_PER_WORD),
10524
                                  0));
10525
 
10526
          if (small_data_operand (x, GET_MODE (x)))
10527
            fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10528
                     reg_names[SMALL_DATA_REG]);
10529
        }
10530
      return;
10531
 
10532
    case 'm':
10533
      /* MB value for a mask operand.  */
10534
      if (! mask_operand (x, SImode))
10535
        output_operand_lossage ("invalid %%m value");
10536
 
10537
      fprintf (file, "%d", extract_MB (x));
10538
      return;
10539
 
10540
    case 'M':
10541
      /* ME value for a mask operand.  */
10542
      if (! mask_operand (x, SImode))
10543
        output_operand_lossage ("invalid %%M value");
10544
 
10545
      fprintf (file, "%d", extract_ME (x));
10546
      return;
10547
 
10548
      /* %n outputs the negative of its operand.  */
10549
 
10550
    case 'N':
10551
      /* Write the number of elements in the vector times 4.  */
10552
      if (GET_CODE (x) != PARALLEL)
10553
        output_operand_lossage ("invalid %%N value");
10554
      else
10555
        fprintf (file, "%d", XVECLEN (x, 0) * 4);
10556
      return;
10557
 
10558
    case 'O':
10559
      /* Similar, but subtract 1 first.  */
10560
      if (GET_CODE (x) != PARALLEL)
10561
        output_operand_lossage ("invalid %%O value");
10562
      else
10563
        fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
10564
      return;
10565
 
10566
    case 'p':
10567
      /* X is a CONST_INT that is a power of two.  Output the logarithm.  */
10568
      if (! INT_P (x)
10569
          || INT_LOWPART (x) < 0
10570
          || (i = exact_log2 (INT_LOWPART (x))) < 0)
10571
        output_operand_lossage ("invalid %%p value");
10572
      else
10573
        fprintf (file, "%d", i);
10574
      return;
10575
 
10576
    case 'P':
10577
      /* The operand must be an indirect memory reference.  The result
10578
         is the register name.  */
10579
      if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
10580
          || REGNO (XEXP (x, 0)) >= 32)
10581
        output_operand_lossage ("invalid %%P value");
10582
      else
10583
        fputs (reg_names[REGNO (XEXP (x, 0))], file);
10584
      return;
10585
 
10586
    case 'q':
10587
      /* This outputs the logical code corresponding to a boolean
10588
         expression.  The expression may have one or both operands
10589
         negated (if one, only the first one).  For condition register
10590
         logical operations, it will also treat the negated
10591
         CR codes as NOTs, but not handle NOTs of them.  */
10592
      {
10593
        const char *const *t = 0;
10594
        const char *s;
10595
        enum rtx_code code = GET_CODE (x);
10596
        static const char * const tbl[3][3] = {
10597
          { "and", "andc", "nor" },
10598
          { "or", "orc", "nand" },
10599
          { "xor", "eqv", "xor" } };
10600
 
10601
        if (code == AND)
10602
          t = tbl[0];
10603
        else if (code == IOR)
10604
          t = tbl[1];
10605
        else if (code == XOR)
10606
          t = tbl[2];
10607
        else
10608
          output_operand_lossage ("invalid %%q value");
10609
 
10610
        if (GET_CODE (XEXP (x, 0)) != NOT)
10611
          s = t[0];
10612
        else
10613
          {
10614
            if (GET_CODE (XEXP (x, 1)) == NOT)
10615
              s = t[2];
10616
            else
10617
              s = t[1];
10618
          }
10619
 
10620
        fputs (s, file);
10621
      }
10622
      return;
10623
 
10624
    case 'Q':
10625
      if (TARGET_MFCRF)
10626
        fputc (',', file);
10627
        /* FALLTHRU */
10628
      else
10629
        return;
10630
 
10631
    case 'R':
10632
      /* X is a CR register.  Print the mask for `mtcrf'.  */
10633
      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10634
        output_operand_lossage ("invalid %%R value");
10635
      else
10636
        fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
10637
      return;
10638
 
10639
    case 's':
10640
      /* Low 5 bits of 32 - value */
10641
      if (! INT_P (x))
10642
        output_operand_lossage ("invalid %%s value");
10643
      else
10644
        fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
10645
      return;
10646
 
10647
    case 'S':
10648
      /* PowerPC64 mask position.  All 0's is excluded.
10649
         CONST_INT 32-bit mask is considered sign-extended so any
10650
         transition must occur within the CONST_INT, not on the boundary.  */
10651
      if (! mask64_operand (x, DImode))
10652
        output_operand_lossage ("invalid %%S value");
10653
 
10654
      uval = INT_LOWPART (x);
10655
 
10656
      if (uval & 1)     /* Clear Left */
10657
        {
10658
#if HOST_BITS_PER_WIDE_INT > 64
10659
          uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10660
#endif
10661
          i = 64;
10662
        }
10663
      else              /* Clear Right */
10664
        {
10665
          uval = ~uval;
10666
#if HOST_BITS_PER_WIDE_INT > 64
10667
          uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10668
#endif
10669
          i = 63;
10670
        }
10671
      while (uval != 0)
10672
        --i, uval >>= 1;
10673
      gcc_assert (i >= 0);
10674
      fprintf (file, "%d", i);
10675
      return;
10676
 
10677
    case 't':
10678
      /* Like 'J' but get to the OVERFLOW/UNORDERED bit.  */
10679
      gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
10680
 
10681
      /* Bit 3 is OV bit.  */
10682
      i = 4 * (REGNO (x) - CR0_REGNO) + 3;
10683
 
10684
      /* If we want bit 31, write a shift count of zero, not 32.  */
10685
      fprintf (file, "%d", i == 31 ? 0 : i + 1);
10686
      return;
10687
 
10688
    case 'T':
10689
      /* Print the symbolic name of a branch target register.  */
10690
      if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
10691
                                  && REGNO (x) != COUNT_REGISTER_REGNUM))
10692
        output_operand_lossage ("invalid %%T value");
10693
      else if (REGNO (x) == LINK_REGISTER_REGNUM)
10694
        fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
10695
      else
10696
        fputs ("ctr", file);
10697
      return;
10698
 
10699
    case 'u':
10700
      /* High-order 16 bits of constant for use in unsigned operand.  */
10701
      if (! INT_P (x))
10702
        output_operand_lossage ("invalid %%u value");
10703
      else
10704
        fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10705
                 (INT_LOWPART (x) >> 16) & 0xffff);
10706
      return;
10707
 
10708
    case 'v':
10709
      /* High-order 16 bits of constant for use in signed operand.  */
10710
      if (! INT_P (x))
10711
        output_operand_lossage ("invalid %%v value");
10712
      else
10713
        fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10714
                 (INT_LOWPART (x) >> 16) & 0xffff);
10715
      return;
10716
 
10717
    case 'U':
10718
      /* Print `u' if this has an auto-increment or auto-decrement.  */
10719
      if (GET_CODE (x) == MEM
10720
          && (GET_CODE (XEXP (x, 0)) == PRE_INC
10721
              || GET_CODE (XEXP (x, 0)) == PRE_DEC))
10722
        putc ('u', file);
10723
      return;
10724
 
10725
    case 'V':
10726
      /* Print the trap code for this operand.  */
10727
      switch (GET_CODE (x))
10728
        {
10729
        case EQ:
10730
          fputs ("eq", file);   /* 4 */
10731
          break;
10732
        case NE:
10733
          fputs ("ne", file);   /* 24 */
10734
          break;
10735
        case LT:
10736
          fputs ("lt", file);   /* 16 */
10737
          break;
10738
        case LE:
10739
          fputs ("le", file);   /* 20 */
10740
          break;
10741
        case GT:
10742
          fputs ("gt", file);   /* 8 */
10743
          break;
10744
        case GE:
10745
          fputs ("ge", file);   /* 12 */
10746
          break;
10747
        case LTU:
10748
          fputs ("llt", file);  /* 2 */
10749
          break;
10750
        case LEU:
10751
          fputs ("lle", file);  /* 6 */
10752
          break;
10753
        case GTU:
10754
          fputs ("lgt", file);  /* 1 */
10755
          break;
10756
        case GEU:
10757
          fputs ("lge", file);  /* 5 */
10758
          break;
10759
        default:
10760
          gcc_unreachable ();
10761
        }
10762
      break;
10763
 
10764
    case 'w':
10765
      /* If constant, low-order 16 bits of constant, signed.  Otherwise, write
10766
         normally.  */
10767
      if (INT_P (x))
10768
        fprintf (file, HOST_WIDE_INT_PRINT_DEC,
10769
                 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
10770
      else
10771
        print_operand (file, x, 0);
10772
      return;
10773
 
10774
    case 'W':
10775
      /* MB value for a PowerPC64 rldic operand.  */
10776
      val = (GET_CODE (x) == CONST_INT
10777
             ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
10778
 
10779
      if (val < 0)
10780
        i = -1;
10781
      else
10782
        for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
10783
          if ((val <<= 1) < 0)
10784
            break;
10785
 
10786
#if HOST_BITS_PER_WIDE_INT == 32
10787
      if (GET_CODE (x) == CONST_INT && i >= 0)
10788
        i += 32;  /* zero-extend high-part was all 0's */
10789
      else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
10790
        {
10791
          val = CONST_DOUBLE_LOW (x);
10792
 
10793
          gcc_assert (val);
10794
          if (val < 0)
10795
            --i;
10796
          else
10797
            for ( ; i < 64; i++)
10798
              if ((val <<= 1) < 0)
10799
                break;
10800
        }
10801
#endif
10802
 
10803
      fprintf (file, "%d", i + 1);
10804
      return;
10805
 
10806
    case 'X':
10807
      if (GET_CODE (x) == MEM
10808
          && legitimate_indexed_address_p (XEXP (x, 0), 0))
10809
        putc ('x', file);
10810
      return;
10811
 
10812
    case 'Y':
10813
      /* Like 'L', for third word of TImode  */
10814
      if (GET_CODE (x) == REG)
10815
        fputs (reg_names[REGNO (x) + 2], file);
10816
      else if (GET_CODE (x) == MEM)
10817
        {
10818
          if (GET_CODE (XEXP (x, 0)) == PRE_INC
10819
              || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10820
            output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
10821
          else
10822
            output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
10823
          if (small_data_operand (x, GET_MODE (x)))
10824
            fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10825
                     reg_names[SMALL_DATA_REG]);
10826
        }
10827
      return;
10828
 
10829
    case 'z':
10830
      /* X is a SYMBOL_REF.  Write out the name preceded by a
10831
         period and without any trailing data in brackets.  Used for function
10832
         names.  If we are configured for System V (or the embedded ABI) on
10833
         the PowerPC, do not emit the period, since those systems do not use
10834
         TOCs and the like.  */
10835
      gcc_assert (GET_CODE (x) == SYMBOL_REF);
10836
 
10837
      /* Mark the decl as referenced so that cgraph will output the
10838
         function.  */
10839
      if (SYMBOL_REF_DECL (x))
10840
        mark_decl_referenced (SYMBOL_REF_DECL (x));
10841
 
10842
      /* For macho, check to see if we need a stub.  */
10843
      if (TARGET_MACHO)
10844
        {
10845
          const char *name = XSTR (x, 0);
10846
#if TARGET_MACHO
10847
          if (MACHOPIC_INDIRECT
10848
              && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
10849
            name = machopic_indirection_name (x, /*stub_p=*/true);
10850
#endif
10851
          assemble_name (file, name);
10852
        }
10853
      else if (!DOT_SYMBOLS)
10854
        assemble_name (file, XSTR (x, 0));
10855
      else
10856
        rs6000_output_function_entry (file, XSTR (x, 0));
10857
      return;
10858
 
10859
    case 'Z':
10860
      /* Like 'L', for last word of TImode.  */
10861
      if (GET_CODE (x) == REG)
10862
        fputs (reg_names[REGNO (x) + 3], file);
10863
      else if (GET_CODE (x) == MEM)
10864
        {
10865
          if (GET_CODE (XEXP (x, 0)) == PRE_INC
10866
              || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10867
            output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
10868
          else
10869
            output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
10870
          if (small_data_operand (x, GET_MODE (x)))
10871
            fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10872
                     reg_names[SMALL_DATA_REG]);
10873
        }
10874
      return;
10875
 
10876
      /* Print AltiVec or SPE memory operand.  */
10877
    case 'y':
10878
      {
10879
        rtx tmp;
10880
 
10881
        gcc_assert (GET_CODE (x) == MEM);
10882
 
10883
        tmp = XEXP (x, 0);
10884
 
10885
        /* Ugly hack because %y is overloaded.  */
10886
        if (TARGET_E500 && GET_MODE_SIZE (GET_MODE (x)) == 8)
10887
          {
10888
            /* Handle [reg].  */
10889
            if (GET_CODE (tmp) == REG)
10890
              {
10891
                fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
10892
                break;
10893
              }
10894
            /* Handle [reg+UIMM].  */
10895
            else if (GET_CODE (tmp) == PLUS &&
10896
                     GET_CODE (XEXP (tmp, 1)) == CONST_INT)
10897
              {
10898
                int x;
10899
 
10900
                gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
10901
 
10902
                x = INTVAL (XEXP (tmp, 1));
10903
                fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
10904
                break;
10905
              }
10906
 
10907
            /* Fall through.  Must be [reg+reg].  */
10908
          }
10909
        if (TARGET_ALTIVEC
10910
            && GET_CODE (tmp) == AND
10911
            && GET_CODE (XEXP (tmp, 1)) == CONST_INT
10912
            && INTVAL (XEXP (tmp, 1)) == -16)
10913
          tmp = XEXP (tmp, 0);
10914
        if (GET_CODE (tmp) == REG)
10915
          fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
10916
        else
10917
          {
10918
            gcc_assert (GET_CODE (tmp) == PLUS
10919
                        && REG_P (XEXP (tmp, 0))
10920
                        && REG_P (XEXP (tmp, 1)));
10921
 
10922
            if (REGNO (XEXP (tmp, 0)) == 0)
10923
              fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
10924
                       reg_names[ REGNO (XEXP (tmp, 0)) ]);
10925
            else
10926
              fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
10927
                       reg_names[ REGNO (XEXP (tmp, 1)) ]);
10928
          }
10929
        break;
10930
      }
10931
 
10932
    case 0:
10933
      if (GET_CODE (x) == REG)
10934
        fprintf (file, "%s", reg_names[REGNO (x)]);
10935
      else if (GET_CODE (x) == MEM)
10936
        {
10937
          /* We need to handle PRE_INC and PRE_DEC here, since we need to
10938
             know the width from the mode.  */
10939
          if (GET_CODE (XEXP (x, 0)) == PRE_INC)
10940
            fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
10941
                     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10942
          else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
10943
            fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
10944
                     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10945
          else
10946
            output_address (XEXP (x, 0));
10947
        }
10948
      else
10949
        output_addr_const (file, x);
10950
      return;
10951
 
10952
    case '&':
10953
      assemble_name (file, rs6000_get_some_local_dynamic_name ());
10954
      return;
10955
 
10956
    default:
10957
      output_operand_lossage ("invalid %%xn code");
10958
    }
10959
}
10960
 
10961
/* Print the address of an operand.  */
10962
 
10963
void
10964
print_operand_address (FILE *file, rtx x)
10965
{
10966
  if (GET_CODE (x) == REG)
10967
    fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
10968
  else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
10969
           || GET_CODE (x) == LABEL_REF)
10970
    {
10971
      output_addr_const (file, x);
10972
      if (small_data_operand (x, GET_MODE (x)))
10973
        fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10974
                 reg_names[SMALL_DATA_REG]);
10975
      else
10976
        gcc_assert (!TARGET_TOC);
10977
    }
10978
  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
10979
    {
10980
      gcc_assert (REG_P (XEXP (x, 0)));
10981
      if (REGNO (XEXP (x, 0)) == 0)
10982
        fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
10983
                 reg_names[ REGNO (XEXP (x, 0)) ]);
10984
      else
10985
        fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
10986
                 reg_names[ REGNO (XEXP (x, 1)) ]);
10987
    }
10988
  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
10989
    fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
10990
             INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
10991
#if TARGET_ELF
10992
  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10993
           && CONSTANT_P (XEXP (x, 1)))
10994
    {
10995
      output_addr_const (file, XEXP (x, 1));
10996
      fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10997
    }
10998
#endif
10999
#if TARGET_MACHO
11000
  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
11001
           && CONSTANT_P (XEXP (x, 1)))
11002
    {
11003
      fprintf (file, "lo16(");
11004
      output_addr_const (file, XEXP (x, 1));
11005
      fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
11006
    }
11007
#endif
11008
  else if (legitimate_constant_pool_address_p (x))
11009
    {
11010
      if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
11011
        {
11012
          rtx contains_minus = XEXP (x, 1);
11013
          rtx minus, symref;
11014
          const char *name;
11015
 
11016
          /* Find the (minus (sym) (toc)) buried in X, and temporarily
11017
             turn it into (sym) for output_addr_const.  */
11018
          while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
11019
            contains_minus = XEXP (contains_minus, 0);
11020
 
11021
          minus = XEXP (contains_minus, 0);
11022
          symref = XEXP (minus, 0);
11023
          XEXP (contains_minus, 0) = symref;
11024
          if (TARGET_ELF)
11025
            {
11026
              char *newname;
11027
 
11028
              name = XSTR (symref, 0);
11029
              newname = alloca (strlen (name) + sizeof ("@toc"));
11030
              strcpy (newname, name);
11031
              strcat (newname, "@toc");
11032
              XSTR (symref, 0) = newname;
11033
            }
11034
          output_addr_const (file, XEXP (x, 1));
11035
          if (TARGET_ELF)
11036
            XSTR (symref, 0) = name;
11037
          XEXP (contains_minus, 0) = minus;
11038
        }
11039
      else
11040
        output_addr_const (file, XEXP (x, 1));
11041
 
11042
      fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
11043
    }
11044
  else
11045
    gcc_unreachable ();
11046
}
11047
 
11048
/* Target hook for assembling integer objects.  The PowerPC version has
11049
   to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
11050
   is defined.  It also needs to handle DI-mode objects on 64-bit
11051
   targets.  */
11052
 
11053
static bool
11054
rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
11055
{
11056
#ifdef RELOCATABLE_NEEDS_FIXUP
11057
  /* Special handling for SI values.  */
11058
  if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
11059
    {
11060
      static int recurse = 0;
11061
 
11062
      /* For -mrelocatable, we mark all addresses that need to be fixed up
11063
         in the .fixup section.  */
11064
      if (TARGET_RELOCATABLE
11065
          && in_section != toc_section
11066
          && in_section != text_section
11067
          && !unlikely_text_section_p (in_section)
11068
          && !recurse
11069
          && GET_CODE (x) != CONST_INT
11070
          && GET_CODE (x) != CONST_DOUBLE
11071
          && CONSTANT_P (x))
11072
        {
11073
          char buf[256];
11074
 
11075
          recurse = 1;
11076
          ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
11077
          fixuplabelno++;
11078
          ASM_OUTPUT_LABEL (asm_out_file, buf);
11079
          fprintf (asm_out_file, "\t.long\t(");
11080
          output_addr_const (asm_out_file, x);
11081
          fprintf (asm_out_file, ")@fixup\n");
11082
          fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
11083
          ASM_OUTPUT_ALIGN (asm_out_file, 2);
11084
          fprintf (asm_out_file, "\t.long\t");
11085
          assemble_name (asm_out_file, buf);
11086
          fprintf (asm_out_file, "\n\t.previous\n");
11087
          recurse = 0;
11088
          return true;
11089
        }
11090
      /* Remove initial .'s to turn a -mcall-aixdesc function
11091
         address into the address of the descriptor, not the function
11092
         itself.  */
11093
      else if (GET_CODE (x) == SYMBOL_REF
11094
               && XSTR (x, 0)[0] == '.'
11095
               && DEFAULT_ABI == ABI_AIX)
11096
        {
11097
          const char *name = XSTR (x, 0);
11098
          while (*name == '.')
11099
            name++;
11100
 
11101
          fprintf (asm_out_file, "\t.long\t%s\n", name);
11102
          return true;
11103
        }
11104
    }
11105
#endif /* RELOCATABLE_NEEDS_FIXUP */
11106
  return default_assemble_integer (x, size, aligned_p);
11107
}
11108
 
11109
#ifdef HAVE_GAS_HIDDEN
11110
/* Emit an assembler directive to set symbol visibility for DECL to
11111
   VISIBILITY_TYPE.  */
11112
 
11113
static void
11114
rs6000_assemble_visibility (tree decl, int vis)
11115
{
11116
  /* Functions need to have their entry point symbol visibility set as
11117
     well as their descriptor symbol visibility.  */
11118
  if (DEFAULT_ABI == ABI_AIX
11119
      && DOT_SYMBOLS
11120
      && TREE_CODE (decl) == FUNCTION_DECL)
11121
    {
11122
      static const char * const visibility_types[] = {
11123
        NULL, "internal", "hidden", "protected"
11124
      };
11125
 
11126
      const char *name, *type;
11127
 
11128
      name = ((* targetm.strip_name_encoding)
11129
              (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
11130
      type = visibility_types[vis];
11131
 
11132
      fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
11133
      fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
11134
    }
11135
  else
11136
    default_assemble_visibility (decl, vis);
11137
}
11138
#endif
11139
 
11140
enum rtx_code
11141
rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
11142
{
11143
  /* Reversal of FP compares takes care -- an ordered compare
11144
     becomes an unordered compare and vice versa.  */
11145
  if (mode == CCFPmode
11146
      && (!flag_finite_math_only
11147
          || code == UNLT || code == UNLE || code == UNGT || code == UNGE
11148
          || code == UNEQ || code == LTGT))
11149
    return reverse_condition_maybe_unordered (code);
11150
  else
11151
    return reverse_condition (code);
11152
}
11153
 
11154
/* Generate a compare for CODE.  Return a brand-new rtx that
11155
   represents the result of the compare.  */
11156
 
11157
static rtx
11158
rs6000_generate_compare (enum rtx_code code)
11159
{
11160
  enum machine_mode comp_mode;
11161
  rtx compare_result;
11162
 
11163
  if (rs6000_compare_fp_p)
11164
    comp_mode = CCFPmode;
11165
  else if (code == GTU || code == LTU
11166
           || code == GEU || code == LEU)
11167
    comp_mode = CCUNSmode;
11168
  else if ((code == EQ || code == NE)
11169
           && GET_CODE (rs6000_compare_op0) == SUBREG
11170
           && GET_CODE (rs6000_compare_op1) == SUBREG
11171
           && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
11172
           && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
11173
    /* These are unsigned values, perhaps there will be a later
11174
       ordering compare that can be shared with this one.
11175
       Unfortunately we cannot detect the signedness of the operands
11176
       for non-subregs.  */
11177
    comp_mode = CCUNSmode;
11178
  else
11179
    comp_mode = CCmode;
11180
 
11181
  /* First, the compare.  */
11182
  compare_result = gen_reg_rtx (comp_mode);
11183
 
11184
  /* E500 FP compare instructions on the GPRs.  Yuck!  */
11185
  if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
11186
      && rs6000_compare_fp_p)
11187
    {
11188
      rtx cmp, or_result, compare_result2;
11189
      enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
11190
 
11191
      if (op_mode == VOIDmode)
11192
        op_mode = GET_MODE (rs6000_compare_op1);
11193
 
11194
      /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
11195
         This explains the following mess.  */
11196
 
11197
      switch (code)
11198
        {
11199
        case EQ: case UNEQ: case NE: case LTGT:
11200
          switch (op_mode)
11201
            {
11202
            case SFmode:
11203
              cmp = flag_unsafe_math_optimizations
11204
                ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
11205
                                   rs6000_compare_op1)
11206
                : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
11207
                                   rs6000_compare_op1);
11208
              break;
11209
 
11210
            case DFmode:
11211
              cmp = flag_unsafe_math_optimizations
11212
                ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
11213
                                   rs6000_compare_op1)
11214
                : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
11215
                                   rs6000_compare_op1);
11216
              break;
11217
 
11218
            default:
11219
              gcc_unreachable ();
11220
            }
11221
          break;
11222
 
11223
        case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
11224
          switch (op_mode)
11225
            {
11226
            case SFmode:
11227
              cmp = flag_unsafe_math_optimizations
11228
                ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
11229
                                   rs6000_compare_op1)
11230
                : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
11231
                                   rs6000_compare_op1);
11232
              break;
11233
 
11234
            case DFmode:
11235
              cmp = flag_unsafe_math_optimizations
11236
                ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
11237
                                   rs6000_compare_op1)
11238
                : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
11239
                                   rs6000_compare_op1);
11240
              break;
11241
 
11242
            default:
11243
              gcc_unreachable ();
11244
            }
11245
          break;
11246
 
11247
        case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
11248
          switch (op_mode)
11249
            {
11250
            case SFmode:
11251
              cmp = flag_unsafe_math_optimizations
11252
                ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
11253
                                   rs6000_compare_op1)
11254
                : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
11255
                                   rs6000_compare_op1);
11256
              break;
11257
 
11258
            case DFmode:
11259
              cmp = flag_unsafe_math_optimizations
11260
                ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
11261
                                   rs6000_compare_op1)
11262
                : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
11263
                                   rs6000_compare_op1);
11264
              break;
11265
 
11266
            default:
11267
              gcc_unreachable ();
11268
            }
11269
          break;
11270
        default:
11271
          gcc_unreachable ();
11272
        }
11273
 
11274
      /* Synthesize LE and GE from LT/GT || EQ.  */
11275
      if (code == LE || code == GE || code == LEU || code == GEU)
11276
        {
11277
          emit_insn (cmp);
11278
 
11279
          switch (code)
11280
            {
11281
            case LE: code = LT; break;
11282
            case GE: code = GT; break;
11283
            case LEU: code = LT; break;
11284
            case GEU: code = GT; break;
11285
            default: gcc_unreachable ();
11286
            }
11287
 
11288
          compare_result2 = gen_reg_rtx (CCFPmode);
11289
 
11290
          /* Do the EQ.  */
11291
          switch (op_mode)
11292
            {
11293
            case SFmode:
11294
              cmp = flag_unsafe_math_optimizations
11295
                ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
11296
                                   rs6000_compare_op1)
11297
                : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
11298
                                   rs6000_compare_op1);
11299
              break;
11300
 
11301
            case DFmode:
11302
              cmp = flag_unsafe_math_optimizations
11303
                ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
11304
                                   rs6000_compare_op1)
11305
                : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
11306
                                   rs6000_compare_op1);
11307
              break;
11308
 
11309
            default:
11310
              gcc_unreachable ();
11311
            }
11312
          emit_insn (cmp);
11313
 
11314
          /* OR them together.  */
11315
          or_result = gen_reg_rtx (CCFPmode);
11316
          cmp = gen_e500_cr_ior_compare (or_result, compare_result,
11317
                                           compare_result2);
11318
          compare_result = or_result;
11319
          code = EQ;
11320
        }
11321
      else
11322
        {
11323
          if (code == NE || code == LTGT)
11324
            code = NE;
11325
          else
11326
            code = EQ;
11327
        }
11328
 
11329
      emit_insn (cmp);
11330
    }
11331
  else
11332
    {
11333
      /* Generate XLC-compatible TFmode compare as PARALLEL with extra
11334
         CLOBBERs to match cmptf_internal2 pattern.  */
11335
      if (comp_mode == CCFPmode && TARGET_XL_COMPAT
11336
          && GET_MODE (rs6000_compare_op0) == TFmode
11337
          && !TARGET_IEEEQUAD
11338
          && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
11339
        emit_insn (gen_rtx_PARALLEL (VOIDmode,
11340
          gen_rtvec (9,
11341
                     gen_rtx_SET (VOIDmode,
11342
                                  compare_result,
11343
                                  gen_rtx_COMPARE (comp_mode,
11344
                                                   rs6000_compare_op0,
11345
                                                   rs6000_compare_op1)),
11346
                     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11347
                     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11348
                     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11349
                     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11350
                     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11351
                     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11352
                     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11353
                     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
11354
      else if (GET_CODE (rs6000_compare_op1) == UNSPEC
11355
               && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
11356
        {
11357
          rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
11358
          comp_mode = CCEQmode;
11359
          compare_result = gen_reg_rtx (CCEQmode);
11360
          if (TARGET_64BIT)
11361
            emit_insn (gen_stack_protect_testdi (compare_result,
11362
                                                 rs6000_compare_op0, op1));
11363
          else
11364
            emit_insn (gen_stack_protect_testsi (compare_result,
11365
                                                 rs6000_compare_op0, op1));
11366
        }
11367
      else
11368
        emit_insn (gen_rtx_SET (VOIDmode, compare_result,
11369
                                gen_rtx_COMPARE (comp_mode,
11370
                                                 rs6000_compare_op0,
11371
                                                 rs6000_compare_op1)));
11372
    }
11373
 
11374
  /* Some kinds of FP comparisons need an OR operation;
11375
     under flag_finite_math_only we don't bother.  */
11376
  if (rs6000_compare_fp_p
11377
      && !flag_finite_math_only
11378
      && !(TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
11379
      && (code == LE || code == GE
11380
          || code == UNEQ || code == LTGT
11381
          || code == UNGT || code == UNLT))
11382
    {
11383
      enum rtx_code or1, or2;
11384
      rtx or1_rtx, or2_rtx, compare2_rtx;
11385
      rtx or_result = gen_reg_rtx (CCEQmode);
11386
 
11387
      switch (code)
11388
        {
11389
        case LE: or1 = LT;  or2 = EQ;  break;
11390
        case GE: or1 = GT;  or2 = EQ;  break;
11391
        case UNEQ: or1 = UNORDERED;  or2 = EQ;  break;
11392
        case LTGT: or1 = LT;  or2 = GT;  break;
11393
        case UNGT: or1 = UNORDERED;  or2 = GT;  break;
11394
        case UNLT: or1 = UNORDERED;  or2 = LT;  break;
11395
        default:  gcc_unreachable ();
11396
        }
11397
      validate_condition_mode (or1, comp_mode);
11398
      validate_condition_mode (or2, comp_mode);
11399
      or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
11400
      or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
11401
      compare2_rtx = gen_rtx_COMPARE (CCEQmode,
11402
                                      gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
11403
                                      const_true_rtx);
11404
      emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
11405
 
11406
      compare_result = or_result;
11407
      code = EQ;
11408
    }
11409
 
11410
  validate_condition_mode (code, GET_MODE (compare_result));
11411
 
11412
  return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
11413
}
11414
 
11415
 
11416
/* Emit the RTL for an sCOND pattern.  */
11417
 
11418
void
11419
rs6000_emit_sCOND (enum rtx_code code, rtx result)
11420
{
11421
  rtx condition_rtx;
11422
  enum machine_mode op_mode;
11423
  enum rtx_code cond_code;
11424
 
11425
  condition_rtx = rs6000_generate_compare (code);
11426
  cond_code = GET_CODE (condition_rtx);
11427
 
11428
  if (TARGET_E500 && rs6000_compare_fp_p
11429
      && !TARGET_FPRS && TARGET_HARD_FLOAT)
11430
    {
11431
      rtx t;
11432
 
11433
      PUT_MODE (condition_rtx, SImode);
11434
      t = XEXP (condition_rtx, 0);
11435
 
11436
      gcc_assert (cond_code == NE || cond_code == EQ);
11437
 
11438
      if (cond_code == NE)
11439
        emit_insn (gen_e500_flip_gt_bit (t, t));
11440
 
11441
      emit_insn (gen_move_from_CR_gt_bit (result, t));
11442
      return;
11443
    }
11444
 
11445
  if (cond_code == NE
11446
      || cond_code == GE || cond_code == LE
11447
      || cond_code == GEU || cond_code == LEU
11448
      || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
11449
    {
11450
      rtx not_result = gen_reg_rtx (CCEQmode);
11451
      rtx not_op, rev_cond_rtx;
11452
      enum machine_mode cc_mode;
11453
 
11454
      cc_mode = GET_MODE (XEXP (condition_rtx, 0));
11455
 
11456
      rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
11457
                                     SImode, XEXP (condition_rtx, 0), const0_rtx);
11458
      not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
11459
      emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
11460
      condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
11461
    }
11462
 
11463
  op_mode = GET_MODE (rs6000_compare_op0);
11464
  if (op_mode == VOIDmode)
11465
    op_mode = GET_MODE (rs6000_compare_op1);
11466
 
11467
  if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
11468
    {
11469
      PUT_MODE (condition_rtx, DImode);
11470
      convert_move (result, condition_rtx, 0);
11471
    }
11472
  else
11473
    {
11474
      PUT_MODE (condition_rtx, SImode);
11475
      emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
11476
    }
11477
}
11478
 
11479
/* Emit a branch of kind CODE to location LOC.  */
11480
 
11481
void
11482
rs6000_emit_cbranch (enum rtx_code code, rtx loc)
11483
{
11484
  rtx condition_rtx, loc_ref;
11485
 
11486
  condition_rtx = rs6000_generate_compare (code);
11487
  loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
11488
  emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
11489
                               gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
11490
                                                     loc_ref, pc_rtx)));
11491
}
11492
 
11493
/* Return the string to output a conditional branch to LABEL, which is
11494
   the operand number of the label, or -1 if the branch is really a
11495
   conditional return.
11496
 
11497
   OP is the conditional expression.  XEXP (OP, 0) is assumed to be a
11498
   condition code register and its mode specifies what kind of
11499
   comparison we made.
11500
 
11501
   REVERSED is nonzero if we should reverse the sense of the comparison.
11502
 
11503
   INSN is the insn.  */
11504
 
11505
char *
11506
output_cbranch (rtx op, const char *label, int reversed, rtx insn)
11507
{
11508
  static char string[64];
11509
  enum rtx_code code = GET_CODE (op);
11510
  rtx cc_reg = XEXP (op, 0);
11511
  enum machine_mode mode = GET_MODE (cc_reg);
11512
  int cc_regno = REGNO (cc_reg) - CR0_REGNO;
11513
  int need_longbranch = label != NULL && get_attr_length (insn) == 8;
11514
  int really_reversed = reversed ^ need_longbranch;
11515
  char *s = string;
11516
  const char *ccode;
11517
  const char *pred;
11518
  rtx note;
11519
 
11520
  validate_condition_mode (code, mode);
11521
 
11522
  /* Work out which way this really branches.  We could use
11523
     reverse_condition_maybe_unordered here always but this
11524
     makes the resulting assembler clearer.  */
11525
  if (really_reversed)
11526
    {
11527
      /* Reversal of FP compares takes care -- an ordered compare
11528
         becomes an unordered compare and vice versa.  */
11529
      if (mode == CCFPmode)
11530
        code = reverse_condition_maybe_unordered (code);
11531
      else
11532
        code = reverse_condition (code);
11533
    }
11534
 
11535
  if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
11536
    {
11537
      /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
11538
         to the GT bit.  */
11539
      switch (code)
11540
        {
11541
        case EQ:
11542
          /* Opposite of GT.  */
11543
          code = GT;
11544
          break;
11545
 
11546
        case NE:
11547
          code = UNLE;
11548
          break;
11549
 
11550
        default:
11551
          gcc_unreachable ();
11552
        }
11553
    }
11554
 
11555
  switch (code)
11556
    {
11557
      /* Not all of these are actually distinct opcodes, but
11558
         we distinguish them for clarity of the resulting assembler.  */
11559
    case NE: case LTGT:
11560
      ccode = "ne"; break;
11561
    case EQ: case UNEQ:
11562
      ccode = "eq"; break;
11563
    case GE: case GEU:
11564
      ccode = "ge"; break;
11565
    case GT: case GTU: case UNGT:
11566
      ccode = "gt"; break;
11567
    case LE: case LEU:
11568
      ccode = "le"; break;
11569
    case LT: case LTU: case UNLT:
11570
      ccode = "lt"; break;
11571
    case UNORDERED: ccode = "un"; break;
11572
    case ORDERED: ccode = "nu"; break;
11573
    case UNGE: ccode = "nl"; break;
11574
    case UNLE: ccode = "ng"; break;
11575
    default:
11576
      gcc_unreachable ();
11577
    }
11578
 
11579
  /* Maybe we have a guess as to how likely the branch is.
11580
     The old mnemonics don't have a way to specify this information.  */
11581
  pred = "";
11582
  note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
11583
  if (note != NULL_RTX)
11584
    {
11585
      /* PROB is the difference from 50%.  */
11586
      int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
11587
 
11588
      /* Only hint for highly probable/improbable branches on newer
11589
         cpus as static prediction overrides processor dynamic
11590
         prediction.  For older cpus we may as well always hint, but
11591
         assume not taken for branches that are very close to 50% as a
11592
         mispredicted taken branch is more expensive than a
11593
         mispredicted not-taken branch.  */
11594
      if (rs6000_always_hint
11595
          || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
11596
              && br_prob_note_reliable_p (note)))
11597
        {
11598
          if (abs (prob) > REG_BR_PROB_BASE / 20
11599
              && ((prob > 0) ^ need_longbranch))
11600
            pred = "+";
11601
          else
11602
            pred = "-";
11603
        }
11604
    }
11605
 
11606
  if (label == NULL)
11607
    s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
11608
  else
11609
    s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
11610
 
11611
  /* We need to escape any '%' characters in the reg_names string.
11612
     Assume they'd only be the first character....  */
11613
  if (reg_names[cc_regno + CR0_REGNO][0] == '%')
11614
    *s++ = '%';
11615
  s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
11616
 
11617
  if (label != NULL)
11618
    {
11619
      /* If the branch distance was too far, we may have to use an
11620
         unconditional branch to go the distance.  */
11621
      if (need_longbranch)
11622
        s += sprintf (s, ",$+8\n\tb %s", label);
11623
      else
11624
        s += sprintf (s, ",%s", label);
11625
    }
11626
 
11627
  return string;
11628
}
11629
 
11630
/* Return the string to flip the GT bit on a CR.  */
11631
char *
11632
output_e500_flip_gt_bit (rtx dst, rtx src)
11633
{
11634
  static char string[64];
11635
  int a, b;
11636
 
11637
  gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
11638
              && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
11639
 
11640
  /* GT bit.  */
11641
  a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
11642
  b = 4 * (REGNO (src) - CR0_REGNO) + 1;
11643
 
11644
  sprintf (string, "crnot %d,%d", a, b);
11645
  return string;
11646
}
11647
 
11648
/* Return insn index for the vector compare instruction for given CODE,
11649
   and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
11650
   not available.  */
11651
 
11652
static int
11653
get_vec_cmp_insn (enum rtx_code code,
11654
                  enum machine_mode dest_mode,
11655
                  enum machine_mode op_mode)
11656
{
11657
  if (!TARGET_ALTIVEC)
11658
    return INSN_NOT_AVAILABLE;
11659
 
11660
  switch (code)
11661
    {
11662
    case EQ:
11663
      if (dest_mode == V16QImode && op_mode == V16QImode)
11664
        return UNSPEC_VCMPEQUB;
11665
      if (dest_mode == V8HImode && op_mode == V8HImode)
11666
        return UNSPEC_VCMPEQUH;
11667
      if (dest_mode == V4SImode && op_mode == V4SImode)
11668
        return UNSPEC_VCMPEQUW;
11669
      if (dest_mode == V4SImode && op_mode == V4SFmode)
11670
        return UNSPEC_VCMPEQFP;
11671
      break;
11672
    case GE:
11673
      if (dest_mode == V4SImode && op_mode == V4SFmode)
11674
        return UNSPEC_VCMPGEFP;
11675
    case GT:
11676
      if (dest_mode == V16QImode && op_mode == V16QImode)
11677
        return UNSPEC_VCMPGTSB;
11678
      if (dest_mode == V8HImode && op_mode == V8HImode)
11679
        return UNSPEC_VCMPGTSH;
11680
      if (dest_mode == V4SImode && op_mode == V4SImode)
11681
        return UNSPEC_VCMPGTSW;
11682
      if (dest_mode == V4SImode && op_mode == V4SFmode)
11683
        return UNSPEC_VCMPGTFP;
11684
      break;
11685
    case GTU:
11686
      if (dest_mode == V16QImode && op_mode == V16QImode)
11687
        return UNSPEC_VCMPGTUB;
11688
      if (dest_mode == V8HImode && op_mode == V8HImode)
11689
        return UNSPEC_VCMPGTUH;
11690
      if (dest_mode == V4SImode && op_mode == V4SImode)
11691
        return UNSPEC_VCMPGTUW;
11692
      break;
11693
    default:
11694
      break;
11695
    }
11696
  return INSN_NOT_AVAILABLE;
11697
}
11698
 
11699
/* Emit vector compare for operands OP0 and OP1 using code RCODE.
11700
   DMODE is expected destination mode. This is a recursive function.  */
11701
 
11702
static rtx
11703
rs6000_emit_vector_compare (enum rtx_code rcode,
11704
                            rtx op0, rtx op1,
11705
                            enum machine_mode dmode)
11706
{
11707
  int vec_cmp_insn;
11708
  rtx mask;
11709
  enum machine_mode dest_mode;
11710
  enum machine_mode op_mode = GET_MODE (op1);
11711
 
11712
  gcc_assert (TARGET_ALTIVEC);
11713
  gcc_assert (GET_MODE (op0) == GET_MODE (op1));
11714
 
11715
  /* Floating point vector compare instructions uses destination V4SImode.
11716
     Move destination to appropriate mode later.  */
11717
  if (dmode == V4SFmode)
11718
    dest_mode = V4SImode;
11719
  else
11720
    dest_mode = dmode;
11721
 
11722
  mask = gen_reg_rtx (dest_mode);
11723
  vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
11724
 
11725
  if (vec_cmp_insn == INSN_NOT_AVAILABLE)
11726
    {
11727
      bool swap_operands = false;
11728
      bool try_again = false;
11729
      switch (rcode)
11730
        {
11731
        case LT:
11732
          rcode = GT;
11733
          swap_operands = true;
11734
          try_again = true;
11735
          break;
11736
        case LTU:
11737
          rcode = GTU;
11738
          swap_operands = true;
11739
          try_again = true;
11740
          break;
11741
        case NE:
11742
          /* Treat A != B as ~(A==B).  */
11743
          {
11744
            enum insn_code nor_code;
11745
            rtx eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
11746
                                                     dest_mode);
11747
 
11748
            nor_code = one_cmpl_optab->handlers[(int)dest_mode].insn_code;
11749
            gcc_assert (nor_code != CODE_FOR_nothing);
11750
            emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
11751
 
11752
            if (dmode != dest_mode)
11753
              {
11754
                rtx temp = gen_reg_rtx (dest_mode);
11755
                convert_move (temp, mask, 0);
11756
                return temp;
11757
              }
11758
            return mask;
11759
          }
11760
          break;
11761
        case GE:
11762
        case GEU:
11763
        case LE:
11764
        case LEU:
11765
          /* Try GT/GTU/LT/LTU OR EQ */
11766
          {
11767
            rtx c_rtx, eq_rtx;
11768
            enum insn_code ior_code;
11769
            enum rtx_code new_code;
11770
 
11771
            switch (rcode)
11772
              {
11773
              case  GE:
11774
                new_code = GT;
11775
                break;
11776
 
11777
              case GEU:
11778
                new_code = GTU;
11779
                break;
11780
 
11781
              case LE:
11782
                new_code = LT;
11783
                break;
11784
 
11785
              case LEU:
11786
                new_code = LTU;
11787
                break;
11788
 
11789
              default:
11790
                gcc_unreachable ();
11791
              }
11792
 
11793
            c_rtx = rs6000_emit_vector_compare (new_code,
11794
                                                op0, op1, dest_mode);
11795
            eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
11796
                                                 dest_mode);
11797
 
11798
            ior_code = ior_optab->handlers[(int)dest_mode].insn_code;
11799
            gcc_assert (ior_code != CODE_FOR_nothing);
11800
            emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
11801
            if (dmode != dest_mode)
11802
              {
11803
                rtx temp = gen_reg_rtx (dest_mode);
11804
                convert_move (temp, mask, 0);
11805
                return temp;
11806
              }
11807
            return mask;
11808
          }
11809
          break;
11810
        default:
11811
          gcc_unreachable ();
11812
        }
11813
 
11814
      if (try_again)
11815
        {
11816
          vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
11817
          /* You only get two chances.  */
11818
          gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
11819
        }
11820
 
11821
      if (swap_operands)
11822
        {
11823
          rtx tmp;
11824
          tmp = op0;
11825
          op0 = op1;
11826
          op1 = tmp;
11827
        }
11828
    }
11829
 
11830
  emit_insn (gen_rtx_SET (VOIDmode, mask,
11831
                          gen_rtx_UNSPEC (dest_mode,
11832
                                          gen_rtvec (2, op0, op1),
11833
                                          vec_cmp_insn)));
11834
  if (dmode != dest_mode)
11835
    {
11836
      rtx temp = gen_reg_rtx (dest_mode);
11837
      convert_move (temp, mask, 0);
11838
      return temp;
11839
    }
11840
  return mask;
11841
}
11842
 
11843
/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
11844
   valid insn doesn exist for given mode.  */
11845
 
11846
static int
11847
get_vsel_insn (enum machine_mode mode)
11848
{
11849
  switch (mode)
11850
    {
11851
    case V4SImode:
11852
      return UNSPEC_VSEL4SI;
11853
      break;
11854
    case V4SFmode:
11855
      return UNSPEC_VSEL4SF;
11856
      break;
11857
    case V8HImode:
11858
      return UNSPEC_VSEL8HI;
11859
      break;
11860
    case V16QImode:
11861
      return UNSPEC_VSEL16QI;
11862
      break;
11863
    default:
11864
      return INSN_NOT_AVAILABLE;
11865
      break;
11866
    }
11867
  return INSN_NOT_AVAILABLE;
11868
}
11869
 
11870
/* Emit vector select insn where DEST is destination using
11871
   operands OP1, OP2 and MASK.  */
11872
 
11873
static void
11874
rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
11875
{
11876
  rtx t, temp;
11877
  enum machine_mode dest_mode = GET_MODE (dest);
11878
  int vsel_insn_index  = get_vsel_insn (GET_MODE (dest));
11879
 
11880
  temp = gen_reg_rtx (dest_mode);
11881
 
11882
  /* For each vector element, select op1 when mask is 1 otherwise
11883
     select op2.  */
11884
  t = gen_rtx_SET (VOIDmode, temp,
11885
                   gen_rtx_UNSPEC (dest_mode,
11886
                                   gen_rtvec (3, op2, op1, mask),
11887
                                   vsel_insn_index));
11888
  emit_insn (t);
11889
  emit_move_insn (dest, temp);
11890
  return;
11891
}
11892
 
11893
/* Emit vector conditional expression.
11894
   DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
11895
   CC_OP0 and CC_OP1 are the two operands for the relation operation COND.  */
11896
 
11897
int
11898
rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
11899
                              rtx cond, rtx cc_op0, rtx cc_op1)
11900
{
11901
  enum machine_mode dest_mode = GET_MODE (dest);
11902
  enum rtx_code rcode = GET_CODE (cond);
11903
  rtx mask;
11904
 
11905
  if (!TARGET_ALTIVEC)
11906
    return 0;
11907
 
11908
  /* Get the vector mask for the given relational operations.  */
11909
  mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
11910
 
11911
  rs6000_emit_vector_select (dest, op1, op2, mask);
11912
 
11913
  return 1;
11914
}
11915
 
11916
/* Emit a conditional move: move TRUE_COND to DEST if OP of the
11917
   operands of the last comparison is nonzero/true, FALSE_COND if it
11918
   is zero/false.  Return 0 if the hardware has no such operation.  */
11919
 
11920
int
11921
rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
11922
{
11923
  enum rtx_code code = GET_CODE (op);
11924
  rtx op0 = rs6000_compare_op0;
11925
  rtx op1 = rs6000_compare_op1;
11926
  REAL_VALUE_TYPE c1;
11927
  enum machine_mode compare_mode = GET_MODE (op0);
11928
  enum machine_mode result_mode = GET_MODE (dest);
11929
  rtx temp;
11930
  bool is_against_zero;
11931
 
11932
  /* These modes should always match.  */
11933
  if (GET_MODE (op1) != compare_mode
11934
      /* In the isel case however, we can use a compare immediate, so
11935
         op1 may be a small constant.  */
11936
      && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
11937
    return 0;
11938
  if (GET_MODE (true_cond) != result_mode)
11939
    return 0;
11940
  if (GET_MODE (false_cond) != result_mode)
11941
    return 0;
11942
 
11943
  /* First, work out if the hardware can do this at all, or
11944
     if it's too slow....  */
11945
  if (! rs6000_compare_fp_p)
11946
    {
11947
      if (TARGET_ISEL)
11948
        return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
11949
      return 0;
11950
    }
11951
  else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
11952
           && SCALAR_FLOAT_MODE_P (compare_mode))
11953
    return 0;
11954
 
11955
  is_against_zero = op1 == CONST0_RTX (compare_mode);
11956
 
11957
  /* A floating-point subtract might overflow, underflow, or produce
11958
     an inexact result, thus changing the floating-point flags, so it
11959
     can't be generated if we care about that.  It's safe if one side
11960
     of the construct is zero, since then no subtract will be
11961
     generated.  */
11962
  if (SCALAR_FLOAT_MODE_P (compare_mode)
11963
      && flag_trapping_math && ! is_against_zero)
11964
    return 0;
11965
 
11966
  /* Eliminate half of the comparisons by switching operands, this
11967
     makes the remaining code simpler.  */
11968
  if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
11969
      || code == LTGT || code == LT || code == UNLE)
11970
    {
11971
      code = reverse_condition_maybe_unordered (code);
11972
      temp = true_cond;
11973
      true_cond = false_cond;
11974
      false_cond = temp;
11975
    }
11976
 
11977
  /* UNEQ and LTGT take four instructions for a comparison with zero,
11978
     it'll probably be faster to use a branch here too.  */
11979
  if (code == UNEQ && HONOR_NANS (compare_mode))
11980
    return 0;
11981
 
11982
  if (GET_CODE (op1) == CONST_DOUBLE)
11983
    REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
11984
 
11985
  /* We're going to try to implement comparisons by performing
11986
     a subtract, then comparing against zero.  Unfortunately,
11987
     Inf - Inf is NaN which is not zero, and so if we don't
11988
     know that the operand is finite and the comparison
11989
     would treat EQ different to UNORDERED, we can't do it.  */
11990
  if (HONOR_INFINITIES (compare_mode)
11991
      && code != GT && code != UNGE
11992
      && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
11993
      /* Constructs of the form (a OP b ? a : b) are safe.  */
11994
      && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
11995
          || (! rtx_equal_p (op0, true_cond)
11996
              && ! rtx_equal_p (op1, true_cond))))
11997
    return 0;
11998
 
11999
  /* At this point we know we can use fsel.  */
12000
 
12001
  /* Reduce the comparison to a comparison against zero.  */
12002
  if (! is_against_zero)
12003
    {
12004
      temp = gen_reg_rtx (compare_mode);
12005
      emit_insn (gen_rtx_SET (VOIDmode, temp,
12006
                              gen_rtx_MINUS (compare_mode, op0, op1)));
12007
      op0 = temp;
12008
      op1 = CONST0_RTX (compare_mode);
12009
    }
12010
 
12011
  /* If we don't care about NaNs we can reduce some of the comparisons
12012
     down to faster ones.  */
12013
  if (! HONOR_NANS (compare_mode))
12014
    switch (code)
12015
      {
12016
      case GT:
12017
        code = LE;
12018
        temp = true_cond;
12019
        true_cond = false_cond;
12020
        false_cond = temp;
12021
        break;
12022
      case UNGE:
12023
        code = GE;
12024
        break;
12025
      case UNEQ:
12026
        code = EQ;
12027
        break;
12028
      default:
12029
        break;
12030
      }
12031
 
12032
  /* Now, reduce everything down to a GE.  */
12033
  switch (code)
12034
    {
12035
    case GE:
12036
      break;
12037
 
12038
    case LE:
12039
      temp = gen_reg_rtx (compare_mode);
12040
      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
12041
      op0 = temp;
12042
      break;
12043
 
12044
    case ORDERED:
12045
      temp = gen_reg_rtx (compare_mode);
12046
      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
12047
      op0 = temp;
12048
      break;
12049
 
12050
    case EQ:
12051
      temp = gen_reg_rtx (compare_mode);
12052
      emit_insn (gen_rtx_SET (VOIDmode, temp,
12053
                              gen_rtx_NEG (compare_mode,
12054
                                           gen_rtx_ABS (compare_mode, op0))));
12055
      op0 = temp;
12056
      break;
12057
 
12058
    case UNGE:
12059
      /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
12060
      temp = gen_reg_rtx (result_mode);
12061
      emit_insn (gen_rtx_SET (VOIDmode, temp,
12062
                              gen_rtx_IF_THEN_ELSE (result_mode,
12063
                                                    gen_rtx_GE (VOIDmode,
12064
                                                                op0, op1),
12065
                                                    true_cond, false_cond)));
12066
      false_cond = true_cond;
12067
      true_cond = temp;
12068
 
12069
      temp = gen_reg_rtx (compare_mode);
12070
      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
12071
      op0 = temp;
12072
      break;
12073
 
12074
    case GT:
12075
      /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
12076
      temp = gen_reg_rtx (result_mode);
12077
      emit_insn (gen_rtx_SET (VOIDmode, temp,
12078
                              gen_rtx_IF_THEN_ELSE (result_mode,
12079
                                                    gen_rtx_GE (VOIDmode,
12080
                                                                op0, op1),
12081
                                                    true_cond, false_cond)));
12082
      true_cond = false_cond;
12083
      false_cond = temp;
12084
 
12085
      temp = gen_reg_rtx (compare_mode);
12086
      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
12087
      op0 = temp;
12088
      break;
12089
 
12090
    default:
12091
      gcc_unreachable ();
12092
    }
12093
 
12094
  emit_insn (gen_rtx_SET (VOIDmode, dest,
12095
                          gen_rtx_IF_THEN_ELSE (result_mode,
12096
                                                gen_rtx_GE (VOIDmode,
12097
                                                            op0, op1),
12098
                                                true_cond, false_cond)));
12099
  return 1;
12100
}
12101
 
12102
/* Same as above, but for ints (isel).  */
12103
 
12104
static int
12105
rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
12106
{
12107
  rtx condition_rtx, cr;
12108
 
12109
  /* All isel implementations thus far are 32-bits.  */
12110
  if (GET_MODE (rs6000_compare_op0) != SImode)
12111
    return 0;
12112
 
12113
  /* We still have to do the compare, because isel doesn't do a
12114
     compare, it just looks at the CRx bits set by a previous compare
12115
     instruction.  */
12116
  condition_rtx = rs6000_generate_compare (GET_CODE (op));
12117
  cr = XEXP (condition_rtx, 0);
12118
 
12119
  if (GET_MODE (cr) == CCmode)
12120
    emit_insn (gen_isel_signed (dest, condition_rtx,
12121
                                true_cond, false_cond, cr));
12122
  else
12123
    emit_insn (gen_isel_unsigned (dest, condition_rtx,
12124
                                  true_cond, false_cond, cr));
12125
 
12126
  return 1;
12127
}
12128
 
12129
const char *
12130
output_isel (rtx *operands)
12131
{
12132
  enum rtx_code code;
12133
 
12134
  code = GET_CODE (operands[1]);
12135
  if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
12136
    {
12137
      PUT_CODE (operands[1], reverse_condition (code));
12138
      return "isel %0,%3,%2,%j1";
12139
    }
12140
  else
12141
    return "isel %0,%2,%3,%j1";
12142
}
12143
 
12144
void
12145
rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
12146
{
12147
  enum machine_mode mode = GET_MODE (op0);
12148
  enum rtx_code c;
12149
  rtx target;
12150
 
12151
  if (code == SMAX || code == SMIN)
12152
    c = GE;
12153
  else
12154
    c = GEU;
12155
 
12156
  if (code == SMAX || code == UMAX)
12157
    target = emit_conditional_move (dest, c, op0, op1, mode,
12158
                                    op0, op1, mode, 0);
12159
  else
12160
    target = emit_conditional_move (dest, c, op0, op1, mode,
12161
                                    op1, op0, mode, 0);
12162
  gcc_assert (target);
12163
  if (target != dest)
12164
    emit_move_insn (dest, target);
12165
}
12166
 
12167
/* Emit instructions to perform a load-reserved/store-conditional operation.
12168
   The operation performed is an atomic
12169
   (set M (CODE:MODE M OP))
12170
   If not NULL, BEFORE is atomically set to M before the operation, and
12171
   AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
12172
   If SYNC_P then a memory barrier is emitted before the operation.
12173
   Either OP or M may be wrapped in a NOT operation.  */
12174
 
12175
void
12176
rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
12177
                  rtx m, rtx op, rtx before_param, rtx after_param,
12178
                  bool sync_p)
12179
{
12180
  enum machine_mode used_mode;
12181
  rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
12182
  rtx used_m;
12183
  rtvec vec;
12184
  HOST_WIDE_INT imask = GET_MODE_MASK (mode);
12185
  rtx shift = NULL_RTX;
12186
 
12187
  if (sync_p)
12188
    emit_insn (gen_memory_barrier ());
12189
 
12190
  if (GET_CODE (m) == NOT)
12191
    used_m = XEXP (m, 0);
12192
  else
12193
    used_m = m;
12194
 
12195
  /* If this is smaller than SImode, we'll have to use SImode with
12196
     adjustments.  */
12197
  if (mode == QImode || mode == HImode)
12198
    {
12199
      rtx newop, oldop;
12200
 
12201
      if (MEM_ALIGN (used_m) >= 32)
12202
        {
12203
          int ishift = 0;
12204
          if (BYTES_BIG_ENDIAN)
12205
            ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
12206
 
12207
          shift = GEN_INT (ishift);
12208
        }
12209
      else
12210
        {
12211
          rtx addrSI, aligned_addr;
12212
          int shift_mask = mode == QImode ? 0x18 : 0x10;
12213
 
12214
          addrSI = force_reg (SImode, gen_lowpart_common (SImode,
12215
                                                          XEXP (used_m, 0)));
12216
          shift = gen_reg_rtx (SImode);
12217
 
12218
          emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
12219
                                 GEN_INT (shift_mask)));
12220
          emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
12221
 
12222
          aligned_addr = expand_binop (Pmode, and_optab,
12223
                                       XEXP (used_m, 0),
12224
                                       GEN_INT (-4), NULL_RTX,
12225
                                       1, OPTAB_LIB_WIDEN);
12226
          used_m = change_address (used_m, SImode, aligned_addr);
12227
          set_mem_align (used_m, 32);
12228
          /* It's safe to keep the old alias set of USED_M, because
12229
             the operation is atomic and only affects the original
12230
             USED_M.  */
12231
          if (GET_CODE (m) == NOT)
12232
            m = gen_rtx_NOT (SImode, used_m);
12233
          else
12234
            m = used_m;
12235
        }
12236
 
12237
      if (GET_CODE (op) == NOT)
12238
        {
12239
          oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
12240
          oldop = gen_rtx_NOT (SImode, oldop);
12241
        }
12242
      else
12243
        oldop = lowpart_subreg (SImode, op, mode);
12244
 
12245
      switch (code)
12246
        {
12247
        case IOR:
12248
        case XOR:
12249
          newop = expand_binop (SImode, and_optab,
12250
                                oldop, GEN_INT (imask), NULL_RTX,
12251
                                1, OPTAB_LIB_WIDEN);
12252
          emit_insn (gen_ashlsi3 (newop, newop, shift));
12253
          break;
12254
 
12255
        case AND:
12256
          newop = expand_binop (SImode, ior_optab,
12257
                                oldop, GEN_INT (~imask), NULL_RTX,
12258
                                1, OPTAB_LIB_WIDEN);
12259
          emit_insn (gen_rotlsi3 (newop, newop, shift));
12260
          break;
12261
 
12262
        case PLUS:
12263
        case MINUS:
12264
          {
12265
            rtx mask;
12266
 
12267
            newop = expand_binop (SImode, and_optab,
12268
                                  oldop, GEN_INT (imask), NULL_RTX,
12269
                                  1, OPTAB_LIB_WIDEN);
12270
            emit_insn (gen_ashlsi3 (newop, newop, shift));
12271
 
12272
            mask = gen_reg_rtx (SImode);
12273
            emit_move_insn (mask, GEN_INT (imask));
12274
            emit_insn (gen_ashlsi3 (mask, mask, shift));
12275
 
12276
            if (code == PLUS)
12277
              newop = gen_rtx_PLUS (SImode, m, newop);
12278
            else
12279
              newop = gen_rtx_MINUS (SImode, m, newop);
12280
            newop = gen_rtx_AND (SImode, newop, mask);
12281
            newop = gen_rtx_IOR (SImode, newop,
12282
                                 gen_rtx_AND (SImode,
12283
                                              gen_rtx_NOT (SImode, mask),
12284
                                              m));
12285
            break;
12286
          }
12287
 
12288
        default:
12289
          gcc_unreachable ();
12290
        }
12291
 
12292
      if (GET_CODE (m) == NOT)
12293
        {
12294
          rtx mask, xorm;
12295
 
12296
          mask = gen_reg_rtx (SImode);
12297
          emit_move_insn (mask, GEN_INT (imask));
12298
          emit_insn (gen_ashlsi3 (mask, mask, shift));
12299
 
12300
          xorm = gen_rtx_XOR (SImode, used_m, mask);
12301
          /* Depending on the value of 'op', the XOR or the operation might
12302
             be able to be simplified away.  */
12303
          newop = simplify_gen_binary (code, SImode, xorm, newop);
12304
        }
12305
      op = newop;
12306
      used_mode = SImode;
12307
      before = gen_reg_rtx (used_mode);
12308
      after = gen_reg_rtx (used_mode);
12309
    }
12310
  else
12311
    {
12312
      used_mode = mode;
12313
      before = before_param;
12314
      after = after_param;
12315
 
12316
      if (before == NULL_RTX)
12317
        before = gen_reg_rtx (used_mode);
12318
      if (after == NULL_RTX)
12319
        after = gen_reg_rtx (used_mode);
12320
    }
12321
 
12322
  if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
12323
      && used_mode != mode)
12324
    the_op = op;  /* Computed above.  */
12325
  else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
12326
    the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
12327
  else
12328
    the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
12329
 
12330
  set_after = gen_rtx_SET (VOIDmode, after, the_op);
12331
  set_before = gen_rtx_SET (VOIDmode, before, used_m);
12332
  set_atomic = gen_rtx_SET (VOIDmode, used_m,
12333
                            gen_rtx_UNSPEC (used_mode,
12334
                                            gen_rtvec (1, the_op),
12335
                                            UNSPEC_SYNC_OP));
12336
  cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
12337
 
12338
  if ((code == PLUS || code == MINUS) && used_mode != mode)
12339
    vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
12340
                     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
12341
  else
12342
    vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
12343
  emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
12344
 
12345
  /* Shift and mask the return values properly.  */
12346
  if (used_mode != mode && before_param)
12347
    {
12348
      emit_insn (gen_lshrsi3 (before, before, shift));
12349
      convert_move (before_param, before, 1);
12350
    }
12351
 
12352
  if (used_mode != mode && after_param)
12353
    {
12354
      emit_insn (gen_lshrsi3 (after, after, shift));
12355
      convert_move (after_param, after, 1);
12356
    }
12357
 
12358
  /* The previous sequence will end with a branch that's dependent on
12359
     the conditional store, so placing an isync will ensure that no
12360
     other instructions (especially, no load or store instructions)
12361
     can start before the atomic operation completes.  */
12362
  if (sync_p)
12363
    emit_insn (gen_isync ());
12364
}
12365
 
12366
/* A subroutine of the atomic operation splitters.  Jump to LABEL if
12367
   COND is true.  Mark the jump as unlikely to be taken.  */
12368
 
12369
static void
12370
emit_unlikely_jump (rtx cond, rtx label)
12371
{
12372
  rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
12373
  rtx x;
12374
 
12375
  x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
12376
  x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
12377
  REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
12378
}
12379
 
12380
/* A subroutine of the atomic operation splitters.  Emit a load-locked
12381
   instruction in MODE.  */
12382
 
12383
static void
12384
emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
12385
{
12386
  rtx (*fn) (rtx, rtx) = NULL;
12387
  if (mode == SImode)
12388
    fn = gen_load_locked_si;
12389
  else if (mode == DImode)
12390
    fn = gen_load_locked_di;
12391
  emit_insn (fn (reg, mem));
12392
}
12393
 
12394
/* A subroutine of the atomic operation splitters.  Emit a store-conditional
12395
   instruction in MODE.  */
12396
 
12397
static void
12398
emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
12399
{
12400
  rtx (*fn) (rtx, rtx, rtx) = NULL;
12401
  if (mode == SImode)
12402
    fn = gen_store_conditional_si;
12403
  else if (mode == DImode)
12404
    fn = gen_store_conditional_di;
12405
 
12406
  /* Emit sync before stwcx. to address PPC405 Erratum.  */
12407
  if (PPC405_ERRATUM77)
12408
    emit_insn (gen_memory_barrier ());
12409
 
12410
  emit_insn (fn (res, mem, val));
12411
}
12412
 
12413
/* Expand an an atomic fetch-and-operate pattern.  CODE is the binary operation
12414
   to perform.  MEM is the memory on which to operate.  VAL is the second
12415
   operand of the binary operator.  BEFORE and AFTER are optional locations to
12416
   return the value of MEM either before of after the operation.  SCRATCH is
12417
   a scratch register.  */
12418
 
12419
void
12420
rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
12421
                       rtx before, rtx after, rtx scratch)
12422
{
12423
  enum machine_mode mode = GET_MODE (mem);
12424
  rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12425
 
12426
  emit_insn (gen_memory_barrier ());
12427
 
12428
  label = gen_label_rtx ();
12429
  emit_label (label);
12430
  label = gen_rtx_LABEL_REF (VOIDmode, label);
12431
 
12432
  if (before == NULL_RTX)
12433
    before = scratch;
12434
  emit_load_locked (mode, before, mem);
12435
 
12436
  if (code == NOT)
12437
    x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
12438
  else if (code == AND)
12439
    x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
12440
  else
12441
    x = gen_rtx_fmt_ee (code, mode, before, val);
12442
 
12443
  if (after != NULL_RTX)
12444
    emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
12445
  emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
12446
 
12447
  emit_store_conditional (mode, cond, mem, scratch);
12448
 
12449
  x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12450
  emit_unlikely_jump (x, label);
12451
 
12452
  emit_insn (gen_isync ());
12453
}
12454
 
12455
/* Expand an atomic compare and swap operation.  MEM is the memory on which
12456
   to operate.  OLDVAL is the old value to be compared.  NEWVAL is the new
12457
   value to be stored.  SCRATCH is a scratch GPR.  */
12458
 
12459
void
12460
rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
12461
                               rtx scratch)
12462
{
12463
  enum machine_mode mode = GET_MODE (mem);
12464
  rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12465
 
12466
  emit_insn (gen_memory_barrier ());
12467
 
12468
  label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12469
  label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12470
  emit_label (XEXP (label1, 0));
12471
 
12472
  emit_load_locked (mode, retval, mem);
12473
 
12474
  x = gen_rtx_COMPARE (CCmode, retval, oldval);
12475
  emit_insn (gen_rtx_SET (VOIDmode, cond, x));
12476
 
12477
  x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12478
  emit_unlikely_jump (x, label2);
12479
 
12480
  emit_move_insn (scratch, newval);
12481
  emit_store_conditional (mode, cond, mem, scratch);
12482
 
12483
  x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12484
  emit_unlikely_jump (x, label1);
12485
 
12486
  emit_insn (gen_isync ());
12487
  emit_label (XEXP (label2, 0));
12488
}
12489
 
12490
/* Expand an atomic test and set operation.  MEM is the memory on which
12491
   to operate.  VAL is the value set.  SCRATCH is a scratch GPR.  */
12492
 
12493
void
12494
rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
12495
{
12496
  enum machine_mode mode = GET_MODE (mem);
12497
  rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12498
 
12499
  emit_insn (gen_memory_barrier ());
12500
 
12501
  label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12502
  emit_label (XEXP (label, 0));
12503
 
12504
  emit_load_locked (mode, retval, mem);
12505
  emit_move_insn (scratch, val);
12506
  emit_store_conditional (mode, cond, mem, scratch);
12507
 
12508
  x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12509
  emit_unlikely_jump (x, label);
12510
 
12511
  emit_insn (gen_isync ());
12512
}
12513
 
12514
void
12515
rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
12516
{
12517
  enum machine_mode mode = GET_MODE (mem);
12518
  rtx addrSI, align, wdst, shift, mask;
12519
  HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
12520
  HOST_WIDE_INT imask = GET_MODE_MASK (mode);
12521
 
12522
  /* Shift amount for subword relative to aligned word.  */
12523
  addrSI = force_reg (SImode, gen_lowpart_common (SImode, XEXP (mem, 0)));
12524
  shift = gen_reg_rtx (SImode);
12525
  emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
12526
                         GEN_INT (shift_mask)));
12527
  emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
12528
 
12529
  /* Shift and mask old value into position within word.  */
12530
  oldval = convert_modes (SImode, mode, oldval, 1);
12531
  oldval = expand_binop (SImode, and_optab,
12532
                         oldval, GEN_INT (imask), NULL_RTX,
12533
                         1, OPTAB_LIB_WIDEN);
12534
  emit_insn (gen_ashlsi3 (oldval, oldval, shift));
12535
 
12536
  /* Shift and mask new value into position within word.  */
12537
  newval = convert_modes (SImode, mode, newval, 1);
12538
  newval = expand_binop (SImode, and_optab,
12539
                         newval, GEN_INT (imask), NULL_RTX,
12540
                         1, OPTAB_LIB_WIDEN);
12541
  emit_insn (gen_ashlsi3 (newval, newval, shift));
12542
 
12543
  /* Mask for insertion.  */
12544
  mask = gen_reg_rtx (SImode);
12545
  emit_move_insn (mask, GEN_INT (imask));
12546
  emit_insn (gen_ashlsi3 (mask, mask, shift));
12547
 
12548
  /* Address of aligned word containing subword.  */
12549
  align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
12550
                        NULL_RTX, 1, OPTAB_LIB_WIDEN);
12551
  mem = change_address (mem, SImode, align);
12552
  set_mem_align (mem, 32);
12553
  MEM_VOLATILE_P (mem) = 1;
12554
 
12555
  wdst = gen_reg_rtx (SImode);
12556
  emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
12557
                                                    oldval, newval, mem));
12558
 
12559
  emit_move_insn (dst, gen_lowpart (mode, wdst));
12560
}
12561
 
12562
void
12563
rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
12564
                                  rtx oldval, rtx newval, rtx mem,
12565
                                  rtx scratch)
12566
{
12567
  rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12568
 
12569
  emit_insn (gen_memory_barrier ());
12570
  label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12571
  label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12572
  emit_label (XEXP (label1, 0));
12573
 
12574
  emit_load_locked (SImode, scratch, mem);
12575
 
12576
  /* Mask subword within loaded value for comparison with oldval.
12577
     Use UNSPEC_AND to avoid clobber.*/
12578
  emit_insn (gen_rtx_SET (SImode, dest,
12579
                          gen_rtx_UNSPEC (SImode,
12580
                                          gen_rtvec (2, scratch, mask),
12581
                                          UNSPEC_AND)));
12582
 
12583
  x = gen_rtx_COMPARE (CCmode, dest, oldval);
12584
  emit_insn (gen_rtx_SET (VOIDmode, cond, x));
12585
 
12586
  x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12587
  emit_unlikely_jump (x, label2);
12588
 
12589
  /* Clear subword within loaded value for insertion of new value.  */
12590
  emit_insn (gen_rtx_SET (SImode, scratch,
12591
                          gen_rtx_AND (SImode,
12592
                                       gen_rtx_NOT (SImode, mask), scratch)));
12593
  emit_insn (gen_iorsi3 (scratch, scratch, newval));
12594
  emit_store_conditional (SImode, cond, mem, scratch);
12595
 
12596
  x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12597
  emit_unlikely_jump (x, label1);
12598
 
12599
  emit_insn (gen_isync ());
12600
  emit_label (XEXP (label2, 0));
12601
}
12602
 
12603
 
12604
  /* Emit instructions to move SRC to DST.  Called by splitters for
12605
   multi-register moves.  It will emit at most one instruction for
12606
   each register that is accessed; that is, it won't emit li/lis pairs
12607
   (or equivalent for 64-bit code).  One of SRC or DST must be a hard
12608
   register.  */
12609
 
12610
void
12611
rs6000_split_multireg_move (rtx dst, rtx src)
12612
{
12613
  /* The register number of the first register being moved.  */
12614
  int reg;
12615
  /* The mode that is to be moved.  */
12616
  enum machine_mode mode;
12617
  /* The mode that the move is being done in, and its size.  */
12618
  enum machine_mode reg_mode;
12619
  int reg_mode_size;
12620
  /* The number of registers that will be moved.  */
12621
  int nregs;
12622
 
12623
  reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
12624
  mode = GET_MODE (dst);
12625
  nregs = hard_regno_nregs[reg][mode];
12626
  if (FP_REGNO_P (reg))
12627
    reg_mode = DFmode;
12628
  else if (ALTIVEC_REGNO_P (reg))
12629
    reg_mode = V16QImode;
12630
  else if (TARGET_E500_DOUBLE && mode == TFmode)
12631
    reg_mode = DFmode;
12632
  else
12633
    reg_mode = word_mode;
12634
  reg_mode_size = GET_MODE_SIZE (reg_mode);
12635
 
12636
  gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
12637
 
12638
  if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
12639
    {
12640
      /* Move register range backwards, if we might have destructive
12641
         overlap.  */
12642
      int i;
12643
      for (i = nregs - 1; i >= 0; i--)
12644
        emit_insn (gen_rtx_SET (VOIDmode,
12645
                                simplify_gen_subreg (reg_mode, dst, mode,
12646
                                                     i * reg_mode_size),
12647
                                simplify_gen_subreg (reg_mode, src, mode,
12648
                                                     i * reg_mode_size)));
12649
    }
12650
  else
12651
    {
12652
      int i;
12653
      int j = -1;
12654
      bool used_update = false;
12655
 
12656
      if (MEM_P (src) && INT_REGNO_P (reg))
12657
        {
12658
          rtx breg;
12659
 
12660
          if (GET_CODE (XEXP (src, 0)) == PRE_INC
12661
              || GET_CODE (XEXP (src, 0)) == PRE_DEC)
12662
            {
12663
              rtx delta_rtx;
12664
              breg = XEXP (XEXP (src, 0), 0);
12665
              delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
12666
                           ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
12667
                           : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
12668
              emit_insn (TARGET_32BIT
12669
                         ? gen_addsi3 (breg, breg, delta_rtx)
12670
                         : gen_adddi3 (breg, breg, delta_rtx));
12671
              src = replace_equiv_address (src, breg);
12672
            }
12673
          else if (! rs6000_offsettable_memref_p (src))
12674
            {
12675
              rtx basereg;
12676
              basereg = gen_rtx_REG (Pmode, reg);
12677
              emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
12678
              src = replace_equiv_address (src, basereg);
12679
            }
12680
 
12681
          breg = XEXP (src, 0);
12682
          if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
12683
            breg = XEXP (breg, 0);
12684
 
12685
          /* If the base register we are using to address memory is
12686
             also a destination reg, then change that register last.  */
12687
          if (REG_P (breg)
12688
              && REGNO (breg) >= REGNO (dst)
12689
              && REGNO (breg) < REGNO (dst) + nregs)
12690
            j = REGNO (breg) - REGNO (dst);
12691
        }
12692
 
12693
      if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
12694
        {
12695
          rtx breg;
12696
 
12697
          if (GET_CODE (XEXP (dst, 0)) == PRE_INC
12698
              || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
12699
            {
12700
              rtx delta_rtx;
12701
              breg = XEXP (XEXP (dst, 0), 0);
12702
              delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
12703
                           ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
12704
                           : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
12705
 
12706
              /* We have to update the breg before doing the store.
12707
                 Use store with update, if available.  */
12708
 
12709
              if (TARGET_UPDATE)
12710
                {
12711
                  rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
12712
                  emit_insn (TARGET_32BIT
12713
                             ? (TARGET_POWERPC64
12714
                                ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
12715
                                : gen_movsi_update (breg, breg, delta_rtx, nsrc))
12716
                             : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
12717
                  used_update = true;
12718
                }
12719
              else
12720
                emit_insn (TARGET_32BIT
12721
                           ? gen_addsi3 (breg, breg, delta_rtx)
12722
                           : gen_adddi3 (breg, breg, delta_rtx));
12723
              dst = replace_equiv_address (dst, breg);
12724
            }
12725
          else
12726
            gcc_assert (rs6000_offsettable_memref_p (dst));
12727
        }
12728
 
12729
      for (i = 0; i < nregs; i++)
12730
        {
12731
          /* Calculate index to next subword.  */
12732
          ++j;
12733
          if (j == nregs)
12734
            j = 0;
12735
 
12736
          /* If compiler already emitted move of first word by
12737
             store with update, no need to do anything.  */
12738
          if (j == 0 && used_update)
12739
            continue;
12740
 
12741
          emit_insn (gen_rtx_SET (VOIDmode,
12742
                                  simplify_gen_subreg (reg_mode, dst, mode,
12743
                                                       j * reg_mode_size),
12744
                                  simplify_gen_subreg (reg_mode, src, mode,
12745
                                                       j * reg_mode_size)));
12746
        }
12747
    }
12748
}
12749
 
12750
 
12751
/* This page contains routines that are used to determine what the
12752
   function prologue and epilogue code will do and write them out.  */
12753
 
12754
/* Return the first fixed-point register that is required to be
12755
   saved. 32 if none.  */
12756
 
12757
int
12758
first_reg_to_save (void)
12759
{
12760
  int first_reg;
12761
 
12762
  /* Find lowest numbered live register.  */
12763
  for (first_reg = 13; first_reg <= 31; first_reg++)
12764
    if (regs_ever_live[first_reg]
12765
        && (! call_used_regs[first_reg]
12766
            || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
12767
                && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12768
                    || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
12769
                    || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
12770
      break;
12771
 
12772
#if TARGET_MACHO
12773
  if (flag_pic
12774
      && current_function_uses_pic_offset_table
12775
      && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
12776
    return RS6000_PIC_OFFSET_TABLE_REGNUM;
12777
#endif
12778
 
12779
  return first_reg;
12780
}
12781
 
12782
/* Similar, for FP regs.  */
12783
 
12784
int
12785
first_fp_reg_to_save (void)
12786
{
12787
  int first_reg;
12788
 
12789
  /* Find lowest numbered live register.  */
12790
  for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
12791
    if (regs_ever_live[first_reg])
12792
      break;
12793
 
12794
  return first_reg;
12795
}
12796
 
12797
/* Similar, for AltiVec regs.  */
12798
 
12799
static int
12800
first_altivec_reg_to_save (void)
12801
{
12802
  int i;
12803
 
12804
  /* Stack frame remains as is unless we are in AltiVec ABI.  */
12805
  if (! TARGET_ALTIVEC_ABI)
12806
    return LAST_ALTIVEC_REGNO + 1;
12807
 
12808
  /* On Darwin, the unwind routines are compiled without
12809
     TARGET_ALTIVEC, and use save_world to save/restore the
12810
     altivec registers when necessary.  */
12811
  if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
12812
      && ! TARGET_ALTIVEC)
12813
    return FIRST_ALTIVEC_REGNO + 20;
12814
 
12815
  /* Find lowest numbered live register.  */
12816
  for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
12817
    if (regs_ever_live[i])
12818
      break;
12819
 
12820
  return i;
12821
}
12822
 
12823
/* Return a 32-bit mask of the AltiVec registers we need to set in
12824
   VRSAVE.  Bit n of the return value is 1 if Vn is live.  The MSB in
12825
   the 32-bit word is 0.  */
12826
 
12827
static unsigned int
12828
compute_vrsave_mask (void)
12829
{
12830
  unsigned int i, mask = 0;
12831
 
12832
  /* On Darwin, the unwind routines are compiled without
12833
     TARGET_ALTIVEC, and use save_world to save/restore the
12834
     call-saved altivec registers when necessary.  */
12835
  if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
12836
      && ! TARGET_ALTIVEC)
12837
    mask |= 0xFFF;
12838
 
12839
  /* First, find out if we use _any_ altivec registers.  */
12840
  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
12841
    if (regs_ever_live[i])
12842
      mask |= ALTIVEC_REG_BIT (i);
12843
 
12844
  if (mask == 0)
12845
    return mask;
12846
 
12847
  /* Next, remove the argument registers from the set.  These must
12848
     be in the VRSAVE mask set by the caller, so we don't need to add
12849
     them in again.  More importantly, the mask we compute here is
12850
     used to generate CLOBBERs in the set_vrsave insn, and we do not
12851
     wish the argument registers to die.  */
12852
  for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
12853
    mask &= ~ALTIVEC_REG_BIT (i);
12854
 
12855
  /* Similarly, remove the return value from the set.  */
12856
  {
12857
    bool yes = false;
12858
    diddle_return_value (is_altivec_return_reg, &yes);
12859
    if (yes)
12860
      mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
12861
  }
12862
 
12863
  return mask;
12864
}
12865
 
12866
/* For a very restricted set of circumstances, we can cut down the
12867
   size of prologues/epilogues by calling our own save/restore-the-world
12868
   routines.  */
12869
 
12870
static void
12871
compute_save_world_info (rs6000_stack_t *info_ptr)
12872
{
12873
  info_ptr->world_save_p = 1;
12874
  info_ptr->world_save_p
12875
    = (WORLD_SAVE_P (info_ptr)
12876
       && DEFAULT_ABI == ABI_DARWIN
12877
       && ! (current_function_calls_setjmp && flag_exceptions)
12878
       && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
12879
       && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
12880
       && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
12881
       && info_ptr->cr_save_p);
12882
 
12883
  /* This will not work in conjunction with sibcalls.  Make sure there
12884
     are none.  (This check is expensive, but seldom executed.) */
12885
  if (WORLD_SAVE_P (info_ptr))
12886
    {
12887
      rtx insn;
12888
      for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
12889
        if ( GET_CODE (insn) == CALL_INSN
12890
             && SIBLING_CALL_P (insn))
12891
          {
12892
            info_ptr->world_save_p = 0;
12893
            break;
12894
          }
12895
    }
12896
 
12897
  if (WORLD_SAVE_P (info_ptr))
12898
    {
12899
      /* Even if we're not touching VRsave, make sure there's room on the
12900
         stack for it, if it looks like we're calling SAVE_WORLD, which
12901
         will attempt to save it. */
12902
      info_ptr->vrsave_size  = 4;
12903
 
12904
      /* "Save" the VRsave register too if we're saving the world.  */
12905
      if (info_ptr->vrsave_mask == 0)
12906
        info_ptr->vrsave_mask = compute_vrsave_mask ();
12907
 
12908
      /* Because the Darwin register save/restore routines only handle
12909
         F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
12910
         check.  */
12911
      gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
12912
                  && (info_ptr->first_altivec_reg_save
12913
                      >= FIRST_SAVED_ALTIVEC_REGNO));
12914
    }
12915
  return;
12916
}
12917
 
12918
 
12919
static void
12920
is_altivec_return_reg (rtx reg, void *xyes)
12921
{
12922
  bool *yes = (bool *) xyes;
12923
  if (REGNO (reg) == ALTIVEC_ARG_RETURN)
12924
    *yes = true;
12925
}
12926
 
12927
 
12928
/* Calculate the stack information for the current function.  This is
12929
   complicated by having two separate calling sequences, the AIX calling
12930
   sequence and the V.4 calling sequence.
12931
 
12932
   AIX (and Darwin/Mac OS X) stack frames look like:
12933
                                                          32-bit  64-bit
12934
        SP----> +---------------------------------------+
12935
                | back chain to caller                  | 0       0
12936
                +---------------------------------------+
12937
                | saved CR                              | 4       8 (8-11)
12938
                +---------------------------------------+
12939
                | saved LR                              | 8       16
12940
                +---------------------------------------+
12941
                | reserved for compilers                | 12      24
12942
                +---------------------------------------+
12943
                | reserved for binders                  | 16      32
12944
                +---------------------------------------+
12945
                | saved TOC pointer                     | 20      40
12946
                +---------------------------------------+
12947
                | Parameter save area (P)               | 24      48
12948
                +---------------------------------------+
12949
                | Alloca space (A)                      | 24+P    etc.
12950
                +---------------------------------------+
12951
                | Local variable space (L)              | 24+P+A
12952
                +---------------------------------------+
12953
                | Float/int conversion temporary (X)    | 24+P+A+L
12954
                +---------------------------------------+
12955
                | Save area for AltiVec registers (W)   | 24+P+A+L+X
12956
                +---------------------------------------+
12957
                | AltiVec alignment padding (Y)         | 24+P+A+L+X+W
12958
                +---------------------------------------+
12959
                | Save area for VRSAVE register (Z)     | 24+P+A+L+X+W+Y
12960
                +---------------------------------------+
12961
                | Save area for GP registers (G)        | 24+P+A+X+L+X+W+Y+Z
12962
                +---------------------------------------+
12963
                | Save area for FP registers (F)        | 24+P+A+X+L+X+W+Y+Z+G
12964
                +---------------------------------------+
12965
        old SP->| back chain to caller's caller         |
12966
                +---------------------------------------+
12967
 
12968
   The required alignment for AIX configurations is two words (i.e., 8
12969
   or 16 bytes).
12970
 
12971
 
12972
   V.4 stack frames look like:
12973
 
12974
        SP----> +---------------------------------------+
12975
                | back chain to caller                  | 0
12976
                +---------------------------------------+
12977
                | caller's saved LR                     | 4
12978
                +---------------------------------------+
12979
                | Parameter save area (P)               | 8
12980
                +---------------------------------------+
12981
                | Alloca space (A)                      | 8+P
12982
                +---------------------------------------+
12983
                | Varargs save area (V)                 | 8+P+A
12984
                +---------------------------------------+
12985
                | Local variable space (L)              | 8+P+A+V
12986
                +---------------------------------------+
12987
                | Float/int conversion temporary (X)    | 8+P+A+V+L
12988
                +---------------------------------------+
12989
                | Save area for AltiVec registers (W)   | 8+P+A+V+L+X
12990
                +---------------------------------------+
12991
                | AltiVec alignment padding (Y)         | 8+P+A+V+L+X+W
12992
                +---------------------------------------+
12993
                | Save area for VRSAVE register (Z)     | 8+P+A+V+L+X+W+Y
12994
                +---------------------------------------+
12995
                | SPE: area for 64-bit GP registers     |
12996
                +---------------------------------------+
12997
                | SPE alignment padding                 |
12998
                +---------------------------------------+
12999
                | saved CR (C)                          | 8+P+A+V+L+X+W+Y+Z
13000
                +---------------------------------------+
13001
                | Save area for GP registers (G)        | 8+P+A+V+L+X+W+Y+Z+C
13002
                +---------------------------------------+
13003
                | Save area for FP registers (F)        | 8+P+A+V+L+X+W+Y+Z+C+G
13004
                +---------------------------------------+
13005
        old SP->| back chain to caller's caller         |
13006
                +---------------------------------------+
13007
 
13008
   The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
13009
   given.  (But note below and in sysv4.h that we require only 8 and
13010
   may round up the size of our stack frame anyways.  The historical
13011
   reason is early versions of powerpc-linux which didn't properly
13012
   align the stack at program startup.  A happy side-effect is that
13013
   -mno-eabi libraries can be used with -meabi programs.)
13014
 
13015
   The EABI configuration defaults to the V.4 layout.  However,
13016
   the stack alignment requirements may differ.  If -mno-eabi is not
13017
   given, the required stack alignment is 8 bytes; if -mno-eabi is
13018
   given, the required alignment is 16 bytes.  (But see V.4 comment
13019
   above.)  */
13020
 
13021
#ifndef ABI_STACK_BOUNDARY
13022
#define ABI_STACK_BOUNDARY STACK_BOUNDARY
13023
#endif
13024
 
13025
static rs6000_stack_t *
13026
rs6000_stack_info (void)
13027
{
13028
  static rs6000_stack_t info;
13029
  rs6000_stack_t *info_ptr = &info;
13030
  int reg_size = TARGET_32BIT ? 4 : 8;
13031
  int ehrd_size;
13032
  int save_align;
13033
  HOST_WIDE_INT non_fixed_size;
13034
 
13035
  memset (&info, 0, sizeof (info));
13036
 
13037
  if (TARGET_SPE)
13038
    {
13039
      /* Cache value so we don't rescan instruction chain over and over.  */
13040
      if (cfun->machine->insn_chain_scanned_p == 0)
13041
        cfun->machine->insn_chain_scanned_p
13042
          = spe_func_has_64bit_regs_p () + 1;
13043
      info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
13044
    }
13045
 
13046
  /* Select which calling sequence.  */
13047
  info_ptr->abi = DEFAULT_ABI;
13048
 
13049
  /* Calculate which registers need to be saved & save area size.  */
13050
  info_ptr->first_gp_reg_save = first_reg_to_save ();
13051
  /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
13052
     even if it currently looks like we won't.  */
13053
  if (((TARGET_TOC && TARGET_MINIMAL_TOC)
13054
       || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
13055
       || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
13056
      && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
13057
    info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
13058
  else
13059
    info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
13060
 
13061
  /* For the SPE, we have an additional upper 32-bits on each GPR.
13062
     Ideally we should save the entire 64-bits only when the upper
13063
     half is used in SIMD instructions.  Since we only record
13064
     registers live (not the size they are used in), this proves
13065
     difficult because we'd have to traverse the instruction chain at
13066
     the right time, taking reload into account.  This is a real pain,
13067
     so we opt to save the GPRs in 64-bits always if but one register
13068
     gets used in 64-bits.  Otherwise, all the registers in the frame
13069
     get saved in 32-bits.
13070
 
13071
     So... since when we save all GPRs (except the SP) in 64-bits, the
13072
     traditional GP save area will be empty.  */
13073
  if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
13074
    info_ptr->gp_size = 0;
13075
 
13076
  info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
13077
  info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
13078
 
13079
  info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
13080
  info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
13081
                                 - info_ptr->first_altivec_reg_save);
13082
 
13083
  /* Does this function call anything?  */
13084
  info_ptr->calls_p = (! current_function_is_leaf
13085
                       || cfun->machine->ra_needs_full_frame);
13086
 
13087
  /* Determine if we need to save the link register.  */
13088
  if ((DEFAULT_ABI == ABI_AIX
13089
       && current_function_profile
13090
       && !TARGET_PROFILE_KERNEL)
13091
#ifdef TARGET_RELOCATABLE
13092
      || (TARGET_RELOCATABLE && (get_pool_size () != 0))
13093
#endif
13094
      || (info_ptr->first_fp_reg_save != 64
13095
          && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
13096
      || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
13097
      || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
13098
      || info_ptr->calls_p
13099
      || rs6000_ra_ever_killed ())
13100
    {
13101
      info_ptr->lr_save_p = 1;
13102
      regs_ever_live[LINK_REGISTER_REGNUM] = 1;
13103
    }
13104
 
13105
  /* Determine if we need to save the condition code registers.  */
13106
  if (regs_ever_live[CR2_REGNO]
13107
      || regs_ever_live[CR3_REGNO]
13108
      || regs_ever_live[CR4_REGNO])
13109
    {
13110
      info_ptr->cr_save_p = 1;
13111
      if (DEFAULT_ABI == ABI_V4)
13112
        info_ptr->cr_size = reg_size;
13113
    }
13114
 
13115
  /* If the current function calls __builtin_eh_return, then we need
13116
     to allocate stack space for registers that will hold data for
13117
     the exception handler.  */
13118
  if (current_function_calls_eh_return)
13119
    {
13120
      unsigned int i;
13121
      for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
13122
        continue;
13123
 
13124
      /* SPE saves EH registers in 64-bits.  */
13125
      ehrd_size = i * (TARGET_SPE_ABI
13126
                       && info_ptr->spe_64bit_regs_used != 0
13127
                       ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
13128
    }
13129
  else
13130
    ehrd_size = 0;
13131
 
13132
  /* Determine various sizes.  */
13133
  info_ptr->reg_size     = reg_size;
13134
  info_ptr->fixed_size   = RS6000_SAVE_AREA;
13135
  info_ptr->vars_size    = RS6000_ALIGN (get_frame_size (), 8);
13136
  info_ptr->parm_size    = RS6000_ALIGN (current_function_outgoing_args_size,
13137
                                         TARGET_ALTIVEC ? 16 : 8);
13138
  if (FRAME_GROWS_DOWNWARD)
13139
    info_ptr->vars_size
13140
      += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
13141
                       + info_ptr->parm_size,
13142
                       ABI_STACK_BOUNDARY / BITS_PER_UNIT)
13143
         - (info_ptr->fixed_size + info_ptr->vars_size
13144
            + info_ptr->parm_size);
13145
 
13146
  if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
13147
    info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
13148
  else
13149
    info_ptr->spe_gp_size = 0;
13150
 
13151
  if (TARGET_ALTIVEC_ABI)
13152
    info_ptr->vrsave_mask = compute_vrsave_mask ();
13153
  else
13154
    info_ptr->vrsave_mask = 0;
13155
 
13156
  if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
13157
    info_ptr->vrsave_size  = 4;
13158
  else
13159
    info_ptr->vrsave_size  = 0;
13160
 
13161
  compute_save_world_info (info_ptr);
13162
 
13163
  /* Calculate the offsets.  */
13164
  switch (DEFAULT_ABI)
13165
    {
13166
    case ABI_NONE:
13167
    default:
13168
      gcc_unreachable ();
13169
 
13170
    case ABI_AIX:
13171
    case ABI_DARWIN:
13172
      info_ptr->fp_save_offset   = - info_ptr->fp_size;
13173
      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
13174
 
13175
      if (TARGET_ALTIVEC_ABI)
13176
        {
13177
          info_ptr->vrsave_save_offset
13178
            = info_ptr->gp_save_offset - info_ptr->vrsave_size;
13179
 
13180
          /* Align stack so vector save area is on a quadword boundary.
13181
             The padding goes above the vectors.  */
13182
          if (info_ptr->altivec_size != 0)
13183
            info_ptr->altivec_padding_size
13184
              = info_ptr->vrsave_save_offset & 0xF;
13185
          else
13186
            info_ptr->altivec_padding_size = 0;
13187
 
13188
          info_ptr->altivec_save_offset
13189
            = info_ptr->vrsave_save_offset
13190
            - info_ptr->altivec_padding_size
13191
            - info_ptr->altivec_size;
13192
          gcc_assert (info_ptr->altivec_size == 0
13193
                      || info_ptr->altivec_save_offset % 16 == 0);
13194
 
13195
          /* Adjust for AltiVec case.  */
13196
          info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
13197
        }
13198
      else
13199
        info_ptr->ehrd_offset      = info_ptr->gp_save_offset - ehrd_size;
13200
      info_ptr->cr_save_offset   = reg_size; /* first word when 64-bit.  */
13201
      info_ptr->lr_save_offset   = 2*reg_size;
13202
      break;
13203
 
13204
    case ABI_V4:
13205
      info_ptr->fp_save_offset   = - info_ptr->fp_size;
13206
      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
13207
      info_ptr->cr_save_offset   = info_ptr->gp_save_offset - info_ptr->cr_size;
13208
 
13209
      if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
13210
        {
13211
          /* Align stack so SPE GPR save area is aligned on a
13212
             double-word boundary.  */
13213
          if (info_ptr->spe_gp_size != 0)
13214
            info_ptr->spe_padding_size
13215
              = 8 - (-info_ptr->cr_save_offset % 8);
13216
          else
13217
            info_ptr->spe_padding_size = 0;
13218
 
13219
          info_ptr->spe_gp_save_offset
13220
            = info_ptr->cr_save_offset
13221
            - info_ptr->spe_padding_size
13222
            - info_ptr->spe_gp_size;
13223
 
13224
          /* Adjust for SPE case.  */
13225
          info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
13226
        }
13227
      else if (TARGET_ALTIVEC_ABI)
13228
        {
13229
          info_ptr->vrsave_save_offset
13230
            = info_ptr->cr_save_offset - info_ptr->vrsave_size;
13231
 
13232
          /* Align stack so vector save area is on a quadword boundary.  */
13233
          if (info_ptr->altivec_size != 0)
13234
            info_ptr->altivec_padding_size
13235
              = 16 - (-info_ptr->vrsave_save_offset % 16);
13236
          else
13237
            info_ptr->altivec_padding_size = 0;
13238
 
13239
          info_ptr->altivec_save_offset
13240
            = info_ptr->vrsave_save_offset
13241
            - info_ptr->altivec_padding_size
13242
            - info_ptr->altivec_size;
13243
 
13244
          /* Adjust for AltiVec case.  */
13245
          info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
13246
        }
13247
      else
13248
        info_ptr->ehrd_offset    = info_ptr->cr_save_offset;
13249
      info_ptr->ehrd_offset      -= ehrd_size;
13250
      info_ptr->lr_save_offset   = reg_size;
13251
      break;
13252
    }
13253
 
13254
  save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
13255
  info_ptr->save_size    = RS6000_ALIGN (info_ptr->fp_size
13256
                                         + info_ptr->gp_size
13257
                                         + info_ptr->altivec_size
13258
                                         + info_ptr->altivec_padding_size
13259
                                         + info_ptr->spe_gp_size
13260
                                         + info_ptr->spe_padding_size
13261
                                         + ehrd_size
13262
                                         + info_ptr->cr_size
13263
                                         + info_ptr->vrsave_size,
13264
                                         save_align);
13265
 
13266
  non_fixed_size         = (info_ptr->vars_size
13267
                            + info_ptr->parm_size
13268
                            + info_ptr->save_size);
13269
 
13270
  info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
13271
                                       ABI_STACK_BOUNDARY / BITS_PER_UNIT);
13272
 
13273
  /* Determine if we need to allocate any stack frame:
13274
 
13275
     For AIX we need to push the stack if a frame pointer is needed
13276
     (because the stack might be dynamically adjusted), if we are
13277
     debugging, if we make calls, or if the sum of fp_save, gp_save,
13278
     and local variables are more than the space needed to save all
13279
     non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
13280
     + 18*8 = 288 (GPR13 reserved).
13281
 
13282
     For V.4 we don't have the stack cushion that AIX uses, but assume
13283
     that the debugger can handle stackless frames.  */
13284
 
13285
  if (info_ptr->calls_p)
13286
    info_ptr->push_p = 1;
13287
 
13288
  else if (DEFAULT_ABI == ABI_V4)
13289
    info_ptr->push_p = non_fixed_size != 0;
13290
 
13291
  else if (frame_pointer_needed)
13292
    info_ptr->push_p = 1;
13293
 
13294
  else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
13295
    info_ptr->push_p = 1;
13296
 
13297
  else
13298
    info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
13299
 
13300
  /* Zero offsets if we're not saving those registers.  */
13301
  if (info_ptr->fp_size == 0)
13302
    info_ptr->fp_save_offset = 0;
13303
 
13304
  if (info_ptr->gp_size == 0)
13305
    info_ptr->gp_save_offset = 0;
13306
 
13307
  if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
13308
    info_ptr->altivec_save_offset = 0;
13309
 
13310
  if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
13311
    info_ptr->vrsave_save_offset = 0;
13312
 
13313
  if (! TARGET_SPE_ABI
13314
      || info_ptr->spe_64bit_regs_used == 0
13315
      || info_ptr->spe_gp_size == 0)
13316
    info_ptr->spe_gp_save_offset = 0;
13317
 
13318
  if (! info_ptr->lr_save_p)
13319
    info_ptr->lr_save_offset = 0;
13320
 
13321
  if (! info_ptr->cr_save_p)
13322
    info_ptr->cr_save_offset = 0;
13323
 
13324
  return info_ptr;
13325
}
13326
 
13327
/* Return true if the current function uses any GPRs in 64-bit SIMD
13328
   mode.  */
13329
 
13330
static bool
13331
spe_func_has_64bit_regs_p (void)
13332
{
13333
  rtx insns, insn;
13334
 
13335
  /* Functions that save and restore all the call-saved registers will
13336
     need to save/restore the registers in 64-bits.  */
13337
  if (current_function_calls_eh_return
13338
      || current_function_calls_setjmp
13339
      || current_function_has_nonlocal_goto)
13340
    return true;
13341
 
13342
  insns = get_insns ();
13343
 
13344
  for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
13345
    {
13346
      if (INSN_P (insn))
13347
        {
13348
          rtx i;
13349
 
13350
          /* FIXME: This should be implemented with attributes...
13351
 
13352
                 (set_attr "spe64" "true")....then,
13353
                 if (get_spe64(insn)) return true;
13354
 
13355
             It's the only reliable way to do the stuff below.  */
13356
 
13357
          i = PATTERN (insn);
13358
          if (GET_CODE (i) == SET)
13359
            {
13360
              enum machine_mode mode = GET_MODE (SET_SRC (i));
13361
 
13362
              if (SPE_VECTOR_MODE (mode))
13363
                return true;
13364
              if (TARGET_E500_DOUBLE && mode == DFmode)
13365
                return true;
13366
            }
13367
        }
13368
    }
13369
 
13370
  return false;
13371
}
13372
 
13373
static void
13374
debug_stack_info (rs6000_stack_t *info)
13375
{
13376
  const char *abi_string;
13377
 
13378
  if (! info)
13379
    info = rs6000_stack_info ();
13380
 
13381
  fprintf (stderr, "\nStack information for function %s:\n",
13382
           ((current_function_decl && DECL_NAME (current_function_decl))
13383
            ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
13384
            : "<unknown>"));
13385
 
13386
  switch (info->abi)
13387
    {
13388
    default:             abi_string = "Unknown";        break;
13389
    case ABI_NONE:       abi_string = "NONE";           break;
13390
    case ABI_AIX:        abi_string = "AIX";            break;
13391
    case ABI_DARWIN:     abi_string = "Darwin";         break;
13392
    case ABI_V4:         abi_string = "V.4";            break;
13393
    }
13394
 
13395
  fprintf (stderr, "\tABI                 = %5s\n", abi_string);
13396
 
13397
  if (TARGET_ALTIVEC_ABI)
13398
    fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
13399
 
13400
  if (TARGET_SPE_ABI)
13401
    fprintf (stderr, "\tSPE ABI extensions enabled.\n");
13402
 
13403
  if (info->first_gp_reg_save != 32)
13404
    fprintf (stderr, "\tfirst_gp_reg_save   = %5d\n", info->first_gp_reg_save);
13405
 
13406
  if (info->first_fp_reg_save != 64)
13407
    fprintf (stderr, "\tfirst_fp_reg_save   = %5d\n", info->first_fp_reg_save);
13408
 
13409
  if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
13410
    fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
13411
             info->first_altivec_reg_save);
13412
 
13413
  if (info->lr_save_p)
13414
    fprintf (stderr, "\tlr_save_p           = %5d\n", info->lr_save_p);
13415
 
13416
  if (info->cr_save_p)
13417
    fprintf (stderr, "\tcr_save_p           = %5d\n", info->cr_save_p);
13418
 
13419
  if (info->vrsave_mask)
13420
    fprintf (stderr, "\tvrsave_mask         = 0x%x\n", info->vrsave_mask);
13421
 
13422
  if (info->push_p)
13423
    fprintf (stderr, "\tpush_p              = %5d\n", info->push_p);
13424
 
13425
  if (info->calls_p)
13426
    fprintf (stderr, "\tcalls_p             = %5d\n", info->calls_p);
13427
 
13428
  if (info->gp_save_offset)
13429
    fprintf (stderr, "\tgp_save_offset      = %5d\n", info->gp_save_offset);
13430
 
13431
  if (info->fp_save_offset)
13432
    fprintf (stderr, "\tfp_save_offset      = %5d\n", info->fp_save_offset);
13433
 
13434
  if (info->altivec_save_offset)
13435
    fprintf (stderr, "\taltivec_save_offset = %5d\n",
13436
             info->altivec_save_offset);
13437
 
13438
  if (info->spe_gp_save_offset)
13439
    fprintf (stderr, "\tspe_gp_save_offset  = %5d\n",
13440
             info->spe_gp_save_offset);
13441
 
13442
  if (info->vrsave_save_offset)
13443
    fprintf (stderr, "\tvrsave_save_offset  = %5d\n",
13444
             info->vrsave_save_offset);
13445
 
13446
  if (info->lr_save_offset)
13447
    fprintf (stderr, "\tlr_save_offset      = %5d\n", info->lr_save_offset);
13448
 
13449
  if (info->cr_save_offset)
13450
    fprintf (stderr, "\tcr_save_offset      = %5d\n", info->cr_save_offset);
13451
 
13452
  if (info->varargs_save_offset)
13453
    fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
13454
 
13455
  if (info->total_size)
13456
    fprintf (stderr, "\ttotal_size          = "HOST_WIDE_INT_PRINT_DEC"\n",
13457
             info->total_size);
13458
 
13459
  if (info->vars_size)
13460
    fprintf (stderr, "\tvars_size           = "HOST_WIDE_INT_PRINT_DEC"\n",
13461
             info->vars_size);
13462
 
13463
  if (info->parm_size)
13464
    fprintf (stderr, "\tparm_size           = %5d\n", info->parm_size);
13465
 
13466
  if (info->fixed_size)
13467
    fprintf (stderr, "\tfixed_size          = %5d\n", info->fixed_size);
13468
 
13469
  if (info->gp_size)
13470
    fprintf (stderr, "\tgp_size             = %5d\n", info->gp_size);
13471
 
13472
  if (info->spe_gp_size)
13473
    fprintf (stderr, "\tspe_gp_size         = %5d\n", info->spe_gp_size);
13474
 
13475
  if (info->fp_size)
13476
    fprintf (stderr, "\tfp_size             = %5d\n", info->fp_size);
13477
 
13478
  if (info->altivec_size)
13479
    fprintf (stderr, "\taltivec_size        = %5d\n", info->altivec_size);
13480
 
13481
  if (info->vrsave_size)
13482
    fprintf (stderr, "\tvrsave_size         = %5d\n", info->vrsave_size);
13483
 
13484
  if (info->altivec_padding_size)
13485
    fprintf (stderr, "\taltivec_padding_size= %5d\n",
13486
             info->altivec_padding_size);
13487
 
13488
  if (info->spe_padding_size)
13489
    fprintf (stderr, "\tspe_padding_size    = %5d\n",
13490
             info->spe_padding_size);
13491
 
13492
  if (info->cr_size)
13493
    fprintf (stderr, "\tcr_size             = %5d\n", info->cr_size);
13494
 
13495
  if (info->save_size)
13496
    fprintf (stderr, "\tsave_size           = %5d\n", info->save_size);
13497
 
13498
  if (info->reg_size != 4)
13499
    fprintf (stderr, "\treg_size            = %5d\n", info->reg_size);
13500
 
13501
  fprintf (stderr, "\n");
13502
}
13503
 
13504
rtx
13505
rs6000_return_addr (int count, rtx frame)
13506
{
13507
  /* Currently we don't optimize very well between prolog and body
13508
     code and for PIC code the code can be actually quite bad, so
13509
     don't try to be too clever here.  */
13510
  if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
13511
    {
13512
      cfun->machine->ra_needs_full_frame = 1;
13513
 
13514
      return
13515
        gen_rtx_MEM
13516
          (Pmode,
13517
           memory_address
13518
           (Pmode,
13519
            plus_constant (copy_to_reg
13520
                           (gen_rtx_MEM (Pmode,
13521
                                         memory_address (Pmode, frame))),
13522
                           RETURN_ADDRESS_OFFSET)));
13523
    }
13524
 
13525
  cfun->machine->ra_need_lr = 1;
13526
  return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
13527
}
13528
 
13529
/* Say whether a function is a candidate for sibcall handling or not.
13530
   We do not allow indirect calls to be optimized into sibling calls.
13531
   Also, we can't do it if there are any vector parameters; there's
13532
   nowhere to put the VRsave code so it works; note that functions with
13533
   vector parameters are required to have a prototype, so the argument
13534
   type info must be available here.  (The tail recursion case can work
13535
   with vector parameters, but there's no way to distinguish here.) */
13536
static bool
13537
rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
13538
{
13539
  tree type;
13540
  if (decl)
13541
    {
13542
      if (TARGET_ALTIVEC_VRSAVE)
13543
        {
13544
          for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
13545
               type; type = TREE_CHAIN (type))
13546
            {
13547
              if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
13548
                return false;
13549
            }
13550
        }
13551
      if (DEFAULT_ABI == ABI_DARWIN
13552
          || ((*targetm.binds_local_p) (decl)
13553
              && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
13554
        {
13555
          tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
13556
 
13557
          if (!lookup_attribute ("longcall", attr_list)
13558
              || lookup_attribute ("shortcall", attr_list))
13559
            return true;
13560
        }
13561
    }
13562
  return false;
13563
}
13564
 
13565
/* NULL if INSN insn is valid within a low-overhead loop.
13566
   Otherwise return why doloop cannot be applied.
13567
   PowerPC uses the COUNT register for branch on table instructions.  */
13568
 
13569
static const char *
13570
rs6000_invalid_within_doloop (rtx insn)
13571
{
13572
  if (CALL_P (insn))
13573
    return "Function call in the loop.";
13574
 
13575
  if (JUMP_P (insn)
13576
      && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
13577
          || GET_CODE (PATTERN (insn)) == ADDR_VEC))
13578
    return "Computed branch in the loop.";
13579
 
13580
  return NULL;
13581
}
13582
 
13583
static int
13584
rs6000_ra_ever_killed (void)
13585
{
13586
  rtx top;
13587
  rtx reg;
13588
  rtx insn;
13589
 
13590
  if (current_function_is_thunk)
13591
    return 0;
13592
 
13593
  /* regs_ever_live has LR marked as used if any sibcalls are present,
13594
     but this should not force saving and restoring in the
13595
     pro/epilogue.  Likewise, reg_set_between_p thinks a sibcall
13596
     clobbers LR, so that is inappropriate.  */
13597
 
13598
  /* Also, the prologue can generate a store into LR that
13599
     doesn't really count, like this:
13600
 
13601
        move LR->R0
13602
        bcl to set PIC register
13603
        move LR->R31
13604
        move R0->LR
13605
 
13606
     When we're called from the epilogue, we need to avoid counting
13607
     this as a store.  */
13608
 
13609
  push_topmost_sequence ();
13610
  top = get_insns ();
13611
  pop_topmost_sequence ();
13612
  reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
13613
 
13614
  for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
13615
    {
13616
      if (INSN_P (insn))
13617
        {
13618
          if (CALL_P (insn))
13619
            {
13620
              if (!SIBLING_CALL_P (insn))
13621
                return 1;
13622
            }
13623
          else if (find_regno_note (insn, REG_INC, LINK_REGISTER_REGNUM))
13624
            return 1;
13625
          else if (set_of (reg, insn) != NULL_RTX
13626
                   && !prologue_epilogue_contains (insn))
13627
            return 1;
13628
        }
13629
    }
13630
  return 0;
13631
}
13632
 
13633
/* Add a REG_MAYBE_DEAD note to the insn.  */
13634
static void
13635
rs6000_maybe_dead (rtx insn)
13636
{
13637
  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
13638
                                        const0_rtx,
13639
                                        REG_NOTES (insn));
13640
}
13641
 
13642
/* Emit instructions needed to load the TOC register.
13643
   This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
13644
   a constant pool; or for SVR4 -fpic.  */
13645
 
13646
void
13647
rs6000_emit_load_toc_table (int fromprolog)
13648
{
13649
  rtx dest, insn;
13650
  dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
13651
 
13652
  if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
13653
    {
13654
      char buf[30];
13655
      rtx lab, tmp1, tmp2, got, tempLR;
13656
 
13657
      ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
13658
      lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13659
      if (flag_pic == 2)
13660
        got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
13661
      else
13662
        got = rs6000_got_sym ();
13663
      tmp1 = tmp2 = dest;
13664
      if (!fromprolog)
13665
        {
13666
          tmp1 = gen_reg_rtx (Pmode);
13667
          tmp2 = gen_reg_rtx (Pmode);
13668
        }
13669
      tempLR = (fromprolog
13670
                ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13671
                : gen_reg_rtx (Pmode));
13672
      insn = emit_insn (gen_load_toc_v4_PIC_1 (tempLR, lab));
13673
      if (fromprolog)
13674
        rs6000_maybe_dead (insn);
13675
      insn = emit_move_insn (tmp1, tempLR);
13676
      if (fromprolog)
13677
        rs6000_maybe_dead (insn);
13678
      insn = emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
13679
      if (fromprolog)
13680
        rs6000_maybe_dead (insn);
13681
      insn = emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
13682
      if (fromprolog)
13683
        rs6000_maybe_dead (insn);
13684
    }
13685
  else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
13686
    {
13687
      rtx tempLR = (fromprolog
13688
                    ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13689
                    : gen_reg_rtx (Pmode));
13690
 
13691
      insn = emit_insn (gen_load_toc_v4_pic_si (tempLR));
13692
      if (fromprolog)
13693
        rs6000_maybe_dead (insn);
13694
      insn = emit_move_insn (dest, tempLR);
13695
      if (fromprolog)
13696
        rs6000_maybe_dead (insn);
13697
    }
13698
  else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
13699
    {
13700
      char buf[30];
13701
      rtx tempLR = (fromprolog
13702
                    ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13703
                    : gen_reg_rtx (Pmode));
13704
      rtx temp0 = (fromprolog
13705
                   ? gen_rtx_REG (Pmode, 0)
13706
                   : gen_reg_rtx (Pmode));
13707
 
13708
      if (fromprolog)
13709
        {
13710
          rtx symF, symL;
13711
 
13712
          ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
13713
          symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13714
 
13715
          ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
13716
          symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13717
 
13718
          rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
13719
                                                               symF)));
13720
          rs6000_maybe_dead (emit_move_insn (dest, tempLR));
13721
          rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
13722
                                                               symL,
13723
                                                               symF)));
13724
        }
13725
      else
13726
        {
13727
          rtx tocsym;
13728
 
13729
          tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
13730
          emit_insn (gen_load_toc_v4_PIC_1b (tempLR, tocsym));
13731
          emit_move_insn (dest, tempLR);
13732
          emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
13733
        }
13734
      insn = emit_insn (gen_addsi3 (dest, temp0, dest));
13735
      if (fromprolog)
13736
        rs6000_maybe_dead (insn);
13737
    }
13738
  else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
13739
    {
13740
      /* This is for AIX code running in non-PIC ELF32.  */
13741
      char buf[30];
13742
      rtx realsym;
13743
      ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
13744
      realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13745
 
13746
      insn = emit_insn (gen_elf_high (dest, realsym));
13747
      if (fromprolog)
13748
        rs6000_maybe_dead (insn);
13749
      insn = emit_insn (gen_elf_low (dest, dest, realsym));
13750
      if (fromprolog)
13751
        rs6000_maybe_dead (insn);
13752
    }
13753
  else
13754
    {
13755
      gcc_assert (DEFAULT_ABI == ABI_AIX);
13756
 
13757
      if (TARGET_32BIT)
13758
        insn = emit_insn (gen_load_toc_aix_si (dest));
13759
      else
13760
        insn = emit_insn (gen_load_toc_aix_di (dest));
13761
      if (fromprolog)
13762
        rs6000_maybe_dead (insn);
13763
    }
13764
}
13765
 
13766
/* Emit instructions to restore the link register after determining where
13767
   its value has been stored.  */
13768
 
13769
void
13770
rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
13771
{
13772
  rs6000_stack_t *info = rs6000_stack_info ();
13773
  rtx operands[2];
13774
 
13775
  operands[0] = source;
13776
  operands[1] = scratch;
13777
 
13778
  if (info->lr_save_p)
13779
    {
13780
      rtx frame_rtx = stack_pointer_rtx;
13781
      HOST_WIDE_INT sp_offset = 0;
13782
      rtx tmp;
13783
 
13784
      if (frame_pointer_needed
13785
          || current_function_calls_alloca
13786
          || info->total_size > 32767)
13787
        {
13788
          tmp = gen_frame_mem (Pmode, frame_rtx);
13789
          emit_move_insn (operands[1], tmp);
13790
          frame_rtx = operands[1];
13791
        }
13792
      else if (info->push_p)
13793
        sp_offset = info->total_size;
13794
 
13795
      tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
13796
      tmp = gen_frame_mem (Pmode, tmp);
13797
      emit_move_insn (tmp, operands[0]);
13798
    }
13799
  else
13800
    emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
13801
}
13802
 
13803
static GTY(()) int set = -1;
13804
 
13805
int
13806
get_TOC_alias_set (void)
13807
{
13808
  if (set == -1)
13809
    set = new_alias_set ();
13810
  return set;
13811
}
13812
 
13813
/* This returns nonzero if the current function uses the TOC.  This is
13814
   determined by the presence of (use (unspec ... UNSPEC_TOC)), which
13815
   is generated by the ABI_V4 load_toc_* patterns.  */
13816
#if TARGET_ELF
13817
static int
13818
uses_TOC (void)
13819
{
13820
  rtx insn;
13821
 
13822
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
13823
    if (INSN_P (insn))
13824
      {
13825
        rtx pat = PATTERN (insn);
13826
        int i;
13827
 
13828
        if (GET_CODE (pat) == PARALLEL)
13829
          for (i = 0; i < XVECLEN (pat, 0); i++)
13830
            {
13831
              rtx sub = XVECEXP (pat, 0, i);
13832
              if (GET_CODE (sub) == USE)
13833
                {
13834
                  sub = XEXP (sub, 0);
13835
                  if (GET_CODE (sub) == UNSPEC
13836
                      && XINT (sub, 1) == UNSPEC_TOC)
13837
                    return 1;
13838
                }
13839
            }
13840
      }
13841
  return 0;
13842
}
13843
#endif
13844
 
13845
rtx
13846
create_TOC_reference (rtx symbol)
13847
{
13848
  if (no_new_pseudos)
13849
    regs_ever_live[TOC_REGISTER] = 1;
13850
  return gen_rtx_PLUS (Pmode,
13851
           gen_rtx_REG (Pmode, TOC_REGISTER),
13852
             gen_rtx_CONST (Pmode,
13853
               gen_rtx_MINUS (Pmode, symbol,
13854
                 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
13855
}
13856
 
13857
/* If _Unwind_* has been called from within the same module,
13858
   toc register is not guaranteed to be saved to 40(1) on function
13859
   entry.  Save it there in that case.  */
13860
 
13861
void
13862
rs6000_aix_emit_builtin_unwind_init (void)
13863
{
13864
  rtx mem;
13865
  rtx stack_top = gen_reg_rtx (Pmode);
13866
  rtx opcode_addr = gen_reg_rtx (Pmode);
13867
  rtx opcode = gen_reg_rtx (SImode);
13868
  rtx tocompare = gen_reg_rtx (SImode);
13869
  rtx no_toc_save_needed = gen_label_rtx ();
13870
 
13871
  mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
13872
  emit_move_insn (stack_top, mem);
13873
 
13874
  mem = gen_frame_mem (Pmode,
13875
                       gen_rtx_PLUS (Pmode, stack_top,
13876
                                     GEN_INT (2 * GET_MODE_SIZE (Pmode))));
13877
  emit_move_insn (opcode_addr, mem);
13878
  emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
13879
  emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
13880
                                           : 0xE8410028, SImode));
13881
 
13882
  do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
13883
                           SImode, NULL_RTX, NULL_RTX,
13884
                           no_toc_save_needed);
13885
 
13886
  mem = gen_frame_mem (Pmode,
13887
                       gen_rtx_PLUS (Pmode, stack_top,
13888
                                     GEN_INT (5 * GET_MODE_SIZE (Pmode))));
13889
  emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
13890
  emit_label (no_toc_save_needed);
13891
}
13892
 
13893
/* This ties together stack memory (MEM with an alias set of frame_alias_set)
13894
   and the change to the stack pointer.  */
13895
 
13896
static void
13897
rs6000_emit_stack_tie (void)
13898
{
13899
  rtx mem = gen_frame_mem (BLKmode,
13900
                           gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
13901
 
13902
  emit_insn (gen_stack_tie (mem));
13903
}
13904
 
13905
/* Emit the correct code for allocating stack space, as insns.
13906
   If COPY_R12, make sure a copy of the old frame is left in r12.
13907
   The generated code may use hard register 0 as a temporary.  */
13908
 
13909
static void
13910
rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
13911
{
13912
  rtx insn;
13913
  rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
13914
  rtx tmp_reg = gen_rtx_REG (Pmode, 0);
13915
  rtx todec = gen_int_mode (-size, Pmode);
13916
 
13917
  if (INTVAL (todec) != -size)
13918
    {
13919
      warning (0, "stack frame too large");
13920
      emit_insn (gen_trap ());
13921
      return;
13922
    }
13923
 
13924
  if (current_function_limit_stack)
13925
    {
13926
      if (REG_P (stack_limit_rtx)
13927
          && REGNO (stack_limit_rtx) > 1
13928
          && REGNO (stack_limit_rtx) <= 31)
13929
        {
13930
          emit_insn (TARGET_32BIT
13931
                     ? gen_addsi3 (tmp_reg,
13932
                                   stack_limit_rtx,
13933
                                   GEN_INT (size))
13934
                     : gen_adddi3 (tmp_reg,
13935
                                   stack_limit_rtx,
13936
                                   GEN_INT (size)));
13937
 
13938
          emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
13939
                                    const0_rtx));
13940
        }
13941
      else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
13942
               && TARGET_32BIT
13943
               && DEFAULT_ABI == ABI_V4)
13944
        {
13945
          rtx toload = gen_rtx_CONST (VOIDmode,
13946
                                      gen_rtx_PLUS (Pmode,
13947
                                                    stack_limit_rtx,
13948
                                                    GEN_INT (size)));
13949
 
13950
          emit_insn (gen_elf_high (tmp_reg, toload));
13951
          emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
13952
          emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
13953
                                    const0_rtx));
13954
        }
13955
      else
13956
        warning (0, "stack limit expression is not supported");
13957
    }
13958
 
13959
  if (copy_r12 || ! TARGET_UPDATE)
13960
    emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
13961
 
13962
  if (TARGET_UPDATE)
13963
    {
13964
      if (size > 32767)
13965
        {
13966
          /* Need a note here so that try_split doesn't get confused.  */
13967
          if (get_last_insn () == NULL_RTX)
13968
            emit_note (NOTE_INSN_DELETED);
13969
          insn = emit_move_insn (tmp_reg, todec);
13970
          try_split (PATTERN (insn), insn, 0);
13971
          todec = tmp_reg;
13972
        }
13973
 
13974
      insn = emit_insn (TARGET_32BIT
13975
                        ? gen_movsi_update (stack_reg, stack_reg,
13976
                                            todec, stack_reg)
13977
                        : gen_movdi_di_update (stack_reg, stack_reg,
13978
                                            todec, stack_reg));
13979
    }
13980
  else
13981
    {
13982
      insn = emit_insn (TARGET_32BIT
13983
                        ? gen_addsi3 (stack_reg, stack_reg, todec)
13984
                        : gen_adddi3 (stack_reg, stack_reg, todec));
13985
      emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
13986
                      gen_rtx_REG (Pmode, 12));
13987
    }
13988
 
13989
  RTX_FRAME_RELATED_P (insn) = 1;
13990
  REG_NOTES (insn) =
13991
    gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13992
                       gen_rtx_SET (VOIDmode, stack_reg,
13993
                                    gen_rtx_PLUS (Pmode, stack_reg,
13994
                                                  GEN_INT (-size))),
13995
                       REG_NOTES (insn));
13996
}
13997
 
13998
/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
13999
   with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
14000
   is not NULL.  It would be nice if dwarf2out_frame_debug_expr could
14001
   deduce these equivalences by itself so it wasn't necessary to hold
14002
   its hand so much.  */
14003
 
14004
static void
14005
rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
14006
                      rtx reg2, rtx rreg)
14007
{
14008
  rtx real, temp;
14009
 
14010
  /* copy_rtx will not make unique copies of registers, so we need to
14011
     ensure we don't have unwanted sharing here.  */
14012
  if (reg == reg2)
14013
    reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
14014
 
14015
  if (reg == rreg)
14016
    reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
14017
 
14018
  real = copy_rtx (PATTERN (insn));
14019
 
14020
  if (reg2 != NULL_RTX)
14021
    real = replace_rtx (real, reg2, rreg);
14022
 
14023
  real = replace_rtx (real, reg,
14024
                      gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
14025
                                                        STACK_POINTER_REGNUM),
14026
                                    GEN_INT (val)));
14027
 
14028
  /* We expect that 'real' is either a SET or a PARALLEL containing
14029
     SETs (and possibly other stuff).  In a PARALLEL, all the SETs
14030
     are important so they all have to be marked RTX_FRAME_RELATED_P.  */
14031
 
14032
  if (GET_CODE (real) == SET)
14033
    {
14034
      rtx set = real;
14035
 
14036
      temp = simplify_rtx (SET_SRC (set));
14037
      if (temp)
14038
        SET_SRC (set) = temp;
14039
      temp = simplify_rtx (SET_DEST (set));
14040
      if (temp)
14041
        SET_DEST (set) = temp;
14042
      if (GET_CODE (SET_DEST (set)) == MEM)
14043
        {
14044
          temp = simplify_rtx (XEXP (SET_DEST (set), 0));
14045
          if (temp)
14046
            XEXP (SET_DEST (set), 0) = temp;
14047
        }
14048
    }
14049
  else
14050
    {
14051
      int i;
14052
 
14053
      gcc_assert (GET_CODE (real) == PARALLEL);
14054
      for (i = 0; i < XVECLEN (real, 0); i++)
14055
        if (GET_CODE (XVECEXP (real, 0, i)) == SET)
14056
          {
14057
            rtx set = XVECEXP (real, 0, i);
14058
 
14059
            temp = simplify_rtx (SET_SRC (set));
14060
            if (temp)
14061
              SET_SRC (set) = temp;
14062
            temp = simplify_rtx (SET_DEST (set));
14063
            if (temp)
14064
              SET_DEST (set) = temp;
14065
            if (GET_CODE (SET_DEST (set)) == MEM)
14066
              {
14067
                temp = simplify_rtx (XEXP (SET_DEST (set), 0));
14068
                if (temp)
14069
                  XEXP (SET_DEST (set), 0) = temp;
14070
              }
14071
            RTX_FRAME_RELATED_P (set) = 1;
14072
          }
14073
    }
14074
 
14075
  if (TARGET_SPE)
14076
    real = spe_synthesize_frame_save (real);
14077
 
14078
  RTX_FRAME_RELATED_P (insn) = 1;
14079
  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14080
                                        real,
14081
                                        REG_NOTES (insn));
14082
}
14083
 
14084
/* Given an SPE frame note, return a PARALLEL of SETs with the
14085
   original note, plus a synthetic register save.  */
14086
 
14087
static rtx
14088
spe_synthesize_frame_save (rtx real)
14089
{
14090
  rtx synth, offset, reg, real2;
14091
 
14092
  if (GET_CODE (real) != SET
14093
      || GET_MODE (SET_SRC (real)) != V2SImode)
14094
    return real;
14095
 
14096
  /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
14097
     frame related note.  The parallel contains a set of the register
14098
     being saved, and another set to a synthetic register (n+1200).
14099
     This is so we can differentiate between 64-bit and 32-bit saves.
14100
     Words cannot describe this nastiness.  */
14101
 
14102
  gcc_assert (GET_CODE (SET_DEST (real)) == MEM
14103
              && GET_CODE (XEXP (SET_DEST (real), 0)) == PLUS
14104
              && GET_CODE (SET_SRC (real)) == REG);
14105
 
14106
  /* Transform:
14107
       (set (mem (plus (reg x) (const y)))
14108
            (reg z))
14109
     into:
14110
       (set (mem (plus (reg x) (const y+4)))
14111
            (reg z+1200))
14112
  */
14113
 
14114
  real2 = copy_rtx (real);
14115
  PUT_MODE (SET_DEST (real2), SImode);
14116
  reg = SET_SRC (real2);
14117
  real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
14118
  synth = copy_rtx (real2);
14119
 
14120
  if (BYTES_BIG_ENDIAN)
14121
    {
14122
      offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
14123
      real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
14124
    }
14125
 
14126
  reg = SET_SRC (synth);
14127
 
14128
  synth = replace_rtx (synth, reg,
14129
                       gen_rtx_REG (SImode, REGNO (reg) + 1200));
14130
 
14131
  offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
14132
  synth = replace_rtx (synth, offset,
14133
                       GEN_INT (INTVAL (offset)
14134
                                + (BYTES_BIG_ENDIAN ? 0 : 4)));
14135
 
14136
  RTX_FRAME_RELATED_P (synth) = 1;
14137
  RTX_FRAME_RELATED_P (real2) = 1;
14138
  if (BYTES_BIG_ENDIAN)
14139
    real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
14140
  else
14141
    real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
14142
 
14143
  return real;
14144
}
14145
 
14146
/* Returns an insn that has a vrsave set operation with the
14147
   appropriate CLOBBERs.  */
14148
 
14149
static rtx
14150
generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
14151
{
14152
  int nclobs, i;
14153
  rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
14154
  rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
14155
 
14156
  clobs[0]
14157
    = gen_rtx_SET (VOIDmode,
14158
                   vrsave,
14159
                   gen_rtx_UNSPEC_VOLATILE (SImode,
14160
                                            gen_rtvec (2, reg, vrsave),
14161
                                            UNSPECV_SET_VRSAVE));
14162
 
14163
  nclobs = 1;
14164
 
14165
  /* We need to clobber the registers in the mask so the scheduler
14166
     does not move sets to VRSAVE before sets of AltiVec registers.
14167
 
14168
     However, if the function receives nonlocal gotos, reload will set
14169
     all call saved registers live.  We will end up with:
14170
 
14171
        (set (reg 999) (mem))
14172
        (parallel [ (set (reg vrsave) (unspec blah))
14173
                    (clobber (reg 999))])
14174
 
14175
     The clobber will cause the store into reg 999 to be dead, and
14176
     flow will attempt to delete an epilogue insn.  In this case, we
14177
     need an unspec use/set of the register.  */
14178
 
14179
  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
14180
    if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
14181
      {
14182
        if (!epiloguep || call_used_regs [i])
14183
          clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
14184
                                             gen_rtx_REG (V4SImode, i));
14185
        else
14186
          {
14187
            rtx reg = gen_rtx_REG (V4SImode, i);
14188
 
14189
            clobs[nclobs++]
14190
              = gen_rtx_SET (VOIDmode,
14191
                             reg,
14192
                             gen_rtx_UNSPEC (V4SImode,
14193
                                             gen_rtvec (1, reg), 27));
14194
          }
14195
      }
14196
 
14197
  insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
14198
 
14199
  for (i = 0; i < nclobs; ++i)
14200
    XVECEXP (insn, 0, i) = clobs[i];
14201
 
14202
  return insn;
14203
}
14204
 
14205
/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
14206
   Save REGNO into [FRAME_REG + OFFSET] in mode MODE.  */
14207
 
14208
static void
14209
emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
14210
                 unsigned int regno, int offset, HOST_WIDE_INT total_size)
14211
{
14212
  rtx reg, offset_rtx, insn, mem, addr, int_rtx;
14213
  rtx replacea, replaceb;
14214
 
14215
  int_rtx = GEN_INT (offset);
14216
 
14217
  /* Some cases that need register indexed addressing.  */
14218
  if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
14219
      || (TARGET_E500_DOUBLE && mode == DFmode)
14220
      || (TARGET_SPE_ABI
14221
          && SPE_VECTOR_MODE (mode)
14222
          && !SPE_CONST_OFFSET_OK (offset)))
14223
    {
14224
      /* Whomever calls us must make sure r11 is available in the
14225
         flow path of instructions in the prologue.  */
14226
      offset_rtx = gen_rtx_REG (Pmode, 11);
14227
      emit_move_insn (offset_rtx, int_rtx);
14228
 
14229
      replacea = offset_rtx;
14230
      replaceb = int_rtx;
14231
    }
14232
  else
14233
    {
14234
      offset_rtx = int_rtx;
14235
      replacea = NULL_RTX;
14236
      replaceb = NULL_RTX;
14237
    }
14238
 
14239
  reg = gen_rtx_REG (mode, regno);
14240
  addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
14241
  mem = gen_frame_mem (mode, addr);
14242
 
14243
  insn = emit_move_insn (mem, reg);
14244
 
14245
  rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
14246
}
14247
 
14248
/* Emit an offset memory reference suitable for a frame store, while
14249
   converting to a valid addressing mode.  */
14250
 
14251
static rtx
14252
gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
14253
{
14254
  rtx int_rtx, offset_rtx;
14255
 
14256
  int_rtx = GEN_INT (offset);
14257
 
14258
  if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
14259
      || (TARGET_E500_DOUBLE && mode == DFmode))
14260
    {
14261
      offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14262
      emit_move_insn (offset_rtx, int_rtx);
14263
    }
14264
  else
14265
    offset_rtx = int_rtx;
14266
 
14267
  return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
14268
}
14269
 
14270
/* Look for user-defined global regs.  We should not save and restore these,
14271
   and cannot use stmw/lmw if there are any in its range.  */
14272
 
14273
static bool
14274
no_global_regs_above (int first_greg)
14275
{
14276
  int i;
14277
  for (i = 0; i < 32 - first_greg; i++)
14278
    if (global_regs[first_greg + i])
14279
      return false;
14280
  return true;
14281
}
14282
 
14283
#ifndef TARGET_FIX_AND_CONTINUE
14284
#define TARGET_FIX_AND_CONTINUE 0
14285
#endif
14286
 
14287
/* Emit function prologue as insns.  */
14288
 
14289
void
14290
rs6000_emit_prologue (void)
14291
{
14292
  rs6000_stack_t *info = rs6000_stack_info ();
14293
  enum machine_mode reg_mode = Pmode;
14294
  int reg_size = TARGET_32BIT ? 4 : 8;
14295
  rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
14296
  rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
14297
  rtx frame_reg_rtx = sp_reg_rtx;
14298
  rtx cr_save_rtx = NULL_RTX;
14299
  rtx insn;
14300
  int saving_FPRs_inline;
14301
  int using_store_multiple;
14302
  HOST_WIDE_INT sp_offset = 0;
14303
 
14304
  if (TARGET_FIX_AND_CONTINUE)
14305
    {
14306
      /* gdb on darwin arranges to forward a function from the old
14307
         address by modifying the first 5 instructions of the function
14308
         to branch to the overriding function.  This is necessary to
14309
         permit function pointers that point to the old function to
14310
         actually forward to the new function.  */
14311
      emit_insn (gen_nop ());
14312
      emit_insn (gen_nop ());
14313
      emit_insn (gen_nop ());
14314
      emit_insn (gen_nop ());
14315
      emit_insn (gen_nop ());
14316
    }
14317
 
14318
  if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14319
    {
14320
      reg_mode = V2SImode;
14321
      reg_size = 8;
14322
    }
14323
 
14324
  using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
14325
                          && (!TARGET_SPE_ABI
14326
                              || info->spe_64bit_regs_used == 0)
14327
                          && info->first_gp_reg_save < 31
14328
                          && no_global_regs_above (info->first_gp_reg_save));
14329
  saving_FPRs_inline = (info->first_fp_reg_save == 64
14330
                        || FP_SAVE_INLINE (info->first_fp_reg_save)
14331
                        || current_function_calls_eh_return
14332
                        || cfun->machine->ra_need_lr);
14333
 
14334
  /* For V.4, update stack before we do any saving and set back pointer.  */
14335
  if (! WORLD_SAVE_P (info)
14336
      && info->push_p
14337
      && (DEFAULT_ABI == ABI_V4
14338
          || current_function_calls_eh_return))
14339
    {
14340
      if (info->total_size < 32767)
14341
        sp_offset = info->total_size;
14342
      else
14343
        frame_reg_rtx = frame_ptr_rtx;
14344
      rs6000_emit_allocate_stack (info->total_size,
14345
                                  (frame_reg_rtx != sp_reg_rtx
14346
                                   && (info->cr_save_p
14347
                                       || info->lr_save_p
14348
                                       || info->first_fp_reg_save < 64
14349
                                       || info->first_gp_reg_save < 32
14350
                                       )));
14351
      if (frame_reg_rtx != sp_reg_rtx)
14352
        rs6000_emit_stack_tie ();
14353
    }
14354
 
14355
  /* Handle world saves specially here.  */
14356
  if (WORLD_SAVE_P (info))
14357
    {
14358
      int i, j, sz;
14359
      rtx treg;
14360
      rtvec p;
14361
      rtx reg0;
14362
 
14363
      /* save_world expects lr in r0. */
14364
      reg0 = gen_rtx_REG (Pmode, 0);
14365
      if (info->lr_save_p)
14366
        {
14367
          insn = emit_move_insn (reg0,
14368
                                 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
14369
          RTX_FRAME_RELATED_P (insn) = 1;
14370
        }
14371
 
14372
      /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
14373
         assumptions about the offsets of various bits of the stack
14374
         frame.  */
14375
      gcc_assert (info->gp_save_offset == -220
14376
                  && info->fp_save_offset == -144
14377
                  && info->lr_save_offset == 8
14378
                  && info->cr_save_offset == 4
14379
                  && info->push_p
14380
                  && info->lr_save_p
14381
                  && (!current_function_calls_eh_return
14382
                       || info->ehrd_offset == -432)
14383
                  && info->vrsave_save_offset == -224
14384
                  && info->altivec_save_offset == -416);
14385
 
14386
      treg = gen_rtx_REG (SImode, 11);
14387
      emit_move_insn (treg, GEN_INT (-info->total_size));
14388
 
14389
      /* SAVE_WORLD takes the caller's LR in R0 and the frame size
14390
         in R11.  It also clobbers R12, so beware!  */
14391
 
14392
      /* Preserve CR2 for save_world prologues */
14393
      sz = 5;
14394
      sz += 32 - info->first_gp_reg_save;
14395
      sz += 64 - info->first_fp_reg_save;
14396
      sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
14397
      p = rtvec_alloc (sz);
14398
      j = 0;
14399
      RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
14400
                                            gen_rtx_REG (Pmode,
14401
                                                         LINK_REGISTER_REGNUM));
14402
      RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
14403
                                        gen_rtx_SYMBOL_REF (Pmode,
14404
                                                            "*save_world"));
14405
      /* We do floats first so that the instruction pattern matches
14406
         properly.  */
14407
      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14408
        {
14409
          rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14410
          rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14411
                                   GEN_INT (info->fp_save_offset
14412
                                            + sp_offset + 8 * i));
14413
          rtx mem = gen_frame_mem (DFmode, addr);
14414
 
14415
          RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14416
        }
14417
      for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
14418
        {
14419
          rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
14420
          rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14421
                                   GEN_INT (info->altivec_save_offset
14422
                                            + sp_offset + 16 * i));
14423
          rtx mem = gen_frame_mem (V4SImode, addr);
14424
 
14425
          RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14426
        }
14427
      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14428
        {
14429
          rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14430
          rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14431
                                   GEN_INT (info->gp_save_offset
14432
                                            + sp_offset + reg_size * i));
14433
          rtx mem = gen_frame_mem (reg_mode, addr);
14434
 
14435
          RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14436
        }
14437
 
14438
      {
14439
        /* CR register traditionally saved as CR2.  */
14440
        rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
14441
        rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14442
                                 GEN_INT (info->cr_save_offset
14443
                                          + sp_offset));
14444
        rtx mem = gen_frame_mem (reg_mode, addr);
14445
 
14446
        RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14447
      }
14448
      /* Explain about use of R0.  */
14449
      if (info->lr_save_p)
14450
        {
14451
          rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14452
                                   GEN_INT (info->lr_save_offset
14453
                                            + sp_offset));
14454
          rtx mem = gen_frame_mem (reg_mode, addr);
14455
 
14456
          RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
14457
        }
14458
      /* Explain what happens to the stack pointer.  */
14459
      {
14460
        rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
14461
        RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
14462
      }
14463
 
14464
      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14465
      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14466
                            treg, GEN_INT (-info->total_size));
14467
      sp_offset = info->total_size;
14468
    }
14469
 
14470
  /* Save AltiVec registers if needed.  */
14471
  if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
14472
    {
14473
      int i;
14474
 
14475
      /* There should be a non inline version of this, for when we
14476
         are saving lots of vector registers.  */
14477
      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
14478
        if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
14479
          {
14480
            rtx areg, savereg, mem;
14481
            int offset;
14482
 
14483
            offset = info->altivec_save_offset + sp_offset
14484
              + 16 * (i - info->first_altivec_reg_save);
14485
 
14486
            savereg = gen_rtx_REG (V4SImode, i);
14487
 
14488
            areg = gen_rtx_REG (Pmode, 0);
14489
            emit_move_insn (areg, GEN_INT (offset));
14490
 
14491
            /* AltiVec addressing mode is [reg+reg].  */
14492
            mem = gen_frame_mem (V4SImode,
14493
                                 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
14494
 
14495
            insn = emit_move_insn (mem, savereg);
14496
 
14497
            rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14498
                                  areg, GEN_INT (offset));
14499
          }
14500
    }
14501
 
14502
  /* VRSAVE is a bit vector representing which AltiVec registers
14503
     are used.  The OS uses this to determine which vector
14504
     registers to save on a context switch.  We need to save
14505
     VRSAVE on the stack frame, add whatever AltiVec registers we
14506
     used in this function, and do the corresponding magic in the
14507
     epilogue.  */
14508
 
14509
  if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
14510
      && info->vrsave_mask != 0)
14511
    {
14512
      rtx reg, mem, vrsave;
14513
      int offset;
14514
 
14515
      /* Get VRSAVE onto a GPR.  Note that ABI_V4 might be using r12
14516
         as frame_reg_rtx and r11 as the static chain pointer for
14517
         nested functions.  */
14518
      reg = gen_rtx_REG (SImode, 0);
14519
      vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
14520
      if (TARGET_MACHO)
14521
        emit_insn (gen_get_vrsave_internal (reg));
14522
      else
14523
        emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
14524
 
14525
      if (!WORLD_SAVE_P (info))
14526
        {
14527
          /* Save VRSAVE.  */
14528
          offset = info->vrsave_save_offset + sp_offset;
14529
          mem = gen_frame_mem (SImode,
14530
                               gen_rtx_PLUS (Pmode, frame_reg_rtx,
14531
                                             GEN_INT (offset)));
14532
          insn = emit_move_insn (mem, reg);
14533
        }
14534
 
14535
      /* Include the registers in the mask.  */
14536
      emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
14537
 
14538
      insn = emit_insn (generate_set_vrsave (reg, info, 0));
14539
    }
14540
 
14541
  /* If we use the link register, get it into r0.  */
14542
  if (!WORLD_SAVE_P (info) && info->lr_save_p)
14543
    {
14544
      insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
14545
                             gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
14546
      RTX_FRAME_RELATED_P (insn) = 1;
14547
    }
14548
 
14549
  /* If we need to save CR, put it into r12.  */
14550
  if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
14551
    {
14552
      rtx set;
14553
 
14554
      cr_save_rtx = gen_rtx_REG (SImode, 12);
14555
      insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
14556
      RTX_FRAME_RELATED_P (insn) = 1;
14557
      /* Now, there's no way that dwarf2out_frame_debug_expr is going
14558
         to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
14559
         But that's OK.  All we have to do is specify that _one_ condition
14560
         code register is saved in this stack slot.  The thrower's epilogue
14561
         will then restore all the call-saved registers.
14562
         We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux.  */
14563
      set = gen_rtx_SET (VOIDmode, cr_save_rtx,
14564
                         gen_rtx_REG (SImode, CR2_REGNO));
14565
      REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14566
                                            set,
14567
                                            REG_NOTES (insn));
14568
    }
14569
 
14570
  /* Do any required saving of fpr's.  If only one or two to save, do
14571
     it ourselves.  Otherwise, call function.  */
14572
  if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
14573
    {
14574
      int i;
14575
      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14576
        if ((regs_ever_live[info->first_fp_reg_save+i]
14577
             && ! call_used_regs[info->first_fp_reg_save+i]))
14578
          emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
14579
                           info->first_fp_reg_save + i,
14580
                           info->fp_save_offset + sp_offset + 8 * i,
14581
                           info->total_size);
14582
    }
14583
  else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
14584
    {
14585
      int i;
14586
      char rname[30];
14587
      const char *alloc_rname;
14588
      rtvec p;
14589
      p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
14590
 
14591
      RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
14592
                                          gen_rtx_REG (Pmode,
14593
                                                       LINK_REGISTER_REGNUM));
14594
      sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
14595
               info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
14596
      alloc_rname = ggc_strdup (rname);
14597
      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
14598
                                      gen_rtx_SYMBOL_REF (Pmode,
14599
                                                          alloc_rname));
14600
      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14601
        {
14602
          rtx addr, reg, mem;
14603
          reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14604
          addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14605
                               GEN_INT (info->fp_save_offset
14606
                                        + sp_offset + 8*i));
14607
          mem = gen_frame_mem (DFmode, addr);
14608
 
14609
          RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
14610
        }
14611
      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14612
      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14613
                            NULL_RTX, NULL_RTX);
14614
    }
14615
 
14616
  /* Save GPRs.  This is done as a PARALLEL if we are using
14617
     the store-multiple instructions.  */
14618
  if (!WORLD_SAVE_P (info) && using_store_multiple)
14619
    {
14620
      rtvec p;
14621
      int i;
14622
      p = rtvec_alloc (32 - info->first_gp_reg_save);
14623
      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14624
        {
14625
          rtx addr, reg, mem;
14626
          reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14627
          addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14628
                               GEN_INT (info->gp_save_offset
14629
                                        + sp_offset
14630
                                        + reg_size * i));
14631
          mem = gen_frame_mem (reg_mode, addr);
14632
 
14633
          RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
14634
        }
14635
      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14636
      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14637
                            NULL_RTX, NULL_RTX);
14638
    }
14639
  else if (!WORLD_SAVE_P (info))
14640
    {
14641
      int i;
14642
      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14643
        if ((regs_ever_live[info->first_gp_reg_save + i]
14644
             && (!call_used_regs[info->first_gp_reg_save + i]
14645
                 || (i + info->first_gp_reg_save
14646
                     == RS6000_PIC_OFFSET_TABLE_REGNUM
14647
                     && TARGET_TOC && TARGET_MINIMAL_TOC)))
14648
            || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14649
                && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
14650
                    || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
14651
          {
14652
            rtx addr, reg, mem;
14653
            reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14654
 
14655
            if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14656
              {
14657
                int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
14658
                rtx b;
14659
 
14660
                if (!SPE_CONST_OFFSET_OK (offset))
14661
                  {
14662
                    b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14663
                    emit_move_insn (b, GEN_INT (offset));
14664
                  }
14665
                else
14666
                  b = GEN_INT (offset);
14667
 
14668
                addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
14669
                mem = gen_frame_mem (V2SImode, addr);
14670
                insn = emit_move_insn (mem, reg);
14671
 
14672
                if (GET_CODE (b) == CONST_INT)
14673
                  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14674
                                        NULL_RTX, NULL_RTX);
14675
                else
14676
                  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14677
                                        b, GEN_INT (offset));
14678
              }
14679
            else
14680
              {
14681
                addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14682
                                     GEN_INT (info->gp_save_offset
14683
                                              + sp_offset
14684
                                              + reg_size * i));
14685
                mem = gen_frame_mem (reg_mode, addr);
14686
 
14687
                insn = emit_move_insn (mem, reg);
14688
                rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14689
                                      NULL_RTX, NULL_RTX);
14690
              }
14691
          }
14692
    }
14693
 
14694
  /* ??? There's no need to emit actual instructions here, but it's the
14695
     easiest way to get the frame unwind information emitted.  */
14696
  if (current_function_calls_eh_return)
14697
    {
14698
      unsigned int i, regno;
14699
 
14700
      /* In AIX ABI we need to pretend we save r2 here.  */
14701
      if (TARGET_AIX)
14702
        {
14703
          rtx addr, reg, mem;
14704
 
14705
          reg = gen_rtx_REG (reg_mode, 2);
14706
          addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14707
                               GEN_INT (sp_offset + 5 * reg_size));
14708
          mem = gen_frame_mem (reg_mode, addr);
14709
 
14710
          insn = emit_move_insn (mem, reg);
14711
          rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14712
                                NULL_RTX, NULL_RTX);
14713
          PATTERN (insn) = gen_blockage ();
14714
        }
14715
 
14716
      for (i = 0; ; ++i)
14717
        {
14718
          regno = EH_RETURN_DATA_REGNO (i);
14719
          if (regno == INVALID_REGNUM)
14720
            break;
14721
 
14722
          emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
14723
                           info->ehrd_offset + sp_offset
14724
                           + reg_size * (int) i,
14725
                           info->total_size);
14726
        }
14727
    }
14728
 
14729
  /* Save lr if we used it.  */
14730
  if (!WORLD_SAVE_P (info) && info->lr_save_p)
14731
    {
14732
      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14733
                               GEN_INT (info->lr_save_offset + sp_offset));
14734
      rtx reg = gen_rtx_REG (Pmode, 0);
14735
      rtx mem = gen_rtx_MEM (Pmode, addr);
14736
      /* This should not be of frame_alias_set, because of
14737
         __builtin_return_address.  */
14738
 
14739
      insn = emit_move_insn (mem, reg);
14740
      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14741
                            NULL_RTX, NULL_RTX);
14742
    }
14743
 
14744
  /* Save CR if we use any that must be preserved.  */
14745
  if (!WORLD_SAVE_P (info) && info->cr_save_p)
14746
    {
14747
      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14748
                               GEN_INT (info->cr_save_offset + sp_offset));
14749
      rtx mem = gen_frame_mem (SImode, addr);
14750
      /* See the large comment above about why CR2_REGNO is used.  */
14751
      rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
14752
 
14753
      /* If r12 was used to hold the original sp, copy cr into r0 now
14754
         that it's free.  */
14755
      if (REGNO (frame_reg_rtx) == 12)
14756
        {
14757
          rtx set;
14758
 
14759
          cr_save_rtx = gen_rtx_REG (SImode, 0);
14760
          insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
14761
          RTX_FRAME_RELATED_P (insn) = 1;
14762
          set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
14763
          REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14764
                                                set,
14765
                                                REG_NOTES (insn));
14766
 
14767
        }
14768
      insn = emit_move_insn (mem, cr_save_rtx);
14769
 
14770
      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14771
                            NULL_RTX, NULL_RTX);
14772
    }
14773
 
14774
  /* Update stack and set back pointer unless this is V.4,
14775
     for which it was done previously.  */
14776
  if (!WORLD_SAVE_P (info) && info->push_p
14777
      && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
14778
    rs6000_emit_allocate_stack (info->total_size, FALSE);
14779
 
14780
  /* Set frame pointer, if needed.  */
14781
  if (frame_pointer_needed)
14782
    {
14783
      insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
14784
                             sp_reg_rtx);
14785
      RTX_FRAME_RELATED_P (insn) = 1;
14786
    }
14787
 
14788
  /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up.  */
14789
  if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
14790
      || (DEFAULT_ABI == ABI_V4
14791
          && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
14792
          && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
14793
    {
14794
      /* If emit_load_toc_table will use the link register, we need to save
14795
         it.  We use R12 for this purpose because emit_load_toc_table
14796
         can use register 0.  This allows us to use a plain 'blr' to return
14797
         from the procedure more often.  */
14798
      int save_LR_around_toc_setup = (TARGET_ELF
14799
                                      && DEFAULT_ABI != ABI_AIX
14800
                                      && flag_pic
14801
                                      && ! info->lr_save_p
14802
                                      && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
14803
      if (save_LR_around_toc_setup)
14804
        {
14805
          rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
14806
 
14807
          insn = emit_move_insn (frame_ptr_rtx, lr);
14808
          rs6000_maybe_dead (insn);
14809
          RTX_FRAME_RELATED_P (insn) = 1;
14810
 
14811
          rs6000_emit_load_toc_table (TRUE);
14812
 
14813
          insn = emit_move_insn (lr, frame_ptr_rtx);
14814
          rs6000_maybe_dead (insn);
14815
          RTX_FRAME_RELATED_P (insn) = 1;
14816
        }
14817
      else
14818
        rs6000_emit_load_toc_table (TRUE);
14819
    }
14820
 
14821
#if TARGET_MACHO
14822
  if (DEFAULT_ABI == ABI_DARWIN
14823
      && flag_pic && current_function_uses_pic_offset_table)
14824
    {
14825
      rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
14826
      rtx src = machopic_function_base_sym ();
14827
 
14828
      /* Save and restore LR locally around this call (in R0).  */
14829
      if (!info->lr_save_p)
14830
        rs6000_maybe_dead (emit_move_insn (gen_rtx_REG (Pmode, 0), lr));
14831
 
14832
      rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (lr, src)));
14833
 
14834
      insn = emit_move_insn (gen_rtx_REG (Pmode,
14835
                                          RS6000_PIC_OFFSET_TABLE_REGNUM),
14836
                             lr);
14837
      rs6000_maybe_dead (insn);
14838
 
14839
      if (!info->lr_save_p)
14840
        rs6000_maybe_dead (emit_move_insn (lr, gen_rtx_REG (Pmode, 0)));
14841
    }
14842
#endif
14843
}
14844
 
14845
/* Write function prologue.  */
14846
 
14847
static void
14848
rs6000_output_function_prologue (FILE *file,
14849
                                 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
14850
{
14851
  rs6000_stack_t *info = rs6000_stack_info ();
14852
 
14853
  if (TARGET_DEBUG_STACK)
14854
    debug_stack_info (info);
14855
 
14856
  /* Write .extern for any function we will call to save and restore
14857
     fp values.  */
14858
  if (info->first_fp_reg_save < 64
14859
      && !FP_SAVE_INLINE (info->first_fp_reg_save))
14860
    fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
14861
             SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
14862
             RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
14863
             RESTORE_FP_SUFFIX);
14864
 
14865
  /* Write .extern for AIX common mode routines, if needed.  */
14866
  if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
14867
    {
14868
      fputs ("\t.extern __mulh\n", file);
14869
      fputs ("\t.extern __mull\n", file);
14870
      fputs ("\t.extern __divss\n", file);
14871
      fputs ("\t.extern __divus\n", file);
14872
      fputs ("\t.extern __quoss\n", file);
14873
      fputs ("\t.extern __quous\n", file);
14874
      common_mode_defined = 1;
14875
    }
14876
 
14877
  if (! HAVE_prologue)
14878
    {
14879
      start_sequence ();
14880
 
14881
      /* A NOTE_INSN_DELETED is supposed to be at the start and end of
14882
         the "toplevel" insn chain.  */
14883
      emit_note (NOTE_INSN_DELETED);
14884
      rs6000_emit_prologue ();
14885
      emit_note (NOTE_INSN_DELETED);
14886
 
14887
      /* Expand INSN_ADDRESSES so final() doesn't crash.  */
14888
      {
14889
        rtx insn;
14890
        unsigned addr = 0;
14891
        for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
14892
          {
14893
            INSN_ADDRESSES_NEW (insn, addr);
14894
            addr += 4;
14895
          }
14896
      }
14897
 
14898
      if (TARGET_DEBUG_STACK)
14899
        debug_rtx_list (get_insns (), 100);
14900
      final (get_insns (), file, FALSE);
14901
      end_sequence ();
14902
    }
14903
 
14904
  rs6000_pic_labelno++;
14905
}
14906
 
14907
/* Emit function epilogue as insns.
14908
 
14909
   At present, dwarf2out_frame_debug_expr doesn't understand
14910
   register restores, so we don't bother setting RTX_FRAME_RELATED_P
14911
   anywhere in the epilogue.  Most of the insns below would in any case
14912
   need special notes to explain where r11 is in relation to the stack.  */
14913
 
14914
void
14915
rs6000_emit_epilogue (int sibcall)
14916
{
14917
  rs6000_stack_t *info;
14918
  int restoring_FPRs_inline;
14919
  int using_load_multiple;
14920
  int using_mfcr_multiple;
14921
  int use_backchain_to_restore_sp;
14922
  int sp_offset = 0;
14923
  rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
14924
  rtx frame_reg_rtx = sp_reg_rtx;
14925
  enum machine_mode reg_mode = Pmode;
14926
  int reg_size = TARGET_32BIT ? 4 : 8;
14927
  int i;
14928
 
14929
  info = rs6000_stack_info ();
14930
 
14931
  if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14932
    {
14933
      reg_mode = V2SImode;
14934
      reg_size = 8;
14935
    }
14936
 
14937
  using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
14938
                         && (!TARGET_SPE_ABI
14939
                             || info->spe_64bit_regs_used == 0)
14940
                         && info->first_gp_reg_save < 31
14941
                         && no_global_regs_above (info->first_gp_reg_save));
14942
  restoring_FPRs_inline = (sibcall
14943
                           || current_function_calls_eh_return
14944
                           || info->first_fp_reg_save == 64
14945
                           || FP_SAVE_INLINE (info->first_fp_reg_save));
14946
  use_backchain_to_restore_sp = (frame_pointer_needed
14947
                                 || current_function_calls_alloca
14948
                                 || info->total_size > 32767);
14949
  using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
14950
                         || rs6000_cpu == PROCESSOR_PPC603
14951
                         || rs6000_cpu == PROCESSOR_PPC750
14952
                         || optimize_size);
14953
 
14954
  if (WORLD_SAVE_P (info))
14955
    {
14956
      int i, j;
14957
      char rname[30];
14958
      const char *alloc_rname;
14959
      rtvec p;
14960
 
14961
      /* eh_rest_world_r10 will return to the location saved in the LR
14962
         stack slot (which is not likely to be our caller.)
14963
         Input: R10 -- stack adjustment.  Clobbers R0, R11, R12, R7, R8.
14964
         rest_world is similar, except any R10 parameter is ignored.
14965
         The exception-handling stuff that was here in 2.95 is no
14966
         longer necessary.  */
14967
 
14968
      p = rtvec_alloc (9
14969
                       + 1
14970
                       + 32 - info->first_gp_reg_save
14971
                       + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
14972
                       + 63 + 1 - info->first_fp_reg_save);
14973
 
14974
      strcpy (rname, ((current_function_calls_eh_return) ?
14975
                      "*eh_rest_world_r10" : "*rest_world"));
14976
      alloc_rname = ggc_strdup (rname);
14977
 
14978
      j = 0;
14979
      RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
14980
      RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
14981
                                        gen_rtx_REG (Pmode,
14982
                                                     LINK_REGISTER_REGNUM));
14983
      RTVEC_ELT (p, j++)
14984
        = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
14985
      /* The instruction pattern requires a clobber here;
14986
         it is shared with the restVEC helper. */
14987
      RTVEC_ELT (p, j++)
14988
        = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
14989
 
14990
      {
14991
        /* CR register traditionally saved as CR2.  */
14992
        rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
14993
        rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14994
                                 GEN_INT (info->cr_save_offset));
14995
        rtx mem = gen_frame_mem (reg_mode, addr);
14996
 
14997
        RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14998
      }
14999
 
15000
      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15001
        {
15002
          rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15003
          rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15004
                                   GEN_INT (info->gp_save_offset
15005
                                            + reg_size * i));
15006
          rtx mem = gen_frame_mem (reg_mode, addr);
15007
 
15008
          RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
15009
        }
15010
      for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
15011
        {
15012
          rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
15013
          rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15014
                                   GEN_INT (info->altivec_save_offset
15015
                                            + 16 * i));
15016
          rtx mem = gen_frame_mem (V4SImode, addr);
15017
 
15018
          RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
15019
        }
15020
      for (i = 0; info->first_fp_reg_save + i <= 63; i++)
15021
        {
15022
          rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15023
          rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15024
                                   GEN_INT (info->fp_save_offset
15025
                                            + 8 * i));
15026
          rtx mem = gen_frame_mem (DFmode, addr);
15027
 
15028
          RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
15029
        }
15030
      RTVEC_ELT (p, j++)
15031
        = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
15032
      RTVEC_ELT (p, j++)
15033
        = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
15034
      RTVEC_ELT (p, j++)
15035
        = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
15036
      RTVEC_ELT (p, j++)
15037
        = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
15038
      RTVEC_ELT (p, j++)
15039
        = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
15040
      emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
15041
 
15042
      return;
15043
    }
15044
 
15045
  /* If we have a frame pointer, a call to alloca,  or a large stack
15046
     frame, restore the old stack pointer using the backchain.  Otherwise,
15047
     we know what size to update it with.  */
15048
  if (use_backchain_to_restore_sp)
15049
    {
15050
      /* Under V.4, don't reset the stack pointer until after we're done
15051
         loading the saved registers.  */
15052
      if (DEFAULT_ABI == ABI_V4)
15053
        frame_reg_rtx = gen_rtx_REG (Pmode, 11);
15054
 
15055
      emit_move_insn (frame_reg_rtx,
15056
                      gen_rtx_MEM (Pmode, sp_reg_rtx));
15057
    }
15058
  else if (info->push_p)
15059
    {
15060
      if (DEFAULT_ABI == ABI_V4
15061
          || current_function_calls_eh_return)
15062
        sp_offset = info->total_size;
15063
      else
15064
        {
15065
          emit_insn (TARGET_32BIT
15066
                     ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
15067
                                   GEN_INT (info->total_size))
15068
                     : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
15069
                                   GEN_INT (info->total_size)));
15070
        }
15071
    }
15072
 
15073
  /* Restore AltiVec registers if needed.  */
15074
  if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
15075
    {
15076
      int i;
15077
 
15078
      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
15079
        if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
15080
          {
15081
            rtx addr, areg, mem;
15082
 
15083
            areg = gen_rtx_REG (Pmode, 0);
15084
            emit_move_insn
15085
              (areg, GEN_INT (info->altivec_save_offset
15086
                              + sp_offset
15087
                              + 16 * (i - info->first_altivec_reg_save)));
15088
 
15089
            /* AltiVec addressing mode is [reg+reg].  */
15090
            addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
15091
            mem = gen_frame_mem (V4SImode, addr);
15092
 
15093
            emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
15094
          }
15095
    }
15096
 
15097
  /* Restore VRSAVE if needed.  */
15098
  if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
15099
      && info->vrsave_mask != 0)
15100
    {
15101
      rtx addr, mem, reg;
15102
 
15103
      addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15104
                           GEN_INT (info->vrsave_save_offset + sp_offset));
15105
      mem = gen_frame_mem (SImode, addr);
15106
      reg = gen_rtx_REG (SImode, 12);
15107
      emit_move_insn (reg, mem);
15108
 
15109
      emit_insn (generate_set_vrsave (reg, info, 1));
15110
    }
15111
 
15112
  /* Get the old lr if we saved it.  */
15113
  if (info->lr_save_p)
15114
    {
15115
      rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
15116
                                      info->lr_save_offset + sp_offset);
15117
 
15118
      emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
15119
    }
15120
 
15121
  /* Get the old cr if we saved it.  */
15122
  if (info->cr_save_p)
15123
    {
15124
      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15125
                               GEN_INT (info->cr_save_offset + sp_offset));
15126
      rtx mem = gen_frame_mem (SImode, addr);
15127
 
15128
      emit_move_insn (gen_rtx_REG (SImode, 12), mem);
15129
    }
15130
 
15131
  /* Set LR here to try to overlap restores below.  */
15132
  if (info->lr_save_p)
15133
    emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
15134
                    gen_rtx_REG (Pmode, 0));
15135
 
15136
  /* Load exception handler data registers, if needed.  */
15137
  if (current_function_calls_eh_return)
15138
    {
15139
      unsigned int i, regno;
15140
 
15141
      if (TARGET_AIX)
15142
        {
15143
          rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15144
                                   GEN_INT (sp_offset + 5 * reg_size));
15145
          rtx mem = gen_frame_mem (reg_mode, addr);
15146
 
15147
          emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
15148
        }
15149
 
15150
      for (i = 0; ; ++i)
15151
        {
15152
          rtx mem;
15153
 
15154
          regno = EH_RETURN_DATA_REGNO (i);
15155
          if (regno == INVALID_REGNUM)
15156
            break;
15157
 
15158
          mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
15159
                                      info->ehrd_offset + sp_offset
15160
                                      + reg_size * (int) i);
15161
 
15162
          emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
15163
        }
15164
    }
15165
 
15166
  /* Restore GPRs.  This is done as a PARALLEL if we are using
15167
     the load-multiple instructions.  */
15168
  if (using_load_multiple)
15169
    {
15170
      rtvec p;
15171
      p = rtvec_alloc (32 - info->first_gp_reg_save);
15172
      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15173
        {
15174
          rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15175
                                   GEN_INT (info->gp_save_offset
15176
                                            + sp_offset
15177
                                            + reg_size * i));
15178
          rtx mem = gen_frame_mem (reg_mode, addr);
15179
 
15180
          RTVEC_ELT (p, i) =
15181
            gen_rtx_SET (VOIDmode,
15182
                         gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
15183
                         mem);
15184
        }
15185
      emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
15186
    }
15187
  else
15188
    for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15189
      if ((regs_ever_live[info->first_gp_reg_save + i]
15190
           && (!call_used_regs[info->first_gp_reg_save + i]
15191
               || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
15192
                   && TARGET_TOC && TARGET_MINIMAL_TOC)))
15193
          || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
15194
              && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
15195
                  || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
15196
        {
15197
          rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15198
                                   GEN_INT (info->gp_save_offset
15199
                                            + sp_offset
15200
                                            + reg_size * i));
15201
          rtx mem = gen_frame_mem (reg_mode, addr);
15202
 
15203
          /* Restore 64-bit quantities for SPE.  */
15204
          if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
15205
            {
15206
              int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
15207
              rtx b;
15208
 
15209
              if (!SPE_CONST_OFFSET_OK (offset))
15210
                {
15211
                  b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15212
                  emit_move_insn (b, GEN_INT (offset));
15213
                }
15214
              else
15215
                b = GEN_INT (offset);
15216
 
15217
              addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
15218
              mem = gen_frame_mem (V2SImode, addr);
15219
            }
15220
 
15221
          emit_move_insn (gen_rtx_REG (reg_mode,
15222
                                       info->first_gp_reg_save + i), mem);
15223
        }
15224
 
15225
  /* Restore fpr's if we need to do it without calling a function.  */
15226
  if (restoring_FPRs_inline)
15227
    for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15228
      if ((regs_ever_live[info->first_fp_reg_save+i]
15229
           && ! call_used_regs[info->first_fp_reg_save+i]))
15230
        {
15231
          rtx addr, mem;
15232
          addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15233
                               GEN_INT (info->fp_save_offset
15234
                                        + sp_offset
15235
                                        + 8 * i));
15236
          mem = gen_frame_mem (DFmode, addr);
15237
 
15238
          emit_move_insn (gen_rtx_REG (DFmode,
15239
                                       info->first_fp_reg_save + i),
15240
                          mem);
15241
        }
15242
 
15243
  /* If we saved cr, restore it here.  Just those that were used.  */
15244
  if (info->cr_save_p)
15245
    {
15246
      rtx r12_rtx = gen_rtx_REG (SImode, 12);
15247
      int count = 0;
15248
 
15249
      if (using_mfcr_multiple)
15250
        {
15251
          for (i = 0; i < 8; i++)
15252
            if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
15253
              count++;
15254
          gcc_assert (count);
15255
        }
15256
 
15257
      if (using_mfcr_multiple && count > 1)
15258
        {
15259
          rtvec p;
15260
          int ndx;
15261
 
15262
          p = rtvec_alloc (count);
15263
 
15264
          ndx = 0;
15265
          for (i = 0; i < 8; i++)
15266
            if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
15267
              {
15268
                rtvec r = rtvec_alloc (2);
15269
                RTVEC_ELT (r, 0) = r12_rtx;
15270
                RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
15271
                RTVEC_ELT (p, ndx) =
15272
                  gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
15273
                               gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
15274
                ndx++;
15275
              }
15276
          emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
15277
          gcc_assert (ndx == count);
15278
        }
15279
      else
15280
        for (i = 0; i < 8; i++)
15281
          if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
15282
            {
15283
              emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
15284
                                                           CR0_REGNO+i),
15285
                                              r12_rtx));
15286
            }
15287
    }
15288
 
15289
  /* If this is V.4, unwind the stack pointer after all of the loads
15290
     have been done.  */
15291
  if (frame_reg_rtx != sp_reg_rtx)
15292
    {
15293
      /* This blockage is needed so that sched doesn't decide to move
15294
         the sp change before the register restores.  */
15295
      rs6000_emit_stack_tie ();
15296
      emit_move_insn (sp_reg_rtx, frame_reg_rtx);
15297
    }
15298
  else if (sp_offset != 0)
15299
    emit_insn (TARGET_32BIT
15300
               ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
15301
                             GEN_INT (sp_offset))
15302
               : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
15303
                             GEN_INT (sp_offset)));
15304
 
15305
  if (current_function_calls_eh_return)
15306
    {
15307
      rtx sa = EH_RETURN_STACKADJ_RTX;
15308
      emit_insn (TARGET_32BIT
15309
                 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
15310
                 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
15311
    }
15312
 
15313
  if (!sibcall)
15314
    {
15315
      rtvec p;
15316
      if (! restoring_FPRs_inline)
15317
        p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
15318
      else
15319
        p = rtvec_alloc (2);
15320
 
15321
      RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
15322
      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
15323
                                      gen_rtx_REG (Pmode,
15324
                                                   LINK_REGISTER_REGNUM));
15325
 
15326
      /* If we have to restore more than two FP registers, branch to the
15327
         restore function.  It will return to our caller.  */
15328
      if (! restoring_FPRs_inline)
15329
        {
15330
          int i;
15331
          char rname[30];
15332
          const char *alloc_rname;
15333
 
15334
          sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
15335
                   info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
15336
          alloc_rname = ggc_strdup (rname);
15337
          RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
15338
                                          gen_rtx_SYMBOL_REF (Pmode,
15339
                                                              alloc_rname));
15340
 
15341
          for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15342
            {
15343
              rtx addr, mem;
15344
              addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
15345
                                   GEN_INT (info->fp_save_offset + 8*i));
15346
              mem = gen_frame_mem (DFmode, addr);
15347
 
15348
              RTVEC_ELT (p, i+3) =
15349
                gen_rtx_SET (VOIDmode,
15350
                             gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
15351
                             mem);
15352
            }
15353
        }
15354
 
15355
      emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
15356
    }
15357
}
15358
 
15359
/* Write function epilogue.  */
15360
 
15361
static void
15362
rs6000_output_function_epilogue (FILE *file,
15363
                                 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
15364
{
15365
  if (! HAVE_epilogue)
15366
    {
15367
      rtx insn = get_last_insn ();
15368
      /* If the last insn was a BARRIER, we don't have to write anything except
15369
         the trace table.  */
15370
      if (GET_CODE (insn) == NOTE)
15371
        insn = prev_nonnote_insn (insn);
15372
      if (insn == 0 ||  GET_CODE (insn) != BARRIER)
15373
        {
15374
          /* This is slightly ugly, but at least we don't have two
15375
             copies of the epilogue-emitting code.  */
15376
          start_sequence ();
15377
 
15378
          /* A NOTE_INSN_DELETED is supposed to be at the start
15379
             and end of the "toplevel" insn chain.  */
15380
          emit_note (NOTE_INSN_DELETED);
15381
          rs6000_emit_epilogue (FALSE);
15382
          emit_note (NOTE_INSN_DELETED);
15383
 
15384
          /* Expand INSN_ADDRESSES so final() doesn't crash.  */
15385
          {
15386
            rtx insn;
15387
            unsigned addr = 0;
15388
            for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
15389
              {
15390
                INSN_ADDRESSES_NEW (insn, addr);
15391
                addr += 4;
15392
              }
15393
          }
15394
 
15395
          if (TARGET_DEBUG_STACK)
15396
            debug_rtx_list (get_insns (), 100);
15397
          final (get_insns (), file, FALSE);
15398
          end_sequence ();
15399
        }
15400
    }
15401
 
15402
#if TARGET_MACHO
15403
  macho_branch_islands ();
15404
  /* Mach-O doesn't support labels at the end of objects, so if
15405
     it looks like we might want one, insert a NOP.  */
15406
  {
15407
    rtx insn = get_last_insn ();
15408
    while (insn
15409
           && NOTE_P (insn)
15410
           && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
15411
      insn = PREV_INSN (insn);
15412
    if (insn
15413
        && (LABEL_P (insn)
15414
            || (NOTE_P (insn)
15415
                && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
15416
      fputs ("\tnop\n", file);
15417
  }
15418
#endif
15419
 
15420
  /* Output a traceback table here.  See /usr/include/sys/debug.h for info
15421
     on its format.
15422
 
15423
     We don't output a traceback table if -finhibit-size-directive was
15424
     used.  The documentation for -finhibit-size-directive reads
15425
     ``don't output a @code{.size} assembler directive, or anything
15426
     else that would cause trouble if the function is split in the
15427
     middle, and the two halves are placed at locations far apart in
15428
     memory.''  The traceback table has this property, since it
15429
     includes the offset from the start of the function to the
15430
     traceback table itself.
15431
 
15432
     System V.4 Powerpc's (and the embedded ABI derived from it) use a
15433
     different traceback table.  */
15434
  if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
15435
      && rs6000_traceback != traceback_none && !current_function_is_thunk)
15436
    {
15437
      const char *fname = NULL;
15438
      const char *language_string = lang_hooks.name;
15439
      int fixed_parms = 0, float_parms = 0, parm_info = 0;
15440
      int i;
15441
      int optional_tbtab;
15442
      rs6000_stack_t *info = rs6000_stack_info ();
15443
 
15444
      if (rs6000_traceback == traceback_full)
15445
        optional_tbtab = 1;
15446
      else if (rs6000_traceback == traceback_part)
15447
        optional_tbtab = 0;
15448
      else
15449
        optional_tbtab = !optimize_size && !TARGET_ELF;
15450
 
15451
      if (optional_tbtab)
15452
        {
15453
          fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
15454
          while (*fname == '.') /* V.4 encodes . in the name */
15455
            fname++;
15456
 
15457
          /* Need label immediately before tbtab, so we can compute
15458
             its offset from the function start.  */
15459
          ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
15460
          ASM_OUTPUT_LABEL (file, fname);
15461
        }
15462
 
15463
      /* The .tbtab pseudo-op can only be used for the first eight
15464
         expressions, since it can't handle the possibly variable
15465
         length fields that follow.  However, if you omit the optional
15466
         fields, the assembler outputs zeros for all optional fields
15467
         anyways, giving each variable length field is minimum length
15468
         (as defined in sys/debug.h).  Thus we can not use the .tbtab
15469
         pseudo-op at all.  */
15470
 
15471
      /* An all-zero word flags the start of the tbtab, for debuggers
15472
         that have to find it by searching forward from the entry
15473
         point or from the current pc.  */
15474
      fputs ("\t.long 0\n", file);
15475
 
15476
      /* Tbtab format type.  Use format type 0.  */
15477
      fputs ("\t.byte 0,", file);
15478
 
15479
      /* Language type.  Unfortunately, there does not seem to be any
15480
         official way to discover the language being compiled, so we
15481
         use language_string.
15482
         C is 0.  Fortran is 1.  Pascal is 2.  Ada is 3.  C++ is 9.
15483
         Java is 13.  Objective-C is 14.  Objective-C++ isn't assigned
15484
         a number, so for now use 9.  */
15485
      if (! strcmp (language_string, "GNU C"))
15486
        i = 0;
15487
      else if (! strcmp (language_string, "GNU F77")
15488
               || ! strcmp (language_string, "GNU F95"))
15489
        i = 1;
15490
      else if (! strcmp (language_string, "GNU Pascal"))
15491
        i = 2;
15492
      else if (! strcmp (language_string, "GNU Ada"))
15493
        i = 3;
15494
      else if (! strcmp (language_string, "GNU C++")
15495
               || ! strcmp (language_string, "GNU Objective-C++"))
15496
        i = 9;
15497
      else if (! strcmp (language_string, "GNU Java"))
15498
        i = 13;
15499
      else if (! strcmp (language_string, "GNU Objective-C"))
15500
        i = 14;
15501
      else
15502
        gcc_unreachable ();
15503
      fprintf (file, "%d,", i);
15504
 
15505
      /* 8 single bit fields: global linkage (not set for C extern linkage,
15506
         apparently a PL/I convention?), out-of-line epilogue/prologue, offset
15507
         from start of procedure stored in tbtab, internal function, function
15508
         has controlled storage, function has no toc, function uses fp,
15509
         function logs/aborts fp operations.  */
15510
      /* Assume that fp operations are used if any fp reg must be saved.  */
15511
      fprintf (file, "%d,",
15512
               (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
15513
 
15514
      /* 6 bitfields: function is interrupt handler, name present in
15515
         proc table, function calls alloca, on condition directives
15516
         (controls stack walks, 3 bits), saves condition reg, saves
15517
         link reg.  */
15518
      /* The `function calls alloca' bit seems to be set whenever reg 31 is
15519
         set up as a frame pointer, even when there is no alloca call.  */
15520
      fprintf (file, "%d,",
15521
               ((optional_tbtab << 6)
15522
                | ((optional_tbtab & frame_pointer_needed) << 5)
15523
                | (info->cr_save_p << 1)
15524
                | (info->lr_save_p)));
15525
 
15526
      /* 3 bitfields: saves backchain, fixup code, number of fpr saved
15527
         (6 bits).  */
15528
      fprintf (file, "%d,",
15529
               (info->push_p << 7) | (64 - info->first_fp_reg_save));
15530
 
15531
      /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits).  */
15532
      fprintf (file, "%d,", (32 - first_reg_to_save ()));
15533
 
15534
      if (optional_tbtab)
15535
        {
15536
          /* Compute the parameter info from the function decl argument
15537
             list.  */
15538
          tree decl;
15539
          int next_parm_info_bit = 31;
15540
 
15541
          for (decl = DECL_ARGUMENTS (current_function_decl);
15542
               decl; decl = TREE_CHAIN (decl))
15543
            {
15544
              rtx parameter = DECL_INCOMING_RTL (decl);
15545
              enum machine_mode mode = GET_MODE (parameter);
15546
 
15547
              if (GET_CODE (parameter) == REG)
15548
                {
15549
                  if (SCALAR_FLOAT_MODE_P (mode))
15550
                    {
15551
                      int bits;
15552
 
15553
                      float_parms++;
15554
 
15555
                      switch (mode)
15556
                        {
15557
                        case SFmode:
15558
                          bits = 0x2;
15559
                          break;
15560
 
15561
                        case DFmode:
15562
                        case TFmode:
15563
                          bits = 0x3;
15564
                          break;
15565
 
15566
                        default:
15567
                          gcc_unreachable ();
15568
                        }
15569
 
15570
                      /* If only one bit will fit, don't or in this entry.  */
15571
                      if (next_parm_info_bit > 0)
15572
                        parm_info |= (bits << (next_parm_info_bit - 1));
15573
                      next_parm_info_bit -= 2;
15574
                    }
15575
                  else
15576
                    {
15577
                      fixed_parms += ((GET_MODE_SIZE (mode)
15578
                                       + (UNITS_PER_WORD - 1))
15579
                                      / UNITS_PER_WORD);
15580
                      next_parm_info_bit -= 1;
15581
                    }
15582
                }
15583
            }
15584
        }
15585
 
15586
      /* Number of fixed point parameters.  */
15587
      /* This is actually the number of words of fixed point parameters; thus
15588
         an 8 byte struct counts as 2; and thus the maximum value is 8.  */
15589
      fprintf (file, "%d,", fixed_parms);
15590
 
15591
      /* 2 bitfields: number of floating point parameters (7 bits), parameters
15592
         all on stack.  */
15593
      /* This is actually the number of fp registers that hold parameters;
15594
         and thus the maximum value is 13.  */
15595
      /* Set parameters on stack bit if parameters are not in their original
15596
         registers, regardless of whether they are on the stack?  Xlc
15597
         seems to set the bit when not optimizing.  */
15598
      fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
15599
 
15600
      if (! optional_tbtab)
15601
        return;
15602
 
15603
      /* Optional fields follow.  Some are variable length.  */
15604
 
15605
      /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
15606
         11 double float.  */
15607
      /* There is an entry for each parameter in a register, in the order that
15608
         they occur in the parameter list.  Any intervening arguments on the
15609
         stack are ignored.  If the list overflows a long (max possible length
15610
         34 bits) then completely leave off all elements that don't fit.  */
15611
      /* Only emit this long if there was at least one parameter.  */
15612
      if (fixed_parms || float_parms)
15613
        fprintf (file, "\t.long %d\n", parm_info);
15614
 
15615
      /* Offset from start of code to tb table.  */
15616
      fputs ("\t.long ", file);
15617
      ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
15618
      if (TARGET_AIX)
15619
        RS6000_OUTPUT_BASENAME (file, fname);
15620
      else
15621
        assemble_name (file, fname);
15622
      putc ('-', file);
15623
      rs6000_output_function_entry (file, fname);
15624
      putc ('\n', file);
15625
 
15626
      /* Interrupt handler mask.  */
15627
      /* Omit this long, since we never set the interrupt handler bit
15628
         above.  */
15629
 
15630
      /* Number of CTL (controlled storage) anchors.  */
15631
      /* Omit this long, since the has_ctl bit is never set above.  */
15632
 
15633
      /* Displacement into stack of each CTL anchor.  */
15634
      /* Omit this list of longs, because there are no CTL anchors.  */
15635
 
15636
      /* Length of function name.  */
15637
      if (*fname == '*')
15638
        ++fname;
15639
      fprintf (file, "\t.short %d\n", (int) strlen (fname));
15640
 
15641
      /* Function name.  */
15642
      assemble_string (fname, strlen (fname));
15643
 
15644
      /* Register for alloca automatic storage; this is always reg 31.
15645
         Only emit this if the alloca bit was set above.  */
15646
      if (frame_pointer_needed)
15647
        fputs ("\t.byte 31\n", file);
15648
 
15649
      fputs ("\t.align 2\n", file);
15650
    }
15651
}
15652
 
15653
/* A C compound statement that outputs the assembler code for a thunk
15654
   function, used to implement C++ virtual function calls with
15655
   multiple inheritance.  The thunk acts as a wrapper around a virtual
15656
   function, adjusting the implicit object parameter before handing
15657
   control off to the real function.
15658
 
15659
   First, emit code to add the integer DELTA to the location that
15660
   contains the incoming first argument.  Assume that this argument
15661
   contains a pointer, and is the one used to pass the `this' pointer
15662
   in C++.  This is the incoming argument *before* the function
15663
   prologue, e.g. `%o0' on a sparc.  The addition must preserve the
15664
   values of all other incoming arguments.
15665
 
15666
   After the addition, emit code to jump to FUNCTION, which is a
15667
   `FUNCTION_DECL'.  This is a direct pure jump, not a call, and does
15668
   not touch the return address.  Hence returning from FUNCTION will
15669
   return to whoever called the current `thunk'.
15670
 
15671
   The effect must be as if FUNCTION had been called directly with the
15672
   adjusted first argument.  This macro is responsible for emitting
15673
   all of the code for a thunk function; output_function_prologue()
15674
   and output_function_epilogue() are not invoked.
15675
 
15676
   The THUNK_FNDECL is redundant.  (DELTA and FUNCTION have already
15677
   been extracted from it.)  It might possibly be useful on some
15678
   targets, but probably not.
15679
 
15680
   If you do not define this macro, the target-independent code in the
15681
   C++ frontend will generate a less efficient heavyweight thunk that
15682
   calls FUNCTION instead of jumping to it.  The generic approach does
15683
   not support varargs.  */
15684
 
15685
static void
15686
rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
15687
                        HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
15688
                        tree function)
15689
{
15690
  rtx this, insn, funexp;
15691
 
15692
  reload_completed = 1;
15693
  epilogue_completed = 1;
15694
  no_new_pseudos = 1;
15695
  reset_block_changes ();
15696
 
15697
  /* Mark the end of the (empty) prologue.  */
15698
  emit_note (NOTE_INSN_PROLOGUE_END);
15699
 
15700
  /* Find the "this" pointer.  If the function returns a structure,
15701
     the structure return pointer is in r3.  */
15702
  if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
15703
    this = gen_rtx_REG (Pmode, 4);
15704
  else
15705
    this = gen_rtx_REG (Pmode, 3);
15706
 
15707
  /* Apply the constant offset, if required.  */
15708
  if (delta)
15709
    {
15710
      rtx delta_rtx = GEN_INT (delta);
15711
      emit_insn (TARGET_32BIT
15712
                 ? gen_addsi3 (this, this, delta_rtx)
15713
                 : gen_adddi3 (this, this, delta_rtx));
15714
    }
15715
 
15716
  /* Apply the offset from the vtable, if required.  */
15717
  if (vcall_offset)
15718
    {
15719
      rtx vcall_offset_rtx = GEN_INT (vcall_offset);
15720
      rtx tmp = gen_rtx_REG (Pmode, 12);
15721
 
15722
      emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
15723
      if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
15724
        {
15725
          emit_insn (TARGET_32BIT
15726
                     ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
15727
                     : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
15728
          emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
15729
        }
15730
      else
15731
        {
15732
          rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
15733
 
15734
          emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
15735
        }
15736
      emit_insn (TARGET_32BIT
15737
                 ? gen_addsi3 (this, this, tmp)
15738
                 : gen_adddi3 (this, this, tmp));
15739
    }
15740
 
15741
  /* Generate a tail call to the target function.  */
15742
  if (!TREE_USED (function))
15743
    {
15744
      assemble_external (function);
15745
      TREE_USED (function) = 1;
15746
    }
15747
  funexp = XEXP (DECL_RTL (function), 0);
15748
  funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
15749
 
15750
#if TARGET_MACHO
15751
  if (MACHOPIC_INDIRECT)
15752
    funexp = machopic_indirect_call_target (funexp);
15753
#endif
15754
 
15755
  /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
15756
     generate sibcall RTL explicitly.  */
15757
  insn = emit_call_insn (
15758
           gen_rtx_PARALLEL (VOIDmode,
15759
             gen_rtvec (4,
15760
                        gen_rtx_CALL (VOIDmode,
15761
                                      funexp, const0_rtx),
15762
                        gen_rtx_USE (VOIDmode, const0_rtx),
15763
                        gen_rtx_USE (VOIDmode,
15764
                                     gen_rtx_REG (SImode,
15765
                                                  LINK_REGISTER_REGNUM)),
15766
                        gen_rtx_RETURN (VOIDmode))));
15767
  SIBLING_CALL_P (insn) = 1;
15768
  emit_barrier ();
15769
 
15770
  /* Run just enough of rest_of_compilation to get the insns emitted.
15771
     There's not really enough bulk here to make other passes such as
15772
     instruction scheduling worth while.  Note that use_thunk calls
15773
     assemble_start_function and assemble_end_function.  */
15774
  insn = get_insns ();
15775
  insn_locators_initialize ();
15776
  shorten_branches (insn);
15777
  final_start_function (insn, file, 1);
15778
  final (insn, file, 1);
15779
  final_end_function ();
15780
 
15781
  reload_completed = 0;
15782
  epilogue_completed = 0;
15783
  no_new_pseudos = 0;
15784
}
15785
 
15786
/* A quick summary of the various types of 'constant-pool tables'
15787
   under PowerPC:
15788
 
15789
   Target       Flags           Name            One table per
15790
   AIX          (none)          AIX TOC         object file
15791
   AIX          -mfull-toc      AIX TOC         object file
15792
   AIX          -mminimal-toc   AIX minimal TOC translation unit
15793
   SVR4/EABI    (none)          SVR4 SDATA      object file
15794
   SVR4/EABI    -fpic           SVR4 pic        object file
15795
   SVR4/EABI    -fPIC           SVR4 PIC        translation unit
15796
   SVR4/EABI    -mrelocatable   EABI TOC        function
15797
   SVR4/EABI    -maix           AIX TOC         object file
15798
   SVR4/EABI    -maix -mminimal-toc
15799
                                AIX minimal TOC translation unit
15800
 
15801
   Name                 Reg.    Set by  entries       contains:
15802
                                        made by  addrs? fp?     sum?
15803
 
15804
   AIX TOC              2       crt0    as       Y      option  option
15805
   AIX minimal TOC      30      prolog  gcc      Y      Y       option
15806
   SVR4 SDATA           13      crt0    gcc      N      Y       N
15807
   SVR4 pic             30      prolog  ld       Y      not yet N
15808
   SVR4 PIC             30      prolog  gcc      Y      option  option
15809
   EABI TOC             30      prolog  gcc      Y      option  option
15810
 
15811
*/
15812
 
15813
/* Hash functions for the hash table.  */
15814
 
15815
static unsigned
15816
rs6000_hash_constant (rtx k)
15817
{
15818
  enum rtx_code code = GET_CODE (k);
15819
  enum machine_mode mode = GET_MODE (k);
15820
  unsigned result = (code << 3) ^ mode;
15821
  const char *format;
15822
  int flen, fidx;
15823
 
15824
  format = GET_RTX_FORMAT (code);
15825
  flen = strlen (format);
15826
  fidx = 0;
15827
 
15828
  switch (code)
15829
    {
15830
    case LABEL_REF:
15831
      return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
15832
 
15833
    case CONST_DOUBLE:
15834
      if (mode != VOIDmode)
15835
        return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
15836
      flen = 2;
15837
      break;
15838
 
15839
    case CODE_LABEL:
15840
      fidx = 3;
15841
      break;
15842
 
15843
    default:
15844
      break;
15845
    }
15846
 
15847
  for (; fidx < flen; fidx++)
15848
    switch (format[fidx])
15849
      {
15850
      case 's':
15851
        {
15852
          unsigned i, len;
15853
          const char *str = XSTR (k, fidx);
15854
          len = strlen (str);
15855
          result = result * 613 + len;
15856
          for (i = 0; i < len; i++)
15857
            result = result * 613 + (unsigned) str[i];
15858
          break;
15859
        }
15860
      case 'u':
15861
      case 'e':
15862
        result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
15863
        break;
15864
      case 'i':
15865
      case 'n':
15866
        result = result * 613 + (unsigned) XINT (k, fidx);
15867
        break;
15868
      case 'w':
15869
        if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
15870
          result = result * 613 + (unsigned) XWINT (k, fidx);
15871
        else
15872
          {
15873
            size_t i;
15874
            for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
15875
              result = result * 613 + (unsigned) (XWINT (k, fidx)
15876
                                                  >> CHAR_BIT * i);
15877
          }
15878
        break;
15879
      case '0':
15880
        break;
15881
      default:
15882
        gcc_unreachable ();
15883
      }
15884
 
15885
  return result;
15886
}
15887
 
15888
static unsigned
15889
toc_hash_function (const void *hash_entry)
15890
{
15891
  const struct toc_hash_struct *thc =
15892
    (const struct toc_hash_struct *) hash_entry;
15893
  return rs6000_hash_constant (thc->key) ^ thc->key_mode;
15894
}
15895
 
15896
/* Compare H1 and H2 for equivalence.  */
15897
 
15898
static int
15899
toc_hash_eq (const void *h1, const void *h2)
15900
{
15901
  rtx r1 = ((const struct toc_hash_struct *) h1)->key;
15902
  rtx r2 = ((const struct toc_hash_struct *) h2)->key;
15903
 
15904
  if (((const struct toc_hash_struct *) h1)->key_mode
15905
      != ((const struct toc_hash_struct *) h2)->key_mode)
15906
    return 0;
15907
 
15908
  return rtx_equal_p (r1, r2);
15909
}
15910
 
15911
/* These are the names given by the C++ front-end to vtables, and
15912
   vtable-like objects.  Ideally, this logic should not be here;
15913
   instead, there should be some programmatic way of inquiring as
15914
   to whether or not an object is a vtable.  */
15915
 
15916
#define VTABLE_NAME_P(NAME)                             \
15917
  (strncmp ("_vt.", name, strlen ("_vt.")) == 0          \
15918
  || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0        \
15919
  || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0        \
15920
  || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0        \
15921
  || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
15922
 
15923
void
15924
rs6000_output_symbol_ref (FILE *file, rtx x)
15925
{
15926
  /* Currently C++ toc references to vtables can be emitted before it
15927
     is decided whether the vtable is public or private.  If this is
15928
     the case, then the linker will eventually complain that there is
15929
     a reference to an unknown section.  Thus, for vtables only,
15930
     we emit the TOC reference to reference the symbol and not the
15931
     section.  */
15932
  const char *name = XSTR (x, 0);
15933
 
15934
  if (VTABLE_NAME_P (name))
15935
    {
15936
      RS6000_OUTPUT_BASENAME (file, name);
15937
    }
15938
  else
15939
    assemble_name (file, name);
15940
}
15941
 
15942
/* Output a TOC entry.  We derive the entry name from what is being
15943
   written.  */
15944
 
15945
void
15946
output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
15947
{
15948
  char buf[256];
15949
  const char *name = buf;
15950
  const char *real_name;
15951
  rtx base = x;
15952
  HOST_WIDE_INT offset = 0;
15953
 
15954
  gcc_assert (!TARGET_NO_TOC);
15955
 
15956
  /* When the linker won't eliminate them, don't output duplicate
15957
     TOC entries (this happens on AIX if there is any kind of TOC,
15958
     and on SVR4 under -fPIC or -mrelocatable).  Don't do this for
15959
     CODE_LABELs.  */
15960
  if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
15961
    {
15962
      struct toc_hash_struct *h;
15963
      void * * found;
15964
 
15965
      /* Create toc_hash_table.  This can't be done at OVERRIDE_OPTIONS
15966
         time because GGC is not initialized at that point.  */
15967
      if (toc_hash_table == NULL)
15968
        toc_hash_table = htab_create_ggc (1021, toc_hash_function,
15969
                                          toc_hash_eq, NULL);
15970
 
15971
      h = ggc_alloc (sizeof (*h));
15972
      h->key = x;
15973
      h->key_mode = mode;
15974
      h->labelno = labelno;
15975
 
15976
      found = htab_find_slot (toc_hash_table, h, 1);
15977
      if (*found == NULL)
15978
        *found = h;
15979
      else  /* This is indeed a duplicate.
15980
               Set this label equal to that label.  */
15981
        {
15982
          fputs ("\t.set ", file);
15983
          ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
15984
          fprintf (file, "%d,", labelno);
15985
          ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
15986
          fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
15987
                                              found)->labelno));
15988
          return;
15989
        }
15990
    }
15991
 
15992
  /* If we're going to put a double constant in the TOC, make sure it's
15993
     aligned properly when strict alignment is on.  */
15994
  if (GET_CODE (x) == CONST_DOUBLE
15995
      && STRICT_ALIGNMENT
15996
      && GET_MODE_BITSIZE (mode) >= 64
15997
      && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
15998
    ASM_OUTPUT_ALIGN (file, 3);
15999
  }
16000
 
16001
  (*targetm.asm_out.internal_label) (file, "LC", labelno);
16002
 
16003
  /* Handle FP constants specially.  Note that if we have a minimal
16004
     TOC, things we put here aren't actually in the TOC, so we can allow
16005
     FP constants.  */
16006
  if (GET_CODE (x) == CONST_DOUBLE &&
16007
      (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
16008
    {
16009
      REAL_VALUE_TYPE rv;
16010
      long k[4];
16011
 
16012
      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
16013
      if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
16014
        REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
16015
      else
16016
        REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
16017
 
16018
      if (TARGET_64BIT)
16019
        {
16020
          if (TARGET_MINIMAL_TOC)
16021
            fputs (DOUBLE_INT_ASM_OP, file);
16022
          else
16023
            fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
16024
                     k[0] & 0xffffffff, k[1] & 0xffffffff,
16025
                     k[2] & 0xffffffff, k[3] & 0xffffffff);
16026
          fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
16027
                   k[0] & 0xffffffff, k[1] & 0xffffffff,
16028
                   k[2] & 0xffffffff, k[3] & 0xffffffff);
16029
          return;
16030
        }
16031
      else
16032
        {
16033
          if (TARGET_MINIMAL_TOC)
16034
            fputs ("\t.long ", file);
16035
          else
16036
            fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
16037
                     k[0] & 0xffffffff, k[1] & 0xffffffff,
16038
                     k[2] & 0xffffffff, k[3] & 0xffffffff);
16039
          fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
16040
                   k[0] & 0xffffffff, k[1] & 0xffffffff,
16041
                   k[2] & 0xffffffff, k[3] & 0xffffffff);
16042
          return;
16043
        }
16044
    }
16045
  else if (GET_CODE (x) == CONST_DOUBLE &&
16046
           (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
16047
    {
16048
      REAL_VALUE_TYPE rv;
16049
      long k[2];
16050
 
16051
      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
16052
 
16053
      if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
16054
        REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
16055
      else
16056
        REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
16057
 
16058
      if (TARGET_64BIT)
16059
        {
16060
          if (TARGET_MINIMAL_TOC)
16061
            fputs (DOUBLE_INT_ASM_OP, file);
16062
          else
16063
            fprintf (file, "\t.tc FD_%lx_%lx[TC],",
16064
                     k[0] & 0xffffffff, k[1] & 0xffffffff);
16065
          fprintf (file, "0x%lx%08lx\n",
16066
                   k[0] & 0xffffffff, k[1] & 0xffffffff);
16067
          return;
16068
        }
16069
      else
16070
        {
16071
          if (TARGET_MINIMAL_TOC)
16072
            fputs ("\t.long ", file);
16073
          else
16074
            fprintf (file, "\t.tc FD_%lx_%lx[TC],",
16075
                     k[0] & 0xffffffff, k[1] & 0xffffffff);
16076
          fprintf (file, "0x%lx,0x%lx\n",
16077
                   k[0] & 0xffffffff, k[1] & 0xffffffff);
16078
          return;
16079
        }
16080
    }
16081
  else if (GET_CODE (x) == CONST_DOUBLE &&
16082
           (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
16083
    {
16084
      REAL_VALUE_TYPE rv;
16085
      long l;
16086
 
16087
      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
16088
      if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
16089
        REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
16090
      else
16091
        REAL_VALUE_TO_TARGET_SINGLE (rv, l);
16092
 
16093
      if (TARGET_64BIT)
16094
        {
16095
          if (TARGET_MINIMAL_TOC)
16096
            fputs (DOUBLE_INT_ASM_OP, file);
16097
          else
16098
            fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
16099
          fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
16100
          return;
16101
        }
16102
      else
16103
        {
16104
          if (TARGET_MINIMAL_TOC)
16105
            fputs ("\t.long ", file);
16106
          else
16107
            fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
16108
          fprintf (file, "0x%lx\n", l & 0xffffffff);
16109
          return;
16110
        }
16111
    }
16112
  else if (GET_MODE (x) == VOIDmode
16113
           && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
16114
    {
16115
      unsigned HOST_WIDE_INT low;
16116
      HOST_WIDE_INT high;
16117
 
16118
      if (GET_CODE (x) == CONST_DOUBLE)
16119
        {
16120
          low = CONST_DOUBLE_LOW (x);
16121
          high = CONST_DOUBLE_HIGH (x);
16122
        }
16123
      else
16124
#if HOST_BITS_PER_WIDE_INT == 32
16125
        {
16126
          low = INTVAL (x);
16127
          high = (low & 0x80000000) ? ~0 : 0;
16128
        }
16129
#else
16130
        {
16131
          low = INTVAL (x) & 0xffffffff;
16132
          high = (HOST_WIDE_INT) INTVAL (x) >> 32;
16133
        }
16134
#endif
16135
 
16136
      /* TOC entries are always Pmode-sized, but since this
16137
         is a bigendian machine then if we're putting smaller
16138
         integer constants in the TOC we have to pad them.
16139
         (This is still a win over putting the constants in
16140
         a separate constant pool, because then we'd have
16141
         to have both a TOC entry _and_ the actual constant.)
16142
 
16143
         For a 32-bit target, CONST_INT values are loaded and shifted
16144
         entirely within `low' and can be stored in one TOC entry.  */
16145
 
16146
      /* It would be easy to make this work, but it doesn't now.  */
16147
      gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
16148
 
16149
      if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
16150
        {
16151
#if HOST_BITS_PER_WIDE_INT == 32
16152
          lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
16153
                         POINTER_SIZE, &low, &high, 0);
16154
#else
16155
          low |= high << 32;
16156
          low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
16157
          high = (HOST_WIDE_INT) low >> 32;
16158
          low &= 0xffffffff;
16159
#endif
16160
        }
16161
 
16162
      if (TARGET_64BIT)
16163
        {
16164
          if (TARGET_MINIMAL_TOC)
16165
            fputs (DOUBLE_INT_ASM_OP, file);
16166
          else
16167
            fprintf (file, "\t.tc ID_%lx_%lx[TC],",
16168
                     (long) high & 0xffffffff, (long) low & 0xffffffff);
16169
          fprintf (file, "0x%lx%08lx\n",
16170
                   (long) high & 0xffffffff, (long) low & 0xffffffff);
16171
          return;
16172
        }
16173
      else
16174
        {
16175
          if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
16176
            {
16177
              if (TARGET_MINIMAL_TOC)
16178
                fputs ("\t.long ", file);
16179
              else
16180
                fprintf (file, "\t.tc ID_%lx_%lx[TC],",
16181
                         (long) high & 0xffffffff, (long) low & 0xffffffff);
16182
              fprintf (file, "0x%lx,0x%lx\n",
16183
                       (long) high & 0xffffffff, (long) low & 0xffffffff);
16184
            }
16185
          else
16186
            {
16187
              if (TARGET_MINIMAL_TOC)
16188
                fputs ("\t.long ", file);
16189
              else
16190
                fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
16191
              fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
16192
            }
16193
          return;
16194
        }
16195
    }
16196
 
16197
  if (GET_CODE (x) == CONST)
16198
    {
16199
      gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
16200
 
16201
      base = XEXP (XEXP (x, 0), 0);
16202
      offset = INTVAL (XEXP (XEXP (x, 0), 1));
16203
    }
16204
 
16205
  switch (GET_CODE (base))
16206
    {
16207
    case SYMBOL_REF:
16208
      name = XSTR (base, 0);
16209
      break;
16210
 
16211
    case LABEL_REF:
16212
      ASM_GENERATE_INTERNAL_LABEL (buf, "L",
16213
                                   CODE_LABEL_NUMBER (XEXP (base, 0)));
16214
      break;
16215
 
16216
    case CODE_LABEL:
16217
      ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
16218
      break;
16219
 
16220
    default:
16221
      gcc_unreachable ();
16222
    }
16223
 
16224
  real_name = (*targetm.strip_name_encoding) (name);
16225
  if (TARGET_MINIMAL_TOC)
16226
    fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
16227
  else
16228
    {
16229
      fprintf (file, "\t.tc %s", real_name);
16230
 
16231
      if (offset < 0)
16232
        fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
16233
      else if (offset)
16234
        fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
16235
 
16236
      fputs ("[TC],", file);
16237
    }
16238
 
16239
  /* Currently C++ toc references to vtables can be emitted before it
16240
     is decided whether the vtable is public or private.  If this is
16241
     the case, then the linker will eventually complain that there is
16242
     a TOC reference to an unknown section.  Thus, for vtables only,
16243
     we emit the TOC reference to reference the symbol and not the
16244
     section.  */
16245
  if (VTABLE_NAME_P (name))
16246
    {
16247
      RS6000_OUTPUT_BASENAME (file, name);
16248
      if (offset < 0)
16249
        fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
16250
      else if (offset > 0)
16251
        fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
16252
    }
16253
  else
16254
    output_addr_const (file, x);
16255
  putc ('\n', file);
16256
}
16257
 
16258
/* Output an assembler pseudo-op to write an ASCII string of N characters
16259
   starting at P to FILE.
16260
 
16261
   On the RS/6000, we have to do this using the .byte operation and
16262
   write out special characters outside the quoted string.
16263
   Also, the assembler is broken; very long strings are truncated,
16264
   so we must artificially break them up early.  */
16265
 
16266
void
16267
output_ascii (FILE *file, const char *p, int n)
16268
{
16269
  char c;
16270
  int i, count_string;
16271
  const char *for_string = "\t.byte \"";
16272
  const char *for_decimal = "\t.byte ";
16273
  const char *to_close = NULL;
16274
 
16275
  count_string = 0;
16276
  for (i = 0; i < n; i++)
16277
    {
16278
      c = *p++;
16279
      if (c >= ' ' && c < 0177)
16280
        {
16281
          if (for_string)
16282
            fputs (for_string, file);
16283
          putc (c, file);
16284
 
16285
          /* Write two quotes to get one.  */
16286
          if (c == '"')
16287
            {
16288
              putc (c, file);
16289
              ++count_string;
16290
            }
16291
 
16292
          for_string = NULL;
16293
          for_decimal = "\"\n\t.byte ";
16294
          to_close = "\"\n";
16295
          ++count_string;
16296
 
16297
          if (count_string >= 512)
16298
            {
16299
              fputs (to_close, file);
16300
 
16301
              for_string = "\t.byte \"";
16302
              for_decimal = "\t.byte ";
16303
              to_close = NULL;
16304
              count_string = 0;
16305
            }
16306
        }
16307
      else
16308
        {
16309
          if (for_decimal)
16310
            fputs (for_decimal, file);
16311
          fprintf (file, "%d", c);
16312
 
16313
          for_string = "\n\t.byte \"";
16314
          for_decimal = ", ";
16315
          to_close = "\n";
16316
          count_string = 0;
16317
        }
16318
    }
16319
 
16320
  /* Now close the string if we have written one.  Then end the line.  */
16321
  if (to_close)
16322
    fputs (to_close, file);
16323
}
16324
 
16325
/* Generate a unique section name for FILENAME for a section type
16326
   represented by SECTION_DESC.  Output goes into BUF.
16327
 
16328
   SECTION_DESC can be any string, as long as it is different for each
16329
   possible section type.
16330
 
16331
   We name the section in the same manner as xlc.  The name begins with an
16332
   underscore followed by the filename (after stripping any leading directory
16333
   names) with the last period replaced by the string SECTION_DESC.  If
16334
   FILENAME does not contain a period, SECTION_DESC is appended to the end of
16335
   the name.  */
16336
 
16337
void
16338
rs6000_gen_section_name (char **buf, const char *filename,
16339
                         const char *section_desc)
16340
{
16341
  const char *q, *after_last_slash, *last_period = 0;
16342
  char *p;
16343
  int len;
16344
 
16345
  after_last_slash = filename;
16346
  for (q = filename; *q; q++)
16347
    {
16348
      if (*q == '/')
16349
        after_last_slash = q + 1;
16350
      else if (*q == '.')
16351
        last_period = q;
16352
    }
16353
 
16354
  len = strlen (after_last_slash) + strlen (section_desc) + 2;
16355
  *buf = (char *) xmalloc (len);
16356
 
16357
  p = *buf;
16358
  *p++ = '_';
16359
 
16360
  for (q = after_last_slash; *q; q++)
16361
    {
16362
      if (q == last_period)
16363
        {
16364
          strcpy (p, section_desc);
16365
          p += strlen (section_desc);
16366
          break;
16367
        }
16368
 
16369
      else if (ISALNUM (*q))
16370
        *p++ = *q;
16371
    }
16372
 
16373
  if (last_period == 0)
16374
    strcpy (p, section_desc);
16375
  else
16376
    *p = '\0';
16377
}
16378
 
16379
/* Emit profile function.  */
16380
 
16381
void
16382
output_profile_hook (int labelno ATTRIBUTE_UNUSED)
16383
{
16384
  /* Non-standard profiling for kernels, which just saves LR then calls
16385
     _mcount without worrying about arg saves.  The idea is to change
16386
     the function prologue as little as possible as it isn't easy to
16387
     account for arg save/restore code added just for _mcount.  */
16388
  if (TARGET_PROFILE_KERNEL)
16389
    return;
16390
 
16391
  if (DEFAULT_ABI == ABI_AIX)
16392
    {
16393
#ifndef NO_PROFILE_COUNTERS
16394
# define NO_PROFILE_COUNTERS 0
16395
#endif
16396
      if (NO_PROFILE_COUNTERS)
16397
        emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
16398
      else
16399
        {
16400
          char buf[30];
16401
          const char *label_name;
16402
          rtx fun;
16403
 
16404
          ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
16405
          label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
16406
          fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
16407
 
16408
          emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
16409
                             fun, Pmode);
16410
        }
16411
    }
16412
  else if (DEFAULT_ABI == ABI_DARWIN)
16413
    {
16414
      const char *mcount_name = RS6000_MCOUNT;
16415
      int caller_addr_regno = LINK_REGISTER_REGNUM;
16416
 
16417
      /* Be conservative and always set this, at least for now.  */
16418
      current_function_uses_pic_offset_table = 1;
16419
 
16420
#if TARGET_MACHO
16421
      /* For PIC code, set up a stub and collect the caller's address
16422
         from r0, which is where the prologue puts it.  */
16423
      if (MACHOPIC_INDIRECT
16424
          && current_function_uses_pic_offset_table)
16425
        caller_addr_regno = 0;
16426
#endif
16427
      emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
16428
                         0, VOIDmode, 1,
16429
                         gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
16430
    }
16431
}
16432
 
16433
/* Write function profiler code.  */
16434
 
16435
void
16436
output_function_profiler (FILE *file, int labelno)
16437
{
16438
  char buf[100];
16439
 
16440
  switch (DEFAULT_ABI)
16441
    {
16442
    default:
16443
      gcc_unreachable ();
16444
 
16445
    case ABI_V4:
16446
      if (!TARGET_32BIT)
16447
        {
16448
          warning (0, "no profiling of 64-bit code for this ABI");
16449
          return;
16450
        }
16451
      ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
16452
      fprintf (file, "\tmflr %s\n", reg_names[0]);
16453
      if (NO_PROFILE_COUNTERS)
16454
        {
16455
          asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16456
                       reg_names[0], reg_names[1]);
16457
        }
16458
      else if (TARGET_SECURE_PLT && flag_pic)
16459
        {
16460
          asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
16461
                       reg_names[0], reg_names[1]);
16462
          asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
16463
          asm_fprintf (file, "\t{cau|addis} %s,%s,",
16464
                       reg_names[12], reg_names[12]);
16465
          assemble_name (file, buf);
16466
          asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
16467
          assemble_name (file, buf);
16468
          asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
16469
        }
16470
      else if (flag_pic == 1)
16471
        {
16472
          fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
16473
          asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16474
                       reg_names[0], reg_names[1]);
16475
          asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
16476
          asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
16477
          assemble_name (file, buf);
16478
          asm_fprintf (file, "@got(%s)\n", reg_names[12]);
16479
        }
16480
      else if (flag_pic > 1)
16481
        {
16482
          asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16483
                       reg_names[0], reg_names[1]);
16484
          /* Now, we need to get the address of the label.  */
16485
          fputs ("\tbcl 20,31,1f\n\t.long ", file);
16486
          assemble_name (file, buf);
16487
          fputs ("-.\n1:", file);
16488
          asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
16489
          asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
16490
                       reg_names[0], reg_names[11]);
16491
          asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
16492
                       reg_names[0], reg_names[0], reg_names[11]);
16493
        }
16494
      else
16495
        {
16496
          asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
16497
          assemble_name (file, buf);
16498
          fputs ("@ha\n", file);
16499
          asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16500
                       reg_names[0], reg_names[1]);
16501
          asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
16502
          assemble_name (file, buf);
16503
          asm_fprintf (file, "@l(%s)\n", reg_names[12]);
16504
        }
16505
 
16506
      /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH.  */
16507
      fprintf (file, "\tbl %s%s\n",
16508
               RS6000_MCOUNT, flag_pic ? "@plt" : "");
16509
      break;
16510
 
16511
    case ABI_AIX:
16512
    case ABI_DARWIN:
16513
      if (!TARGET_PROFILE_KERNEL)
16514
        {
16515
          /* Don't do anything, done in output_profile_hook ().  */
16516
        }
16517
      else
16518
        {
16519
          gcc_assert (!TARGET_32BIT);
16520
 
16521
          asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
16522
          asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
16523
 
16524
          if (cfun->static_chain_decl != NULL)
16525
            {
16526
              asm_fprintf (file, "\tstd %s,24(%s)\n",
16527
                           reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
16528
              fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
16529
              asm_fprintf (file, "\tld %s,24(%s)\n",
16530
                           reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
16531
            }
16532
          else
16533
            fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
16534
        }
16535
      break;
16536
    }
16537
}
16538
 
16539
 
16540
/* Power4 load update and store update instructions are cracked into a
16541
   load or store and an integer insn which are executed in the same cycle.
16542
   Branches have their own dispatch slot which does not count against the
16543
   GCC issue rate, but it changes the program flow so there are no other
16544
   instructions to issue in this cycle.  */
16545
 
16546
static int
16547
rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
16548
                       int verbose ATTRIBUTE_UNUSED,
16549
                       rtx insn, int more)
16550
{
16551
  if (GET_CODE (PATTERN (insn)) == USE
16552
      || GET_CODE (PATTERN (insn)) == CLOBBER)
16553
    return more;
16554
 
16555
  if (rs6000_sched_groups)
16556
    {
16557
      if (is_microcoded_insn (insn))
16558
        return 0;
16559
      else if (is_cracked_insn (insn))
16560
        return more > 2 ? more - 2 : 0;
16561
    }
16562
 
16563
  return more - 1;
16564
}
16565
 
16566
/* Adjust the cost of a scheduling dependency.  Return the new cost of
16567
   a dependency LINK or INSN on DEP_INSN.  COST is the current cost.  */
16568
 
16569
static int
16570
rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
16571
{
16572
  if (! recog_memoized (insn))
16573
    return 0;
16574
 
16575
  if (REG_NOTE_KIND (link) != 0)
16576
    return 0;
16577
 
16578
  if (REG_NOTE_KIND (link) == 0)
16579
    {
16580
      /* Data dependency; DEP_INSN writes a register that INSN reads
16581
         some cycles later.  */
16582
 
16583
      /* Separate a load from a narrower, dependent store.  */
16584
      if (rs6000_sched_groups
16585
          && GET_CODE (PATTERN (insn)) == SET
16586
          && GET_CODE (PATTERN (dep_insn)) == SET
16587
          && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
16588
          && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
16589
          && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
16590
              > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
16591
        return cost + 14;
16592
 
16593
      switch (get_attr_type (insn))
16594
        {
16595
        case TYPE_JMPREG:
16596
          /* Tell the first scheduling pass about the latency between
16597
             a mtctr and bctr (and mtlr and br/blr).  The first
16598
             scheduling pass will not know about this latency since
16599
             the mtctr instruction, which has the latency associated
16600
             to it, will be generated by reload.  */
16601
          return TARGET_POWER ? 5 : 4;
16602
        case TYPE_BRANCH:
16603
          /* Leave some extra cycles between a compare and its
16604
             dependent branch, to inhibit expensive mispredicts.  */
16605
          if ((rs6000_cpu_attr == CPU_PPC603
16606
               || rs6000_cpu_attr == CPU_PPC604
16607
               || rs6000_cpu_attr == CPU_PPC604E
16608
               || rs6000_cpu_attr == CPU_PPC620
16609
               || rs6000_cpu_attr == CPU_PPC630
16610
               || rs6000_cpu_attr == CPU_PPC750
16611
               || rs6000_cpu_attr == CPU_PPC7400
16612
               || rs6000_cpu_attr == CPU_PPC7450
16613
               || rs6000_cpu_attr == CPU_POWER4
16614
               || rs6000_cpu_attr == CPU_POWER5)
16615
              && recog_memoized (dep_insn)
16616
              && (INSN_CODE (dep_insn) >= 0)
16617
              && (get_attr_type (dep_insn) == TYPE_CMP
16618
                  || get_attr_type (dep_insn) == TYPE_COMPARE
16619
                  || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
16620
                  || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
16621
                  || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
16622
                  || get_attr_type (dep_insn) == TYPE_FPCOMPARE
16623
                  || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
16624
                  || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
16625
            return cost + 2;
16626
        default:
16627
          break;
16628
        }
16629
      /* Fall out to return default cost.  */
16630
    }
16631
 
16632
  return cost;
16633
}
16634
 
16635
/* The function returns a true if INSN is microcoded.
16636
   Return false otherwise.  */
16637
 
16638
static bool
16639
is_microcoded_insn (rtx insn)
16640
{
16641
  if (!insn || !INSN_P (insn)
16642
      || GET_CODE (PATTERN (insn)) == USE
16643
      || GET_CODE (PATTERN (insn)) == CLOBBER)
16644
    return false;
16645
 
16646
  if (rs6000_sched_groups)
16647
    {
16648
      enum attr_type type = get_attr_type (insn);
16649
      if (type == TYPE_LOAD_EXT_U
16650
          || type == TYPE_LOAD_EXT_UX
16651
          || type == TYPE_LOAD_UX
16652
          || type == TYPE_STORE_UX
16653
          || type == TYPE_MFCR)
16654
        return true;
16655
    }
16656
 
16657
  return false;
16658
}
16659
 
16660
/* The function returns a nonzero value if INSN can be scheduled only
16661
   as the first insn in a dispatch group ("dispatch-slot restricted").
16662
   In this case, the returned value indicates how many dispatch slots
16663
   the insn occupies (at the beginning of the group).
16664
   Return 0 otherwise.  */
16665
 
16666
static int
16667
is_dispatch_slot_restricted (rtx insn)
16668
{
16669
  enum attr_type type;
16670
 
16671
  if (!rs6000_sched_groups)
16672
    return 0;
16673
 
16674
  if (!insn
16675
      || insn == NULL_RTX
16676
      || GET_CODE (insn) == NOTE
16677
      || GET_CODE (PATTERN (insn)) == USE
16678
      || GET_CODE (PATTERN (insn)) == CLOBBER)
16679
    return 0;
16680
 
16681
  type = get_attr_type (insn);
16682
 
16683
  switch (type)
16684
    {
16685
    case TYPE_MFCR:
16686
    case TYPE_MFCRF:
16687
    case TYPE_MTCR:
16688
    case TYPE_DELAYED_CR:
16689
    case TYPE_CR_LOGICAL:
16690
    case TYPE_MTJMPR:
16691
    case TYPE_MFJMPR:
16692
      return 1;
16693
    case TYPE_IDIV:
16694
    case TYPE_LDIV:
16695
      return 2;
16696
    case TYPE_LOAD_L:
16697
    case TYPE_STORE_C:
16698
    case TYPE_ISYNC:
16699
    case TYPE_SYNC:
16700
      return 4;
16701
    default:
16702
      if (rs6000_cpu == PROCESSOR_POWER5
16703
          && is_cracked_insn (insn))
16704
        return 2;
16705
      return 0;
16706
    }
16707
}
16708
 
16709
/* The function returns true if INSN is cracked into 2 instructions
16710
   by the processor (and therefore occupies 2 issue slots).  */
16711
 
16712
static bool
16713
is_cracked_insn (rtx insn)
16714
{
16715
  if (!insn || !INSN_P (insn)
16716
      || GET_CODE (PATTERN (insn)) == USE
16717
      || GET_CODE (PATTERN (insn)) == CLOBBER)
16718
    return false;
16719
 
16720
  if (rs6000_sched_groups)
16721
    {
16722
      enum attr_type type = get_attr_type (insn);
16723
      if (type == TYPE_LOAD_U || type == TYPE_STORE_U
16724
          || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
16725
          || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
16726
          || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
16727
          || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
16728
          || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
16729
          || type == TYPE_IDIV || type == TYPE_LDIV
16730
          || type == TYPE_INSERT_WORD)
16731
        return true;
16732
    }
16733
 
16734
  return false;
16735
}
16736
 
16737
/* The function returns true if INSN can be issued only from
16738
   the branch slot.  */
16739
 
16740
static bool
16741
is_branch_slot_insn (rtx insn)
16742
{
16743
  if (!insn || !INSN_P (insn)
16744
      || GET_CODE (PATTERN (insn)) == USE
16745
      || GET_CODE (PATTERN (insn)) == CLOBBER)
16746
    return false;
16747
 
16748
  if (rs6000_sched_groups)
16749
    {
16750
      enum attr_type type = get_attr_type (insn);
16751
      if (type == TYPE_BRANCH || type == TYPE_JMPREG)
16752
        return true;
16753
      return false;
16754
    }
16755
 
16756
  return false;
16757
}
16758
 
16759
/* A C statement (sans semicolon) to update the integer scheduling
16760
   priority INSN_PRIORITY (INSN). Increase the priority to execute the
16761
   INSN earlier, reduce the priority to execute INSN later.  Do not
16762
   define this macro if you do not need to adjust the scheduling
16763
   priorities of insns.  */
16764
 
16765
static int
16766
rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
16767
{
16768
  /* On machines (like the 750) which have asymmetric integer units,
16769
     where one integer unit can do multiply and divides and the other
16770
     can't, reduce the priority of multiply/divide so it is scheduled
16771
     before other integer operations.  */
16772
 
16773
#if 0
16774
  if (! INSN_P (insn))
16775
    return priority;
16776
 
16777
  if (GET_CODE (PATTERN (insn)) == USE)
16778
    return priority;
16779
 
16780
  switch (rs6000_cpu_attr) {
16781
  case CPU_PPC750:
16782
    switch (get_attr_type (insn))
16783
      {
16784
      default:
16785
        break;
16786
 
16787
      case TYPE_IMUL:
16788
      case TYPE_IDIV:
16789
        fprintf (stderr, "priority was %#x (%d) before adjustment\n",
16790
                 priority, priority);
16791
        if (priority >= 0 && priority < 0x01000000)
16792
          priority >>= 3;
16793
        break;
16794
      }
16795
  }
16796
#endif
16797
 
16798
  if (is_dispatch_slot_restricted (insn)
16799
      && reload_completed
16800
      && current_sched_info->sched_max_insns_priority
16801
      && rs6000_sched_restricted_insns_priority)
16802
    {
16803
 
16804
      /* Prioritize insns that can be dispatched only in the first
16805
         dispatch slot.  */
16806
      if (rs6000_sched_restricted_insns_priority == 1)
16807
        /* Attach highest priority to insn. This means that in
16808
           haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
16809
           precede 'priority' (critical path) considerations.  */
16810
        return current_sched_info->sched_max_insns_priority;
16811
      else if (rs6000_sched_restricted_insns_priority == 2)
16812
        /* Increase priority of insn by a minimal amount. This means that in
16813
           haifa-sched.c:ready_sort(), only 'priority' (critical path)
16814
           considerations precede dispatch-slot restriction considerations.  */
16815
        return (priority + 1);
16816
    }
16817
 
16818
  return priority;
16819
}
16820
 
16821
/* Return how many instructions the machine can issue per cycle.  */
16822
 
16823
static int
16824
rs6000_issue_rate (void)
16825
{
16826
  /* Use issue rate of 1 for first scheduling pass to decrease degradation.  */
16827
  if (!reload_completed)
16828
    return 1;
16829
 
16830
  switch (rs6000_cpu_attr) {
16831
  case CPU_RIOS1:  /* ? */
16832
  case CPU_RS64A:
16833
  case CPU_PPC601: /* ? */
16834
  case CPU_PPC7450:
16835
    return 3;
16836
  case CPU_PPC440:
16837
  case CPU_PPC603:
16838
  case CPU_PPC750:
16839
  case CPU_PPC7400:
16840
  case CPU_PPC8540:
16841
    return 2;
16842
  case CPU_RIOS2:
16843
  case CPU_PPC604:
16844
  case CPU_PPC604E:
16845
  case CPU_PPC620:
16846
  case CPU_PPC630:
16847
    return 4;
16848
  case CPU_POWER4:
16849
  case CPU_POWER5:
16850
    return 5;
16851
  default:
16852
    return 1;
16853
  }
16854
}
16855
 
16856
/* Return how many instructions to look ahead for better insn
16857
   scheduling.  */
16858
 
16859
static int
16860
rs6000_use_sched_lookahead (void)
16861
{
16862
  if (rs6000_cpu_attr == CPU_PPC8540)
16863
    return 4;
16864
  return 0;
16865
}
16866
 
16867
/* Determine is PAT refers to memory.  */
16868
 
16869
static bool
16870
is_mem_ref (rtx pat)
16871
{
16872
  const char * fmt;
16873
  int i, j;
16874
  bool ret = false;
16875
 
16876
  if (GET_CODE (pat) == MEM)
16877
    return true;
16878
 
16879
  /* Recursively process the pattern.  */
16880
  fmt = GET_RTX_FORMAT (GET_CODE (pat));
16881
 
16882
  for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
16883
    {
16884
      if (fmt[i] == 'e')
16885
        ret |= is_mem_ref (XEXP (pat, i));
16886
      else if (fmt[i] == 'E')
16887
        for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
16888
          ret |= is_mem_ref (XVECEXP (pat, i, j));
16889
    }
16890
 
16891
  return ret;
16892
}
16893
 
16894
/* Determine if PAT is a PATTERN of a load insn.  */
16895
 
16896
static bool
16897
is_load_insn1 (rtx pat)
16898
{
16899
  if (!pat || pat == NULL_RTX)
16900
    return false;
16901
 
16902
  if (GET_CODE (pat) == SET)
16903
    return is_mem_ref (SET_SRC (pat));
16904
 
16905
  if (GET_CODE (pat) == PARALLEL)
16906
    {
16907
      int i;
16908
 
16909
      for (i = 0; i < XVECLEN (pat, 0); i++)
16910
        if (is_load_insn1 (XVECEXP (pat, 0, i)))
16911
          return true;
16912
    }
16913
 
16914
  return false;
16915
}
16916
 
16917
/* Determine if INSN loads from memory.  */
16918
 
16919
static bool
16920
is_load_insn (rtx insn)
16921
{
16922
  if (!insn || !INSN_P (insn))
16923
    return false;
16924
 
16925
  if (GET_CODE (insn) == CALL_INSN)
16926
    return false;
16927
 
16928
  return is_load_insn1 (PATTERN (insn));
16929
}
16930
 
16931
/* Determine if PAT is a PATTERN of a store insn.  */
16932
 
16933
static bool
16934
is_store_insn1 (rtx pat)
16935
{
16936
  if (!pat || pat == NULL_RTX)
16937
    return false;
16938
 
16939
  if (GET_CODE (pat) == SET)
16940
    return is_mem_ref (SET_DEST (pat));
16941
 
16942
  if (GET_CODE (pat) == PARALLEL)
16943
    {
16944
      int i;
16945
 
16946
      for (i = 0; i < XVECLEN (pat, 0); i++)
16947
        if (is_store_insn1 (XVECEXP (pat, 0, i)))
16948
          return true;
16949
    }
16950
 
16951
  return false;
16952
}
16953
 
16954
/* Determine if INSN stores to memory.  */
16955
 
16956
static bool
16957
is_store_insn (rtx insn)
16958
{
16959
  if (!insn || !INSN_P (insn))
16960
    return false;
16961
 
16962
  return is_store_insn1 (PATTERN (insn));
16963
}
16964
 
16965
/* Returns whether the dependence between INSN and NEXT is considered
16966
   costly by the given target.  */
16967
 
16968
static bool
16969
rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost,
16970
                             int distance)
16971
{
16972
  /* If the flag is not enabled - no dependence is considered costly;
16973
     allow all dependent insns in the same group.
16974
     This is the most aggressive option.  */
16975
  if (rs6000_sched_costly_dep == no_dep_costly)
16976
    return false;
16977
 
16978
  /* If the flag is set to 1 - a dependence is always considered costly;
16979
     do not allow dependent instructions in the same group.
16980
     This is the most conservative option.  */
16981
  if (rs6000_sched_costly_dep == all_deps_costly)
16982
    return true;
16983
 
16984
  if (rs6000_sched_costly_dep == store_to_load_dep_costly
16985
      && is_load_insn (next)
16986
      && is_store_insn (insn))
16987
    /* Prevent load after store in the same group.  */
16988
    return true;
16989
 
16990
  if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
16991
      && is_load_insn (next)
16992
      && is_store_insn (insn)
16993
      && (!link || (int) REG_NOTE_KIND (link) == 0))
16994
     /* Prevent load after store in the same group if it is a true
16995
        dependence.  */
16996
     return true;
16997
 
16998
  /* The flag is set to X; dependences with latency >= X are considered costly,
16999
     and will not be scheduled in the same group.  */
17000
  if (rs6000_sched_costly_dep <= max_dep_latency
17001
      && ((cost - distance) >= (int)rs6000_sched_costly_dep))
17002
    return true;
17003
 
17004
  return false;
17005
}
17006
 
17007
/* Return the next insn after INSN that is found before TAIL is reached,
17008
   skipping any "non-active" insns - insns that will not actually occupy
17009
   an issue slot.  Return NULL_RTX if such an insn is not found.  */
17010
 
17011
static rtx
17012
get_next_active_insn (rtx insn, rtx tail)
17013
{
17014
  if (insn == NULL_RTX || insn == tail)
17015
    return NULL_RTX;
17016
 
17017
  while (1)
17018
    {
17019
      insn = NEXT_INSN (insn);
17020
      if (insn == NULL_RTX || insn == tail)
17021
        return NULL_RTX;
17022
 
17023
      if (CALL_P (insn)
17024
          || JUMP_P (insn)
17025
          || (NONJUMP_INSN_P (insn)
17026
              && GET_CODE (PATTERN (insn)) != USE
17027
              && GET_CODE (PATTERN (insn)) != CLOBBER
17028
              && INSN_CODE (insn) != CODE_FOR_stack_tie))
17029
        break;
17030
    }
17031
  return insn;
17032
}
17033
 
17034
/* Return whether the presence of INSN causes a dispatch group termination
17035
   of group WHICH_GROUP.
17036
 
17037
   If WHICH_GROUP == current_group, this function will return true if INSN
17038
   causes the termination of the current group (i.e, the dispatch group to
17039
   which INSN belongs). This means that INSN will be the last insn in the
17040
   group it belongs to.
17041
 
17042
   If WHICH_GROUP == previous_group, this function will return true if INSN
17043
   causes the termination of the previous group (i.e, the dispatch group that
17044
   precedes the group to which INSN belongs).  This means that INSN will be
17045
   the first insn in the group it belongs to).  */
17046
 
17047
static bool
17048
insn_terminates_group_p (rtx insn, enum group_termination which_group)
17049
{
17050
  enum attr_type type;
17051
 
17052
  if (! insn)
17053
    return false;
17054
 
17055
  type = get_attr_type (insn);
17056
 
17057
  if (is_microcoded_insn (insn))
17058
    return true;
17059
 
17060
  if (which_group == current_group)
17061
    {
17062
      if (is_branch_slot_insn (insn))
17063
        return true;
17064
      return false;
17065
    }
17066
  else if (which_group == previous_group)
17067
    {
17068
      if (is_dispatch_slot_restricted (insn))
17069
        return true;
17070
      return false;
17071
    }
17072
 
17073
  return false;
17074
}
17075
 
17076
/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
17077
   dispatch group) from the insns in GROUP_INSNS.  Return false otherwise.  */
17078
 
17079
static bool
17080
is_costly_group (rtx *group_insns, rtx next_insn)
17081
{
17082
  int i;
17083
  rtx link;
17084
  int cost;
17085
  int issue_rate = rs6000_issue_rate ();
17086
 
17087
  for (i = 0; i < issue_rate; i++)
17088
    {
17089
      rtx insn = group_insns[i];
17090
      if (!insn)
17091
        continue;
17092
      for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
17093
        {
17094
          rtx next = XEXP (link, 0);
17095
          if (next == next_insn)
17096
            {
17097
              cost = insn_cost (insn, link, next_insn);
17098
              if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
17099
                return true;
17100
            }
17101
        }
17102
    }
17103
 
17104
  return false;
17105
}
17106
 
17107
/* Utility of the function redefine_groups.
17108
   Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
17109
   in the same dispatch group.  If so, insert nops before NEXT_INSN, in order
17110
   to keep it "far" (in a separate group) from GROUP_INSNS, following
17111
   one of the following schemes, depending on the value of the flag
17112
   -minsert_sched_nops = X:
17113
   (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
17114
       in order to force NEXT_INSN into a separate group.
17115
   (2) X < sched_finish_regroup_exact: insert exactly X nops.
17116
   GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
17117
   insertion (has a group just ended, how many vacant issue slots remain in the
17118
   last group, and how many dispatch groups were encountered so far).  */
17119
 
17120
static int
17121
force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
17122
                 rtx next_insn, bool *group_end, int can_issue_more,
17123
                 int *group_count)
17124
{
17125
  rtx nop;
17126
  bool force;
17127
  int issue_rate = rs6000_issue_rate ();
17128
  bool end = *group_end;
17129
  int i;
17130
 
17131
  if (next_insn == NULL_RTX)
17132
    return can_issue_more;
17133
 
17134
  if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
17135
    return can_issue_more;
17136
 
17137
  force = is_costly_group (group_insns, next_insn);
17138
  if (!force)
17139
    return can_issue_more;
17140
 
17141
  if (sched_verbose > 6)
17142
    fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
17143
             *group_count ,can_issue_more);
17144
 
17145
  if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
17146
    {
17147
      if (*group_end)
17148
        can_issue_more = 0;
17149
 
17150
      /* Since only a branch can be issued in the last issue_slot, it is
17151
         sufficient to insert 'can_issue_more - 1' nops if next_insn is not
17152
         a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
17153
         in this case the last nop will start a new group and the branch
17154
         will be forced to the new group.  */
17155
      if (can_issue_more && !is_branch_slot_insn (next_insn))
17156
        can_issue_more--;
17157
 
17158
      while (can_issue_more > 0)
17159
        {
17160
          nop = gen_nop ();
17161
          emit_insn_before (nop, next_insn);
17162
          can_issue_more--;
17163
        }
17164
 
17165
      *group_end = true;
17166
      return 0;
17167
    }
17168
 
17169
  if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
17170
    {
17171
      int n_nops = rs6000_sched_insert_nops;
17172
 
17173
      /* Nops can't be issued from the branch slot, so the effective
17174
         issue_rate for nops is 'issue_rate - 1'.  */
17175
      if (can_issue_more == 0)
17176
        can_issue_more = issue_rate;
17177
      can_issue_more--;
17178
      if (can_issue_more == 0)
17179
        {
17180
          can_issue_more = issue_rate - 1;
17181
          (*group_count)++;
17182
          end = true;
17183
          for (i = 0; i < issue_rate; i++)
17184
            {
17185
              group_insns[i] = 0;
17186
            }
17187
        }
17188
 
17189
      while (n_nops > 0)
17190
        {
17191
          nop = gen_nop ();
17192
          emit_insn_before (nop, next_insn);
17193
          if (can_issue_more == issue_rate - 1) /* new group begins */
17194
            end = false;
17195
          can_issue_more--;
17196
          if (can_issue_more == 0)
17197
            {
17198
              can_issue_more = issue_rate - 1;
17199
              (*group_count)++;
17200
              end = true;
17201
              for (i = 0; i < issue_rate; i++)
17202
                {
17203
                  group_insns[i] = 0;
17204
                }
17205
            }
17206
          n_nops--;
17207
        }
17208
 
17209
      /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1').  */
17210
      can_issue_more++;
17211
 
17212
      /* Is next_insn going to start a new group?  */
17213
      *group_end
17214
        = (end
17215
           || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
17216
           || (can_issue_more <= 2 && is_cracked_insn (next_insn))
17217
           || (can_issue_more < issue_rate &&
17218
               insn_terminates_group_p (next_insn, previous_group)));
17219
      if (*group_end && end)
17220
        (*group_count)--;
17221
 
17222
      if (sched_verbose > 6)
17223
        fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
17224
                 *group_count, can_issue_more);
17225
      return can_issue_more;
17226
    }
17227
 
17228
  return can_issue_more;
17229
}
17230
 
17231
/* This function tries to synch the dispatch groups that the compiler "sees"
17232
   with the dispatch groups that the processor dispatcher is expected to
17233
   form in practice.  It tries to achieve this synchronization by forcing the
17234
   estimated processor grouping on the compiler (as opposed to the function
17235
   'pad_goups' which tries to force the scheduler's grouping on the processor).
17236
 
17237
   The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
17238
   examines the (estimated) dispatch groups that will be formed by the processor
17239
   dispatcher.  It marks these group boundaries to reflect the estimated
17240
   processor grouping, overriding the grouping that the scheduler had marked.
17241
   Depending on the value of the flag '-minsert-sched-nops' this function can
17242
   force certain insns into separate groups or force a certain distance between
17243
   them by inserting nops, for example, if there exists a "costly dependence"
17244
   between the insns.
17245
 
17246
   The function estimates the group boundaries that the processor will form as
17247
   follows:  It keeps track of how many vacant issue slots are available after
17248
   each insn.  A subsequent insn will start a new group if one of the following
17249
   4 cases applies:
17250
   - no more vacant issue slots remain in the current dispatch group.
17251
   - only the last issue slot, which is the branch slot, is vacant, but the next
17252
     insn is not a branch.
17253
   - only the last 2 or less issue slots, including the branch slot, are vacant,
17254
     which means that a cracked insn (which occupies two issue slots) can't be
17255
     issued in this group.
17256
   - less than 'issue_rate' slots are vacant, and the next insn always needs to
17257
     start a new group.  */
17258
 
17259
static int
17260
redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
17261
{
17262
  rtx insn, next_insn;
17263
  int issue_rate;
17264
  int can_issue_more;
17265
  int slot, i;
17266
  bool group_end;
17267
  int group_count = 0;
17268
  rtx *group_insns;
17269
 
17270
  /* Initialize.  */
17271
  issue_rate = rs6000_issue_rate ();
17272
  group_insns = alloca (issue_rate * sizeof (rtx));
17273
  for (i = 0; i < issue_rate; i++)
17274
    {
17275
      group_insns[i] = 0;
17276
    }
17277
  can_issue_more = issue_rate;
17278
  slot = 0;
17279
  insn = get_next_active_insn (prev_head_insn, tail);
17280
  group_end = false;
17281
 
17282
  while (insn != NULL_RTX)
17283
    {
17284
      slot = (issue_rate - can_issue_more);
17285
      group_insns[slot] = insn;
17286
      can_issue_more =
17287
        rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
17288
      if (insn_terminates_group_p (insn, current_group))
17289
        can_issue_more = 0;
17290
 
17291
      next_insn = get_next_active_insn (insn, tail);
17292
      if (next_insn == NULL_RTX)
17293
        return group_count + 1;
17294
 
17295
      /* Is next_insn going to start a new group?  */
17296
      group_end
17297
        = (can_issue_more == 0
17298
           || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
17299
           || (can_issue_more <= 2 && is_cracked_insn (next_insn))
17300
           || (can_issue_more < issue_rate &&
17301
               insn_terminates_group_p (next_insn, previous_group)));
17302
 
17303
      can_issue_more = force_new_group (sched_verbose, dump, group_insns,
17304
                                        next_insn, &group_end, can_issue_more,
17305
                                        &group_count);
17306
 
17307
      if (group_end)
17308
        {
17309
          group_count++;
17310
          can_issue_more = 0;
17311
          for (i = 0; i < issue_rate; i++)
17312
            {
17313
              group_insns[i] = 0;
17314
            }
17315
        }
17316
 
17317
      if (GET_MODE (next_insn) == TImode && can_issue_more)
17318
        PUT_MODE (next_insn, VOIDmode);
17319
      else if (!can_issue_more && GET_MODE (next_insn) != TImode)
17320
        PUT_MODE (next_insn, TImode);
17321
 
17322
      insn = next_insn;
17323
      if (can_issue_more == 0)
17324
        can_issue_more = issue_rate;
17325
    } /* while */
17326
 
17327
  return group_count;
17328
}
17329
 
17330
/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
17331
   dispatch group boundaries that the scheduler had marked.  Pad with nops
17332
   any dispatch groups which have vacant issue slots, in order to force the
17333
   scheduler's grouping on the processor dispatcher.  The function
17334
   returns the number of dispatch groups found.  */
17335
 
17336
static int
17337
pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
17338
{
17339
  rtx insn, next_insn;
17340
  rtx nop;
17341
  int issue_rate;
17342
  int can_issue_more;
17343
  int group_end;
17344
  int group_count = 0;
17345
 
17346
  /* Initialize issue_rate.  */
17347
  issue_rate = rs6000_issue_rate ();
17348
  can_issue_more = issue_rate;
17349
 
17350
  insn = get_next_active_insn (prev_head_insn, tail);
17351
  next_insn = get_next_active_insn (insn, tail);
17352
 
17353
  while (insn != NULL_RTX)
17354
    {
17355
      can_issue_more =
17356
        rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
17357
 
17358
      group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
17359
 
17360
      if (next_insn == NULL_RTX)
17361
        break;
17362
 
17363
      if (group_end)
17364
        {
17365
          /* If the scheduler had marked group termination at this location
17366
             (between insn and next_indn), and neither insn nor next_insn will
17367
             force group termination, pad the group with nops to force group
17368
             termination.  */
17369
          if (can_issue_more
17370
              && (rs6000_sched_insert_nops == sched_finish_pad_groups)
17371
              && !insn_terminates_group_p (insn, current_group)
17372
              && !insn_terminates_group_p (next_insn, previous_group))
17373
            {
17374
              if (!is_branch_slot_insn (next_insn))
17375
                can_issue_more--;
17376
 
17377
              while (can_issue_more)
17378
                {
17379
                  nop = gen_nop ();
17380
                  emit_insn_before (nop, next_insn);
17381
                  can_issue_more--;
17382
                }
17383
            }
17384
 
17385
          can_issue_more = issue_rate;
17386
          group_count++;
17387
        }
17388
 
17389
      insn = next_insn;
17390
      next_insn = get_next_active_insn (insn, tail);
17391
    }
17392
 
17393
  return group_count;
17394
}
17395
 
17396
/* The following function is called at the end of scheduling BB.
17397
   After reload, it inserts nops at insn group bundling.  */
17398
 
17399
static void
17400
rs6000_sched_finish (FILE *dump, int sched_verbose)
17401
{
17402
  int n_groups;
17403
 
17404
  if (sched_verbose)
17405
    fprintf (dump, "=== Finishing schedule.\n");
17406
 
17407
  if (reload_completed && rs6000_sched_groups)
17408
    {
17409
      if (rs6000_sched_insert_nops == sched_finish_none)
17410
        return;
17411
 
17412
      if (rs6000_sched_insert_nops == sched_finish_pad_groups)
17413
        n_groups = pad_groups (dump, sched_verbose,
17414
                               current_sched_info->prev_head,
17415
                               current_sched_info->next_tail);
17416
      else
17417
        n_groups = redefine_groups (dump, sched_verbose,
17418
                                    current_sched_info->prev_head,
17419
                                    current_sched_info->next_tail);
17420
 
17421
      if (sched_verbose >= 6)
17422
        {
17423
          fprintf (dump, "ngroups = %d\n", n_groups);
17424
          print_rtl (dump, current_sched_info->prev_head);
17425
          fprintf (dump, "Done finish_sched\n");
17426
        }
17427
    }
17428
}
17429
 
17430
/* Length in units of the trampoline for entering a nested function.  */
17431
 
17432
int
17433
rs6000_trampoline_size (void)
17434
{
17435
  int ret = 0;
17436
 
17437
  switch (DEFAULT_ABI)
17438
    {
17439
    default:
17440
      gcc_unreachable ();
17441
 
17442
    case ABI_AIX:
17443
      ret = (TARGET_32BIT) ? 12 : 24;
17444
      break;
17445
 
17446
    case ABI_DARWIN:
17447
    case ABI_V4:
17448
      ret = (TARGET_32BIT) ? 40 : 48;
17449
      break;
17450
    }
17451
 
17452
  return ret;
17453
}
17454
 
17455
/* Emit RTL insns to initialize the variable parts of a trampoline.
17456
   FNADDR is an RTX for the address of the function's pure code.
17457
   CXT is an RTX for the static chain value for the function.  */
17458
 
17459
void
17460
rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
17461
{
17462
  int regsize = (TARGET_32BIT) ? 4 : 8;
17463
  rtx ctx_reg = force_reg (Pmode, cxt);
17464
 
17465
  switch (DEFAULT_ABI)
17466
    {
17467
    default:
17468
      gcc_unreachable ();
17469
 
17470
/* Macros to shorten the code expansions below.  */
17471
#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
17472
#define MEM_PLUS(addr,offset) \
17473
  gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
17474
 
17475
    /* Under AIX, just build the 3 word function descriptor */
17476
    case ABI_AIX:
17477
      {
17478
        rtx fn_reg = gen_reg_rtx (Pmode);
17479
        rtx toc_reg = gen_reg_rtx (Pmode);
17480
        emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
17481
        emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
17482
        emit_move_insn (MEM_DEREF (addr), fn_reg);
17483
        emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
17484
        emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
17485
      }
17486
      break;
17487
 
17488
    /* Under V.4/eabi/darwin, __trampoline_setup does the real work.  */
17489
    case ABI_DARWIN:
17490
    case ABI_V4:
17491
      emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
17492
                         FALSE, VOIDmode, 4,
17493
                         addr, Pmode,
17494
                         GEN_INT (rs6000_trampoline_size ()), SImode,
17495
                         fnaddr, Pmode,
17496
                         ctx_reg, Pmode);
17497
      break;
17498
    }
17499
 
17500
  return;
17501
}
17502
 
17503
 
17504
/* Table of valid machine attributes.  */
17505
 
17506
const struct attribute_spec rs6000_attribute_table[] =
17507
{
17508
  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
17509
  { "altivec",   1, 1, false, true,  false, rs6000_handle_altivec_attribute },
17510
  { "longcall",  0, 0, false, true,  true,  rs6000_handle_longcall_attribute },
17511
  { "shortcall", 0, 0, false, true,  true,  rs6000_handle_longcall_attribute },
17512
  { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
17513
  { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
17514
#ifdef SUBTARGET_ATTRIBUTE_TABLE
17515
  SUBTARGET_ATTRIBUTE_TABLE,
17516
#endif
17517
  { NULL,        0, 0, false, false, false, NULL }
17518
};
17519
 
17520
/* Handle the "altivec" attribute.  The attribute may have
17521
   arguments as follows:
17522
 
17523
        __attribute__((altivec(vector__)))
17524
        __attribute__((altivec(pixel__)))       (always followed by 'unsigned short')
17525
        __attribute__((altivec(bool__)))        (always followed by 'unsigned')
17526
 
17527
  and may appear more than once (e.g., 'vector bool char') in a
17528
  given declaration.  */
17529
 
17530
static tree
17531
rs6000_handle_altivec_attribute (tree *node,
17532
                                 tree name ATTRIBUTE_UNUSED,
17533
                                 tree args,
17534
                                 int flags ATTRIBUTE_UNUSED,
17535
                                 bool *no_add_attrs)
17536
{
17537
  tree type = *node, result = NULL_TREE;
17538
  enum machine_mode mode;
17539
  int unsigned_p;
17540
  char altivec_type
17541
    = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
17542
        && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
17543
       ? *IDENTIFIER_POINTER (TREE_VALUE (args))
17544
       : '?');
17545
 
17546
  while (POINTER_TYPE_P (type)
17547
         || TREE_CODE (type) == FUNCTION_TYPE
17548
         || TREE_CODE (type) == METHOD_TYPE
17549
         || TREE_CODE (type) == ARRAY_TYPE)
17550
    type = TREE_TYPE (type);
17551
 
17552
  mode = TYPE_MODE (type);
17553
 
17554
  /* Check for invalid AltiVec type qualifiers.  */
17555
  if (type == long_unsigned_type_node || type == long_integer_type_node)
17556
    {
17557
    if (TARGET_64BIT)
17558
      error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
17559
    else if (rs6000_warn_altivec_long)
17560
      warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
17561
    }
17562
  else if (type == long_long_unsigned_type_node
17563
           || type == long_long_integer_type_node)
17564
    error ("use of %<long long%> in AltiVec types is invalid");
17565
  else if (type == double_type_node)
17566
    error ("use of %<double%> in AltiVec types is invalid");
17567
  else if (type == long_double_type_node)
17568
    error ("use of %<long double%> in AltiVec types is invalid");
17569
  else if (type == boolean_type_node)
17570
    error ("use of boolean types in AltiVec types is invalid");
17571
  else if (TREE_CODE (type) == COMPLEX_TYPE)
17572
    error ("use of %<complex%> in AltiVec types is invalid");
17573
  else if (DECIMAL_FLOAT_MODE_P (mode))
17574
    error ("use of decimal floating point types in AltiVec types is invalid");
17575
 
17576
  switch (altivec_type)
17577
    {
17578
    case 'v':
17579
      unsigned_p = TYPE_UNSIGNED (type);
17580
      switch (mode)
17581
        {
17582
        case SImode:
17583
          result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
17584
          break;
17585
        case HImode:
17586
          result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
17587
          break;
17588
        case QImode:
17589
          result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
17590
          break;
17591
        case SFmode: result = V4SF_type_node; break;
17592
          /* If the user says 'vector int bool', we may be handed the 'bool'
17593
             attribute _before_ the 'vector' attribute, and so select the
17594
             proper type in the 'b' case below.  */
17595
        case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
17596
          result = type;
17597
        default: break;
17598
        }
17599
      break;
17600
    case 'b':
17601
      switch (mode)
17602
        {
17603
        case SImode: case V4SImode: result = bool_V4SI_type_node; break;
17604
        case HImode: case V8HImode: result = bool_V8HI_type_node; break;
17605
        case QImode: case V16QImode: result = bool_V16QI_type_node;
17606
        default: break;
17607
        }
17608
      break;
17609
    case 'p':
17610
      switch (mode)
17611
        {
17612
        case V8HImode: result = pixel_V8HI_type_node;
17613
        default: break;
17614
        }
17615
    default: break;
17616
    }
17617
 
17618
  if (result && result != type && TYPE_READONLY (type))
17619
    result = build_qualified_type (result, TYPE_QUAL_CONST);
17620
 
17621
  *no_add_attrs = true;  /* No need to hang on to the attribute.  */
17622
 
17623
  if (result)
17624
    *node = reconstruct_complex_type (*node, result);
17625
 
17626
  return NULL_TREE;
17627
}
17628
 
17629
/* AltiVec defines four built-in scalar types that serve as vector
17630
   elements; we must teach the compiler how to mangle them.  */
17631
 
17632
static const char *
17633
rs6000_mangle_fundamental_type (tree type)
17634
{
17635
  if (type == bool_char_type_node) return "U6__boolc";
17636
  if (type == bool_short_type_node) return "U6__bools";
17637
  if (type == pixel_type_node) return "u7__pixel";
17638
  if (type == bool_int_type_node) return "U6__booli";
17639
 
17640
  /* Mangle IBM extended float long double as `g' (__float128) on
17641
     powerpc*-linux where long-double-64 previously was the default.  */
17642
  if (TYPE_MAIN_VARIANT (type) == long_double_type_node
17643
      && TARGET_ELF
17644
      && TARGET_LONG_DOUBLE_128
17645
      && !TARGET_IEEEQUAD)
17646
    return "g";
17647
 
17648
  /* For all other types, use normal C++ mangling.  */
17649
  return NULL;
17650
}
17651
 
17652
/* Handle a "longcall" or "shortcall" attribute; arguments as in
17653
   struct attribute_spec.handler.  */
17654
 
17655
static tree
17656
rs6000_handle_longcall_attribute (tree *node, tree name,
17657
                                  tree args ATTRIBUTE_UNUSED,
17658
                                  int flags ATTRIBUTE_UNUSED,
17659
                                  bool *no_add_attrs)
17660
{
17661
  if (TREE_CODE (*node) != FUNCTION_TYPE
17662
      && TREE_CODE (*node) != FIELD_DECL
17663
      && TREE_CODE (*node) != TYPE_DECL)
17664
    {
17665
      warning (OPT_Wattributes, "%qs attribute only applies to functions",
17666
               IDENTIFIER_POINTER (name));
17667
      *no_add_attrs = true;
17668
    }
17669
 
17670
  return NULL_TREE;
17671
}
17672
 
17673
/* Set longcall attributes on all functions declared when
17674
   rs6000_default_long_calls is true.  */
17675
static void
17676
rs6000_set_default_type_attributes (tree type)
17677
{
17678
  if (rs6000_default_long_calls
17679
      && (TREE_CODE (type) == FUNCTION_TYPE
17680
          || TREE_CODE (type) == METHOD_TYPE))
17681
    TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
17682
                                        NULL_TREE,
17683
                                        TYPE_ATTRIBUTES (type));
17684
 
17685
#if TARGET_MACHO
17686
  darwin_set_default_type_attributes (type);
17687
#endif
17688
}
17689
 
17690
/* Return a reference suitable for calling a function with the
17691
   longcall attribute.  */
17692
 
17693
rtx
17694
rs6000_longcall_ref (rtx call_ref)
17695
{
17696
  const char *call_name;
17697
  tree node;
17698
 
17699
  if (GET_CODE (call_ref) != SYMBOL_REF)
17700
    return call_ref;
17701
 
17702
  /* System V adds '.' to the internal name, so skip them.  */
17703
  call_name = XSTR (call_ref, 0);
17704
  if (*call_name == '.')
17705
    {
17706
      while (*call_name == '.')
17707
        call_name++;
17708
 
17709
      node = get_identifier (call_name);
17710
      call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
17711
    }
17712
 
17713
  return force_reg (Pmode, call_ref);
17714
}
17715
 
17716
#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
17717
#define TARGET_USE_MS_BITFIELD_LAYOUT 0
17718
#endif
17719
 
17720
/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
17721
   struct attribute_spec.handler.  */
17722
static tree
17723
rs6000_handle_struct_attribute (tree *node, tree name,
17724
                                tree args ATTRIBUTE_UNUSED,
17725
                                int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
17726
{
17727
  tree *type = NULL;
17728
  if (DECL_P (*node))
17729
    {
17730
      if (TREE_CODE (*node) == TYPE_DECL)
17731
        type = &TREE_TYPE (*node);
17732
    }
17733
  else
17734
    type = node;
17735
 
17736
  if (!(type && (TREE_CODE (*type) == RECORD_TYPE
17737
                 || TREE_CODE (*type) == UNION_TYPE)))
17738
    {
17739
      warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
17740
      *no_add_attrs = true;
17741
    }
17742
 
17743
  else if ((is_attribute_p ("ms_struct", name)
17744
            && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
17745
           || ((is_attribute_p ("gcc_struct", name)
17746
                && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
17747
    {
17748
      warning (OPT_Wattributes, "%qs incompatible attribute ignored",
17749
               IDENTIFIER_POINTER (name));
17750
      *no_add_attrs = true;
17751
    }
17752
 
17753
  return NULL_TREE;
17754
}
17755
 
17756
static bool
17757
rs6000_ms_bitfield_layout_p (tree record_type)
17758
{
17759
  return (TARGET_USE_MS_BITFIELD_LAYOUT &&
17760
          !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
17761
    || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
17762
}
17763
 
17764
#ifdef USING_ELFOS_H
17765
 
17766
/* A get_unnamed_section callback, used for switching to toc_section.  */
17767
 
17768
static void
17769
rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
17770
{
17771
  if (DEFAULT_ABI == ABI_AIX
17772
      && TARGET_MINIMAL_TOC
17773
      && !TARGET_RELOCATABLE)
17774
    {
17775
      if (!toc_initialized)
17776
        {
17777
          toc_initialized = 1;
17778
          fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
17779
          (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
17780
          fprintf (asm_out_file, "\t.tc ");
17781
          ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
17782
          ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
17783
          fprintf (asm_out_file, "\n");
17784
 
17785
          fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
17786
          ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
17787
          fprintf (asm_out_file, " = .+32768\n");
17788
        }
17789
      else
17790
        fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
17791
    }
17792
  else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
17793
    fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
17794
  else
17795
    {
17796
      fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
17797
      if (!toc_initialized)
17798
        {
17799
          ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
17800
          fprintf (asm_out_file, " = .+32768\n");
17801
          toc_initialized = 1;
17802
        }
17803
    }
17804
}
17805
 
17806
/* Implement TARGET_ASM_INIT_SECTIONS.  */
17807
 
17808
static void
17809
rs6000_elf_asm_init_sections (void)
17810
{
17811
  toc_section
17812
    = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
17813
 
17814
  sdata2_section
17815
    = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
17816
                           SDATA2_SECTION_ASM_OP);
17817
}
17818
 
17819
/* Implement TARGET_SELECT_RTX_SECTION.  */
17820
 
17821
static section *
17822
rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
17823
                               unsigned HOST_WIDE_INT align)
17824
{
17825
  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
17826
    return toc_section;
17827
  else
17828
    return default_elf_select_rtx_section (mode, x, align);
17829
}
17830
 
17831
/* For a SYMBOL_REF, set generic flags and then perform some
17832
   target-specific processing.
17833
 
17834
   When the AIX ABI is requested on a non-AIX system, replace the
17835
   function name with the real name (with a leading .) rather than the
17836
   function descriptor name.  This saves a lot of overriding code to
17837
   read the prefixes.  */
17838
 
17839
static void
17840
rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
17841
{
17842
  default_encode_section_info (decl, rtl, first);
17843
 
17844
  if (first
17845
      && TREE_CODE (decl) == FUNCTION_DECL
17846
      && !TARGET_AIX
17847
      && DEFAULT_ABI == ABI_AIX)
17848
    {
17849
      rtx sym_ref = XEXP (rtl, 0);
17850
      size_t len = strlen (XSTR (sym_ref, 0));
17851
      char *str = alloca (len + 2);
17852
      str[0] = '.';
17853
      memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
17854
      XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
17855
    }
17856
}
17857
 
17858
bool
17859
rs6000_elf_in_small_data_p (tree decl)
17860
{
17861
  if (rs6000_sdata == SDATA_NONE)
17862
    return false;
17863
 
17864
  /* We want to merge strings, so we never consider them small data.  */
17865
  if (TREE_CODE (decl) == STRING_CST)
17866
    return false;
17867
 
17868
  /* Functions are never in the small data area.  */
17869
  if (TREE_CODE (decl) == FUNCTION_DECL)
17870
    return false;
17871
 
17872
  if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
17873
    {
17874
      const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
17875
      if (strcmp (section, ".sdata") == 0
17876
          || strcmp (section, ".sdata2") == 0
17877
          || strcmp (section, ".sbss") == 0
17878
          || strcmp (section, ".sbss2") == 0
17879
          || strcmp (section, ".PPC.EMB.sdata0") == 0
17880
          || strcmp (section, ".PPC.EMB.sbss0") == 0)
17881
        return true;
17882
    }
17883
  else
17884
    {
17885
      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
17886
 
17887
      if (size > 0
17888
          && (unsigned HOST_WIDE_INT) size <= g_switch_value
17889
          /* If it's not public, and we're not going to reference it there,
17890
             there's no need to put it in the small data section.  */
17891
          && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
17892
        return true;
17893
    }
17894
 
17895
  return false;
17896
}
17897
 
17898
#endif /* USING_ELFOS_H */
17899
 
17900
/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P.  */
17901
 
17902
static bool
17903
rs6000_use_blocks_for_constant_p (enum machine_mode mode, rtx x)
17904
{
17905
  return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
17906
}
17907
 
17908
/* Return a REG that occurs in ADDR with coefficient 1.
17909
   ADDR can be effectively incremented by incrementing REG.
17910
 
17911
   r0 is special and we must not select it as an address
17912
   register by this routine since our caller will try to
17913
   increment the returned register via an "la" instruction.  */
17914
 
17915
rtx
17916
find_addr_reg (rtx addr)
17917
{
17918
  while (GET_CODE (addr) == PLUS)
17919
    {
17920
      if (GET_CODE (XEXP (addr, 0)) == REG
17921
          && REGNO (XEXP (addr, 0)) != 0)
17922
        addr = XEXP (addr, 0);
17923
      else if (GET_CODE (XEXP (addr, 1)) == REG
17924
               && REGNO (XEXP (addr, 1)) != 0)
17925
        addr = XEXP (addr, 1);
17926
      else if (CONSTANT_P (XEXP (addr, 0)))
17927
        addr = XEXP (addr, 1);
17928
      else if (CONSTANT_P (XEXP (addr, 1)))
17929
        addr = XEXP (addr, 0);
17930
      else
17931
        gcc_unreachable ();
17932
    }
17933
  gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
17934
  return addr;
17935
}
17936
 
17937
void
17938
rs6000_fatal_bad_address (rtx op)
17939
{
17940
  fatal_insn ("bad address", op);
17941
}
17942
 
17943
#if TARGET_MACHO
17944
 
17945
static tree branch_island_list = 0;
17946
 
17947
/* Remember to generate a branch island for far calls to the given
17948
   function.  */
17949
 
17950
static void
17951
add_compiler_branch_island (tree label_name, tree function_name,
17952
                            int line_number)
17953
{
17954
  tree branch_island = build_tree_list (function_name, label_name);
17955
  TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
17956
  TREE_CHAIN (branch_island) = branch_island_list;
17957
  branch_island_list = branch_island;
17958
}
17959
 
17960
#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND)     TREE_VALUE (BRANCH_ISLAND)
17961
#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND)  TREE_PURPOSE (BRANCH_ISLAND)
17962
#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND)    \
17963
                TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
17964
 
17965
/* Generate far-jump branch islands for everything on the
17966
   branch_island_list.  Invoked immediately after the last instruction
17967
   of the epilogue has been emitted; the branch-islands must be
17968
   appended to, and contiguous with, the function body.  Mach-O stubs
17969
   are generated in machopic_output_stub().  */
17970
 
17971
static void
17972
macho_branch_islands (void)
17973
{
17974
  char tmp_buf[512];
17975
  tree branch_island;
17976
 
17977
  for (branch_island = branch_island_list;
17978
       branch_island;
17979
       branch_island = TREE_CHAIN (branch_island))
17980
    {
17981
      const char *label =
17982
        IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
17983
      const char *name  =
17984
        IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
17985
      char name_buf[512];
17986
      /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF().  */
17987
      if (name[0] == '*' || name[0] == '&')
17988
        strcpy (name_buf, name+1);
17989
      else
17990
        {
17991
          name_buf[0] = '_';
17992
          strcpy (name_buf+1, name);
17993
        }
17994
      strcpy (tmp_buf, "\n");
17995
      strcat (tmp_buf, label);
17996
#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
17997
      if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
17998
        dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
17999
#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
18000
      if (flag_pic)
18001
        {
18002
          strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
18003
          strcat (tmp_buf, label);
18004
          strcat (tmp_buf, "_pic\n");
18005
          strcat (tmp_buf, label);
18006
          strcat (tmp_buf, "_pic:\n\tmflr r11\n");
18007
 
18008
          strcat (tmp_buf, "\taddis r11,r11,ha16(");
18009
          strcat (tmp_buf, name_buf);
18010
          strcat (tmp_buf, " - ");
18011
          strcat (tmp_buf, label);
18012
          strcat (tmp_buf, "_pic)\n");
18013
 
18014
          strcat (tmp_buf, "\tmtlr r0\n");
18015
 
18016
          strcat (tmp_buf, "\taddi r12,r11,lo16(");
18017
          strcat (tmp_buf, name_buf);
18018
          strcat (tmp_buf, " - ");
18019
          strcat (tmp_buf, label);
18020
          strcat (tmp_buf, "_pic)\n");
18021
 
18022
          strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
18023
        }
18024
      else
18025
        {
18026
          strcat (tmp_buf, ":\nlis r12,hi16(");
18027
          strcat (tmp_buf, name_buf);
18028
          strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
18029
          strcat (tmp_buf, name_buf);
18030
          strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
18031
        }
18032
      output_asm_insn (tmp_buf, 0);
18033
#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
18034
      if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
18035
        dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
18036
#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
18037
    }
18038
 
18039
  branch_island_list = 0;
18040
}
18041
 
18042
/* NO_PREVIOUS_DEF checks in the link list whether the function name is
18043
   already there or not.  */
18044
 
18045
static int
18046
no_previous_def (tree function_name)
18047
{
18048
  tree branch_island;
18049
  for (branch_island = branch_island_list;
18050
       branch_island;
18051
       branch_island = TREE_CHAIN (branch_island))
18052
    if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
18053
      return 0;
18054
  return 1;
18055
}
18056
 
18057
/* GET_PREV_LABEL gets the label name from the previous definition of
18058
   the function.  */
18059
 
18060
static tree
18061
get_prev_label (tree function_name)
18062
{
18063
  tree branch_island;
18064
  for (branch_island = branch_island_list;
18065
       branch_island;
18066
       branch_island = TREE_CHAIN (branch_island))
18067
    if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
18068
      return BRANCH_ISLAND_LABEL_NAME (branch_island);
18069
  return 0;
18070
}
18071
 
18072
#ifndef DARWIN_LINKER_GENERATES_ISLANDS
18073
#define DARWIN_LINKER_GENERATES_ISLANDS 0
18074
#endif
18075
 
18076
/* KEXTs still need branch islands.  */
18077
#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
18078
                                 || flag_mkernel || flag_apple_kext)
18079
 
18080
/* INSN is either a function call or a millicode call.  It may have an
18081
   unconditional jump in its delay slot.
18082
 
18083
   CALL_DEST is the routine we are calling.  */
18084
 
18085
char *
18086
output_call (rtx insn, rtx *operands, int dest_operand_number,
18087
             int cookie_operand_number)
18088
{
18089
  static char buf[256];
18090
  if (DARWIN_GENERATE_ISLANDS
18091
      && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
18092
      && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
18093
    {
18094
      tree labelname;
18095
      tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
18096
 
18097
      if (no_previous_def (funname))
18098
        {
18099
          int line_number = 0;
18100
          rtx label_rtx = gen_label_rtx ();
18101
          char *label_buf, temp_buf[256];
18102
          ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
18103
                                       CODE_LABEL_NUMBER (label_rtx));
18104
          label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
18105
          labelname = get_identifier (label_buf);
18106
          for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
18107
          if (insn)
18108
            line_number = NOTE_LINE_NUMBER (insn);
18109
          add_compiler_branch_island (labelname, funname, line_number);
18110
        }
18111
      else
18112
        labelname = get_prev_label (funname);
18113
 
18114
      /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
18115
         instruction will reach 'foo', otherwise link as 'bl L42'".
18116
         "L42" should be a 'branch island', that will do a far jump to
18117
         'foo'.  Branch islands are generated in
18118
         macho_branch_islands().  */
18119
      sprintf (buf, "jbsr %%z%d,%.246s",
18120
               dest_operand_number, IDENTIFIER_POINTER (labelname));
18121
    }
18122
  else
18123
    sprintf (buf, "bl %%z%d", dest_operand_number);
18124
  return buf;
18125
}
18126
 
18127
/* Generate PIC and indirect symbol stubs.  */
18128
 
18129
void
18130
machopic_output_stub (FILE *file, const char *symb, const char *stub)
18131
{
18132
  unsigned int length;
18133
  char *symbol_name, *lazy_ptr_name;
18134
  char *local_label_0;
18135
  static int label = 0;
18136
 
18137
  /* Lose our funky encoding stuff so it doesn't contaminate the stub.  */
18138
  symb = (*targetm.strip_name_encoding) (symb);
18139
 
18140
 
18141
  length = strlen (symb);
18142
  symbol_name = alloca (length + 32);
18143
  GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
18144
 
18145
  lazy_ptr_name = alloca (length + 32);
18146
  GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
18147
 
18148
  if (flag_pic == 2)
18149
    switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
18150
  else
18151
    switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
18152
 
18153
  if (flag_pic == 2)
18154
    {
18155
      fprintf (file, "\t.align 5\n");
18156
 
18157
      fprintf (file, "%s:\n", stub);
18158
      fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
18159
 
18160
      label++;
18161
      local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
18162
      sprintf (local_label_0, "\"L%011d$spb\"", label);
18163
 
18164
      fprintf (file, "\tmflr r0\n");
18165
      fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
18166
      fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
18167
      fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
18168
               lazy_ptr_name, local_label_0);
18169
      fprintf (file, "\tmtlr r0\n");
18170
      fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
18171
               (TARGET_64BIT ? "ldu" : "lwzu"),
18172
               lazy_ptr_name, local_label_0);
18173
      fprintf (file, "\tmtctr r12\n");
18174
      fprintf (file, "\tbctr\n");
18175
    }
18176
  else
18177
    {
18178
      fprintf (file, "\t.align 4\n");
18179
 
18180
      fprintf (file, "%s:\n", stub);
18181
      fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
18182
 
18183
      fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
18184
      fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
18185
               (TARGET_64BIT ? "ldu" : "lwzu"),
18186
               lazy_ptr_name);
18187
      fprintf (file, "\tmtctr r12\n");
18188
      fprintf (file, "\tbctr\n");
18189
    }
18190
 
18191
  switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
18192
  fprintf (file, "%s:\n", lazy_ptr_name);
18193
  fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
18194
  fprintf (file, "%sdyld_stub_binding_helper\n",
18195
           (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
18196
}
18197
 
18198
/* Legitimize PIC addresses.  If the address is already
18199
   position-independent, we return ORIG.  Newly generated
18200
   position-independent addresses go into a reg.  This is REG if non
18201
   zero, otherwise we allocate register(s) as necessary.  */
18202
 
18203
#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
18204
 
18205
rtx
18206
rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
18207
                                        rtx reg)
18208
{
18209
  rtx base, offset;
18210
 
18211
  if (reg == NULL && ! reload_in_progress && ! reload_completed)
18212
    reg = gen_reg_rtx (Pmode);
18213
 
18214
  if (GET_CODE (orig) == CONST)
18215
    {
18216
      rtx reg_temp;
18217
 
18218
      if (GET_CODE (XEXP (orig, 0)) == PLUS
18219
          && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
18220
        return orig;
18221
 
18222
      gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
18223
 
18224
      /* Use a different reg for the intermediate value, as
18225
         it will be marked UNCHANGING.  */
18226
      reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
18227
      base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
18228
                                                     Pmode, reg_temp);
18229
      offset =
18230
        rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
18231
                                                Pmode, reg);
18232
 
18233
      if (GET_CODE (offset) == CONST_INT)
18234
        {
18235
          if (SMALL_INT (offset))
18236
            return plus_constant (base, INTVAL (offset));
18237
          else if (! reload_in_progress && ! reload_completed)
18238
            offset = force_reg (Pmode, offset);
18239
          else
18240
            {
18241
              rtx mem = force_const_mem (Pmode, orig);
18242
              return machopic_legitimize_pic_address (mem, Pmode, reg);
18243
            }
18244
        }
18245
      return gen_rtx_PLUS (Pmode, base, offset);
18246
    }
18247
 
18248
  /* Fall back on generic machopic code.  */
18249
  return machopic_legitimize_pic_address (orig, mode, reg);
18250
}
18251
 
18252
/* Output a .machine directive for the Darwin assembler, and call
18253
   the generic start_file routine.  */
18254
 
18255
static void
18256
rs6000_darwin_file_start (void)
18257
{
18258
  static const struct
18259
  {
18260
    const char *arg;
18261
    const char *name;
18262
    int if_set;
18263
  } mapping[] = {
18264
    { "ppc64", "ppc64", MASK_64BIT },
18265
    { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
18266
    { "power4", "ppc970", 0 },
18267
    { "G5", "ppc970", 0 },
18268
    { "7450", "ppc7450", 0 },
18269
    { "7400", "ppc7400", MASK_ALTIVEC },
18270
    { "G4", "ppc7400", 0 },
18271
    { "750", "ppc750", 0 },
18272
    { "740", "ppc750", 0 },
18273
    { "G3", "ppc750", 0 },
18274
    { "604e", "ppc604e", 0 },
18275
    { "604", "ppc604", 0 },
18276
    { "603e", "ppc603", 0 },
18277
    { "603", "ppc603", 0 },
18278
    { "601", "ppc601", 0 },
18279
    { NULL, "ppc", 0 } };
18280
  const char *cpu_id = "";
18281
  size_t i;
18282
 
18283
  rs6000_file_start ();
18284
  darwin_file_start ();
18285
 
18286
  /* Determine the argument to -mcpu=.  Default to G3 if not specified.  */
18287
  for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
18288
    if (rs6000_select[i].set_arch_p && rs6000_select[i].string
18289
        && rs6000_select[i].string[0] != '\0')
18290
      cpu_id = rs6000_select[i].string;
18291
 
18292
  /* Look through the mapping array.  Pick the first name that either
18293
     matches the argument, has a bit set in IF_SET that is also set
18294
     in the target flags, or has a NULL name.  */
18295
 
18296
  i = 0;
18297
  while (mapping[i].arg != NULL
18298
         && strcmp (mapping[i].arg, cpu_id) != 0
18299
         && (mapping[i].if_set & target_flags) == 0)
18300
    i++;
18301
 
18302
  fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
18303
}
18304
 
18305
#endif /* TARGET_MACHO */
18306
 
18307
#if TARGET_ELF
18308
static int
18309
rs6000_elf_reloc_rw_mask (void)
18310
{
18311
  if (flag_pic)
18312
    return 3;
18313
  else if (DEFAULT_ABI == ABI_AIX)
18314
    return 2;
18315
  else
18316
    return 0;
18317
}
18318
 
18319
/* Record an element in the table of global constructors.  SYMBOL is
18320
   a SYMBOL_REF of the function to be called; PRIORITY is a number
18321
   between 0 and MAX_INIT_PRIORITY.
18322
 
18323
   This differs from default_named_section_asm_out_constructor in
18324
   that we have special handling for -mrelocatable.  */
18325
 
18326
static void
18327
rs6000_elf_asm_out_constructor (rtx symbol, int priority)
18328
{
18329
  const char *section = ".ctors";
18330
  char buf[16];
18331
 
18332
  if (priority != DEFAULT_INIT_PRIORITY)
18333
    {
18334
      sprintf (buf, ".ctors.%.5u",
18335
               /* Invert the numbering so the linker puts us in the proper
18336
                  order; constructors are run from right to left, and the
18337
                  linker sorts in increasing order.  */
18338
               MAX_INIT_PRIORITY - priority);
18339
      section = buf;
18340
    }
18341
 
18342
  switch_to_section (get_section (section, SECTION_WRITE, NULL));
18343
  assemble_align (POINTER_SIZE);
18344
 
18345
  if (TARGET_RELOCATABLE)
18346
    {
18347
      fputs ("\t.long (", asm_out_file);
18348
      output_addr_const (asm_out_file, symbol);
18349
      fputs (")@fixup\n", asm_out_file);
18350
    }
18351
  else
18352
    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
18353
}
18354
 
18355
static void
18356
rs6000_elf_asm_out_destructor (rtx symbol, int priority)
18357
{
18358
  const char *section = ".dtors";
18359
  char buf[16];
18360
 
18361
  if (priority != DEFAULT_INIT_PRIORITY)
18362
    {
18363
      sprintf (buf, ".dtors.%.5u",
18364
               /* Invert the numbering so the linker puts us in the proper
18365
                  order; constructors are run from right to left, and the
18366
                  linker sorts in increasing order.  */
18367
               MAX_INIT_PRIORITY - priority);
18368
      section = buf;
18369
    }
18370
 
18371
  switch_to_section (get_section (section, SECTION_WRITE, NULL));
18372
  assemble_align (POINTER_SIZE);
18373
 
18374
  if (TARGET_RELOCATABLE)
18375
    {
18376
      fputs ("\t.long (", asm_out_file);
18377
      output_addr_const (asm_out_file, symbol);
18378
      fputs (")@fixup\n", asm_out_file);
18379
    }
18380
  else
18381
    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
18382
}
18383
 
18384
void
18385
rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
18386
{
18387
  if (TARGET_64BIT)
18388
    {
18389
      fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
18390
      ASM_OUTPUT_LABEL (file, name);
18391
      fputs (DOUBLE_INT_ASM_OP, file);
18392
      rs6000_output_function_entry (file, name);
18393
      fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
18394
      if (DOT_SYMBOLS)
18395
        {
18396
          fputs ("\t.size\t", file);
18397
          assemble_name (file, name);
18398
          fputs (",24\n\t.type\t.", file);
18399
          assemble_name (file, name);
18400
          fputs (",@function\n", file);
18401
          if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
18402
            {
18403
              fputs ("\t.globl\t.", file);
18404
              assemble_name (file, name);
18405
              putc ('\n', file);
18406
            }
18407
        }
18408
      else
18409
        ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
18410
      ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
18411
      rs6000_output_function_entry (file, name);
18412
      fputs (":\n", file);
18413
      return;
18414
    }
18415
 
18416
  if (TARGET_RELOCATABLE
18417
      && !TARGET_SECURE_PLT
18418
      && (get_pool_size () != 0 || current_function_profile)
18419
      && uses_TOC ())
18420
    {
18421
      char buf[256];
18422
 
18423
      (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
18424
 
18425
      ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
18426
      fprintf (file, "\t.long ");
18427
      assemble_name (file, buf);
18428
      putc ('-', file);
18429
      ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
18430
      assemble_name (file, buf);
18431
      putc ('\n', file);
18432
    }
18433
 
18434
  ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
18435
  ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
18436
 
18437
  if (DEFAULT_ABI == ABI_AIX)
18438
    {
18439
      const char *desc_name, *orig_name;
18440
 
18441
      orig_name = (*targetm.strip_name_encoding) (name);
18442
      desc_name = orig_name;
18443
      while (*desc_name == '.')
18444
        desc_name++;
18445
 
18446
      if (TREE_PUBLIC (decl))
18447
        fprintf (file, "\t.globl %s\n", desc_name);
18448
 
18449
      fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
18450
      fprintf (file, "%s:\n", desc_name);
18451
      fprintf (file, "\t.long %s\n", orig_name);
18452
      fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
18453
      if (DEFAULT_ABI == ABI_AIX)
18454
        fputs ("\t.long 0\n", file);
18455
      fprintf (file, "\t.previous\n");
18456
    }
18457
  ASM_OUTPUT_LABEL (file, name);
18458
}
18459
 
18460
static void
18461
rs6000_elf_end_indicate_exec_stack (void)
18462
{
18463
  if (TARGET_32BIT)
18464
    file_end_indicate_exec_stack ();
18465
}
18466
#endif
18467
 
18468
#if TARGET_XCOFF
18469
static void
18470
rs6000_xcoff_asm_output_anchor (rtx symbol)
18471
{
18472
  char buffer[100];
18473
 
18474
  sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
18475
           SYMBOL_REF_BLOCK_OFFSET (symbol));
18476
  ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
18477
}
18478
 
18479
static void
18480
rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
18481
{
18482
  fputs (GLOBAL_ASM_OP, stream);
18483
  RS6000_OUTPUT_BASENAME (stream, name);
18484
  putc ('\n', stream);
18485
}
18486
 
18487
/* A get_unnamed_decl callback, used for read-only sections.  PTR
18488
   points to the section string variable.  */
18489
 
18490
static void
18491
rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
18492
{
18493
  fprintf (asm_out_file, "\t.csect %s[RO],3\n",
18494
           *(const char *const *) directive);
18495
}
18496
 
18497
/* Likewise for read-write sections.  */
18498
 
18499
static void
18500
rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
18501
{
18502
  fprintf (asm_out_file, "\t.csect %s[RW],3\n",
18503
           *(const char *const *) directive);
18504
}
18505
 
18506
/* A get_unnamed_section callback, used for switching to toc_section.  */
18507
 
18508
static void
18509
rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
18510
{
18511
  if (TARGET_MINIMAL_TOC)
18512
    {
18513
      /* toc_section is always selected at least once from
18514
         rs6000_xcoff_file_start, so this is guaranteed to
18515
         always be defined once and only once in each file.  */
18516
      if (!toc_initialized)
18517
        {
18518
          fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
18519
          fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
18520
          toc_initialized = 1;
18521
        }
18522
      fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
18523
               (TARGET_32BIT ? "" : ",3"));
18524
    }
18525
  else
18526
    fputs ("\t.toc\n", asm_out_file);
18527
}
18528
 
18529
/* Implement TARGET_ASM_INIT_SECTIONS.  */
18530
 
18531
static void
18532
rs6000_xcoff_asm_init_sections (void)
18533
{
18534
  read_only_data_section
18535
    = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
18536
                           &xcoff_read_only_section_name);
18537
 
18538
  private_data_section
18539
    = get_unnamed_section (SECTION_WRITE,
18540
                           rs6000_xcoff_output_readwrite_section_asm_op,
18541
                           &xcoff_private_data_section_name);
18542
 
18543
  read_only_private_data_section
18544
    = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
18545
                           &xcoff_private_data_section_name);
18546
 
18547
  toc_section
18548
    = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
18549
 
18550
  readonly_data_section = read_only_data_section;
18551
  exception_section = data_section;
18552
}
18553
 
18554
static int
18555
rs6000_xcoff_reloc_rw_mask (void)
18556
{
18557
  return 3;
18558
}
18559
 
18560
static void
18561
rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
18562
                                tree decl ATTRIBUTE_UNUSED)
18563
{
18564
  int smclass;
18565
  static const char * const suffix[3] = { "PR", "RO", "RW" };
18566
 
18567
  if (flags & SECTION_CODE)
18568
    smclass = 0;
18569
  else if (flags & SECTION_WRITE)
18570
    smclass = 2;
18571
  else
18572
    smclass = 1;
18573
 
18574
  fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
18575
           (flags & SECTION_CODE) ? "." : "",
18576
           name, suffix[smclass], flags & SECTION_ENTSIZE);
18577
}
18578
 
18579
static section *
18580
rs6000_xcoff_select_section (tree decl, int reloc,
18581
                             unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
18582
{
18583
  if (decl_readonly_section (decl, reloc))
18584
    {
18585
      if (TREE_PUBLIC (decl))
18586
        return read_only_data_section;
18587
      else
18588
        return read_only_private_data_section;
18589
    }
18590
  else
18591
    {
18592
      if (TREE_PUBLIC (decl))
18593
        return data_section;
18594
      else
18595
        return private_data_section;
18596
    }
18597
}
18598
 
18599
static void
18600
rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
18601
{
18602
  const char *name;
18603
 
18604
  /* Use select_section for private and uninitialized data.  */
18605
  if (!TREE_PUBLIC (decl)
18606
      || DECL_COMMON (decl)
18607
      || DECL_INITIAL (decl) == NULL_TREE
18608
      || DECL_INITIAL (decl) == error_mark_node
18609
      || (flag_zero_initialized_in_bss
18610
          && initializer_zerop (DECL_INITIAL (decl))))
18611
    return;
18612
 
18613
  name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
18614
  name = (*targetm.strip_name_encoding) (name);
18615
  DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
18616
}
18617
 
18618
/* Select section for constant in constant pool.
18619
 
18620
   On RS/6000, all constants are in the private read-only data area.
18621
   However, if this is being placed in the TOC it must be output as a
18622
   toc entry.  */
18623
 
18624
static section *
18625
rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
18626
                                 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
18627
{
18628
  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
18629
    return toc_section;
18630
  else
18631
    return read_only_private_data_section;
18632
}
18633
 
18634
/* Remove any trailing [DS] or the like from the symbol name.  */
18635
 
18636
static const char *
18637
rs6000_xcoff_strip_name_encoding (const char *name)
18638
{
18639
  size_t len;
18640
  if (*name == '*')
18641
    name++;
18642
  len = strlen (name);
18643
  if (name[len - 1] == ']')
18644
    return ggc_alloc_string (name, len - 4);
18645
  else
18646
    return name;
18647
}
18648
 
18649
/* Section attributes.  AIX is always PIC.  */
18650
 
18651
static unsigned int
18652
rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
18653
{
18654
  unsigned int align;
18655
  unsigned int flags = default_section_type_flags (decl, name, reloc);
18656
 
18657
  /* Align to at least UNIT size.  */
18658
  if (flags & SECTION_CODE)
18659
    align = MIN_UNITS_PER_WORD;
18660
  else
18661
    /* Increase alignment of large objects if not already stricter.  */
18662
    align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
18663
                 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
18664
                 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
18665
 
18666
  return flags | (exact_log2 (align) & SECTION_ENTSIZE);
18667
}
18668
 
18669
/* Output at beginning of assembler file.
18670
 
18671
   Initialize the section names for the RS/6000 at this point.
18672
 
18673
   Specify filename, including full path, to assembler.
18674
 
18675
   We want to go into the TOC section so at least one .toc will be emitted.
18676
   Also, in order to output proper .bs/.es pairs, we need at least one static
18677
   [RW] section emitted.
18678
 
18679
   Finally, declare mcount when profiling to make the assembler happy.  */
18680
 
18681
static void
18682
rs6000_xcoff_file_start (void)
18683
{
18684
  rs6000_gen_section_name (&xcoff_bss_section_name,
18685
                           main_input_filename, ".bss_");
18686
  rs6000_gen_section_name (&xcoff_private_data_section_name,
18687
                           main_input_filename, ".rw_");
18688
  rs6000_gen_section_name (&xcoff_read_only_section_name,
18689
                           main_input_filename, ".ro_");
18690
 
18691
  fputs ("\t.file\t", asm_out_file);
18692
  output_quoted_string (asm_out_file, main_input_filename);
18693
  fputc ('\n', asm_out_file);
18694
  if (write_symbols != NO_DEBUG)
18695
    switch_to_section (private_data_section);
18696
  switch_to_section (text_section);
18697
  if (profile_flag)
18698
    fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
18699
  rs6000_file_start ();
18700
}
18701
 
18702
/* Output at end of assembler file.
18703
   On the RS/6000, referencing data should automatically pull in text.  */
18704
 
18705
static void
18706
rs6000_xcoff_file_end (void)
18707
{
18708
  switch_to_section (text_section);
18709
  fputs ("_section_.text:\n", asm_out_file);
18710
  switch_to_section (data_section);
18711
  fputs (TARGET_32BIT
18712
         ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
18713
         asm_out_file);
18714
}
18715
#endif /* TARGET_XCOFF */
18716
 
18717
/* Compute a (partial) cost for rtx X.  Return true if the complete
18718
   cost has been computed, and false if subexpressions should be
18719
   scanned.  In either case, *TOTAL contains the cost result.  */
18720
 
18721
static bool
18722
rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
18723
{
18724
  enum machine_mode mode = GET_MODE (x);
18725
 
18726
  switch (code)
18727
    {
18728
      /* On the RS/6000, if it is valid in the insn, it is free.  */
18729
    case CONST_INT:
18730
      if (((outer_code == SET
18731
            || outer_code == PLUS
18732
            || outer_code == MINUS)
18733
           && (satisfies_constraint_I (x)
18734
               || satisfies_constraint_L (x)))
18735
          || (outer_code == AND
18736
              && (satisfies_constraint_K (x)
18737
                  || (mode == SImode
18738
                      ? satisfies_constraint_L (x)
18739
                      : satisfies_constraint_J (x))
18740
                  || mask_operand (x, mode)
18741
                  || (mode == DImode
18742
                      && mask64_operand (x, DImode))))
18743
          || ((outer_code == IOR || outer_code == XOR)
18744
              && (satisfies_constraint_K (x)
18745
                  || (mode == SImode
18746
                      ? satisfies_constraint_L (x)
18747
                      : satisfies_constraint_J (x))))
18748
          || outer_code == ASHIFT
18749
          || outer_code == ASHIFTRT
18750
          || outer_code == LSHIFTRT
18751
          || outer_code == ROTATE
18752
          || outer_code == ROTATERT
18753
          || outer_code == ZERO_EXTRACT
18754
          || (outer_code == MULT
18755
              && satisfies_constraint_I (x))
18756
          || ((outer_code == DIV || outer_code == UDIV
18757
               || outer_code == MOD || outer_code == UMOD)
18758
              && exact_log2 (INTVAL (x)) >= 0)
18759
          || (outer_code == COMPARE
18760
              && (satisfies_constraint_I (x)
18761
                  || satisfies_constraint_K (x)))
18762
          || (outer_code == EQ
18763
              && (satisfies_constraint_I (x)
18764
                  || satisfies_constraint_K (x)
18765
                  || (mode == SImode
18766
                      ? satisfies_constraint_L (x)
18767
                      : satisfies_constraint_J (x))))
18768
          || (outer_code == GTU
18769
              && satisfies_constraint_I (x))
18770
          || (outer_code == LTU
18771
              && satisfies_constraint_P (x)))
18772
        {
18773
          *total = 0;
18774
          return true;
18775
        }
18776
      else if ((outer_code == PLUS
18777
                && reg_or_add_cint_operand (x, VOIDmode))
18778
               || (outer_code == MINUS
18779
                   && reg_or_sub_cint_operand (x, VOIDmode))
18780
               || ((outer_code == SET
18781
                    || outer_code == IOR
18782
                    || outer_code == XOR)
18783
                   && (INTVAL (x)
18784
                       & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
18785
        {
18786
          *total = COSTS_N_INSNS (1);
18787
          return true;
18788
        }
18789
      /* FALLTHRU */
18790
 
18791
    case CONST_DOUBLE:
18792
      if (mode == DImode && code == CONST_DOUBLE)
18793
        {
18794
          if ((outer_code == IOR || outer_code == XOR)
18795
              && CONST_DOUBLE_HIGH (x) == 0
18796
              && (CONST_DOUBLE_LOW (x)
18797
                  & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
18798
            {
18799
              *total = 0;
18800
              return true;
18801
            }
18802
          else if ((outer_code == AND && and64_2_operand (x, DImode))
18803
                   || ((outer_code == SET
18804
                        || outer_code == IOR
18805
                        || outer_code == XOR)
18806
                       && CONST_DOUBLE_HIGH (x) == 0))
18807
            {
18808
              *total = COSTS_N_INSNS (1);
18809
              return true;
18810
            }
18811
        }
18812
      /* FALLTHRU */
18813
 
18814
    case CONST:
18815
    case HIGH:
18816
    case SYMBOL_REF:
18817
    case MEM:
18818
      /* When optimizing for size, MEM should be slightly more expensive
18819
         than generating address, e.g., (plus (reg) (const)).
18820
         L1 cache latency is about two instructions.  */
18821
      *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
18822
      return true;
18823
 
18824
    case LABEL_REF:
18825
      *total = 0;
18826
      return true;
18827
 
18828
    case PLUS:
18829
      if (mode == DFmode)
18830
        {
18831
          if (GET_CODE (XEXP (x, 0)) == MULT)
18832
            {
18833
              /* FNMA accounted in outer NEG.  */
18834
              if (outer_code == NEG)
18835
                *total = rs6000_cost->dmul - rs6000_cost->fp;
18836
              else
18837
                *total = rs6000_cost->dmul;
18838
            }
18839
          else
18840
            *total = rs6000_cost->fp;
18841
        }
18842
      else if (mode == SFmode)
18843
        {
18844
          /* FNMA accounted in outer NEG.  */
18845
          if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
18846
            *total = 0;
18847
          else
18848
            *total = rs6000_cost->fp;
18849
        }
18850
      else
18851
        *total = COSTS_N_INSNS (1);
18852
      return false;
18853
 
18854
    case MINUS:
18855
      if (mode == DFmode)
18856
        {
18857
          if (GET_CODE (XEXP (x, 0)) == MULT)
18858
            {
18859
              /* FNMA accounted in outer NEG.  */
18860
              if (outer_code == NEG)
18861
                *total = 0;
18862
              else
18863
                *total = rs6000_cost->dmul;
18864
            }
18865
          else
18866
            *total = rs6000_cost->fp;
18867
        }
18868
      else if (mode == SFmode)
18869
        {
18870
          /* FNMA accounted in outer NEG.  */
18871
          if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
18872
            *total = 0;
18873
          else
18874
            *total = rs6000_cost->fp;
18875
        }
18876
      else
18877
        *total = COSTS_N_INSNS (1);
18878
      return false;
18879
 
18880
    case MULT:
18881
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
18882
          && satisfies_constraint_I (XEXP (x, 1)))
18883
        {
18884
          if (INTVAL (XEXP (x, 1)) >= -256
18885
              && INTVAL (XEXP (x, 1)) <= 255)
18886
            *total = rs6000_cost->mulsi_const9;
18887
          else
18888
            *total = rs6000_cost->mulsi_const;
18889
        }
18890
      /* FMA accounted in outer PLUS/MINUS.  */
18891
      else if ((mode == DFmode || mode == SFmode)
18892
               && (outer_code == PLUS || outer_code == MINUS))
18893
        *total = 0;
18894
      else if (mode == DFmode)
18895
        *total = rs6000_cost->dmul;
18896
      else if (mode == SFmode)
18897
        *total = rs6000_cost->fp;
18898
      else if (mode == DImode)
18899
        *total = rs6000_cost->muldi;
18900
      else
18901
        *total = rs6000_cost->mulsi;
18902
      return false;
18903
 
18904
    case DIV:
18905
    case MOD:
18906
      if (FLOAT_MODE_P (mode))
18907
        {
18908
          *total = mode == DFmode ? rs6000_cost->ddiv
18909
                                  : rs6000_cost->sdiv;
18910
          return false;
18911
        }
18912
      /* FALLTHRU */
18913
 
18914
    case UDIV:
18915
    case UMOD:
18916
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
18917
          && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
18918
        {
18919
          if (code == DIV || code == MOD)
18920
            /* Shift, addze */
18921
            *total = COSTS_N_INSNS (2);
18922
          else
18923
            /* Shift */
18924
            *total = COSTS_N_INSNS (1);
18925
        }
18926
      else
18927
        {
18928
          if (GET_MODE (XEXP (x, 1)) == DImode)
18929
            *total = rs6000_cost->divdi;
18930
          else
18931
            *total = rs6000_cost->divsi;
18932
        }
18933
      /* Add in shift and subtract for MOD. */
18934
      if (code == MOD || code == UMOD)
18935
        *total += COSTS_N_INSNS (2);
18936
      return false;
18937
 
18938
    case FFS:
18939
      *total = COSTS_N_INSNS (4);
18940
      return false;
18941
 
18942
    case NOT:
18943
      if (outer_code == AND || outer_code == IOR || outer_code == XOR)
18944
        {
18945
          *total = 0;
18946
          return false;
18947
        }
18948
      /* FALLTHRU */
18949
 
18950
    case AND:
18951
    case IOR:
18952
    case XOR:
18953
    case ZERO_EXTRACT:
18954
      *total = COSTS_N_INSNS (1);
18955
      return false;
18956
 
18957
    case ASHIFT:
18958
    case ASHIFTRT:
18959
    case LSHIFTRT:
18960
    case ROTATE:
18961
    case ROTATERT:
18962
      /* Handle mul_highpart.  */
18963
      if (outer_code == TRUNCATE
18964
          && GET_CODE (XEXP (x, 0)) == MULT)
18965
        {
18966
          if (mode == DImode)
18967
            *total = rs6000_cost->muldi;
18968
          else
18969
            *total = rs6000_cost->mulsi;
18970
          return true;
18971
        }
18972
      else if (outer_code == AND)
18973
        *total = 0;
18974
      else
18975
        *total = COSTS_N_INSNS (1);
18976
      return false;
18977
 
18978
    case SIGN_EXTEND:
18979
    case ZERO_EXTEND:
18980
      if (GET_CODE (XEXP (x, 0)) == MEM)
18981
        *total = 0;
18982
      else
18983
        *total = COSTS_N_INSNS (1);
18984
      return false;
18985
 
18986
    case COMPARE:
18987
    case NEG:
18988
    case ABS:
18989
      if (!FLOAT_MODE_P (mode))
18990
        {
18991
          *total = COSTS_N_INSNS (1);
18992
          return false;
18993
        }
18994
      /* FALLTHRU */
18995
 
18996
    case FLOAT:
18997
    case UNSIGNED_FLOAT:
18998
    case FIX:
18999
    case UNSIGNED_FIX:
19000
    case FLOAT_TRUNCATE:
19001
      *total = rs6000_cost->fp;
19002
      return false;
19003
 
19004
    case FLOAT_EXTEND:
19005
      if (mode == DFmode)
19006
        *total = 0;
19007
      else
19008
        *total = rs6000_cost->fp;
19009
      return false;
19010
 
19011
    case UNSPEC:
19012
      switch (XINT (x, 1))
19013
        {
19014
        case UNSPEC_FRSP:
19015
          *total = rs6000_cost->fp;
19016
          return true;
19017
 
19018
        default:
19019
          break;
19020
        }
19021
      break;
19022
 
19023
    case CALL:
19024
    case IF_THEN_ELSE:
19025
      if (optimize_size)
19026
        {
19027
          *total = COSTS_N_INSNS (1);
19028
          return true;
19029
        }
19030
      else if (FLOAT_MODE_P (mode)
19031
               && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
19032
        {
19033
          *total = rs6000_cost->fp;
19034
          return false;
19035
        }
19036
      break;
19037
 
19038
    case EQ:
19039
    case GTU:
19040
    case LTU:
19041
      /* Carry bit requires mode == Pmode.
19042
         NEG or PLUS already counted so only add one.  */
19043
      if (mode == Pmode
19044
          && (outer_code == NEG || outer_code == PLUS))
19045
        {
19046
          *total = COSTS_N_INSNS (1);
19047
          return true;
19048
        }
19049
      if (outer_code == SET)
19050
        {
19051
          if (XEXP (x, 1) == const0_rtx)
19052
            {
19053
              *total = COSTS_N_INSNS (2);
19054
              return true;
19055
            }
19056
          else if (mode == Pmode)
19057
            {
19058
              *total = COSTS_N_INSNS (3);
19059
              return false;
19060
            }
19061
        }
19062
      /* FALLTHRU */
19063
 
19064
    case GT:
19065
    case LT:
19066
    case UNORDERED:
19067
      if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
19068
        {
19069
          *total = COSTS_N_INSNS (2);
19070
          return true;
19071
        }
19072
      /* CC COMPARE.  */
19073
      if (outer_code == COMPARE)
19074
        {
19075
          *total = 0;
19076
          return true;
19077
        }
19078
      break;
19079
 
19080
    default:
19081
      break;
19082
    }
19083
 
19084
  return false;
19085
}
19086
 
19087
/* A C expression returning the cost of moving data from a register of class
19088
   CLASS1 to one of CLASS2.  */
19089
 
19090
int
19091
rs6000_register_move_cost (enum machine_mode mode,
19092
                           enum reg_class from, enum reg_class to)
19093
{
19094
  /*  Moves from/to GENERAL_REGS.  */
19095
  if (reg_classes_intersect_p (to, GENERAL_REGS)
19096
      || reg_classes_intersect_p (from, GENERAL_REGS))
19097
    {
19098
      if (! reg_classes_intersect_p (to, GENERAL_REGS))
19099
        from = to;
19100
 
19101
      if (from == FLOAT_REGS || from == ALTIVEC_REGS)
19102
        return (rs6000_memory_move_cost (mode, from, 0)
19103
                + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
19104
 
19105
      /* It's more expensive to move CR_REGS than CR0_REGS because of the
19106
         shift.  */
19107
      else if (from == CR_REGS)
19108
        return 4;
19109
 
19110
      else
19111
        /* A move will cost one instruction per GPR moved.  */
19112
        return 2 * hard_regno_nregs[0][mode];
19113
    }
19114
 
19115
  /* Moving between two similar registers is just one instruction.  */
19116
  else if (reg_classes_intersect_p (to, from))
19117
    return mode == TFmode ? 4 : 2;
19118
 
19119
  /* Everything else has to go through GENERAL_REGS.  */
19120
  else
19121
    return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
19122
            + rs6000_register_move_cost (mode, from, GENERAL_REGS));
19123
}
19124
 
19125
/* A C expressions returning the cost of moving data of MODE from a register to
19126
   or from memory.  */
19127
 
19128
int
19129
rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
19130
                         int in ATTRIBUTE_UNUSED)
19131
{
19132
  if (reg_classes_intersect_p (class, GENERAL_REGS))
19133
    return 4 * hard_regno_nregs[0][mode];
19134
  else if (reg_classes_intersect_p (class, FLOAT_REGS))
19135
    return 4 * hard_regno_nregs[32][mode];
19136
  else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
19137
    return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
19138
  else
19139
    return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
19140
}
19141
 
19142
/* Newton-Raphson approximation of single-precision floating point divide n/d.
19143
   Assumes no trapping math and finite arguments.  */
19144
 
19145
void
19146
rs6000_emit_swdivsf (rtx res, rtx n, rtx d)
19147
{
19148
  rtx x0, e0, e1, y1, u0, v0, one;
19149
 
19150
  x0 = gen_reg_rtx (SFmode);
19151
  e0 = gen_reg_rtx (SFmode);
19152
  e1 = gen_reg_rtx (SFmode);
19153
  y1 = gen_reg_rtx (SFmode);
19154
  u0 = gen_reg_rtx (SFmode);
19155
  v0 = gen_reg_rtx (SFmode);
19156
  one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
19157
 
19158
  /* x0 = 1./d estimate */
19159
  emit_insn (gen_rtx_SET (VOIDmode, x0,
19160
                          gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
19161
                                          UNSPEC_FRES)));
19162
  /* e0 = 1. - d * x0 */
19163
  emit_insn (gen_rtx_SET (VOIDmode, e0,
19164
                          gen_rtx_MINUS (SFmode, one,
19165
                                         gen_rtx_MULT (SFmode, d, x0))));
19166
  /* e1 = e0 + e0 * e0 */
19167
  emit_insn (gen_rtx_SET (VOIDmode, e1,
19168
                          gen_rtx_PLUS (SFmode,
19169
                                        gen_rtx_MULT (SFmode, e0, e0), e0)));
19170
  /* y1 = x0 + e1 * x0 */
19171
  emit_insn (gen_rtx_SET (VOIDmode, y1,
19172
                          gen_rtx_PLUS (SFmode,
19173
                                        gen_rtx_MULT (SFmode, e1, x0), x0)));
19174
  /* u0 = n * y1 */
19175
  emit_insn (gen_rtx_SET (VOIDmode, u0,
19176
                          gen_rtx_MULT (SFmode, n, y1)));
19177
  /* v0 = n - d * u0 */
19178
  emit_insn (gen_rtx_SET (VOIDmode, v0,
19179
                          gen_rtx_MINUS (SFmode, n,
19180
                                         gen_rtx_MULT (SFmode, d, u0))));
19181
  /* res = u0 + v0 * y1 */
19182
  emit_insn (gen_rtx_SET (VOIDmode, res,
19183
                          gen_rtx_PLUS (SFmode,
19184
                                        gen_rtx_MULT (SFmode, v0, y1), u0)));
19185
}
19186
 
19187
/* Newton-Raphson approximation of double-precision floating point divide n/d.
19188
   Assumes no trapping math and finite arguments.  */
19189
 
19190
void
19191
rs6000_emit_swdivdf (rtx res, rtx n, rtx d)
19192
{
19193
  rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
19194
 
19195
  x0 = gen_reg_rtx (DFmode);
19196
  e0 = gen_reg_rtx (DFmode);
19197
  e1 = gen_reg_rtx (DFmode);
19198
  e2 = gen_reg_rtx (DFmode);
19199
  y1 = gen_reg_rtx (DFmode);
19200
  y2 = gen_reg_rtx (DFmode);
19201
  y3 = gen_reg_rtx (DFmode);
19202
  u0 = gen_reg_rtx (DFmode);
19203
  v0 = gen_reg_rtx (DFmode);
19204
  one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
19205
 
19206
  /* x0 = 1./d estimate */
19207
  emit_insn (gen_rtx_SET (VOIDmode, x0,
19208
                          gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
19209
                                          UNSPEC_FRES)));
19210
  /* e0 = 1. - d * x0 */
19211
  emit_insn (gen_rtx_SET (VOIDmode, e0,
19212
                          gen_rtx_MINUS (DFmode, one,
19213
                                         gen_rtx_MULT (SFmode, d, x0))));
19214
  /* y1 = x0 + e0 * x0 */
19215
  emit_insn (gen_rtx_SET (VOIDmode, y1,
19216
                          gen_rtx_PLUS (DFmode,
19217
                                        gen_rtx_MULT (DFmode, e0, x0), x0)));
19218
  /* e1 = e0 * e0 */
19219
  emit_insn (gen_rtx_SET (VOIDmode, e1,
19220
                          gen_rtx_MULT (DFmode, e0, e0)));
19221
  /* y2 = y1 + e1 * y1 */
19222
  emit_insn (gen_rtx_SET (VOIDmode, y2,
19223
                          gen_rtx_PLUS (DFmode,
19224
                                        gen_rtx_MULT (DFmode, e1, y1), y1)));
19225
  /* e2 = e1 * e1 */
19226
  emit_insn (gen_rtx_SET (VOIDmode, e2,
19227
                          gen_rtx_MULT (DFmode, e1, e1)));
19228
  /* y3 = y2 + e2 * y2 */
19229
  emit_insn (gen_rtx_SET (VOIDmode, y3,
19230
                          gen_rtx_PLUS (DFmode,
19231
                                        gen_rtx_MULT (DFmode, e2, y2), y2)));
19232
  /* u0 = n * y3 */
19233
  emit_insn (gen_rtx_SET (VOIDmode, u0,
19234
                          gen_rtx_MULT (DFmode, n, y3)));
19235
  /* v0 = n - d * u0 */
19236
  emit_insn (gen_rtx_SET (VOIDmode, v0,
19237
                          gen_rtx_MINUS (DFmode, n,
19238
                                         gen_rtx_MULT (DFmode, d, u0))));
19239
  /* res = u0 + v0 * y3 */
19240
  emit_insn (gen_rtx_SET (VOIDmode, res,
19241
                          gen_rtx_PLUS (DFmode,
19242
                                        gen_rtx_MULT (DFmode, v0, y3), u0)));
19243
}
19244
 
19245
/* Return an RTX representing where to find the function value of a
19246
   function returning MODE.  */
19247
static rtx
19248
rs6000_complex_function_value (enum machine_mode mode)
19249
{
19250
  unsigned int regno;
19251
  rtx r1, r2;
19252
  enum machine_mode inner = GET_MODE_INNER (mode);
19253
  unsigned int inner_bytes = GET_MODE_SIZE (inner);
19254
 
19255
  if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
19256
    regno = FP_ARG_RETURN;
19257
  else
19258
    {
19259
      regno = GP_ARG_RETURN;
19260
 
19261
      /* 32-bit is OK since it'll go in r3/r4.  */
19262
      if (TARGET_32BIT && inner_bytes >= 4)
19263
        return gen_rtx_REG (mode, regno);
19264
    }
19265
 
19266
  if (inner_bytes >= 8)
19267
    return gen_rtx_REG (mode, regno);
19268
 
19269
  r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
19270
                          const0_rtx);
19271
  r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
19272
                          GEN_INT (inner_bytes));
19273
  return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
19274
}
19275
 
19276
/* Define how to find the value returned by a function.
19277
   VALTYPE is the data type of the value (as a tree).
19278
   If the precise function being called is known, FUNC is its FUNCTION_DECL;
19279
   otherwise, FUNC is 0.
19280
 
19281
   On the SPE, both FPs and vectors are returned in r3.
19282
 
19283
   On RS/6000 an integer value is in r3 and a floating-point value is in
19284
   fp1, unless -msoft-float.  */
19285
 
19286
rtx
19287
rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
19288
{
19289
  enum machine_mode mode;
19290
  unsigned int regno;
19291
 
19292
  /* Special handling for structs in darwin64.  */
19293
  if (rs6000_darwin64_abi
19294
      && TYPE_MODE (valtype) == BLKmode
19295
      && TREE_CODE (valtype) == RECORD_TYPE
19296
      && int_size_in_bytes (valtype) > 0)
19297
    {
19298
      CUMULATIVE_ARGS valcum;
19299
      rtx valret;
19300
 
19301
      valcum.words = 0;
19302
      valcum.fregno = FP_ARG_MIN_REG;
19303
      valcum.vregno = ALTIVEC_ARG_MIN_REG;
19304
      /* Do a trial code generation as if this were going to be passed as
19305
         an argument; if any part goes in memory, we return NULL.  */
19306
      valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
19307
      if (valret)
19308
        return valret;
19309
      /* Otherwise fall through to standard ABI rules.  */
19310
    }
19311
 
19312
  if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
19313
    {
19314
      /* Long long return value need be split in -mpowerpc64, 32bit ABI.  */
19315
      return gen_rtx_PARALLEL (DImode,
19316
        gen_rtvec (2,
19317
                   gen_rtx_EXPR_LIST (VOIDmode,
19318
                                      gen_rtx_REG (SImode, GP_ARG_RETURN),
19319
                                      const0_rtx),
19320
                   gen_rtx_EXPR_LIST (VOIDmode,
19321
                                      gen_rtx_REG (SImode,
19322
                                                   GP_ARG_RETURN + 1),
19323
                                      GEN_INT (4))));
19324
    }
19325
  if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
19326
    {
19327
      return gen_rtx_PARALLEL (DCmode,
19328
        gen_rtvec (4,
19329
                   gen_rtx_EXPR_LIST (VOIDmode,
19330
                                      gen_rtx_REG (SImode, GP_ARG_RETURN),
19331
                                      const0_rtx),
19332
                   gen_rtx_EXPR_LIST (VOIDmode,
19333
                                      gen_rtx_REG (SImode,
19334
                                                   GP_ARG_RETURN + 1),
19335
                                      GEN_INT (4)),
19336
                   gen_rtx_EXPR_LIST (VOIDmode,
19337
                                      gen_rtx_REG (SImode,
19338
                                                   GP_ARG_RETURN + 2),
19339
                                      GEN_INT (8)),
19340
                   gen_rtx_EXPR_LIST (VOIDmode,
19341
                                      gen_rtx_REG (SImode,
19342
                                                   GP_ARG_RETURN + 3),
19343
                                      GEN_INT (12))));
19344
    }
19345
 
19346
  mode = TYPE_MODE (valtype);
19347
  if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
19348
      || POINTER_TYPE_P (valtype))
19349
    mode = TARGET_32BIT ? SImode : DImode;
19350
 
19351
  if (DECIMAL_FLOAT_MODE_P (mode))
19352
    regno = GP_ARG_RETURN;
19353
  else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
19354
    regno = FP_ARG_RETURN;
19355
  else if (TREE_CODE (valtype) == COMPLEX_TYPE
19356
           && targetm.calls.split_complex_arg)
19357
    return rs6000_complex_function_value (mode);
19358
  else if (TREE_CODE (valtype) == VECTOR_TYPE
19359
           && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
19360
           && ALTIVEC_VECTOR_MODE (mode))
19361
    regno = ALTIVEC_ARG_RETURN;
19362
  else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
19363
           && (mode == DFmode || mode == DCmode))
19364
    return spe_build_register_parallel (mode, GP_ARG_RETURN);
19365
  else
19366
    regno = GP_ARG_RETURN;
19367
 
19368
  return gen_rtx_REG (mode, regno);
19369
}
19370
 
19371
/* Define how to find the value returned by a library function
19372
   assuming the value has mode MODE.  */
19373
rtx
19374
rs6000_libcall_value (enum machine_mode mode)
19375
{
19376
  unsigned int regno;
19377
 
19378
  if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
19379
    {
19380
      /* Long long return value need be split in -mpowerpc64, 32bit ABI.  */
19381
      return gen_rtx_PARALLEL (DImode,
19382
        gen_rtvec (2,
19383
                   gen_rtx_EXPR_LIST (VOIDmode,
19384
                                      gen_rtx_REG (SImode, GP_ARG_RETURN),
19385
                                      const0_rtx),
19386
                   gen_rtx_EXPR_LIST (VOIDmode,
19387
                                      gen_rtx_REG (SImode,
19388
                                                   GP_ARG_RETURN + 1),
19389
                                      GEN_INT (4))));
19390
    }
19391
 
19392
  if (DECIMAL_FLOAT_MODE_P (mode))
19393
    regno = GP_ARG_RETURN;
19394
  else if (SCALAR_FLOAT_MODE_P (mode)
19395
           && TARGET_HARD_FLOAT && TARGET_FPRS)
19396
    regno = FP_ARG_RETURN;
19397
  else if (ALTIVEC_VECTOR_MODE (mode)
19398
           && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
19399
    regno = ALTIVEC_ARG_RETURN;
19400
  else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
19401
    return rs6000_complex_function_value (mode);
19402
  else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
19403
           && (mode == DFmode || mode == DCmode))
19404
    return spe_build_register_parallel (mode, GP_ARG_RETURN);
19405
  else
19406
    regno = GP_ARG_RETURN;
19407
 
19408
  return gen_rtx_REG (mode, regno);
19409
}
19410
 
19411
/* Define the offset between two registers, FROM to be eliminated and its
19412
   replacement TO, at the start of a routine.  */
19413
HOST_WIDE_INT
19414
rs6000_initial_elimination_offset (int from, int to)
19415
{
19416
  rs6000_stack_t *info = rs6000_stack_info ();
19417
  HOST_WIDE_INT offset;
19418
 
19419
  if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
19420
    offset = info->push_p ? 0 : -info->total_size;
19421
  else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
19422
    {
19423
      offset = info->push_p ? 0 : -info->total_size;
19424
      if (FRAME_GROWS_DOWNWARD)
19425
        offset += info->fixed_size + info->vars_size + info->parm_size;
19426
    }
19427
  else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
19428
    offset = FRAME_GROWS_DOWNWARD
19429
             ? info->fixed_size + info->vars_size + info->parm_size
19430
             : 0;
19431
  else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
19432
    offset = info->total_size;
19433
  else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
19434
    offset = info->push_p ? info->total_size : 0;
19435
  else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
19436
    offset = 0;
19437
  else
19438
    gcc_unreachable ();
19439
 
19440
  return offset;
19441
}
19442
 
19443
/* Return true if TYPE is a SPE or AltiVec opaque type.  */
19444
 
19445
static bool
19446
rs6000_is_opaque_type (tree type)
19447
{
19448
  return (type == opaque_V2SI_type_node
19449
              || type == opaque_V2SF_type_node
19450
              || type == opaque_p_V2SI_type_node
19451
              || type == opaque_V4SI_type_node);
19452
}
19453
 
19454
static rtx
19455
rs6000_dwarf_register_span (rtx reg)
19456
{
19457
  unsigned regno;
19458
 
19459
  if (TARGET_SPE
19460
      && (SPE_VECTOR_MODE (GET_MODE (reg))
19461
          || (TARGET_E500_DOUBLE && GET_MODE (reg) == DFmode)))
19462
    ;
19463
  else
19464
    return NULL_RTX;
19465
 
19466
  regno = REGNO (reg);
19467
 
19468
  /* The duality of the SPE register size wreaks all kinds of havoc.
19469
     This is a way of distinguishing r0 in 32-bits from r0 in
19470
     64-bits.  */
19471
  return
19472
    gen_rtx_PARALLEL (VOIDmode,
19473
                      BYTES_BIG_ENDIAN
19474
                      ? gen_rtvec (2,
19475
                                   gen_rtx_REG (SImode, regno + 1200),
19476
                                   gen_rtx_REG (SImode, regno))
19477
                      : gen_rtvec (2,
19478
                                   gen_rtx_REG (SImode, regno),
19479
                                   gen_rtx_REG (SImode, regno + 1200)));
19480
}
19481
 
19482
/* Map internal gcc register numbers to DWARF2 register numbers.  */
19483
 
19484
unsigned int
19485
rs6000_dbx_register_number (unsigned int regno)
19486
{
19487
  if (regno <= 63 || write_symbols != DWARF2_DEBUG)
19488
    return regno;
19489
  if (regno == MQ_REGNO)
19490
    return 100;
19491
  if (regno == LINK_REGISTER_REGNUM)
19492
    return 108;
19493
  if (regno == COUNT_REGISTER_REGNUM)
19494
    return 109;
19495
  if (CR_REGNO_P (regno))
19496
    return regno - CR0_REGNO + 86;
19497
  if (regno == XER_REGNO)
19498
    return 101;
19499
  if (ALTIVEC_REGNO_P (regno))
19500
    return regno - FIRST_ALTIVEC_REGNO + 1124;
19501
  if (regno == VRSAVE_REGNO)
19502
    return 356;
19503
  if (regno == VSCR_REGNO)
19504
    return 67;
19505
  if (regno == SPE_ACC_REGNO)
19506
    return 99;
19507
  if (regno == SPEFSCR_REGNO)
19508
    return 612;
19509
  /* SPE high reg number.  We get these values of regno from
19510
     rs6000_dwarf_register_span.  */
19511
  gcc_assert (regno >= 1200 && regno < 1232);
19512
  return regno;
19513
}
19514
 
19515
/* target hook eh_return_filter_mode */
19516
static enum machine_mode
19517
rs6000_eh_return_filter_mode (void)
19518
{
19519
  return TARGET_32BIT ? SImode : word_mode;
19520
}
19521
 
19522
/* Target hook for scalar_mode_supported_p.  */
19523
static bool
19524
rs6000_scalar_mode_supported_p (enum machine_mode mode)
19525
{
19526
  if (DECIMAL_FLOAT_MODE_P (mode))
19527
    return true;
19528
  else
19529
    return default_scalar_mode_supported_p (mode);
19530
}
19531
 
19532
/* Target hook for vector_mode_supported_p.  */
19533
static bool
19534
rs6000_vector_mode_supported_p (enum machine_mode mode)
19535
{
19536
 
19537
  if (TARGET_SPE && SPE_VECTOR_MODE (mode))
19538
    return true;
19539
 
19540
  else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
19541
    return true;
19542
 
19543
  else
19544
    return false;
19545
}
19546
 
19547
/* Target hook for invalid_arg_for_unprototyped_fn. */
19548
static const char *
19549
invalid_arg_for_unprototyped_fn (tree typelist, tree funcdecl, tree val)
19550
{
19551
  return (!rs6000_darwin64_abi
19552
          && typelist == 0
19553
          && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
19554
          && (funcdecl == NULL_TREE
19555
              || (TREE_CODE (funcdecl) == FUNCTION_DECL
19556
                  && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
19557
          ? N_("AltiVec argument passed to unprototyped function")
19558
          : NULL;
19559
}
19560
 
19561
/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
19562
   setup by using __stack_chk_fail_local hidden function instead of
19563
   calling __stack_chk_fail directly.  Otherwise it is better to call
19564
   __stack_chk_fail directly.  */
19565
 
19566
static tree
19567
rs6000_stack_protect_fail (void)
19568
{
19569
  return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
19570
         ? default_hidden_stack_protect_fail ()
19571
         : default_external_stack_protect_fail ();
19572
}
19573
 
19574
#include "gt-rs6000.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.