1 |
12 |
jlechner |
/* Copyright (C) 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005
|
2 |
|
|
Free Software Foundation, Inc.
|
3 |
|
|
Contributed by Red Hat, Inc.
|
4 |
|
|
|
5 |
|
|
This file is part of GCC.
|
6 |
|
|
|
7 |
|
|
GCC is free software; you can redistribute it and/or modify
|
8 |
|
|
it under the terms of the GNU General Public License as published by
|
9 |
|
|
the Free Software Foundation; either version 2, or (at your option)
|
10 |
|
|
any later version.
|
11 |
|
|
|
12 |
|
|
GCC is distributed in the hope that it will be useful,
|
13 |
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
14 |
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
15 |
|
|
GNU General Public License for more details.
|
16 |
|
|
|
17 |
|
|
You should have received a copy of the GNU General Public License
|
18 |
|
|
along with GCC; see the file COPYING. If not, write to
|
19 |
|
|
the Free Software Foundation, 51 Franklin Street, Fifth Floor,
|
20 |
|
|
Boston, MA 02110-1301, USA. */
|
21 |
|
|
|
22 |
|
|
#include "config.h"
|
23 |
|
|
#include "system.h"
|
24 |
|
|
#include "coretypes.h"
|
25 |
|
|
#include "tm.h"
|
26 |
|
|
#include "rtl.h"
|
27 |
|
|
#include "tree.h"
|
28 |
|
|
#include "regs.h"
|
29 |
|
|
#include "hard-reg-set.h"
|
30 |
|
|
#include "real.h"
|
31 |
|
|
#include "insn-config.h"
|
32 |
|
|
#include "conditions.h"
|
33 |
|
|
#include "insn-flags.h"
|
34 |
|
|
#include "output.h"
|
35 |
|
|
#include "insn-attr.h"
|
36 |
|
|
#include "flags.h"
|
37 |
|
|
#include "recog.h"
|
38 |
|
|
#include "reload.h"
|
39 |
|
|
#include "expr.h"
|
40 |
|
|
#include "obstack.h"
|
41 |
|
|
#include "except.h"
|
42 |
|
|
#include "function.h"
|
43 |
|
|
#include "optabs.h"
|
44 |
|
|
#include "toplev.h"
|
45 |
|
|
#include "basic-block.h"
|
46 |
|
|
#include "tm_p.h"
|
47 |
|
|
#include "ggc.h"
|
48 |
|
|
#include <ctype.h>
|
49 |
|
|
#include "target.h"
|
50 |
|
|
#include "target-def.h"
|
51 |
|
|
#include "targhooks.h"
|
52 |
|
|
#include "integrate.h"
|
53 |
|
|
#include "langhooks.h"
|
54 |
|
|
|
55 |
|
|
#ifndef FRV_INLINE
|
56 |
|
|
#define FRV_INLINE inline
|
57 |
|
|
#endif
|
58 |
|
|
|
59 |
|
|
/* The maximum number of distinct NOP patterns. There are three:
|
60 |
|
|
nop, fnop and mnop. */
|
61 |
|
|
#define NUM_NOP_PATTERNS 3
|
62 |
|
|
|
63 |
|
|
/* Classification of instructions and units: integer, floating-point/media,
|
64 |
|
|
branch and control. */
|
65 |
|
|
enum frv_insn_group { GROUP_I, GROUP_FM, GROUP_B, GROUP_C, NUM_GROUPS };
|
66 |
|
|
|
67 |
|
|
/* The DFA names of the units, in packet order. */
|
68 |
|
|
static const char *const frv_unit_names[] =
|
69 |
|
|
{
|
70 |
|
|
"c",
|
71 |
|
|
"i0", "f0",
|
72 |
|
|
"i1", "f1",
|
73 |
|
|
"i2", "f2",
|
74 |
|
|
"i3", "f3",
|
75 |
|
|
"b0", "b1"
|
76 |
|
|
};
|
77 |
|
|
|
78 |
|
|
/* The classification of each unit in frv_unit_names[]. */
|
79 |
|
|
static const enum frv_insn_group frv_unit_groups[ARRAY_SIZE (frv_unit_names)] =
|
80 |
|
|
{
|
81 |
|
|
GROUP_C,
|
82 |
|
|
GROUP_I, GROUP_FM,
|
83 |
|
|
GROUP_I, GROUP_FM,
|
84 |
|
|
GROUP_I, GROUP_FM,
|
85 |
|
|
GROUP_I, GROUP_FM,
|
86 |
|
|
GROUP_B, GROUP_B
|
87 |
|
|
};
|
88 |
|
|
|
89 |
|
|
/* Return the DFA unit code associated with the Nth unit of integer
|
90 |
|
|
or floating-point group GROUP, */
|
91 |
|
|
#define NTH_UNIT(GROUP, N) frv_unit_codes[(GROUP) + (N) * 2 + 1]
|
92 |
|
|
|
93 |
|
|
/* Return the number of integer or floating-point unit UNIT
|
94 |
|
|
(1 for I1, 2 for F2, etc.). */
|
95 |
|
|
#define UNIT_NUMBER(UNIT) (((UNIT) - 1) / 2)
|
96 |
|
|
|
97 |
|
|
/* The DFA unit number for each unit in frv_unit_names[]. */
|
98 |
|
|
static int frv_unit_codes[ARRAY_SIZE (frv_unit_names)];
|
99 |
|
|
|
100 |
|
|
/* FRV_TYPE_TO_UNIT[T] is the last unit in frv_unit_names[] that can issue
|
101 |
|
|
an instruction of type T. The value is ARRAY_SIZE (frv_unit_names) if
|
102 |
|
|
no instruction of type T has been seen. */
|
103 |
|
|
static unsigned int frv_type_to_unit[TYPE_UNKNOWN + 1];
|
104 |
|
|
|
105 |
|
|
/* An array of dummy nop INSNs, one for each type of nop that the
|
106 |
|
|
target supports. */
|
107 |
|
|
static GTY(()) rtx frv_nops[NUM_NOP_PATTERNS];
|
108 |
|
|
|
109 |
|
|
/* The number of nop instructions in frv_nops[]. */
|
110 |
|
|
static unsigned int frv_num_nops;
|
111 |
|
|
|
112 |
|
|
/* Information about one __builtin_read or __builtin_write access, or
|
113 |
|
|
the combination of several such accesses. The most general value
|
114 |
|
|
is all-zeros (an unknown access to an unknown address). */
|
115 |
|
|
struct frv_io {
|
116 |
|
|
/* The type of access. FRV_IO_UNKNOWN means the access can be either
|
117 |
|
|
a read or a write. */
|
118 |
|
|
enum { FRV_IO_UNKNOWN, FRV_IO_READ, FRV_IO_WRITE } type;
|
119 |
|
|
|
120 |
|
|
/* The constant address being accessed, or zero if not known. */
|
121 |
|
|
HOST_WIDE_INT const_address;
|
122 |
|
|
|
123 |
|
|
/* The run-time address, as used in operand 0 of the membar pattern. */
|
124 |
|
|
rtx var_address;
|
125 |
|
|
};
|
126 |
|
|
|
127 |
|
|
/* Return true if instruction INSN should be packed with the following
|
128 |
|
|
instruction. */
|
129 |
|
|
#define PACKING_FLAG_P(INSN) (GET_MODE (INSN) == TImode)
|
130 |
|
|
|
131 |
|
|
/* Set the value of PACKING_FLAG_P(INSN). */
|
132 |
|
|
#define SET_PACKING_FLAG(INSN) PUT_MODE (INSN, TImode)
|
133 |
|
|
#define CLEAR_PACKING_FLAG(INSN) PUT_MODE (INSN, VOIDmode)
|
134 |
|
|
|
135 |
|
|
/* Loop with REG set to each hard register in rtx X. */
|
136 |
|
|
#define FOR_EACH_REGNO(REG, X) \
|
137 |
|
|
for (REG = REGNO (X); \
|
138 |
|
|
REG < REGNO (X) + HARD_REGNO_NREGS (REGNO (X), GET_MODE (X)); \
|
139 |
|
|
REG++)
|
140 |
|
|
|
141 |
|
|
/* This structure contains machine specific function data. */
|
142 |
|
|
struct machine_function GTY(())
|
143 |
|
|
{
|
144 |
|
|
/* True if we have created an rtx that relies on the stack frame. */
|
145 |
|
|
int frame_needed;
|
146 |
|
|
|
147 |
|
|
/* True if this function contains at least one __builtin_{read,write}*. */
|
148 |
|
|
bool has_membar_p;
|
149 |
|
|
};
|
150 |
|
|
|
151 |
|
|
/* Temporary register allocation support structure. */
|
152 |
|
|
typedef struct frv_tmp_reg_struct
|
153 |
|
|
{
|
154 |
|
|
HARD_REG_SET regs; /* possible registers to allocate */
|
155 |
|
|
int next_reg[N_REG_CLASSES]; /* next register to allocate per class */
|
156 |
|
|
}
|
157 |
|
|
frv_tmp_reg_t;
|
158 |
|
|
|
159 |
|
|
/* Register state information for VLIW re-packing phase. */
|
160 |
|
|
#define REGSTATE_CC_MASK 0x07 /* Mask to isolate CCn for cond exec */
|
161 |
|
|
#define REGSTATE_MODIFIED 0x08 /* reg modified in current VLIW insn */
|
162 |
|
|
#define REGSTATE_IF_TRUE 0x10 /* reg modified in cond exec true */
|
163 |
|
|
#define REGSTATE_IF_FALSE 0x20 /* reg modified in cond exec false */
|
164 |
|
|
|
165 |
|
|
#define REGSTATE_IF_EITHER (REGSTATE_IF_TRUE | REGSTATE_IF_FALSE)
|
166 |
|
|
|
167 |
|
|
typedef unsigned char regstate_t;
|
168 |
|
|
|
169 |
|
|
/* Used in frv_frame_accessor_t to indicate the direction of a register-to-
|
170 |
|
|
memory move. */
|
171 |
|
|
enum frv_stack_op
|
172 |
|
|
{
|
173 |
|
|
FRV_LOAD,
|
174 |
|
|
FRV_STORE
|
175 |
|
|
};
|
176 |
|
|
|
177 |
|
|
/* Information required by frv_frame_access. */
|
178 |
|
|
typedef struct
|
179 |
|
|
{
|
180 |
|
|
/* This field is FRV_LOAD if registers are to be loaded from the stack and
|
181 |
|
|
FRV_STORE if they should be stored onto the stack. FRV_STORE implies
|
182 |
|
|
the move is being done by the prologue code while FRV_LOAD implies it
|
183 |
|
|
is being done by the epilogue. */
|
184 |
|
|
enum frv_stack_op op;
|
185 |
|
|
|
186 |
|
|
/* The base register to use when accessing the stack. This may be the
|
187 |
|
|
frame pointer, stack pointer, or a temporary. The choice of register
|
188 |
|
|
depends on which part of the frame is being accessed and how big the
|
189 |
|
|
frame is. */
|
190 |
|
|
rtx base;
|
191 |
|
|
|
192 |
|
|
/* The offset of BASE from the bottom of the current frame, in bytes. */
|
193 |
|
|
int base_offset;
|
194 |
|
|
} frv_frame_accessor_t;
|
195 |
|
|
|
196 |
|
|
/* Define the information needed to generate branch and scc insns. This is
|
197 |
|
|
stored from the compare operation. */
|
198 |
|
|
rtx frv_compare_op0;
|
199 |
|
|
rtx frv_compare_op1;
|
200 |
|
|
|
201 |
|
|
/* Conditional execution support gathered together in one structure. */
|
202 |
|
|
typedef struct
|
203 |
|
|
{
|
204 |
|
|
/* Linked list of insns to add if the conditional execution conversion was
|
205 |
|
|
successful. Each link points to an EXPR_LIST which points to the pattern
|
206 |
|
|
of the insn to add, and the insn to be inserted before. */
|
207 |
|
|
rtx added_insns_list;
|
208 |
|
|
|
209 |
|
|
/* Identify which registers are safe to allocate for if conversions to
|
210 |
|
|
conditional execution. We keep the last allocated register in the
|
211 |
|
|
register classes between COND_EXEC statements. This will mean we allocate
|
212 |
|
|
different registers for each different COND_EXEC group if we can. This
|
213 |
|
|
might allow the scheduler to intermix two different COND_EXEC sections. */
|
214 |
|
|
frv_tmp_reg_t tmp_reg;
|
215 |
|
|
|
216 |
|
|
/* For nested IFs, identify which CC registers are used outside of setting
|
217 |
|
|
via a compare isnsn, and using via a check insn. This will allow us to
|
218 |
|
|
know if we can rewrite the register to use a different register that will
|
219 |
|
|
be paired with the CR register controlling the nested IF-THEN blocks. */
|
220 |
|
|
HARD_REG_SET nested_cc_ok_rewrite;
|
221 |
|
|
|
222 |
|
|
/* Temporary registers allocated to hold constants during conditional
|
223 |
|
|
execution. */
|
224 |
|
|
rtx scratch_regs[FIRST_PSEUDO_REGISTER];
|
225 |
|
|
|
226 |
|
|
/* Current number of temp registers available. */
|
227 |
|
|
int cur_scratch_regs;
|
228 |
|
|
|
229 |
|
|
/* Number of nested conditional execution blocks. */
|
230 |
|
|
int num_nested_cond_exec;
|
231 |
|
|
|
232 |
|
|
/* Map of insns that set up constants in scratch registers. */
|
233 |
|
|
bitmap scratch_insns_bitmap;
|
234 |
|
|
|
235 |
|
|
/* Conditional execution test register (CC0..CC7). */
|
236 |
|
|
rtx cr_reg;
|
237 |
|
|
|
238 |
|
|
/* Conditional execution compare register that is paired with cr_reg, so that
|
239 |
|
|
nested compares can be done. The csubcc and caddcc instructions don't
|
240 |
|
|
have enough bits to specify both a CC register to be set and a CR register
|
241 |
|
|
to do the test on, so the same bit number is used for both. Needless to
|
242 |
|
|
say, this is rather inconvenient for GCC. */
|
243 |
|
|
rtx nested_cc_reg;
|
244 |
|
|
|
245 |
|
|
/* Extra CR registers used for &&, ||. */
|
246 |
|
|
rtx extra_int_cr;
|
247 |
|
|
rtx extra_fp_cr;
|
248 |
|
|
|
249 |
|
|
/* Previous CR used in nested if, to make sure we are dealing with the same
|
250 |
|
|
nested if as the previous statement. */
|
251 |
|
|
rtx last_nested_if_cr;
|
252 |
|
|
}
|
253 |
|
|
frv_ifcvt_t;
|
254 |
|
|
|
255 |
|
|
static /* GTY(()) */ frv_ifcvt_t frv_ifcvt;
|
256 |
|
|
|
257 |
|
|
/* Map register number to smallest register class. */
|
258 |
|
|
enum reg_class regno_reg_class[FIRST_PSEUDO_REGISTER];
|
259 |
|
|
|
260 |
|
|
/* Map class letter into register class. */
|
261 |
|
|
enum reg_class reg_class_from_letter[256];
|
262 |
|
|
|
263 |
|
|
/* Cached value of frv_stack_info. */
|
264 |
|
|
static frv_stack_t *frv_stack_cache = (frv_stack_t *)0;
|
265 |
|
|
|
266 |
|
|
/* -mcpu= support */
|
267 |
|
|
frv_cpu_t frv_cpu_type = CPU_TYPE; /* value of -mcpu= */
|
268 |
|
|
|
269 |
|
|
/* Forward references */
|
270 |
|
|
|
271 |
|
|
static bool frv_handle_option (size_t, const char *, int);
|
272 |
|
|
static int frv_default_flags_for_cpu (void);
|
273 |
|
|
static int frv_string_begins_with (tree, const char *);
|
274 |
|
|
static FRV_INLINE bool frv_small_data_reloc_p (rtx, int);
|
275 |
|
|
static void frv_print_operand_memory_reference_reg
|
276 |
|
|
(FILE *, rtx);
|
277 |
|
|
static void frv_print_operand_memory_reference (FILE *, rtx, int);
|
278 |
|
|
static int frv_print_operand_jump_hint (rtx);
|
279 |
|
|
static const char *comparison_string (enum rtx_code, rtx);
|
280 |
|
|
static FRV_INLINE int frv_regno_ok_for_base_p (int, int);
|
281 |
|
|
static rtx single_set_pattern (rtx);
|
282 |
|
|
static int frv_function_contains_far_jump (void);
|
283 |
|
|
static rtx frv_alloc_temp_reg (frv_tmp_reg_t *,
|
284 |
|
|
enum reg_class,
|
285 |
|
|
enum machine_mode,
|
286 |
|
|
int, int);
|
287 |
|
|
static rtx frv_frame_offset_rtx (int);
|
288 |
|
|
static rtx frv_frame_mem (enum machine_mode, rtx, int);
|
289 |
|
|
static rtx frv_dwarf_store (rtx, int);
|
290 |
|
|
static void frv_frame_insn (rtx, rtx);
|
291 |
|
|
static void frv_frame_access (frv_frame_accessor_t*,
|
292 |
|
|
rtx, int);
|
293 |
|
|
static void frv_frame_access_multi (frv_frame_accessor_t*,
|
294 |
|
|
frv_stack_t *, int);
|
295 |
|
|
static void frv_frame_access_standard_regs (enum frv_stack_op,
|
296 |
|
|
frv_stack_t *);
|
297 |
|
|
static struct machine_function *frv_init_machine_status (void);
|
298 |
|
|
static rtx frv_int_to_acc (enum insn_code, int, rtx);
|
299 |
|
|
static enum machine_mode frv_matching_accg_mode (enum machine_mode);
|
300 |
|
|
static rtx frv_read_argument (tree *);
|
301 |
|
|
static rtx frv_read_iacc_argument (enum machine_mode, tree *);
|
302 |
|
|
static int frv_check_constant_argument (enum insn_code, int, rtx);
|
303 |
|
|
static rtx frv_legitimize_target (enum insn_code, rtx);
|
304 |
|
|
static rtx frv_legitimize_argument (enum insn_code, int, rtx);
|
305 |
|
|
static rtx frv_legitimize_tls_address (rtx, enum tls_model);
|
306 |
|
|
static rtx frv_expand_set_builtin (enum insn_code, tree, rtx);
|
307 |
|
|
static rtx frv_expand_unop_builtin (enum insn_code, tree, rtx);
|
308 |
|
|
static rtx frv_expand_binop_builtin (enum insn_code, tree, rtx);
|
309 |
|
|
static rtx frv_expand_cut_builtin (enum insn_code, tree, rtx);
|
310 |
|
|
static rtx frv_expand_binopimm_builtin (enum insn_code, tree, rtx);
|
311 |
|
|
static rtx frv_expand_voidbinop_builtin (enum insn_code, tree);
|
312 |
|
|
static rtx frv_expand_int_void2arg (enum insn_code, tree);
|
313 |
|
|
static rtx frv_expand_prefetches (enum insn_code, tree);
|
314 |
|
|
static rtx frv_expand_voidtriop_builtin (enum insn_code, tree);
|
315 |
|
|
static rtx frv_expand_voidaccop_builtin (enum insn_code, tree);
|
316 |
|
|
static rtx frv_expand_mclracc_builtin (tree);
|
317 |
|
|
static rtx frv_expand_mrdacc_builtin (enum insn_code, tree);
|
318 |
|
|
static rtx frv_expand_mwtacc_builtin (enum insn_code, tree);
|
319 |
|
|
static rtx frv_expand_noargs_builtin (enum insn_code);
|
320 |
|
|
static void frv_split_iacc_move (rtx, rtx);
|
321 |
|
|
static rtx frv_emit_comparison (enum rtx_code, rtx, rtx);
|
322 |
|
|
static int frv_clear_registers_used (rtx *, void *);
|
323 |
|
|
static void frv_ifcvt_add_insn (rtx, rtx, int);
|
324 |
|
|
static rtx frv_ifcvt_rewrite_mem (rtx, enum machine_mode, rtx);
|
325 |
|
|
static rtx frv_ifcvt_load_value (rtx, rtx);
|
326 |
|
|
static int frv_acc_group_1 (rtx *, void *);
|
327 |
|
|
static unsigned int frv_insn_unit (rtx);
|
328 |
|
|
static bool frv_issues_to_branch_unit_p (rtx);
|
329 |
|
|
static int frv_cond_flags (rtx);
|
330 |
|
|
static bool frv_regstate_conflict_p (regstate_t, regstate_t);
|
331 |
|
|
static int frv_registers_conflict_p_1 (rtx *, void *);
|
332 |
|
|
static bool frv_registers_conflict_p (rtx);
|
333 |
|
|
static void frv_registers_update_1 (rtx, rtx, void *);
|
334 |
|
|
static void frv_registers_update (rtx);
|
335 |
|
|
static void frv_start_packet (void);
|
336 |
|
|
static void frv_start_packet_block (void);
|
337 |
|
|
static void frv_finish_packet (void (*) (void));
|
338 |
|
|
static bool frv_pack_insn_p (rtx);
|
339 |
|
|
static void frv_add_insn_to_packet (rtx);
|
340 |
|
|
static void frv_insert_nop_in_packet (rtx);
|
341 |
|
|
static bool frv_for_each_packet (void (*) (void));
|
342 |
|
|
static bool frv_sort_insn_group_1 (enum frv_insn_group,
|
343 |
|
|
unsigned int, unsigned int,
|
344 |
|
|
unsigned int, unsigned int,
|
345 |
|
|
state_t);
|
346 |
|
|
static int frv_compare_insns (const void *, const void *);
|
347 |
|
|
static void frv_sort_insn_group (enum frv_insn_group);
|
348 |
|
|
static void frv_reorder_packet (void);
|
349 |
|
|
static void frv_fill_unused_units (enum frv_insn_group);
|
350 |
|
|
static void frv_align_label (void);
|
351 |
|
|
static void frv_reorg_packet (void);
|
352 |
|
|
static void frv_register_nop (rtx);
|
353 |
|
|
static void frv_reorg (void);
|
354 |
|
|
static void frv_pack_insns (void);
|
355 |
|
|
static void frv_function_prologue (FILE *, HOST_WIDE_INT);
|
356 |
|
|
static void frv_function_epilogue (FILE *, HOST_WIDE_INT);
|
357 |
|
|
static bool frv_assemble_integer (rtx, unsigned, int);
|
358 |
|
|
static void frv_init_builtins (void);
|
359 |
|
|
static rtx frv_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
|
360 |
|
|
static void frv_init_libfuncs (void);
|
361 |
|
|
static bool frv_in_small_data_p (tree);
|
362 |
|
|
static void frv_asm_output_mi_thunk
|
363 |
|
|
(FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT, tree);
|
364 |
|
|
static void frv_setup_incoming_varargs (CUMULATIVE_ARGS *,
|
365 |
|
|
enum machine_mode,
|
366 |
|
|
tree, int *, int);
|
367 |
|
|
static rtx frv_expand_builtin_saveregs (void);
|
368 |
|
|
static bool frv_rtx_costs (rtx, int, int, int*);
|
369 |
|
|
static void frv_asm_out_constructor (rtx, int);
|
370 |
|
|
static void frv_asm_out_destructor (rtx, int);
|
371 |
|
|
static bool frv_function_symbol_referenced_p (rtx);
|
372 |
|
|
static bool frv_cannot_force_const_mem (rtx);
|
373 |
|
|
static const char *unspec_got_name (int);
|
374 |
|
|
static void frv_output_const_unspec (FILE *,
|
375 |
|
|
const struct frv_unspec *);
|
376 |
|
|
static bool frv_function_ok_for_sibcall (tree, tree);
|
377 |
|
|
static rtx frv_struct_value_rtx (tree, int);
|
378 |
|
|
static bool frv_must_pass_in_stack (enum machine_mode mode, tree type);
|
379 |
|
|
static int frv_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
|
380 |
|
|
tree, bool);
|
381 |
|
|
static void frv_output_dwarf_dtprel (FILE *, int, rtx)
|
382 |
|
|
ATTRIBUTE_UNUSED;
|
383 |
|
|
|
384 |
|
|
/* Allow us to easily change the default for -malloc-cc. */
|
385 |
|
|
#ifndef DEFAULT_NO_ALLOC_CC
|
386 |
|
|
#define MASK_DEFAULT_ALLOC_CC MASK_ALLOC_CC
|
387 |
|
|
#else
|
388 |
|
|
#define MASK_DEFAULT_ALLOC_CC 0
|
389 |
|
|
#endif
|
390 |
|
|
|
391 |
|
|
/* Initialize the GCC target structure. */
|
392 |
|
|
#undef TARGET_ASM_FUNCTION_PROLOGUE
|
393 |
|
|
#define TARGET_ASM_FUNCTION_PROLOGUE frv_function_prologue
|
394 |
|
|
#undef TARGET_ASM_FUNCTION_EPILOGUE
|
395 |
|
|
#define TARGET_ASM_FUNCTION_EPILOGUE frv_function_epilogue
|
396 |
|
|
#undef TARGET_ASM_INTEGER
|
397 |
|
|
#define TARGET_ASM_INTEGER frv_assemble_integer
|
398 |
|
|
#undef TARGET_DEFAULT_TARGET_FLAGS
|
399 |
|
|
#define TARGET_DEFAULT_TARGET_FLAGS \
|
400 |
|
|
(MASK_DEFAULT_ALLOC_CC \
|
401 |
|
|
| MASK_COND_MOVE \
|
402 |
|
|
| MASK_SCC \
|
403 |
|
|
| MASK_COND_EXEC \
|
404 |
|
|
| MASK_VLIW_BRANCH \
|
405 |
|
|
| MASK_MULTI_CE \
|
406 |
|
|
| MASK_NESTED_CE)
|
407 |
|
|
#undef TARGET_HANDLE_OPTION
|
408 |
|
|
#define TARGET_HANDLE_OPTION frv_handle_option
|
409 |
|
|
#undef TARGET_INIT_BUILTINS
|
410 |
|
|
#define TARGET_INIT_BUILTINS frv_init_builtins
|
411 |
|
|
#undef TARGET_EXPAND_BUILTIN
|
412 |
|
|
#define TARGET_EXPAND_BUILTIN frv_expand_builtin
|
413 |
|
|
#undef TARGET_INIT_LIBFUNCS
|
414 |
|
|
#define TARGET_INIT_LIBFUNCS frv_init_libfuncs
|
415 |
|
|
#undef TARGET_IN_SMALL_DATA_P
|
416 |
|
|
#define TARGET_IN_SMALL_DATA_P frv_in_small_data_p
|
417 |
|
|
#undef TARGET_RTX_COSTS
|
418 |
|
|
#define TARGET_RTX_COSTS frv_rtx_costs
|
419 |
|
|
#undef TARGET_ASM_CONSTRUCTOR
|
420 |
|
|
#define TARGET_ASM_CONSTRUCTOR frv_asm_out_constructor
|
421 |
|
|
#undef TARGET_ASM_DESTRUCTOR
|
422 |
|
|
#define TARGET_ASM_DESTRUCTOR frv_asm_out_destructor
|
423 |
|
|
|
424 |
|
|
#undef TARGET_ASM_OUTPUT_MI_THUNK
|
425 |
|
|
#define TARGET_ASM_OUTPUT_MI_THUNK frv_asm_output_mi_thunk
|
426 |
|
|
#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
|
427 |
|
|
#define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
|
428 |
|
|
|
429 |
|
|
#undef TARGET_SCHED_ISSUE_RATE
|
430 |
|
|
#define TARGET_SCHED_ISSUE_RATE frv_issue_rate
|
431 |
|
|
|
432 |
|
|
#undef TARGET_FUNCTION_OK_FOR_SIBCALL
|
433 |
|
|
#define TARGET_FUNCTION_OK_FOR_SIBCALL frv_function_ok_for_sibcall
|
434 |
|
|
#undef TARGET_CANNOT_FORCE_CONST_MEM
|
435 |
|
|
#define TARGET_CANNOT_FORCE_CONST_MEM frv_cannot_force_const_mem
|
436 |
|
|
|
437 |
|
|
#undef TARGET_HAVE_TLS
|
438 |
|
|
#define TARGET_HAVE_TLS HAVE_AS_TLS
|
439 |
|
|
|
440 |
|
|
#undef TARGET_STRUCT_VALUE_RTX
|
441 |
|
|
#define TARGET_STRUCT_VALUE_RTX frv_struct_value_rtx
|
442 |
|
|
#undef TARGET_MUST_PASS_IN_STACK
|
443 |
|
|
#define TARGET_MUST_PASS_IN_STACK frv_must_pass_in_stack
|
444 |
|
|
#undef TARGET_PASS_BY_REFERENCE
|
445 |
|
|
#define TARGET_PASS_BY_REFERENCE hook_pass_by_reference_must_pass_in_stack
|
446 |
|
|
#undef TARGET_ARG_PARTIAL_BYTES
|
447 |
|
|
#define TARGET_ARG_PARTIAL_BYTES frv_arg_partial_bytes
|
448 |
|
|
|
449 |
|
|
#undef TARGET_EXPAND_BUILTIN_SAVEREGS
|
450 |
|
|
#define TARGET_EXPAND_BUILTIN_SAVEREGS frv_expand_builtin_saveregs
|
451 |
|
|
#undef TARGET_SETUP_INCOMING_VARARGS
|
452 |
|
|
#define TARGET_SETUP_INCOMING_VARARGS frv_setup_incoming_varargs
|
453 |
|
|
#undef TARGET_MACHINE_DEPENDENT_REORG
|
454 |
|
|
#define TARGET_MACHINE_DEPENDENT_REORG frv_reorg
|
455 |
|
|
|
456 |
|
|
#if HAVE_AS_TLS
|
457 |
|
|
#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
|
458 |
|
|
#define TARGET_ASM_OUTPUT_DWARF_DTPREL frv_output_dwarf_dtprel
|
459 |
|
|
#endif
|
460 |
|
|
|
461 |
|
|
struct gcc_target targetm = TARGET_INITIALIZER;
|
462 |
|
|
|
463 |
|
|
#define FRV_SYMBOL_REF_TLS_P(RTX) \
|
464 |
|
|
(GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
|
465 |
|
|
|
466 |
|
|
|
467 |
|
|
/* Any function call that satisfies the machine-independent
|
468 |
|
|
requirements is eligible on FR-V. */
|
469 |
|
|
|
470 |
|
|
static bool
|
471 |
|
|
frv_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
|
472 |
|
|
tree exp ATTRIBUTE_UNUSED)
|
473 |
|
|
{
|
474 |
|
|
return true;
|
475 |
|
|
}
|
476 |
|
|
|
477 |
|
|
/* Return true if SYMBOL is a small data symbol and relocation RELOC
|
478 |
|
|
can be used to access it directly in a load or store. */
|
479 |
|
|
|
480 |
|
|
static FRV_INLINE bool
|
481 |
|
|
frv_small_data_reloc_p (rtx symbol, int reloc)
|
482 |
|
|
{
|
483 |
|
|
return (GET_CODE (symbol) == SYMBOL_REF
|
484 |
|
|
&& SYMBOL_REF_SMALL_P (symbol)
|
485 |
|
|
&& (!TARGET_FDPIC || flag_pic == 1)
|
486 |
|
|
&& (reloc == R_FRV_GOTOFF12 || reloc == R_FRV_GPREL12));
|
487 |
|
|
}
|
488 |
|
|
|
489 |
|
|
/* Return true if X is a valid relocation unspec. If it is, fill in UNSPEC
|
490 |
|
|
appropriately. */
|
491 |
|
|
|
492 |
|
|
bool
|
493 |
|
|
frv_const_unspec_p (rtx x, struct frv_unspec *unspec)
|
494 |
|
|
{
|
495 |
|
|
if (GET_CODE (x) == CONST)
|
496 |
|
|
{
|
497 |
|
|
unspec->offset = 0;
|
498 |
|
|
x = XEXP (x, 0);
|
499 |
|
|
if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
|
500 |
|
|
{
|
501 |
|
|
unspec->offset += INTVAL (XEXP (x, 1));
|
502 |
|
|
x = XEXP (x, 0);
|
503 |
|
|
}
|
504 |
|
|
if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_GOT)
|
505 |
|
|
{
|
506 |
|
|
unspec->symbol = XVECEXP (x, 0, 0);
|
507 |
|
|
unspec->reloc = INTVAL (XVECEXP (x, 0, 1));
|
508 |
|
|
|
509 |
|
|
if (unspec->offset == 0)
|
510 |
|
|
return true;
|
511 |
|
|
|
512 |
|
|
if (frv_small_data_reloc_p (unspec->symbol, unspec->reloc)
|
513 |
|
|
&& unspec->offset > 0
|
514 |
|
|
&& (unsigned HOST_WIDE_INT) unspec->offset < g_switch_value)
|
515 |
|
|
return true;
|
516 |
|
|
}
|
517 |
|
|
}
|
518 |
|
|
return false;
|
519 |
|
|
}
|
520 |
|
|
|
521 |
|
|
/* Decide whether we can force certain constants to memory. If we
|
522 |
|
|
decide we can't, the caller should be able to cope with it in
|
523 |
|
|
another way.
|
524 |
|
|
|
525 |
|
|
We never allow constants to be forced into memory for TARGET_FDPIC.
|
526 |
|
|
This is necessary for several reasons:
|
527 |
|
|
|
528 |
|
|
1. Since LEGITIMATE_CONSTANT_P rejects constant pool addresses, the
|
529 |
|
|
target-independent code will try to force them into the constant
|
530 |
|
|
pool, thus leading to infinite recursion.
|
531 |
|
|
|
532 |
|
|
2. We can never introduce new constant pool references during reload.
|
533 |
|
|
Any such reference would require use of the pseudo FDPIC register.
|
534 |
|
|
|
535 |
|
|
3. We can't represent a constant added to a function pointer (which is
|
536 |
|
|
not the same as a pointer to a function+constant).
|
537 |
|
|
|
538 |
|
|
4. In many cases, it's more efficient to calculate the constant in-line. */
|
539 |
|
|
|
540 |
|
|
static bool
|
541 |
|
|
frv_cannot_force_const_mem (rtx x ATTRIBUTE_UNUSED)
|
542 |
|
|
{
|
543 |
|
|
return TARGET_FDPIC;
|
544 |
|
|
}
|
545 |
|
|
|
546 |
|
|
/* Implement TARGET_HANDLE_OPTION. */
|
547 |
|
|
|
548 |
|
|
static bool
|
549 |
|
|
frv_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
|
550 |
|
|
{
|
551 |
|
|
switch (code)
|
552 |
|
|
{
|
553 |
|
|
case OPT_mcpu_:
|
554 |
|
|
if (strcmp (arg, "simple") == 0)
|
555 |
|
|
frv_cpu_type = FRV_CPU_SIMPLE;
|
556 |
|
|
else if (strcmp (arg, "tomcat") == 0)
|
557 |
|
|
frv_cpu_type = FRV_CPU_TOMCAT;
|
558 |
|
|
else if (strcmp (arg, "fr550") == 0)
|
559 |
|
|
frv_cpu_type = FRV_CPU_FR550;
|
560 |
|
|
else if (strcmp (arg, "fr500") == 0)
|
561 |
|
|
frv_cpu_type = FRV_CPU_FR500;
|
562 |
|
|
else if (strcmp (arg, "fr450") == 0)
|
563 |
|
|
frv_cpu_type = FRV_CPU_FR450;
|
564 |
|
|
else if (strcmp (arg, "fr405") == 0)
|
565 |
|
|
frv_cpu_type = FRV_CPU_FR405;
|
566 |
|
|
else if (strcmp (arg, "fr400") == 0)
|
567 |
|
|
frv_cpu_type = FRV_CPU_FR400;
|
568 |
|
|
else if (strcmp (arg, "fr300") == 0)
|
569 |
|
|
frv_cpu_type = FRV_CPU_FR300;
|
570 |
|
|
else if (strcmp (arg, "frv") == 0)
|
571 |
|
|
frv_cpu_type = FRV_CPU_GENERIC;
|
572 |
|
|
else
|
573 |
|
|
return false;
|
574 |
|
|
return true;
|
575 |
|
|
|
576 |
|
|
default:
|
577 |
|
|
return true;
|
578 |
|
|
}
|
579 |
|
|
}
|
580 |
|
|
|
581 |
|
|
static int
|
582 |
|
|
frv_default_flags_for_cpu (void)
|
583 |
|
|
{
|
584 |
|
|
switch (frv_cpu_type)
|
585 |
|
|
{
|
586 |
|
|
case FRV_CPU_GENERIC:
|
587 |
|
|
return MASK_DEFAULT_FRV;
|
588 |
|
|
|
589 |
|
|
case FRV_CPU_FR550:
|
590 |
|
|
return MASK_DEFAULT_FR550;
|
591 |
|
|
|
592 |
|
|
case FRV_CPU_FR500:
|
593 |
|
|
case FRV_CPU_TOMCAT:
|
594 |
|
|
return MASK_DEFAULT_FR500;
|
595 |
|
|
|
596 |
|
|
case FRV_CPU_FR450:
|
597 |
|
|
return MASK_DEFAULT_FR450;
|
598 |
|
|
|
599 |
|
|
case FRV_CPU_FR405:
|
600 |
|
|
case FRV_CPU_FR400:
|
601 |
|
|
return MASK_DEFAULT_FR400;
|
602 |
|
|
|
603 |
|
|
case FRV_CPU_FR300:
|
604 |
|
|
case FRV_CPU_SIMPLE:
|
605 |
|
|
return MASK_DEFAULT_SIMPLE;
|
606 |
|
|
|
607 |
|
|
default:
|
608 |
|
|
gcc_unreachable ();
|
609 |
|
|
}
|
610 |
|
|
}
|
611 |
|
|
|
612 |
|
|
/* Sometimes certain combinations of command options do not make
|
613 |
|
|
sense on a particular target machine. You can define a macro
|
614 |
|
|
`OVERRIDE_OPTIONS' to take account of this. This macro, if
|
615 |
|
|
defined, is executed once just after all the command options have
|
616 |
|
|
been parsed.
|
617 |
|
|
|
618 |
|
|
Don't use this macro to turn on various extra optimizations for
|
619 |
|
|
`-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
|
620 |
|
|
|
621 |
|
|
void
|
622 |
|
|
frv_override_options (void)
|
623 |
|
|
{
|
624 |
|
|
int regno;
|
625 |
|
|
unsigned int i;
|
626 |
|
|
|
627 |
|
|
target_flags |= (frv_default_flags_for_cpu () & ~target_flags_explicit);
|
628 |
|
|
|
629 |
|
|
/* -mlibrary-pic sets -fPIC and -G0 and also suppresses warnings from the
|
630 |
|
|
linker about linking pic and non-pic code. */
|
631 |
|
|
if (TARGET_LIBPIC)
|
632 |
|
|
{
|
633 |
|
|
if (!flag_pic) /* -fPIC */
|
634 |
|
|
flag_pic = 2;
|
635 |
|
|
|
636 |
|
|
if (! g_switch_set) /* -G0 */
|
637 |
|
|
{
|
638 |
|
|
g_switch_set = 1;
|
639 |
|
|
g_switch_value = 0;
|
640 |
|
|
}
|
641 |
|
|
}
|
642 |
|
|
|
643 |
|
|
/* A C expression whose value is a register class containing hard
|
644 |
|
|
register REGNO. In general there is more than one such class;
|
645 |
|
|
choose a class which is "minimal", meaning that no smaller class
|
646 |
|
|
also contains the register. */
|
647 |
|
|
|
648 |
|
|
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
|
649 |
|
|
{
|
650 |
|
|
enum reg_class class;
|
651 |
|
|
|
652 |
|
|
if (GPR_P (regno))
|
653 |
|
|
{
|
654 |
|
|
int gpr_reg = regno - GPR_FIRST;
|
655 |
|
|
|
656 |
|
|
if (gpr_reg == GR8_REG)
|
657 |
|
|
class = GR8_REGS;
|
658 |
|
|
|
659 |
|
|
else if (gpr_reg == GR9_REG)
|
660 |
|
|
class = GR9_REGS;
|
661 |
|
|
|
662 |
|
|
else if (gpr_reg == GR14_REG)
|
663 |
|
|
class = FDPIC_FPTR_REGS;
|
664 |
|
|
|
665 |
|
|
else if (gpr_reg == FDPIC_REGNO)
|
666 |
|
|
class = FDPIC_REGS;
|
667 |
|
|
|
668 |
|
|
else if ((gpr_reg & 3) == 0)
|
669 |
|
|
class = QUAD_REGS;
|
670 |
|
|
|
671 |
|
|
else if ((gpr_reg & 1) == 0)
|
672 |
|
|
class = EVEN_REGS;
|
673 |
|
|
|
674 |
|
|
else
|
675 |
|
|
class = GPR_REGS;
|
676 |
|
|
}
|
677 |
|
|
|
678 |
|
|
else if (FPR_P (regno))
|
679 |
|
|
{
|
680 |
|
|
int fpr_reg = regno - GPR_FIRST;
|
681 |
|
|
if ((fpr_reg & 3) == 0)
|
682 |
|
|
class = QUAD_FPR_REGS;
|
683 |
|
|
|
684 |
|
|
else if ((fpr_reg & 1) == 0)
|
685 |
|
|
class = FEVEN_REGS;
|
686 |
|
|
|
687 |
|
|
else
|
688 |
|
|
class = FPR_REGS;
|
689 |
|
|
}
|
690 |
|
|
|
691 |
|
|
else if (regno == LR_REGNO)
|
692 |
|
|
class = LR_REG;
|
693 |
|
|
|
694 |
|
|
else if (regno == LCR_REGNO)
|
695 |
|
|
class = LCR_REG;
|
696 |
|
|
|
697 |
|
|
else if (ICC_P (regno))
|
698 |
|
|
class = ICC_REGS;
|
699 |
|
|
|
700 |
|
|
else if (FCC_P (regno))
|
701 |
|
|
class = FCC_REGS;
|
702 |
|
|
|
703 |
|
|
else if (ICR_P (regno))
|
704 |
|
|
class = ICR_REGS;
|
705 |
|
|
|
706 |
|
|
else if (FCR_P (regno))
|
707 |
|
|
class = FCR_REGS;
|
708 |
|
|
|
709 |
|
|
else if (ACC_P (regno))
|
710 |
|
|
{
|
711 |
|
|
int r = regno - ACC_FIRST;
|
712 |
|
|
if ((r & 3) == 0)
|
713 |
|
|
class = QUAD_ACC_REGS;
|
714 |
|
|
else if ((r & 1) == 0)
|
715 |
|
|
class = EVEN_ACC_REGS;
|
716 |
|
|
else
|
717 |
|
|
class = ACC_REGS;
|
718 |
|
|
}
|
719 |
|
|
|
720 |
|
|
else if (ACCG_P (regno))
|
721 |
|
|
class = ACCG_REGS;
|
722 |
|
|
|
723 |
|
|
else
|
724 |
|
|
class = NO_REGS;
|
725 |
|
|
|
726 |
|
|
regno_reg_class[regno] = class;
|
727 |
|
|
}
|
728 |
|
|
|
729 |
|
|
/* Check for small data option */
|
730 |
|
|
if (!g_switch_set)
|
731 |
|
|
g_switch_value = SDATA_DEFAULT_SIZE;
|
732 |
|
|
|
733 |
|
|
/* A C expression which defines the machine-dependent operand
|
734 |
|
|
constraint letters for register classes. If CHAR is such a
|
735 |
|
|
letter, the value should be the register class corresponding to
|
736 |
|
|
it. Otherwise, the value should be `NO_REGS'. The register
|
737 |
|
|
letter `r', corresponding to class `GENERAL_REGS', will not be
|
738 |
|
|
passed to this macro; you do not need to handle it.
|
739 |
|
|
|
740 |
|
|
The following letters are unavailable, due to being used as
|
741 |
|
|
constraints:
|
742 |
|
|
'0'..'9'
|
743 |
|
|
'<', '>'
|
744 |
|
|
'E', 'F', 'G', 'H'
|
745 |
|
|
'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P'
|
746 |
|
|
'Q', 'R', 'S', 'T', 'U'
|
747 |
|
|
'V', 'X'
|
748 |
|
|
'g', 'i', 'm', 'n', 'o', 'p', 'r', 's' */
|
749 |
|
|
|
750 |
|
|
for (i = 0; i < 256; i++)
|
751 |
|
|
reg_class_from_letter[i] = NO_REGS;
|
752 |
|
|
|
753 |
|
|
reg_class_from_letter['a'] = ACC_REGS;
|
754 |
|
|
reg_class_from_letter['b'] = EVEN_ACC_REGS;
|
755 |
|
|
reg_class_from_letter['c'] = CC_REGS;
|
756 |
|
|
reg_class_from_letter['d'] = GPR_REGS;
|
757 |
|
|
reg_class_from_letter['e'] = EVEN_REGS;
|
758 |
|
|
reg_class_from_letter['f'] = FPR_REGS;
|
759 |
|
|
reg_class_from_letter['h'] = FEVEN_REGS;
|
760 |
|
|
reg_class_from_letter['l'] = LR_REG;
|
761 |
|
|
reg_class_from_letter['q'] = QUAD_REGS;
|
762 |
|
|
reg_class_from_letter['t'] = ICC_REGS;
|
763 |
|
|
reg_class_from_letter['u'] = FCC_REGS;
|
764 |
|
|
reg_class_from_letter['v'] = ICR_REGS;
|
765 |
|
|
reg_class_from_letter['w'] = FCR_REGS;
|
766 |
|
|
reg_class_from_letter['x'] = QUAD_FPR_REGS;
|
767 |
|
|
reg_class_from_letter['y'] = LCR_REG;
|
768 |
|
|
reg_class_from_letter['z'] = SPR_REGS;
|
769 |
|
|
reg_class_from_letter['A'] = QUAD_ACC_REGS;
|
770 |
|
|
reg_class_from_letter['B'] = ACCG_REGS;
|
771 |
|
|
reg_class_from_letter['C'] = CR_REGS;
|
772 |
|
|
reg_class_from_letter['W'] = FDPIC_CALL_REGS; /* gp14+15 */
|
773 |
|
|
reg_class_from_letter['Z'] = FDPIC_REGS; /* gp15 */
|
774 |
|
|
|
775 |
|
|
/* There is no single unaligned SI op for PIC code. Sometimes we
|
776 |
|
|
need to use ".4byte" and sometimes we need to use ".picptr".
|
777 |
|
|
See frv_assemble_integer for details. */
|
778 |
|
|
if (flag_pic || TARGET_FDPIC)
|
779 |
|
|
targetm.asm_out.unaligned_op.si = 0;
|
780 |
|
|
|
781 |
|
|
if ((target_flags_explicit & MASK_LINKED_FP) == 0)
|
782 |
|
|
target_flags |= MASK_LINKED_FP;
|
783 |
|
|
|
784 |
|
|
if ((target_flags_explicit & MASK_OPTIMIZE_MEMBAR) == 0)
|
785 |
|
|
target_flags |= MASK_OPTIMIZE_MEMBAR;
|
786 |
|
|
|
787 |
|
|
for (i = 0; i < ARRAY_SIZE (frv_unit_names); i++)
|
788 |
|
|
frv_unit_codes[i] = get_cpu_unit_code (frv_unit_names[i]);
|
789 |
|
|
|
790 |
|
|
for (i = 0; i < ARRAY_SIZE (frv_type_to_unit); i++)
|
791 |
|
|
frv_type_to_unit[i] = ARRAY_SIZE (frv_unit_codes);
|
792 |
|
|
|
793 |
|
|
init_machine_status = frv_init_machine_status;
|
794 |
|
|
}
|
795 |
|
|
|
796 |
|
|
|
797 |
|
|
/* Some machines may desire to change what optimizations are performed for
|
798 |
|
|
various optimization levels. This macro, if defined, is executed once just
|
799 |
|
|
after the optimization level is determined and before the remainder of the
|
800 |
|
|
command options have been parsed. Values set in this macro are used as the
|
801 |
|
|
default values for the other command line options.
|
802 |
|
|
|
803 |
|
|
LEVEL is the optimization level specified; 2 if `-O2' is specified, 1 if
|
804 |
|
|
`-O' is specified, and 0 if neither is specified.
|
805 |
|
|
|
806 |
|
|
SIZE is nonzero if `-Os' is specified, 0 otherwise.
|
807 |
|
|
|
808 |
|
|
You should not use this macro to change options that are not
|
809 |
|
|
machine-specific. These should uniformly selected by the same optimization
|
810 |
|
|
level on all supported machines. Use this macro to enable machine-specific
|
811 |
|
|
optimizations.
|
812 |
|
|
|
813 |
|
|
*Do not examine `write_symbols' in this macro!* The debugging options are
|
814 |
|
|
*not supposed to alter the generated code. */
|
815 |
|
|
|
816 |
|
|
/* On the FRV, possibly disable VLIW packing which is done by the 2nd
|
817 |
|
|
scheduling pass at the current time. */
|
818 |
|
|
void
|
819 |
|
|
frv_optimization_options (int level, int size ATTRIBUTE_UNUSED)
|
820 |
|
|
{
|
821 |
|
|
if (level >= 2)
|
822 |
|
|
{
|
823 |
|
|
#ifdef DISABLE_SCHED2
|
824 |
|
|
flag_schedule_insns_after_reload = 0;
|
825 |
|
|
#endif
|
826 |
|
|
#ifdef ENABLE_RCSP
|
827 |
|
|
flag_rcsp = 1;
|
828 |
|
|
#endif
|
829 |
|
|
}
|
830 |
|
|
}
|
831 |
|
|
|
832 |
|
|
|
833 |
|
|
/* Return true if NAME (a STRING_CST node) begins with PREFIX. */
|
834 |
|
|
|
835 |
|
|
static int
|
836 |
|
|
frv_string_begins_with (tree name, const char *prefix)
|
837 |
|
|
{
|
838 |
|
|
int prefix_len = strlen (prefix);
|
839 |
|
|
|
840 |
|
|
/* Remember: NAME's length includes the null terminator. */
|
841 |
|
|
return (TREE_STRING_LENGTH (name) > prefix_len
|
842 |
|
|
&& strncmp (TREE_STRING_POINTER (name), prefix, prefix_len) == 0);
|
843 |
|
|
}
|
844 |
|
|
|
845 |
|
|
/* Zero or more C statements that may conditionally modify two variables
|
846 |
|
|
`fixed_regs' and `call_used_regs' (both of type `char []') after they have
|
847 |
|
|
been initialized from the two preceding macros.
|
848 |
|
|
|
849 |
|
|
This is necessary in case the fixed or call-clobbered registers depend on
|
850 |
|
|
target flags.
|
851 |
|
|
|
852 |
|
|
You need not define this macro if it has no work to do.
|
853 |
|
|
|
854 |
|
|
If the usage of an entire class of registers depends on the target flags,
|
855 |
|
|
you may indicate this to GCC by using this macro to modify `fixed_regs' and
|
856 |
|
|
`call_used_regs' to 1 for each of the registers in the classes which should
|
857 |
|
|
not be used by GCC. Also define the macro `REG_CLASS_FROM_LETTER' to return
|
858 |
|
|
`NO_REGS' if it is called with a letter for a class that shouldn't be used.
|
859 |
|
|
|
860 |
|
|
(However, if this class is not included in `GENERAL_REGS' and all of the
|
861 |
|
|
insn patterns whose constraints permit this class are controlled by target
|
862 |
|
|
switches, then GCC will automatically avoid using these registers when the
|
863 |
|
|
target switches are opposed to them.) */
|
864 |
|
|
|
865 |
|
|
void
|
866 |
|
|
frv_conditional_register_usage (void)
|
867 |
|
|
{
|
868 |
|
|
int i;
|
869 |
|
|
|
870 |
|
|
for (i = GPR_FIRST + NUM_GPRS; i <= GPR_LAST; i++)
|
871 |
|
|
fixed_regs[i] = call_used_regs[i] = 1;
|
872 |
|
|
|
873 |
|
|
for (i = FPR_FIRST + NUM_FPRS; i <= FPR_LAST; i++)
|
874 |
|
|
fixed_regs[i] = call_used_regs[i] = 1;
|
875 |
|
|
|
876 |
|
|
/* Reserve the registers used for conditional execution. At present, we need
|
877 |
|
|
1 ICC and 1 ICR register. */
|
878 |
|
|
fixed_regs[ICC_TEMP] = call_used_regs[ICC_TEMP] = 1;
|
879 |
|
|
fixed_regs[ICR_TEMP] = call_used_regs[ICR_TEMP] = 1;
|
880 |
|
|
|
881 |
|
|
if (TARGET_FIXED_CC)
|
882 |
|
|
{
|
883 |
|
|
fixed_regs[ICC_FIRST] = call_used_regs[ICC_FIRST] = 1;
|
884 |
|
|
fixed_regs[FCC_FIRST] = call_used_regs[FCC_FIRST] = 1;
|
885 |
|
|
fixed_regs[ICR_FIRST] = call_used_regs[ICR_FIRST] = 1;
|
886 |
|
|
fixed_regs[FCR_FIRST] = call_used_regs[FCR_FIRST] = 1;
|
887 |
|
|
}
|
888 |
|
|
|
889 |
|
|
if (TARGET_FDPIC)
|
890 |
|
|
fixed_regs[GPR_FIRST + 16] = fixed_regs[GPR_FIRST + 17] =
|
891 |
|
|
call_used_regs[GPR_FIRST + 16] = call_used_regs[GPR_FIRST + 17] = 0;
|
892 |
|
|
|
893 |
|
|
#if 0
|
894 |
|
|
/* If -fpic, SDA_BASE_REG is the PIC register. */
|
895 |
|
|
if (g_switch_value == 0 && !flag_pic)
|
896 |
|
|
fixed_regs[SDA_BASE_REG] = call_used_regs[SDA_BASE_REG] = 0;
|
897 |
|
|
|
898 |
|
|
if (!flag_pic)
|
899 |
|
|
fixed_regs[PIC_REGNO] = call_used_regs[PIC_REGNO] = 0;
|
900 |
|
|
#endif
|
901 |
|
|
}
|
902 |
|
|
|
903 |
|
|
|
904 |
|
|
/*
|
905 |
|
|
* Compute the stack frame layout
|
906 |
|
|
*
|
907 |
|
|
* Register setup:
|
908 |
|
|
* +---------------+-----------------------+-----------------------+
|
909 |
|
|
* |Register |type |caller-save/callee-save|
|
910 |
|
|
* +---------------+-----------------------+-----------------------+
|
911 |
|
|
* |GR0 |Zero register | - |
|
912 |
|
|
* |GR1 |Stack pointer(SP) | - |
|
913 |
|
|
* |GR2 |Frame pointer(FP) | - |
|
914 |
|
|
* |GR3 |Hidden parameter | caller save |
|
915 |
|
|
* |GR4-GR7 | - | caller save |
|
916 |
|
|
* |GR8-GR13 |Argument register | caller save |
|
917 |
|
|
* |GR14-GR15 | - | caller save |
|
918 |
|
|
* |GR16-GR31 | - | callee save |
|
919 |
|
|
* |GR32-GR47 | - | caller save |
|
920 |
|
|
* |GR48-GR63 | - | callee save |
|
921 |
|
|
* |FR0-FR15 | - | caller save |
|
922 |
|
|
* |FR16-FR31 | - | callee save |
|
923 |
|
|
* |FR32-FR47 | - | caller save |
|
924 |
|
|
* |FR48-FR63 | - | callee save |
|
925 |
|
|
* +---------------+-----------------------+-----------------------+
|
926 |
|
|
*
|
927 |
|
|
* Stack frame setup:
|
928 |
|
|
* Low
|
929 |
|
|
* SP-> |-----------------------------------|
|
930 |
|
|
* | Argument area |
|
931 |
|
|
* |-----------------------------------|
|
932 |
|
|
* | Register save area |
|
933 |
|
|
* |-----------------------------------|
|
934 |
|
|
* | Local variable save area |
|
935 |
|
|
* FP-> |-----------------------------------|
|
936 |
|
|
* | Old FP |
|
937 |
|
|
* |-----------------------------------|
|
938 |
|
|
* | Hidden parameter save area |
|
939 |
|
|
* |-----------------------------------|
|
940 |
|
|
* | Return address(LR) storage area |
|
941 |
|
|
* |-----------------------------------|
|
942 |
|
|
* | Padding for alignment |
|
943 |
|
|
* |-----------------------------------|
|
944 |
|
|
* | Register argument area |
|
945 |
|
|
* OLD SP-> |-----------------------------------|
|
946 |
|
|
* | Parameter area |
|
947 |
|
|
* |-----------------------------------|
|
948 |
|
|
* High
|
949 |
|
|
*
|
950 |
|
|
* Argument area/Parameter area:
|
951 |
|
|
*
|
952 |
|
|
* When a function is called, this area is used for argument transfer. When
|
953 |
|
|
* the argument is set up by the caller function, this area is referred to as
|
954 |
|
|
* the argument area. When the argument is referenced by the callee function,
|
955 |
|
|
* this area is referred to as the parameter area. The area is allocated when
|
956 |
|
|
* all arguments cannot be placed on the argument register at the time of
|
957 |
|
|
* argument transfer.
|
958 |
|
|
*
|
959 |
|
|
* Register save area:
|
960 |
|
|
*
|
961 |
|
|
* This is a register save area that must be guaranteed for the caller
|
962 |
|
|
* function. This area is not secured when the register save operation is not
|
963 |
|
|
* needed.
|
964 |
|
|
*
|
965 |
|
|
* Local variable save area:
|
966 |
|
|
*
|
967 |
|
|
* This is the area for local variables and temporary variables.
|
968 |
|
|
*
|
969 |
|
|
* Old FP:
|
970 |
|
|
*
|
971 |
|
|
* This area stores the FP value of the caller function.
|
972 |
|
|
*
|
973 |
|
|
* Hidden parameter save area:
|
974 |
|
|
*
|
975 |
|
|
* This area stores the start address of the return value storage
|
976 |
|
|
* area for a struct/union return function.
|
977 |
|
|
* When a struct/union is used as the return value, the caller
|
978 |
|
|
* function stores the return value storage area start address in
|
979 |
|
|
* register GR3 and passes it to the caller function.
|
980 |
|
|
* The callee function interprets the address stored in the GR3
|
981 |
|
|
* as the return value storage area start address.
|
982 |
|
|
* When register GR3 needs to be saved into memory, the callee
|
983 |
|
|
* function saves it in the hidden parameter save area. This
|
984 |
|
|
* area is not secured when the save operation is not needed.
|
985 |
|
|
*
|
986 |
|
|
* Return address(LR) storage area:
|
987 |
|
|
*
|
988 |
|
|
* This area saves the LR. The LR stores the address of a return to the caller
|
989 |
|
|
* function for the purpose of function calling.
|
990 |
|
|
*
|
991 |
|
|
* Argument register area:
|
992 |
|
|
*
|
993 |
|
|
* This area saves the argument register. This area is not secured when the
|
994 |
|
|
* save operation is not needed.
|
995 |
|
|
*
|
996 |
|
|
* Argument:
|
997 |
|
|
*
|
998 |
|
|
* Arguments, the count of which equals the count of argument registers (6
|
999 |
|
|
* words), are positioned in registers GR8 to GR13 and delivered to the callee
|
1000 |
|
|
* function. When a struct/union return function is called, the return value
|
1001 |
|
|
* area address is stored in register GR3. Arguments not placed in the
|
1002 |
|
|
* argument registers will be stored in the stack argument area for transfer
|
1003 |
|
|
* purposes. When an 8-byte type argument is to be delivered using registers,
|
1004 |
|
|
* it is divided into two and placed in two registers for transfer. When
|
1005 |
|
|
* argument registers must be saved to memory, the callee function secures an
|
1006 |
|
|
* argument register save area in the stack. In this case, a continuous
|
1007 |
|
|
* argument register save area must be established in the parameter area. The
|
1008 |
|
|
* argument register save area must be allocated as needed to cover the size of
|
1009 |
|
|
* the argument register to be saved. If the function has a variable count of
|
1010 |
|
|
* arguments, it saves all argument registers in the argument register save
|
1011 |
|
|
* area.
|
1012 |
|
|
*
|
1013 |
|
|
* Argument Extension Format:
|
1014 |
|
|
*
|
1015 |
|
|
* When an argument is to be stored in the stack, its type is converted to an
|
1016 |
|
|
* extended type in accordance with the individual argument type. The argument
|
1017 |
|
|
* is freed by the caller function after the return from the callee function is
|
1018 |
|
|
* made.
|
1019 |
|
|
*
|
1020 |
|
|
* +-----------------------+---------------+------------------------+
|
1021 |
|
|
* | Argument Type |Extended Type |Stack Storage Size(byte)|
|
1022 |
|
|
* +-----------------------+---------------+------------------------+
|
1023 |
|
|
* |char |int | 4 |
|
1024 |
|
|
* |signed char |int | 4 |
|
1025 |
|
|
* |unsigned char |int | 4 |
|
1026 |
|
|
* |[signed] short int |int | 4 |
|
1027 |
|
|
* |unsigned short int |int | 4 |
|
1028 |
|
|
* |[signed] int |No extension | 4 |
|
1029 |
|
|
* |unsigned int |No extension | 4 |
|
1030 |
|
|
* |[signed] long int |No extension | 4 |
|
1031 |
|
|
* |unsigned long int |No extension | 4 |
|
1032 |
|
|
* |[signed] long long int |No extension | 8 |
|
1033 |
|
|
* |unsigned long long int |No extension | 8 |
|
1034 |
|
|
* |float |double | 8 |
|
1035 |
|
|
* |double |No extension | 8 |
|
1036 |
|
|
* |long double |No extension | 8 |
|
1037 |
|
|
* |pointer |No extension | 4 |
|
1038 |
|
|
* |struct/union |- | 4 (*1) |
|
1039 |
|
|
* +-----------------------+---------------+------------------------+
|
1040 |
|
|
*
|
1041 |
|
|
* When a struct/union is to be delivered as an argument, the caller copies it
|
1042 |
|
|
* to the local variable area and delivers the address of that area.
|
1043 |
|
|
*
|
1044 |
|
|
* Return Value:
|
1045 |
|
|
*
|
1046 |
|
|
* +-------------------------------+----------------------+
|
1047 |
|
|
* |Return Value Type |Return Value Interface|
|
1048 |
|
|
* +-------------------------------+----------------------+
|
1049 |
|
|
* |void |None |
|
1050 |
|
|
* |[signed|unsigned] char |GR8 |
|
1051 |
|
|
* |[signed|unsigned] short int |GR8 |
|
1052 |
|
|
* |[signed|unsigned] int |GR8 |
|
1053 |
|
|
* |[signed|unsigned] long int |GR8 |
|
1054 |
|
|
* |pointer |GR8 |
|
1055 |
|
|
* |[signed|unsigned] long long int|GR8 & GR9 |
|
1056 |
|
|
* |float |GR8 |
|
1057 |
|
|
* |double |GR8 & GR9 |
|
1058 |
|
|
* |long double |GR8 & GR9 |
|
1059 |
|
|
* |struct/union |(*1) |
|
1060 |
|
|
* +-------------------------------+----------------------+
|
1061 |
|
|
*
|
1062 |
|
|
* When a struct/union is used as the return value, the caller function stores
|
1063 |
|
|
* the start address of the return value storage area into GR3 and then passes
|
1064 |
|
|
* it to the callee function. The callee function interprets GR3 as the start
|
1065 |
|
|
* address of the return value storage area. When this address needs to be
|
1066 |
|
|
* saved in memory, the callee function secures the hidden parameter save area
|
1067 |
|
|
* and saves the address in that area.
|
1068 |
|
|
*/
|
1069 |
|
|
|
1070 |
|
|
frv_stack_t *
|
1071 |
|
|
frv_stack_info (void)
|
1072 |
|
|
{
|
1073 |
|
|
static frv_stack_t info, zero_info;
|
1074 |
|
|
frv_stack_t *info_ptr = &info;
|
1075 |
|
|
tree fndecl = current_function_decl;
|
1076 |
|
|
int varargs_p = 0;
|
1077 |
|
|
tree cur_arg;
|
1078 |
|
|
tree next_arg;
|
1079 |
|
|
int range;
|
1080 |
|
|
int alignment;
|
1081 |
|
|
int offset;
|
1082 |
|
|
|
1083 |
|
|
/* If we've already calculated the values and reload is complete,
|
1084 |
|
|
just return now. */
|
1085 |
|
|
if (frv_stack_cache)
|
1086 |
|
|
return frv_stack_cache;
|
1087 |
|
|
|
1088 |
|
|
/* Zero all fields. */
|
1089 |
|
|
info = zero_info;
|
1090 |
|
|
|
1091 |
|
|
/* Set up the register range information. */
|
1092 |
|
|
info_ptr->regs[STACK_REGS_GPR].name = "gpr";
|
1093 |
|
|
info_ptr->regs[STACK_REGS_GPR].first = LAST_ARG_REGNUM + 1;
|
1094 |
|
|
info_ptr->regs[STACK_REGS_GPR].last = GPR_LAST;
|
1095 |
|
|
info_ptr->regs[STACK_REGS_GPR].dword_p = TRUE;
|
1096 |
|
|
|
1097 |
|
|
info_ptr->regs[STACK_REGS_FPR].name = "fpr";
|
1098 |
|
|
info_ptr->regs[STACK_REGS_FPR].first = FPR_FIRST;
|
1099 |
|
|
info_ptr->regs[STACK_REGS_FPR].last = FPR_LAST;
|
1100 |
|
|
info_ptr->regs[STACK_REGS_FPR].dword_p = TRUE;
|
1101 |
|
|
|
1102 |
|
|
info_ptr->regs[STACK_REGS_LR].name = "lr";
|
1103 |
|
|
info_ptr->regs[STACK_REGS_LR].first = LR_REGNO;
|
1104 |
|
|
info_ptr->regs[STACK_REGS_LR].last = LR_REGNO;
|
1105 |
|
|
info_ptr->regs[STACK_REGS_LR].special_p = 1;
|
1106 |
|
|
|
1107 |
|
|
info_ptr->regs[STACK_REGS_CC].name = "cc";
|
1108 |
|
|
info_ptr->regs[STACK_REGS_CC].first = CC_FIRST;
|
1109 |
|
|
info_ptr->regs[STACK_REGS_CC].last = CC_LAST;
|
1110 |
|
|
info_ptr->regs[STACK_REGS_CC].field_p = TRUE;
|
1111 |
|
|
|
1112 |
|
|
info_ptr->regs[STACK_REGS_LCR].name = "lcr";
|
1113 |
|
|
info_ptr->regs[STACK_REGS_LCR].first = LCR_REGNO;
|
1114 |
|
|
info_ptr->regs[STACK_REGS_LCR].last = LCR_REGNO;
|
1115 |
|
|
|
1116 |
|
|
info_ptr->regs[STACK_REGS_STDARG].name = "stdarg";
|
1117 |
|
|
info_ptr->regs[STACK_REGS_STDARG].first = FIRST_ARG_REGNUM;
|
1118 |
|
|
info_ptr->regs[STACK_REGS_STDARG].last = LAST_ARG_REGNUM;
|
1119 |
|
|
info_ptr->regs[STACK_REGS_STDARG].dword_p = 1;
|
1120 |
|
|
info_ptr->regs[STACK_REGS_STDARG].special_p = 1;
|
1121 |
|
|
|
1122 |
|
|
info_ptr->regs[STACK_REGS_STRUCT].name = "struct";
|
1123 |
|
|
info_ptr->regs[STACK_REGS_STRUCT].first = FRV_STRUCT_VALUE_REGNUM;
|
1124 |
|
|
info_ptr->regs[STACK_REGS_STRUCT].last = FRV_STRUCT_VALUE_REGNUM;
|
1125 |
|
|
info_ptr->regs[STACK_REGS_STRUCT].special_p = 1;
|
1126 |
|
|
|
1127 |
|
|
info_ptr->regs[STACK_REGS_FP].name = "fp";
|
1128 |
|
|
info_ptr->regs[STACK_REGS_FP].first = FRAME_POINTER_REGNUM;
|
1129 |
|
|
info_ptr->regs[STACK_REGS_FP].last = FRAME_POINTER_REGNUM;
|
1130 |
|
|
info_ptr->regs[STACK_REGS_FP].special_p = 1;
|
1131 |
|
|
|
1132 |
|
|
/* Determine if this is a stdarg function. If so, allocate space to store
|
1133 |
|
|
the 6 arguments. */
|
1134 |
|
|
if (cfun->stdarg)
|
1135 |
|
|
varargs_p = 1;
|
1136 |
|
|
|
1137 |
|
|
else
|
1138 |
|
|
{
|
1139 |
|
|
/* Find the last argument, and see if it is __builtin_va_alist. */
|
1140 |
|
|
for (cur_arg = DECL_ARGUMENTS (fndecl); cur_arg != (tree)0; cur_arg = next_arg)
|
1141 |
|
|
{
|
1142 |
|
|
next_arg = TREE_CHAIN (cur_arg);
|
1143 |
|
|
if (next_arg == (tree)0)
|
1144 |
|
|
{
|
1145 |
|
|
if (DECL_NAME (cur_arg)
|
1146 |
|
|
&& !strcmp (IDENTIFIER_POINTER (DECL_NAME (cur_arg)), "__builtin_va_alist"))
|
1147 |
|
|
varargs_p = 1;
|
1148 |
|
|
|
1149 |
|
|
break;
|
1150 |
|
|
}
|
1151 |
|
|
}
|
1152 |
|
|
}
|
1153 |
|
|
|
1154 |
|
|
/* Iterate over all of the register ranges. */
|
1155 |
|
|
for (range = 0; range < STACK_REGS_MAX; range++)
|
1156 |
|
|
{
|
1157 |
|
|
frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
|
1158 |
|
|
int first = reg_ptr->first;
|
1159 |
|
|
int last = reg_ptr->last;
|
1160 |
|
|
int size_1word = 0;
|
1161 |
|
|
int size_2words = 0;
|
1162 |
|
|
int regno;
|
1163 |
|
|
|
1164 |
|
|
/* Calculate which registers need to be saved & save area size. */
|
1165 |
|
|
switch (range)
|
1166 |
|
|
{
|
1167 |
|
|
default:
|
1168 |
|
|
for (regno = first; regno <= last; regno++)
|
1169 |
|
|
{
|
1170 |
|
|
if ((regs_ever_live[regno] && !call_used_regs[regno])
|
1171 |
|
|
|| (current_function_calls_eh_return
|
1172 |
|
|
&& (regno >= FIRST_EH_REGNUM && regno <= LAST_EH_REGNUM))
|
1173 |
|
|
|| (!TARGET_FDPIC && flag_pic
|
1174 |
|
|
&& cfun->uses_pic_offset_table && regno == PIC_REGNO))
|
1175 |
|
|
{
|
1176 |
|
|
info_ptr->save_p[regno] = REG_SAVE_1WORD;
|
1177 |
|
|
size_1word += UNITS_PER_WORD;
|
1178 |
|
|
}
|
1179 |
|
|
}
|
1180 |
|
|
break;
|
1181 |
|
|
|
1182 |
|
|
/* Calculate whether we need to create a frame after everything else
|
1183 |
|
|
has been processed. */
|
1184 |
|
|
case STACK_REGS_FP:
|
1185 |
|
|
break;
|
1186 |
|
|
|
1187 |
|
|
case STACK_REGS_LR:
|
1188 |
|
|
if (regs_ever_live[LR_REGNO]
|
1189 |
|
|
|| profile_flag
|
1190 |
|
|
/* This is set for __builtin_return_address, etc. */
|
1191 |
|
|
|| cfun->machine->frame_needed
|
1192 |
|
|
|| (TARGET_LINKED_FP && frame_pointer_needed)
|
1193 |
|
|
|| (!TARGET_FDPIC && flag_pic
|
1194 |
|
|
&& cfun->uses_pic_offset_table))
|
1195 |
|
|
{
|
1196 |
|
|
info_ptr->save_p[LR_REGNO] = REG_SAVE_1WORD;
|
1197 |
|
|
size_1word += UNITS_PER_WORD;
|
1198 |
|
|
}
|
1199 |
|
|
break;
|
1200 |
|
|
|
1201 |
|
|
case STACK_REGS_STDARG:
|
1202 |
|
|
if (varargs_p)
|
1203 |
|
|
{
|
1204 |
|
|
/* If this is a stdarg function with a non varardic
|
1205 |
|
|
argument split between registers and the stack,
|
1206 |
|
|
adjust the saved registers downward. */
|
1207 |
|
|
last -= (ADDR_ALIGN (cfun->pretend_args_size, UNITS_PER_WORD)
|
1208 |
|
|
/ UNITS_PER_WORD);
|
1209 |
|
|
|
1210 |
|
|
for (regno = first; regno <= last; regno++)
|
1211 |
|
|
{
|
1212 |
|
|
info_ptr->save_p[regno] = REG_SAVE_1WORD;
|
1213 |
|
|
size_1word += UNITS_PER_WORD;
|
1214 |
|
|
}
|
1215 |
|
|
|
1216 |
|
|
info_ptr->stdarg_size = size_1word;
|
1217 |
|
|
}
|
1218 |
|
|
break;
|
1219 |
|
|
|
1220 |
|
|
case STACK_REGS_STRUCT:
|
1221 |
|
|
if (cfun->returns_struct)
|
1222 |
|
|
{
|
1223 |
|
|
info_ptr->save_p[FRV_STRUCT_VALUE_REGNUM] = REG_SAVE_1WORD;
|
1224 |
|
|
size_1word += UNITS_PER_WORD;
|
1225 |
|
|
}
|
1226 |
|
|
break;
|
1227 |
|
|
}
|
1228 |
|
|
|
1229 |
|
|
|
1230 |
|
|
if (size_1word)
|
1231 |
|
|
{
|
1232 |
|
|
/* If this is a field, it only takes one word. */
|
1233 |
|
|
if (reg_ptr->field_p)
|
1234 |
|
|
size_1word = UNITS_PER_WORD;
|
1235 |
|
|
|
1236 |
|
|
/* Determine which register pairs can be saved together. */
|
1237 |
|
|
else if (reg_ptr->dword_p && TARGET_DWORD)
|
1238 |
|
|
{
|
1239 |
|
|
for (regno = first; regno < last; regno += 2)
|
1240 |
|
|
{
|
1241 |
|
|
if (info_ptr->save_p[regno] && info_ptr->save_p[regno+1])
|
1242 |
|
|
{
|
1243 |
|
|
size_2words += 2 * UNITS_PER_WORD;
|
1244 |
|
|
size_1word -= 2 * UNITS_PER_WORD;
|
1245 |
|
|
info_ptr->save_p[regno] = REG_SAVE_2WORDS;
|
1246 |
|
|
info_ptr->save_p[regno+1] = REG_SAVE_NO_SAVE;
|
1247 |
|
|
}
|
1248 |
|
|
}
|
1249 |
|
|
}
|
1250 |
|
|
|
1251 |
|
|
reg_ptr->size_1word = size_1word;
|
1252 |
|
|
reg_ptr->size_2words = size_2words;
|
1253 |
|
|
|
1254 |
|
|
if (! reg_ptr->special_p)
|
1255 |
|
|
{
|
1256 |
|
|
info_ptr->regs_size_1word += size_1word;
|
1257 |
|
|
info_ptr->regs_size_2words += size_2words;
|
1258 |
|
|
}
|
1259 |
|
|
}
|
1260 |
|
|
}
|
1261 |
|
|
|
1262 |
|
|
/* Set up the sizes of each each field in the frame body, making the sizes
|
1263 |
|
|
of each be divisible by the size of a dword if dword operations might
|
1264 |
|
|
be used, or the size of a word otherwise. */
|
1265 |
|
|
alignment = (TARGET_DWORD? 2 * UNITS_PER_WORD : UNITS_PER_WORD);
|
1266 |
|
|
|
1267 |
|
|
info_ptr->parameter_size = ADDR_ALIGN (cfun->outgoing_args_size, alignment);
|
1268 |
|
|
info_ptr->regs_size = ADDR_ALIGN (info_ptr->regs_size_2words
|
1269 |
|
|
+ info_ptr->regs_size_1word,
|
1270 |
|
|
alignment);
|
1271 |
|
|
info_ptr->vars_size = ADDR_ALIGN (get_frame_size (), alignment);
|
1272 |
|
|
|
1273 |
|
|
info_ptr->pretend_size = cfun->pretend_args_size;
|
1274 |
|
|
|
1275 |
|
|
/* Work out the size of the frame, excluding the header. Both the frame
|
1276 |
|
|
body and register parameter area will be dword-aligned. */
|
1277 |
|
|
info_ptr->total_size
|
1278 |
|
|
= (ADDR_ALIGN (info_ptr->parameter_size
|
1279 |
|
|
+ info_ptr->regs_size
|
1280 |
|
|
+ info_ptr->vars_size,
|
1281 |
|
|
2 * UNITS_PER_WORD)
|
1282 |
|
|
+ ADDR_ALIGN (info_ptr->pretend_size
|
1283 |
|
|
+ info_ptr->stdarg_size,
|
1284 |
|
|
2 * UNITS_PER_WORD));
|
1285 |
|
|
|
1286 |
|
|
/* See if we need to create a frame at all, if so add header area. */
|
1287 |
|
|
if (info_ptr->total_size > 0
|
1288 |
|
|
|| frame_pointer_needed
|
1289 |
|
|
|| info_ptr->regs[STACK_REGS_LR].size_1word > 0
|
1290 |
|
|
|| info_ptr->regs[STACK_REGS_STRUCT].size_1word > 0)
|
1291 |
|
|
{
|
1292 |
|
|
offset = info_ptr->parameter_size;
|
1293 |
|
|
info_ptr->header_size = 4 * UNITS_PER_WORD;
|
1294 |
|
|
info_ptr->total_size += 4 * UNITS_PER_WORD;
|
1295 |
|
|
|
1296 |
|
|
/* Calculate the offsets to save normal register pairs. */
|
1297 |
|
|
for (range = 0; range < STACK_REGS_MAX; range++)
|
1298 |
|
|
{
|
1299 |
|
|
frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
|
1300 |
|
|
if (! reg_ptr->special_p)
|
1301 |
|
|
{
|
1302 |
|
|
int first = reg_ptr->first;
|
1303 |
|
|
int last = reg_ptr->last;
|
1304 |
|
|
int regno;
|
1305 |
|
|
|
1306 |
|
|
for (regno = first; regno <= last; regno++)
|
1307 |
|
|
if (info_ptr->save_p[regno] == REG_SAVE_2WORDS
|
1308 |
|
|
&& regno != FRAME_POINTER_REGNUM
|
1309 |
|
|
&& (regno < FIRST_ARG_REGNUM
|
1310 |
|
|
|| regno > LAST_ARG_REGNUM))
|
1311 |
|
|
{
|
1312 |
|
|
info_ptr->reg_offset[regno] = offset;
|
1313 |
|
|
offset += 2 * UNITS_PER_WORD;
|
1314 |
|
|
}
|
1315 |
|
|
}
|
1316 |
|
|
}
|
1317 |
|
|
|
1318 |
|
|
/* Calculate the offsets to save normal single registers. */
|
1319 |
|
|
for (range = 0; range < STACK_REGS_MAX; range++)
|
1320 |
|
|
{
|
1321 |
|
|
frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
|
1322 |
|
|
if (! reg_ptr->special_p)
|
1323 |
|
|
{
|
1324 |
|
|
int first = reg_ptr->first;
|
1325 |
|
|
int last = reg_ptr->last;
|
1326 |
|
|
int regno;
|
1327 |
|
|
|
1328 |
|
|
for (regno = first; regno <= last; regno++)
|
1329 |
|
|
if (info_ptr->save_p[regno] == REG_SAVE_1WORD
|
1330 |
|
|
&& regno != FRAME_POINTER_REGNUM
|
1331 |
|
|
&& (regno < FIRST_ARG_REGNUM
|
1332 |
|
|
|| regno > LAST_ARG_REGNUM))
|
1333 |
|
|
{
|
1334 |
|
|
info_ptr->reg_offset[regno] = offset;
|
1335 |
|
|
offset += UNITS_PER_WORD;
|
1336 |
|
|
}
|
1337 |
|
|
}
|
1338 |
|
|
}
|
1339 |
|
|
|
1340 |
|
|
/* Calculate the offset to save the local variables at. */
|
1341 |
|
|
offset = ADDR_ALIGN (offset, alignment);
|
1342 |
|
|
if (info_ptr->vars_size)
|
1343 |
|
|
{
|
1344 |
|
|
info_ptr->vars_offset = offset;
|
1345 |
|
|
offset += info_ptr->vars_size;
|
1346 |
|
|
}
|
1347 |
|
|
|
1348 |
|
|
/* Align header to a dword-boundary. */
|
1349 |
|
|
offset = ADDR_ALIGN (offset, 2 * UNITS_PER_WORD);
|
1350 |
|
|
|
1351 |
|
|
/* Calculate the offsets in the fixed frame. */
|
1352 |
|
|
info_ptr->save_p[FRAME_POINTER_REGNUM] = REG_SAVE_1WORD;
|
1353 |
|
|
info_ptr->reg_offset[FRAME_POINTER_REGNUM] = offset;
|
1354 |
|
|
info_ptr->regs[STACK_REGS_FP].size_1word = UNITS_PER_WORD;
|
1355 |
|
|
|
1356 |
|
|
info_ptr->save_p[LR_REGNO] = REG_SAVE_1WORD;
|
1357 |
|
|
info_ptr->reg_offset[LR_REGNO] = offset + 2*UNITS_PER_WORD;
|
1358 |
|
|
info_ptr->regs[STACK_REGS_LR].size_1word = UNITS_PER_WORD;
|
1359 |
|
|
|
1360 |
|
|
if (cfun->returns_struct)
|
1361 |
|
|
{
|
1362 |
|
|
info_ptr->save_p[FRV_STRUCT_VALUE_REGNUM] = REG_SAVE_1WORD;
|
1363 |
|
|
info_ptr->reg_offset[FRV_STRUCT_VALUE_REGNUM] = offset + UNITS_PER_WORD;
|
1364 |
|
|
info_ptr->regs[STACK_REGS_STRUCT].size_1word = UNITS_PER_WORD;
|
1365 |
|
|
}
|
1366 |
|
|
|
1367 |
|
|
/* Calculate the offsets to store the arguments passed in registers
|
1368 |
|
|
for stdarg functions. The register pairs are first and the single
|
1369 |
|
|
register if any is last. The register save area starts on a
|
1370 |
|
|
dword-boundary. */
|
1371 |
|
|
if (info_ptr->stdarg_size)
|
1372 |
|
|
{
|
1373 |
|
|
int first = info_ptr->regs[STACK_REGS_STDARG].first;
|
1374 |
|
|
int last = info_ptr->regs[STACK_REGS_STDARG].last;
|
1375 |
|
|
int regno;
|
1376 |
|
|
|
1377 |
|
|
/* Skip the header. */
|
1378 |
|
|
offset += 4 * UNITS_PER_WORD;
|
1379 |
|
|
for (regno = first; regno <= last; regno++)
|
1380 |
|
|
{
|
1381 |
|
|
if (info_ptr->save_p[regno] == REG_SAVE_2WORDS)
|
1382 |
|
|
{
|
1383 |
|
|
info_ptr->reg_offset[regno] = offset;
|
1384 |
|
|
offset += 2 * UNITS_PER_WORD;
|
1385 |
|
|
}
|
1386 |
|
|
else if (info_ptr->save_p[regno] == REG_SAVE_1WORD)
|
1387 |
|
|
{
|
1388 |
|
|
info_ptr->reg_offset[regno] = offset;
|
1389 |
|
|
offset += UNITS_PER_WORD;
|
1390 |
|
|
}
|
1391 |
|
|
}
|
1392 |
|
|
}
|
1393 |
|
|
}
|
1394 |
|
|
|
1395 |
|
|
if (reload_completed)
|
1396 |
|
|
frv_stack_cache = info_ptr;
|
1397 |
|
|
|
1398 |
|
|
return info_ptr;
|
1399 |
|
|
}
|
1400 |
|
|
|
1401 |
|
|
|
1402 |
|
|
/* Print the information about the frv stack offsets, etc. when debugging. */
|
1403 |
|
|
|
1404 |
|
|
void
|
1405 |
|
|
frv_debug_stack (frv_stack_t *info)
|
1406 |
|
|
{
|
1407 |
|
|
int range;
|
1408 |
|
|
|
1409 |
|
|
if (!info)
|
1410 |
|
|
info = frv_stack_info ();
|
1411 |
|
|
|
1412 |
|
|
fprintf (stderr, "\nStack information for function %s:\n",
|
1413 |
|
|
((current_function_decl && DECL_NAME (current_function_decl))
|
1414 |
|
|
? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
|
1415 |
|
|
: "<unknown>"));
|
1416 |
|
|
|
1417 |
|
|
fprintf (stderr, "\ttotal_size\t= %6d\n", info->total_size);
|
1418 |
|
|
fprintf (stderr, "\tvars_size\t= %6d\n", info->vars_size);
|
1419 |
|
|
fprintf (stderr, "\tparam_size\t= %6d\n", info->parameter_size);
|
1420 |
|
|
fprintf (stderr, "\tregs_size\t= %6d, 1w = %3d, 2w = %3d\n",
|
1421 |
|
|
info->regs_size, info->regs_size_1word, info->regs_size_2words);
|
1422 |
|
|
|
1423 |
|
|
fprintf (stderr, "\theader_size\t= %6d\n", info->header_size);
|
1424 |
|
|
fprintf (stderr, "\tpretend_size\t= %6d\n", info->pretend_size);
|
1425 |
|
|
fprintf (stderr, "\tvars_offset\t= %6d\n", info->vars_offset);
|
1426 |
|
|
fprintf (stderr, "\tregs_offset\t= %6d\n", info->regs_offset);
|
1427 |
|
|
|
1428 |
|
|
for (range = 0; range < STACK_REGS_MAX; range++)
|
1429 |
|
|
{
|
1430 |
|
|
frv_stack_regs_t *regs = &(info->regs[range]);
|
1431 |
|
|
if ((regs->size_1word + regs->size_2words) > 0)
|
1432 |
|
|
{
|
1433 |
|
|
int first = regs->first;
|
1434 |
|
|
int last = regs->last;
|
1435 |
|
|
int regno;
|
1436 |
|
|
|
1437 |
|
|
fprintf (stderr, "\t%s\tsize\t= %6d, 1w = %3d, 2w = %3d, save =",
|
1438 |
|
|
regs->name, regs->size_1word + regs->size_2words,
|
1439 |
|
|
regs->size_1word, regs->size_2words);
|
1440 |
|
|
|
1441 |
|
|
for (regno = first; regno <= last; regno++)
|
1442 |
|
|
{
|
1443 |
|
|
if (info->save_p[regno] == REG_SAVE_1WORD)
|
1444 |
|
|
fprintf (stderr, " %s (%d)", reg_names[regno],
|
1445 |
|
|
info->reg_offset[regno]);
|
1446 |
|
|
|
1447 |
|
|
else if (info->save_p[regno] == REG_SAVE_2WORDS)
|
1448 |
|
|
fprintf (stderr, " %s-%s (%d)", reg_names[regno],
|
1449 |
|
|
reg_names[regno+1], info->reg_offset[regno]);
|
1450 |
|
|
}
|
1451 |
|
|
|
1452 |
|
|
fputc ('\n', stderr);
|
1453 |
|
|
}
|
1454 |
|
|
}
|
1455 |
|
|
|
1456 |
|
|
fflush (stderr);
|
1457 |
|
|
}
|
1458 |
|
|
|
1459 |
|
|
|
1460 |
|
|
|
1461 |
|
|
|
1462 |
|
|
/* Used during final to control the packing of insns. The value is
|
1463 |
|
|
1 if the current instruction should be packed with the next one,
|
1464 |
|
|
|
1465 |
|
|
|
1466 |
|
|
static int frv_insn_packing_flag;
|
1467 |
|
|
|
1468 |
|
|
/* True if the current function contains a far jump. */
|
1469 |
|
|
|
1470 |
|
|
static int
|
1471 |
|
|
frv_function_contains_far_jump (void)
|
1472 |
|
|
{
|
1473 |
|
|
rtx insn = get_insns ();
|
1474 |
|
|
while (insn != NULL
|
1475 |
|
|
&& !(GET_CODE (insn) == JUMP_INSN
|
1476 |
|
|
/* Ignore tablejump patterns. */
|
1477 |
|
|
&& GET_CODE (PATTERN (insn)) != ADDR_VEC
|
1478 |
|
|
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
|
1479 |
|
|
&& get_attr_far_jump (insn) == FAR_JUMP_YES))
|
1480 |
|
|
insn = NEXT_INSN (insn);
|
1481 |
|
|
return (insn != NULL);
|
1482 |
|
|
}
|
1483 |
|
|
|
1484 |
|
|
/* For the FRV, this function makes sure that a function with far jumps
|
1485 |
|
|
will return correctly. It also does the VLIW packing. */
|
1486 |
|
|
|
1487 |
|
|
static void
|
1488 |
|
|
frv_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
|
1489 |
|
|
{
|
1490 |
|
|
/* If no frame was created, check whether the function uses a call
|
1491 |
|
|
instruction to implement a far jump. If so, save the link in gr3 and
|
1492 |
|
|
replace all returns to LR with returns to GR3. GR3 is used because it
|
1493 |
|
|
is call-clobbered, because is not available to the register allocator,
|
1494 |
|
|
and because all functions that take a hidden argument pointer will have
|
1495 |
|
|
a stack frame. */
|
1496 |
|
|
if (frv_stack_info ()->total_size == 0 && frv_function_contains_far_jump ())
|
1497 |
|
|
{
|
1498 |
|
|
rtx insn;
|
1499 |
|
|
|
1500 |
|
|
/* Just to check that the above comment is true. */
|
1501 |
|
|
gcc_assert (!regs_ever_live[GPR_FIRST + 3]);
|
1502 |
|
|
|
1503 |
|
|
/* Generate the instruction that saves the link register. */
|
1504 |
|
|
fprintf (file, "\tmovsg lr,gr3\n");
|
1505 |
|
|
|
1506 |
|
|
/* Replace the LR with GR3 in *return_internal patterns. The insn
|
1507 |
|
|
will now return using jmpl @(gr3,0) rather than bralr. We cannot
|
1508 |
|
|
simply emit a different assembly directive because bralr and jmpl
|
1509 |
|
|
execute in different units. */
|
1510 |
|
|
for (insn = get_insns(); insn != NULL; insn = NEXT_INSN (insn))
|
1511 |
|
|
if (GET_CODE (insn) == JUMP_INSN)
|
1512 |
|
|
{
|
1513 |
|
|
rtx pattern = PATTERN (insn);
|
1514 |
|
|
if (GET_CODE (pattern) == PARALLEL
|
1515 |
|
|
&& XVECLEN (pattern, 0) >= 2
|
1516 |
|
|
&& GET_CODE (XVECEXP (pattern, 0, 0)) == RETURN
|
1517 |
|
|
&& GET_CODE (XVECEXP (pattern, 0, 1)) == USE)
|
1518 |
|
|
{
|
1519 |
|
|
rtx address = XEXP (XVECEXP (pattern, 0, 1), 0);
|
1520 |
|
|
if (GET_CODE (address) == REG && REGNO (address) == LR_REGNO)
|
1521 |
|
|
REGNO (address) = GPR_FIRST + 3;
|
1522 |
|
|
}
|
1523 |
|
|
}
|
1524 |
|
|
}
|
1525 |
|
|
|
1526 |
|
|
frv_pack_insns ();
|
1527 |
|
|
|
1528 |
|
|
/* Allow the garbage collector to free the nops created by frv_reorg. */
|
1529 |
|
|
memset (frv_nops, 0, sizeof (frv_nops));
|
1530 |
|
|
}
|
1531 |
|
|
|
1532 |
|
|
|
1533 |
|
|
/* Return the next available temporary register in a given class. */
|
1534 |
|
|
|
1535 |
|
|
static rtx
|
1536 |
|
|
frv_alloc_temp_reg (
|
1537 |
|
|
frv_tmp_reg_t *info, /* which registers are available */
|
1538 |
|
|
enum reg_class class, /* register class desired */
|
1539 |
|
|
enum machine_mode mode, /* mode to allocate register with */
|
1540 |
|
|
int mark_as_used, /* register not available after allocation */
|
1541 |
|
|
int no_abort) /* return NULL instead of aborting */
|
1542 |
|
|
{
|
1543 |
|
|
int regno = info->next_reg[ (int)class ];
|
1544 |
|
|
int orig_regno = regno;
|
1545 |
|
|
HARD_REG_SET *reg_in_class = ®_class_contents[ (int)class ];
|
1546 |
|
|
int i, nr;
|
1547 |
|
|
|
1548 |
|
|
for (;;)
|
1549 |
|
|
{
|
1550 |
|
|
if (TEST_HARD_REG_BIT (*reg_in_class, regno)
|
1551 |
|
|
&& TEST_HARD_REG_BIT (info->regs, regno))
|
1552 |
|
|
break;
|
1553 |
|
|
|
1554 |
|
|
if (++regno >= FIRST_PSEUDO_REGISTER)
|
1555 |
|
|
regno = 0;
|
1556 |
|
|
if (regno == orig_regno)
|
1557 |
|
|
{
|
1558 |
|
|
gcc_assert (no_abort);
|
1559 |
|
|
return NULL_RTX;
|
1560 |
|
|
}
|
1561 |
|
|
}
|
1562 |
|
|
|
1563 |
|
|
nr = HARD_REGNO_NREGS (regno, mode);
|
1564 |
|
|
info->next_reg[ (int)class ] = regno + nr;
|
1565 |
|
|
|
1566 |
|
|
if (mark_as_used)
|
1567 |
|
|
for (i = 0; i < nr; i++)
|
1568 |
|
|
CLEAR_HARD_REG_BIT (info->regs, regno+i);
|
1569 |
|
|
|
1570 |
|
|
return gen_rtx_REG (mode, regno);
|
1571 |
|
|
}
|
1572 |
|
|
|
1573 |
|
|
|
1574 |
|
|
/* Return an rtx with the value OFFSET, which will either be a register or a
|
1575 |
|
|
signed 12-bit integer. It can be used as the second operand in an "add"
|
1576 |
|
|
instruction, or as the index in a load or store.
|
1577 |
|
|
|
1578 |
|
|
The function returns a constant rtx if OFFSET is small enough, otherwise
|
1579 |
|
|
it loads the constant into register OFFSET_REGNO and returns that. */
|
1580 |
|
|
static rtx
|
1581 |
|
|
frv_frame_offset_rtx (int offset)
|
1582 |
|
|
{
|
1583 |
|
|
rtx offset_rtx = GEN_INT (offset);
|
1584 |
|
|
if (IN_RANGE_P (offset, -2048, 2047))
|
1585 |
|
|
return offset_rtx;
|
1586 |
|
|
else
|
1587 |
|
|
{
|
1588 |
|
|
rtx reg_rtx = gen_rtx_REG (SImode, OFFSET_REGNO);
|
1589 |
|
|
if (IN_RANGE_P (offset, -32768, 32767))
|
1590 |
|
|
emit_insn (gen_movsi (reg_rtx, offset_rtx));
|
1591 |
|
|
else
|
1592 |
|
|
{
|
1593 |
|
|
emit_insn (gen_movsi_high (reg_rtx, offset_rtx));
|
1594 |
|
|
emit_insn (gen_movsi_lo_sum (reg_rtx, offset_rtx));
|
1595 |
|
|
}
|
1596 |
|
|
return reg_rtx;
|
1597 |
|
|
}
|
1598 |
|
|
}
|
1599 |
|
|
|
1600 |
|
|
/* Generate (mem:MODE (plus:Pmode BASE (frv_frame_offset OFFSET)))). The
|
1601 |
|
|
prologue and epilogue uses such expressions to access the stack. */
|
1602 |
|
|
static rtx
|
1603 |
|
|
frv_frame_mem (enum machine_mode mode, rtx base, int offset)
|
1604 |
|
|
{
|
1605 |
|
|
return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode,
|
1606 |
|
|
base,
|
1607 |
|
|
frv_frame_offset_rtx (offset)));
|
1608 |
|
|
}
|
1609 |
|
|
|
1610 |
|
|
/* Generate a frame-related expression:
|
1611 |
|
|
|
1612 |
|
|
(set REG (mem (plus (sp) (const_int OFFSET)))).
|
1613 |
|
|
|
1614 |
|
|
Such expressions are used in FRAME_RELATED_EXPR notes for more complex
|
1615 |
|
|
instructions. Marking the expressions as frame-related is superfluous if
|
1616 |
|
|
the note contains just a single set. But if the note contains a PARALLEL
|
1617 |
|
|
or SEQUENCE that has several sets, each set must be individually marked
|
1618 |
|
|
as frame-related. */
|
1619 |
|
|
static rtx
|
1620 |
|
|
frv_dwarf_store (rtx reg, int offset)
|
1621 |
|
|
{
|
1622 |
|
|
rtx set = gen_rtx_SET (VOIDmode,
|
1623 |
|
|
gen_rtx_MEM (GET_MODE (reg),
|
1624 |
|
|
plus_constant (stack_pointer_rtx,
|
1625 |
|
|
offset)),
|
1626 |
|
|
reg);
|
1627 |
|
|
RTX_FRAME_RELATED_P (set) = 1;
|
1628 |
|
|
return set;
|
1629 |
|
|
}
|
1630 |
|
|
|
1631 |
|
|
/* Emit a frame-related instruction whose pattern is PATTERN. The
|
1632 |
|
|
instruction is the last in a sequence that cumulatively performs the
|
1633 |
|
|
operation described by DWARF_PATTERN. The instruction is marked as
|
1634 |
|
|
frame-related and has a REG_FRAME_RELATED_EXPR note containing
|
1635 |
|
|
DWARF_PATTERN. */
|
1636 |
|
|
static void
|
1637 |
|
|
frv_frame_insn (rtx pattern, rtx dwarf_pattern)
|
1638 |
|
|
{
|
1639 |
|
|
rtx insn = emit_insn (pattern);
|
1640 |
|
|
RTX_FRAME_RELATED_P (insn) = 1;
|
1641 |
|
|
REG_NOTES (insn) = alloc_EXPR_LIST (REG_FRAME_RELATED_EXPR,
|
1642 |
|
|
dwarf_pattern,
|
1643 |
|
|
REG_NOTES (insn));
|
1644 |
|
|
}
|
1645 |
|
|
|
1646 |
|
|
/* Emit instructions that transfer REG to or from the memory location (sp +
|
1647 |
|
|
STACK_OFFSET). The register is stored in memory if ACCESSOR->OP is
|
1648 |
|
|
FRV_STORE and loaded if it is FRV_LOAD. Only the prologue uses this
|
1649 |
|
|
function to store registers and only the epilogue uses it to load them.
|
1650 |
|
|
|
1651 |
|
|
The caller sets up ACCESSOR so that BASE is equal to (sp + BASE_OFFSET).
|
1652 |
|
|
The generated instruction will use BASE as its base register. BASE may
|
1653 |
|
|
simply be the stack pointer, but if several accesses are being made to a
|
1654 |
|
|
region far away from the stack pointer, it may be more efficient to set
|
1655 |
|
|
up a temporary instead.
|
1656 |
|
|
|
1657 |
|
|
Store instructions will be frame-related and will be annotated with the
|
1658 |
|
|
overall effect of the store. Load instructions will be followed by a
|
1659 |
|
|
(use) to prevent later optimizations from zapping them.
|
1660 |
|
|
|
1661 |
|
|
The function takes care of the moves to and from SPRs, using TEMP_REGNO
|
1662 |
|
|
as a temporary in such cases. */
|
1663 |
|
|
static void
|
1664 |
|
|
frv_frame_access (frv_frame_accessor_t *accessor, rtx reg, int stack_offset)
|
1665 |
|
|
{
|
1666 |
|
|
enum machine_mode mode = GET_MODE (reg);
|
1667 |
|
|
rtx mem = frv_frame_mem (mode,
|
1668 |
|
|
accessor->base,
|
1669 |
|
|
stack_offset - accessor->base_offset);
|
1670 |
|
|
|
1671 |
|
|
if (accessor->op == FRV_LOAD)
|
1672 |
|
|
{
|
1673 |
|
|
if (SPR_P (REGNO (reg)))
|
1674 |
|
|
{
|
1675 |
|
|
rtx temp = gen_rtx_REG (mode, TEMP_REGNO);
|
1676 |
|
|
emit_insn (gen_rtx_SET (VOIDmode, temp, mem));
|
1677 |
|
|
emit_insn (gen_rtx_SET (VOIDmode, reg, temp));
|
1678 |
|
|
}
|
1679 |
|
|
else
|
1680 |
|
|
emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
|
1681 |
|
|
emit_insn (gen_rtx_USE (VOIDmode, reg));
|
1682 |
|
|
}
|
1683 |
|
|
else
|
1684 |
|
|
{
|
1685 |
|
|
if (SPR_P (REGNO (reg)))
|
1686 |
|
|
{
|
1687 |
|
|
rtx temp = gen_rtx_REG (mode, TEMP_REGNO);
|
1688 |
|
|
emit_insn (gen_rtx_SET (VOIDmode, temp, reg));
|
1689 |
|
|
frv_frame_insn (gen_rtx_SET (Pmode, mem, temp),
|
1690 |
|
|
frv_dwarf_store (reg, stack_offset));
|
1691 |
|
|
}
|
1692 |
|
|
else if (GET_MODE (reg) == DImode)
|
1693 |
|
|
{
|
1694 |
|
|
/* For DImode saves, the dwarf2 version needs to be a SEQUENCE
|
1695 |
|
|
with a separate save for each register. */
|
1696 |
|
|
rtx reg1 = gen_rtx_REG (SImode, REGNO (reg));
|
1697 |
|
|
rtx reg2 = gen_rtx_REG (SImode, REGNO (reg) + 1);
|
1698 |
|
|
rtx set1 = frv_dwarf_store (reg1, stack_offset);
|
1699 |
|
|
rtx set2 = frv_dwarf_store (reg2, stack_offset + 4);
|
1700 |
|
|
frv_frame_insn (gen_rtx_SET (Pmode, mem, reg),
|
1701 |
|
|
gen_rtx_PARALLEL (VOIDmode,
|
1702 |
|
|
gen_rtvec (2, set1, set2)));
|
1703 |
|
|
}
|
1704 |
|
|
else
|
1705 |
|
|
frv_frame_insn (gen_rtx_SET (Pmode, mem, reg),
|
1706 |
|
|
frv_dwarf_store (reg, stack_offset));
|
1707 |
|
|
}
|
1708 |
|
|
}
|
1709 |
|
|
|
1710 |
|
|
/* A function that uses frv_frame_access to transfer a group of registers to
|
1711 |
|
|
or from the stack. ACCESSOR is passed directly to frv_frame_access, INFO
|
1712 |
|
|
is the stack information generated by frv_stack_info, and REG_SET is the
|
1713 |
|
|
number of the register set to transfer. */
|
1714 |
|
|
static void
|
1715 |
|
|
frv_frame_access_multi (frv_frame_accessor_t *accessor,
|
1716 |
|
|
frv_stack_t *info,
|
1717 |
|
|
int reg_set)
|
1718 |
|
|
{
|
1719 |
|
|
frv_stack_regs_t *regs_info;
|
1720 |
|
|
int regno;
|
1721 |
|
|
|
1722 |
|
|
regs_info = &info->regs[reg_set];
|
1723 |
|
|
for (regno = regs_info->first; regno <= regs_info->last; regno++)
|
1724 |
|
|
if (info->save_p[regno])
|
1725 |
|
|
frv_frame_access (accessor,
|
1726 |
|
|
info->save_p[regno] == REG_SAVE_2WORDS
|
1727 |
|
|
? gen_rtx_REG (DImode, regno)
|
1728 |
|
|
: gen_rtx_REG (SImode, regno),
|
1729 |
|
|
info->reg_offset[regno]);
|
1730 |
|
|
}
|
1731 |
|
|
|
1732 |
|
|
/* Save or restore callee-saved registers that are kept outside the frame
|
1733 |
|
|
header. The function saves the registers if OP is FRV_STORE and restores
|
1734 |
|
|
them if OP is FRV_LOAD. INFO is the stack information generated by
|
1735 |
|
|
frv_stack_info. */
|
1736 |
|
|
static void
|
1737 |
|
|
frv_frame_access_standard_regs (enum frv_stack_op op, frv_stack_t *info)
|
1738 |
|
|
{
|
1739 |
|
|
frv_frame_accessor_t accessor;
|
1740 |
|
|
|
1741 |
|
|
accessor.op = op;
|
1742 |
|
|
accessor.base = stack_pointer_rtx;
|
1743 |
|
|
accessor.base_offset = 0;
|
1744 |
|
|
frv_frame_access_multi (&accessor, info, STACK_REGS_GPR);
|
1745 |
|
|
frv_frame_access_multi (&accessor, info, STACK_REGS_FPR);
|
1746 |
|
|
frv_frame_access_multi (&accessor, info, STACK_REGS_LCR);
|
1747 |
|
|
}
|
1748 |
|
|
|
1749 |
|
|
|
1750 |
|
|
/* Called after register allocation to add any instructions needed for the
|
1751 |
|
|
prologue. Using a prologue insn is favored compared to putting all of the
|
1752 |
|
|
instructions in the TARGET_ASM_FUNCTION_PROLOGUE target hook, since
|
1753 |
|
|
it allows the scheduler to intermix instructions with the saves of
|
1754 |
|
|
the caller saved registers. In some cases, it might be necessary
|
1755 |
|
|
to emit a barrier instruction as the last insn to prevent such
|
1756 |
|
|
scheduling.
|
1757 |
|
|
|
1758 |
|
|
Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
|
1759 |
|
|
so that the debug info generation code can handle them properly. */
|
1760 |
|
|
void
|
1761 |
|
|
frv_expand_prologue (void)
|
1762 |
|
|
{
|
1763 |
|
|
frv_stack_t *info = frv_stack_info ();
|
1764 |
|
|
rtx sp = stack_pointer_rtx;
|
1765 |
|
|
rtx fp = frame_pointer_rtx;
|
1766 |
|
|
frv_frame_accessor_t accessor;
|
1767 |
|
|
|
1768 |
|
|
if (TARGET_DEBUG_STACK)
|
1769 |
|
|
frv_debug_stack (info);
|
1770 |
|
|
|
1771 |
|
|
if (info->total_size == 0)
|
1772 |
|
|
return;
|
1773 |
|
|
|
1774 |
|
|
/* We're interested in three areas of the frame here:
|
1775 |
|
|
|
1776 |
|
|
A: the register save area
|
1777 |
|
|
B: the old FP
|
1778 |
|
|
C: the header after B
|
1779 |
|
|
|
1780 |
|
|
If the frame pointer isn't used, we'll have to set up A, B and C
|
1781 |
|
|
using the stack pointer. If the frame pointer is used, we'll access
|
1782 |
|
|
them as follows:
|
1783 |
|
|
|
1784 |
|
|
A: set up using sp
|
1785 |
|
|
B: set up using sp or a temporary (see below)
|
1786 |
|
|
C: set up using fp
|
1787 |
|
|
|
1788 |
|
|
We set up B using the stack pointer if the frame is small enough.
|
1789 |
|
|
Otherwise, it's more efficient to copy the old stack pointer into a
|
1790 |
|
|
temporary and use that.
|
1791 |
|
|
|
1792 |
|
|
Note that it's important to make sure the prologue and epilogue use the
|
1793 |
|
|
same registers to access A and C, since doing otherwise will confuse
|
1794 |
|
|
the aliasing code. */
|
1795 |
|
|
|
1796 |
|
|
/* Set up ACCESSOR for accessing region B above. If the frame pointer
|
1797 |
|
|
isn't used, the same method will serve for C. */
|
1798 |
|
|
accessor.op = FRV_STORE;
|
1799 |
|
|
if (frame_pointer_needed && info->total_size > 2048)
|
1800 |
|
|
{
|
1801 |
|
|
rtx insn;
|
1802 |
|
|
|
1803 |
|
|
accessor.base = gen_rtx_REG (Pmode, OLD_SP_REGNO);
|
1804 |
|
|
accessor.base_offset = info->total_size;
|
1805 |
|
|
insn = emit_insn (gen_movsi (accessor.base, sp));
|
1806 |
|
|
}
|
1807 |
|
|
else
|
1808 |
|
|
{
|
1809 |
|
|
accessor.base = stack_pointer_rtx;
|
1810 |
|
|
accessor.base_offset = 0;
|
1811 |
|
|
}
|
1812 |
|
|
|
1813 |
|
|
/* Allocate the stack space. */
|
1814 |
|
|
{
|
1815 |
|
|
rtx asm_offset = frv_frame_offset_rtx (-info->total_size);
|
1816 |
|
|
rtx dwarf_offset = GEN_INT (-info->total_size);
|
1817 |
|
|
|
1818 |
|
|
frv_frame_insn (gen_stack_adjust (sp, sp, asm_offset),
|
1819 |
|
|
gen_rtx_SET (Pmode,
|
1820 |
|
|
sp,
|
1821 |
|
|
gen_rtx_PLUS (Pmode, sp, dwarf_offset)));
|
1822 |
|
|
}
|
1823 |
|
|
|
1824 |
|
|
/* If the frame pointer is needed, store the old one at (sp + FP_OFFSET)
|
1825 |
|
|
and point the new one to that location. */
|
1826 |
|
|
if (frame_pointer_needed)
|
1827 |
|
|
{
|
1828 |
|
|
int fp_offset = info->reg_offset[FRAME_POINTER_REGNUM];
|
1829 |
|
|
|
1830 |
|
|
/* ASM_SRC and DWARF_SRC both point to the frame header. ASM_SRC is
|
1831 |
|
|
based on ACCESSOR.BASE but DWARF_SRC is always based on the stack
|
1832 |
|
|
pointer. */
|
1833 |
|
|
rtx asm_src = plus_constant (accessor.base,
|
1834 |
|
|
fp_offset - accessor.base_offset);
|
1835 |
|
|
rtx dwarf_src = plus_constant (sp, fp_offset);
|
1836 |
|
|
|
1837 |
|
|
/* Store the old frame pointer at (sp + FP_OFFSET). */
|
1838 |
|
|
frv_frame_access (&accessor, fp, fp_offset);
|
1839 |
|
|
|
1840 |
|
|
/* Set up the new frame pointer. */
|
1841 |
|
|
frv_frame_insn (gen_rtx_SET (VOIDmode, fp, asm_src),
|
1842 |
|
|
gen_rtx_SET (VOIDmode, fp, dwarf_src));
|
1843 |
|
|
|
1844 |
|
|
/* Access region C from the frame pointer. */
|
1845 |
|
|
accessor.base = fp;
|
1846 |
|
|
accessor.base_offset = fp_offset;
|
1847 |
|
|
}
|
1848 |
|
|
|
1849 |
|
|
/* Set up region C. */
|
1850 |
|
|
frv_frame_access_multi (&accessor, info, STACK_REGS_STRUCT);
|
1851 |
|
|
frv_frame_access_multi (&accessor, info, STACK_REGS_LR);
|
1852 |
|
|
frv_frame_access_multi (&accessor, info, STACK_REGS_STDARG);
|
1853 |
|
|
|
1854 |
|
|
/* Set up region A. */
|
1855 |
|
|
frv_frame_access_standard_regs (FRV_STORE, info);
|
1856 |
|
|
|
1857 |
|
|
/* If this is a varargs/stdarg function, issue a blockage to prevent the
|
1858 |
|
|
scheduler from moving loads before the stores saving the registers. */
|
1859 |
|
|
if (info->stdarg_size > 0)
|
1860 |
|
|
emit_insn (gen_blockage ());
|
1861 |
|
|
|
1862 |
|
|
/* Set up pic register/small data register for this function. */
|
1863 |
|
|
if (!TARGET_FDPIC && flag_pic && cfun->uses_pic_offset_table)
|
1864 |
|
|
emit_insn (gen_pic_prologue (gen_rtx_REG (Pmode, PIC_REGNO),
|
1865 |
|
|
gen_rtx_REG (Pmode, LR_REGNO),
|
1866 |
|
|
gen_rtx_REG (SImode, OFFSET_REGNO)));
|
1867 |
|
|
}
|
1868 |
|
|
|
1869 |
|
|
|
1870 |
|
|
/* Under frv, all of the work is done via frv_expand_epilogue, but
|
1871 |
|
|
this function provides a convenient place to do cleanup. */
|
1872 |
|
|
|
1873 |
|
|
static void
|
1874 |
|
|
frv_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
|
1875 |
|
|
HOST_WIDE_INT size ATTRIBUTE_UNUSED)
|
1876 |
|
|
{
|
1877 |
|
|
frv_stack_cache = (frv_stack_t *)0;
|
1878 |
|
|
|
1879 |
|
|
/* Zap last used registers for conditional execution. */
|
1880 |
|
|
memset (&frv_ifcvt.tmp_reg, 0, sizeof (frv_ifcvt.tmp_reg));
|
1881 |
|
|
|
1882 |
|
|
/* Release the bitmap of created insns. */
|
1883 |
|
|
BITMAP_FREE (frv_ifcvt.scratch_insns_bitmap);
|
1884 |
|
|
}
|
1885 |
|
|
|
1886 |
|
|
|
1887 |
|
|
/* Called after register allocation to add any instructions needed for the
|
1888 |
|
|
epilogue. Using an epilogue insn is favored compared to putting all of the
|
1889 |
|
|
instructions in the TARGET_ASM_FUNCTION_PROLOGUE target hook, since
|
1890 |
|
|
it allows the scheduler to intermix instructions with the saves of
|
1891 |
|
|
the caller saved registers. In some cases, it might be necessary
|
1892 |
|
|
to emit a barrier instruction as the last insn to prevent such
|
1893 |
|
|
scheduling. */
|
1894 |
|
|
|
1895 |
|
|
void
|
1896 |
|
|
frv_expand_epilogue (bool emit_return)
|
1897 |
|
|
{
|
1898 |
|
|
frv_stack_t *info = frv_stack_info ();
|
1899 |
|
|
rtx fp = frame_pointer_rtx;
|
1900 |
|
|
rtx sp = stack_pointer_rtx;
|
1901 |
|
|
rtx return_addr;
|
1902 |
|
|
int fp_offset;
|
1903 |
|
|
|
1904 |
|
|
fp_offset = info->reg_offset[FRAME_POINTER_REGNUM];
|
1905 |
|
|
|
1906 |
|
|
/* Restore the stack pointer to its original value if alloca or the like
|
1907 |
|
|
is used. */
|
1908 |
|
|
if (! current_function_sp_is_unchanging)
|
1909 |
|
|
emit_insn (gen_addsi3 (sp, fp, frv_frame_offset_rtx (-fp_offset)));
|
1910 |
|
|
|
1911 |
|
|
/* Restore the callee-saved registers that were used in this function. */
|
1912 |
|
|
frv_frame_access_standard_regs (FRV_LOAD, info);
|
1913 |
|
|
|
1914 |
|
|
/* Set RETURN_ADDR to the address we should return to. Set it to NULL if
|
1915 |
|
|
no return instruction should be emitted. */
|
1916 |
|
|
if (info->save_p[LR_REGNO])
|
1917 |
|
|
{
|
1918 |
|
|
int lr_offset;
|
1919 |
|
|
rtx mem;
|
1920 |
|
|
|
1921 |
|
|
/* Use the same method to access the link register's slot as we did in
|
1922 |
|
|
the prologue. In other words, use the frame pointer if available,
|
1923 |
|
|
otherwise use the stack pointer.
|
1924 |
|
|
|
1925 |
|
|
LR_OFFSET is the offset of the link register's slot from the start
|
1926 |
|
|
of the frame and MEM is a memory rtx for it. */
|
1927 |
|
|
lr_offset = info->reg_offset[LR_REGNO];
|
1928 |
|
|
if (frame_pointer_needed)
|
1929 |
|
|
mem = frv_frame_mem (Pmode, fp, lr_offset - fp_offset);
|
1930 |
|
|
else
|
1931 |
|
|
mem = frv_frame_mem (Pmode, sp, lr_offset);
|
1932 |
|
|
|
1933 |
|
|
/* Load the old link register into a GPR. */
|
1934 |
|
|
return_addr = gen_rtx_REG (Pmode, TEMP_REGNO);
|
1935 |
|
|
emit_insn (gen_rtx_SET (VOIDmode, return_addr, mem));
|
1936 |
|
|
}
|
1937 |
|
|
else
|
1938 |
|
|
return_addr = gen_rtx_REG (Pmode, LR_REGNO);
|
1939 |
|
|
|
1940 |
|
|
/* Restore the old frame pointer. Emit a USE afterwards to make sure
|
1941 |
|
|
the load is preserved. */
|
1942 |
|
|
if (frame_pointer_needed)
|
1943 |
|
|
{
|
1944 |
|
|
emit_insn (gen_rtx_SET (VOIDmode, fp, gen_rtx_MEM (Pmode, fp)));
|
1945 |
|
|
emit_insn (gen_rtx_USE (VOIDmode, fp));
|
1946 |
|
|
}
|
1947 |
|
|
|
1948 |
|
|
/* Deallocate the stack frame. */
|
1949 |
|
|
if (info->total_size != 0)
|
1950 |
|
|
{
|
1951 |
|
|
rtx offset = frv_frame_offset_rtx (info->total_size);
|
1952 |
|
|
emit_insn (gen_stack_adjust (sp, sp, offset));
|
1953 |
|
|
}
|
1954 |
|
|
|
1955 |
|
|
/* If this function uses eh_return, add the final stack adjustment now. */
|
1956 |
|
|
if (current_function_calls_eh_return)
|
1957 |
|
|
emit_insn (gen_stack_adjust (sp, sp, EH_RETURN_STACKADJ_RTX));
|
1958 |
|
|
|
1959 |
|
|
if (emit_return)
|
1960 |
|
|
emit_jump_insn (gen_epilogue_return (return_addr));
|
1961 |
|
|
else
|
1962 |
|
|
{
|
1963 |
|
|
rtx lr = return_addr;
|
1964 |
|
|
|
1965 |
|
|
if (REGNO (return_addr) != LR_REGNO)
|
1966 |
|
|
{
|
1967 |
|
|
lr = gen_rtx_REG (Pmode, LR_REGNO);
|
1968 |
|
|
emit_move_insn (lr, return_addr);
|
1969 |
|
|
}
|
1970 |
|
|
|
1971 |
|
|
emit_insn (gen_rtx_USE (VOIDmode, lr));
|
1972 |
|
|
}
|
1973 |
|
|
}
|
1974 |
|
|
|
1975 |
|
|
|
1976 |
|
|
/* Worker function for TARGET_ASM_OUTPUT_MI_THUNK. */
|
1977 |
|
|
|
1978 |
|
|
static void
|
1979 |
|
|
frv_asm_output_mi_thunk (FILE *file,
|
1980 |
|
|
tree thunk_fndecl ATTRIBUTE_UNUSED,
|
1981 |
|
|
HOST_WIDE_INT delta,
|
1982 |
|
|
HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
|
1983 |
|
|
tree function)
|
1984 |
|
|
{
|
1985 |
|
|
const char *name_func = XSTR (XEXP (DECL_RTL (function), 0), 0);
|
1986 |
|
|
const char *name_arg0 = reg_names[FIRST_ARG_REGNUM];
|
1987 |
|
|
const char *name_jmp = reg_names[JUMP_REGNO];
|
1988 |
|
|
const char *parallel = (frv_issue_rate () > 1 ? ".p" : "");
|
1989 |
|
|
|
1990 |
|
|
/* Do the add using an addi if possible. */
|
1991 |
|
|
if (IN_RANGE_P (delta, -2048, 2047))
|
1992 |
|
|
fprintf (file, "\taddi %s,#%d,%s\n", name_arg0, (int) delta, name_arg0);
|
1993 |
|
|
else
|
1994 |
|
|
{
|
1995 |
|
|
const char *const name_add = reg_names[TEMP_REGNO];
|
1996 |
|
|
fprintf (file, "\tsethi%s #hi(" HOST_WIDE_INT_PRINT_DEC "),%s\n",
|
1997 |
|
|
parallel, delta, name_add);
|
1998 |
|
|
fprintf (file, "\tsetlo #lo(" HOST_WIDE_INT_PRINT_DEC "),%s\n",
|
1999 |
|
|
delta, name_add);
|
2000 |
|
|
fprintf (file, "\tadd %s,%s,%s\n", name_add, name_arg0, name_arg0);
|
2001 |
|
|
}
|
2002 |
|
|
|
2003 |
|
|
if (TARGET_FDPIC)
|
2004 |
|
|
{
|
2005 |
|
|
const char *name_pic = reg_names[FDPIC_REGNO];
|
2006 |
|
|
name_jmp = reg_names[FDPIC_FPTR_REGNO];
|
2007 |
|
|
|
2008 |
|
|
if (flag_pic != 1)
|
2009 |
|
|
{
|
2010 |
|
|
fprintf (file, "\tsethi%s #gotofffuncdeschi(", parallel);
|
2011 |
|
|
assemble_name (file, name_func);
|
2012 |
|
|
fprintf (file, "),%s\n", name_jmp);
|
2013 |
|
|
|
2014 |
|
|
fprintf (file, "\tsetlo #gotofffuncdesclo(");
|
2015 |
|
|
assemble_name (file, name_func);
|
2016 |
|
|
fprintf (file, "),%s\n", name_jmp);
|
2017 |
|
|
|
2018 |
|
|
fprintf (file, "\tldd @(%s,%s), %s\n", name_jmp, name_pic, name_jmp);
|
2019 |
|
|
}
|
2020 |
|
|
else
|
2021 |
|
|
{
|
2022 |
|
|
fprintf (file, "\tlddo @(%s,#gotofffuncdesc12(", name_pic);
|
2023 |
|
|
assemble_name (file, name_func);
|
2024 |
|
|
fprintf (file, "\t)), %s\n", name_jmp);
|
2025 |
|
|
}
|
2026 |
|
|
}
|
2027 |
|
|
else if (!flag_pic)
|
2028 |
|
|
{
|
2029 |
|
|
fprintf (file, "\tsethi%s #hi(", parallel);
|
2030 |
|
|
assemble_name (file, name_func);
|
2031 |
|
|
fprintf (file, "),%s\n", name_jmp);
|
2032 |
|
|
|
2033 |
|
|
fprintf (file, "\tsetlo #lo(");
|
2034 |
|
|
assemble_name (file, name_func);
|
2035 |
|
|
fprintf (file, "),%s\n", name_jmp);
|
2036 |
|
|
}
|
2037 |
|
|
else
|
2038 |
|
|
{
|
2039 |
|
|
/* Use JUMP_REGNO as a temporary PIC register. */
|
2040 |
|
|
const char *name_lr = reg_names[LR_REGNO];
|
2041 |
|
|
const char *name_gppic = name_jmp;
|
2042 |
|
|
const char *name_tmp = reg_names[TEMP_REGNO];
|
2043 |
|
|
|
2044 |
|
|
fprintf (file, "\tmovsg %s,%s\n", name_lr, name_tmp);
|
2045 |
|
|
fprintf (file, "\tcall 1f\n");
|
2046 |
|
|
fprintf (file, "1:\tmovsg %s,%s\n", name_lr, name_gppic);
|
2047 |
|
|
fprintf (file, "\tmovgs %s,%s\n", name_tmp, name_lr);
|
2048 |
|
|
fprintf (file, "\tsethi%s #gprelhi(1b),%s\n", parallel, name_tmp);
|
2049 |
|
|
fprintf (file, "\tsetlo #gprello(1b),%s\n", name_tmp);
|
2050 |
|
|
fprintf (file, "\tsub %s,%s,%s\n", name_gppic, name_tmp, name_gppic);
|
2051 |
|
|
|
2052 |
|
|
fprintf (file, "\tsethi%s #gprelhi(", parallel);
|
2053 |
|
|
assemble_name (file, name_func);
|
2054 |
|
|
fprintf (file, "),%s\n", name_tmp);
|
2055 |
|
|
|
2056 |
|
|
fprintf (file, "\tsetlo #gprello(");
|
2057 |
|
|
assemble_name (file, name_func);
|
2058 |
|
|
fprintf (file, "),%s\n", name_tmp);
|
2059 |
|
|
|
2060 |
|
|
fprintf (file, "\tadd %s,%s,%s\n", name_gppic, name_tmp, name_jmp);
|
2061 |
|
|
}
|
2062 |
|
|
|
2063 |
|
|
/* Jump to the function address. */
|
2064 |
|
|
fprintf (file, "\tjmpl @(%s,%s)\n", name_jmp, reg_names[GPR_FIRST+0]);
|
2065 |
|
|
}
|
2066 |
|
|
|
2067 |
|
|
|
2068 |
|
|
/* A C expression which is nonzero if a function must have and use a frame
|
2069 |
|
|
pointer. This expression is evaluated in the reload pass. If its value is
|
2070 |
|
|
nonzero the function will have a frame pointer.
|
2071 |
|
|
|
2072 |
|
|
The expression can in principle examine the current function and decide
|
2073 |
|
|
according to the facts, but on most machines the constant 0 or the constant
|
2074 |
|
|
1 suffices. Use 0 when the machine allows code to be generated with no
|
2075 |
|
|
frame pointer, and doing so saves some time or space. Use 1 when there is
|
2076 |
|
|
no possible advantage to avoiding a frame pointer.
|
2077 |
|
|
|
2078 |
|
|
In certain cases, the compiler does not know how to produce valid code
|
2079 |
|
|
without a frame pointer. The compiler recognizes those cases and
|
2080 |
|
|
automatically gives the function a frame pointer regardless of what
|
2081 |
|
|
`FRAME_POINTER_REQUIRED' says. You don't need to worry about them.
|
2082 |
|
|
|
2083 |
|
|
In a function that does not require a frame pointer, the frame pointer
|
2084 |
|
|
register can be allocated for ordinary usage, unless you mark it as a fixed
|
2085 |
|
|
register. See `FIXED_REGISTERS' for more information. */
|
2086 |
|
|
|
2087 |
|
|
/* On frv, create a frame whenever we need to create stack. */
|
2088 |
|
|
|
2089 |
|
|
int
|
2090 |
|
|
frv_frame_pointer_required (void)
|
2091 |
|
|
{
|
2092 |
|
|
/* If we forgoing the usual linkage requirements, we only need
|
2093 |
|
|
a frame pointer if the stack pointer might change. */
|
2094 |
|
|
if (!TARGET_LINKED_FP)
|
2095 |
|
|
return !current_function_sp_is_unchanging;
|
2096 |
|
|
|
2097 |
|
|
if (! current_function_is_leaf)
|
2098 |
|
|
return TRUE;
|
2099 |
|
|
|
2100 |
|
|
if (get_frame_size () != 0)
|
2101 |
|
|
return TRUE;
|
2102 |
|
|
|
2103 |
|
|
if (cfun->stdarg)
|
2104 |
|
|
return TRUE;
|
2105 |
|
|
|
2106 |
|
|
if (!current_function_sp_is_unchanging)
|
2107 |
|
|
return TRUE;
|
2108 |
|
|
|
2109 |
|
|
if (!TARGET_FDPIC && flag_pic && cfun->uses_pic_offset_table)
|
2110 |
|
|
return TRUE;
|
2111 |
|
|
|
2112 |
|
|
if (profile_flag)
|
2113 |
|
|
return TRUE;
|
2114 |
|
|
|
2115 |
|
|
if (cfun->machine->frame_needed)
|
2116 |
|
|
return TRUE;
|
2117 |
|
|
|
2118 |
|
|
return FALSE;
|
2119 |
|
|
}
|
2120 |
|
|
|
2121 |
|
|
|
2122 |
|
|
/* This macro is similar to `INITIAL_FRAME_POINTER_OFFSET'. It specifies the
|
2123 |
|
|
initial difference between the specified pair of registers. This macro must
|
2124 |
|
|
be defined if `ELIMINABLE_REGS' is defined. */
|
2125 |
|
|
|
2126 |
|
|
/* See frv_stack_info for more details on the frv stack frame. */
|
2127 |
|
|
|
2128 |
|
|
int
|
2129 |
|
|
frv_initial_elimination_offset (int from, int to)
|
2130 |
|
|
{
|
2131 |
|
|
frv_stack_t *info = frv_stack_info ();
|
2132 |
|
|
int ret = 0;
|
2133 |
|
|
|
2134 |
|
|
if (to == STACK_POINTER_REGNUM && from == ARG_POINTER_REGNUM)
|
2135 |
|
|
ret = info->total_size - info->pretend_size;
|
2136 |
|
|
|
2137 |
|
|
else if (to == STACK_POINTER_REGNUM && from == FRAME_POINTER_REGNUM)
|
2138 |
|
|
ret = info->reg_offset[FRAME_POINTER_REGNUM];
|
2139 |
|
|
|
2140 |
|
|
else if (to == FRAME_POINTER_REGNUM && from == ARG_POINTER_REGNUM)
|
2141 |
|
|
ret = (info->total_size
|
2142 |
|
|
- info->reg_offset[FRAME_POINTER_REGNUM]
|
2143 |
|
|
- info->pretend_size);
|
2144 |
|
|
|
2145 |
|
|
else
|
2146 |
|
|
gcc_unreachable ();
|
2147 |
|
|
|
2148 |
|
|
if (TARGET_DEBUG_STACK)
|
2149 |
|
|
fprintf (stderr, "Eliminate %s to %s by adding %d\n",
|
2150 |
|
|
reg_names [from], reg_names[to], ret);
|
2151 |
|
|
|
2152 |
|
|
return ret;
|
2153 |
|
|
}
|
2154 |
|
|
|
2155 |
|
|
|
2156 |
|
|
/* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
|
2157 |
|
|
|
2158 |
|
|
static void
|
2159 |
|
|
frv_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
|
2160 |
|
|
enum machine_mode mode,
|
2161 |
|
|
tree type ATTRIBUTE_UNUSED,
|
2162 |
|
|
int *pretend_size,
|
2163 |
|
|
int second_time)
|
2164 |
|
|
{
|
2165 |
|
|
if (TARGET_DEBUG_ARG)
|
2166 |
|
|
fprintf (stderr,
|
2167 |
|
|
"setup_vararg: words = %2d, mode = %4s, pretend_size = %d, second_time = %d\n",
|
2168 |
|
|
*cum, GET_MODE_NAME (mode), *pretend_size, second_time);
|
2169 |
|
|
}
|
2170 |
|
|
|
2171 |
|
|
|
2172 |
|
|
/* Worker function for TARGET_EXPAND_BUILTIN_SAVEREGS. */
|
2173 |
|
|
|
2174 |
|
|
static rtx
|
2175 |
|
|
frv_expand_builtin_saveregs (void)
|
2176 |
|
|
{
|
2177 |
|
|
int offset = UNITS_PER_WORD * FRV_NUM_ARG_REGS;
|
2178 |
|
|
|
2179 |
|
|
if (TARGET_DEBUG_ARG)
|
2180 |
|
|
fprintf (stderr, "expand_builtin_saveregs: offset from ap = %d\n",
|
2181 |
|
|
offset);
|
2182 |
|
|
|
2183 |
|
|
return gen_rtx_PLUS (Pmode, virtual_incoming_args_rtx, GEN_INT (- offset));
|
2184 |
|
|
}
|
2185 |
|
|
|
2186 |
|
|
|
2187 |
|
|
/* Expand __builtin_va_start to do the va_start macro. */
|
2188 |
|
|
|
2189 |
|
|
void
|
2190 |
|
|
frv_expand_builtin_va_start (tree valist, rtx nextarg)
|
2191 |
|
|
{
|
2192 |
|
|
tree t;
|
2193 |
|
|
int num = cfun->args_info - FIRST_ARG_REGNUM - FRV_NUM_ARG_REGS;
|
2194 |
|
|
|
2195 |
|
|
nextarg = gen_rtx_PLUS (Pmode, virtual_incoming_args_rtx,
|
2196 |
|
|
GEN_INT (UNITS_PER_WORD * num));
|
2197 |
|
|
|
2198 |
|
|
if (TARGET_DEBUG_ARG)
|
2199 |
|
|
{
|
2200 |
|
|
fprintf (stderr, "va_start: args_info = %d, num = %d\n",
|
2201 |
|
|
cfun->args_info, num);
|
2202 |
|
|
|
2203 |
|
|
debug_rtx (nextarg);
|
2204 |
|
|
}
|
2205 |
|
|
|
2206 |
|
|
t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
|
2207 |
|
|
make_tree (ptr_type_node, nextarg));
|
2208 |
|
|
TREE_SIDE_EFFECTS (t) = 1;
|
2209 |
|
|
|
2210 |
|
|
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
|
2211 |
|
|
}
|
2212 |
|
|
|
2213 |
|
|
|
2214 |
|
|
/* Expand a block move operation, and return 1 if successful. Return 0
|
2215 |
|
|
if we should let the compiler generate normal code.
|
2216 |
|
|
|
2217 |
|
|
operands[0] is the destination
|
2218 |
|
|
operands[1] is the source
|
2219 |
|
|
operands[2] is the length
|
2220 |
|
|
operands[3] is the alignment */
|
2221 |
|
|
|
2222 |
|
|
/* Maximum number of loads to do before doing the stores */
|
2223 |
|
|
#ifndef MAX_MOVE_REG
|
2224 |
|
|
#define MAX_MOVE_REG 4
|
2225 |
|
|
#endif
|
2226 |
|
|
|
2227 |
|
|
/* Maximum number of total loads to do. */
|
2228 |
|
|
#ifndef TOTAL_MOVE_REG
|
2229 |
|
|
#define TOTAL_MOVE_REG 8
|
2230 |
|
|
#endif
|
2231 |
|
|
|
2232 |
|
|
int
|
2233 |
|
|
frv_expand_block_move (rtx operands[])
|
2234 |
|
|
{
|
2235 |
|
|
rtx orig_dest = operands[0];
|
2236 |
|
|
rtx orig_src = operands[1];
|
2237 |
|
|
rtx bytes_rtx = operands[2];
|
2238 |
|
|
rtx align_rtx = operands[3];
|
2239 |
|
|
int constp = (GET_CODE (bytes_rtx) == CONST_INT);
|
2240 |
|
|
int align;
|
2241 |
|
|
int bytes;
|
2242 |
|
|
int offset;
|
2243 |
|
|
int num_reg;
|
2244 |
|
|
int i;
|
2245 |
|
|
rtx src_reg;
|
2246 |
|
|
rtx dest_reg;
|
2247 |
|
|
rtx src_addr;
|
2248 |
|
|
rtx dest_addr;
|
2249 |
|
|
rtx src_mem;
|
2250 |
|
|
rtx dest_mem;
|
2251 |
|
|
rtx tmp_reg;
|
2252 |
|
|
rtx stores[MAX_MOVE_REG];
|
2253 |
|
|
int move_bytes;
|
2254 |
|
|
enum machine_mode mode;
|
2255 |
|
|
|
2256 |
|
|
/* If this is not a fixed size move, just call memcpy. */
|
2257 |
|
|
if (! constp)
|
2258 |
|
|
return FALSE;
|
2259 |
|
|
|
2260 |
|
|
/* This should be a fixed size alignment. */
|
2261 |
|
|
gcc_assert (GET_CODE (align_rtx) == CONST_INT);
|
2262 |
|
|
|
2263 |
|
|
align = INTVAL (align_rtx);
|
2264 |
|
|
|
2265 |
|
|
/* Anything to move? */
|
2266 |
|
|
bytes = INTVAL (bytes_rtx);
|
2267 |
|
|
if (bytes <= 0)
|
2268 |
|
|
return TRUE;
|
2269 |
|
|
|
2270 |
|
|
/* Don't support real large moves. */
|
2271 |
|
|
if (bytes > TOTAL_MOVE_REG*align)
|
2272 |
|
|
return FALSE;
|
2273 |
|
|
|
2274 |
|
|
/* Move the address into scratch registers. */
|
2275 |
|
|
dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
|
2276 |
|
|
src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
|
2277 |
|
|
|
2278 |
|
|
num_reg = offset = 0;
|
2279 |
|
|
for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
|
2280 |
|
|
{
|
2281 |
|
|
/* Calculate the correct offset for src/dest. */
|
2282 |
|
|
if (offset == 0)
|
2283 |
|
|
{
|
2284 |
|
|
src_addr = src_reg;
|
2285 |
|
|
dest_addr = dest_reg;
|
2286 |
|
|
}
|
2287 |
|
|
else
|
2288 |
|
|
{
|
2289 |
|
|
src_addr = plus_constant (src_reg, offset);
|
2290 |
|
|
dest_addr = plus_constant (dest_reg, offset);
|
2291 |
|
|
}
|
2292 |
|
|
|
2293 |
|
|
/* Generate the appropriate load and store, saving the stores
|
2294 |
|
|
for later. */
|
2295 |
|
|
if (bytes >= 4 && align >= 4)
|
2296 |
|
|
mode = SImode;
|
2297 |
|
|
else if (bytes >= 2 && align >= 2)
|
2298 |
|
|
mode = HImode;
|
2299 |
|
|
else
|
2300 |
|
|
mode = QImode;
|
2301 |
|
|
|
2302 |
|
|
move_bytes = GET_MODE_SIZE (mode);
|
2303 |
|
|
tmp_reg = gen_reg_rtx (mode);
|
2304 |
|
|
src_mem = change_address (orig_src, mode, src_addr);
|
2305 |
|
|
dest_mem = change_address (orig_dest, mode, dest_addr);
|
2306 |
|
|
emit_insn (gen_rtx_SET (VOIDmode, tmp_reg, src_mem));
|
2307 |
|
|
stores[num_reg++] = gen_rtx_SET (VOIDmode, dest_mem, tmp_reg);
|
2308 |
|
|
|
2309 |
|
|
if (num_reg >= MAX_MOVE_REG)
|
2310 |
|
|
{
|
2311 |
|
|
for (i = 0; i < num_reg; i++)
|
2312 |
|
|
emit_insn (stores[i]);
|
2313 |
|
|
num_reg = 0;
|
2314 |
|
|
}
|
2315 |
|
|
}
|
2316 |
|
|
|
2317 |
|
|
for (i = 0; i < num_reg; i++)
|
2318 |
|
|
emit_insn (stores[i]);
|
2319 |
|
|
|
2320 |
|
|
return TRUE;
|
2321 |
|
|
}
|
2322 |
|
|
|
2323 |
|
|
|
2324 |
|
|
/* Expand a block clear operation, and return 1 if successful. Return 0
|
2325 |
|
|
if we should let the compiler generate normal code.
|
2326 |
|
|
|
2327 |
|
|
operands[0] is the destination
|
2328 |
|
|
operands[1] is the length
|
2329 |
|
|
operands[3] is the alignment */
|
2330 |
|
|
|
2331 |
|
|
int
|
2332 |
|
|
frv_expand_block_clear (rtx operands[])
|
2333 |
|
|
{
|
2334 |
|
|
rtx orig_dest = operands[0];
|
2335 |
|
|
rtx bytes_rtx = operands[1];
|
2336 |
|
|
rtx align_rtx = operands[3];
|
2337 |
|
|
int constp = (GET_CODE (bytes_rtx) == CONST_INT);
|
2338 |
|
|
int align;
|
2339 |
|
|
int bytes;
|
2340 |
|
|
int offset;
|
2341 |
|
|
int num_reg;
|
2342 |
|
|
rtx dest_reg;
|
2343 |
|
|
rtx dest_addr;
|
2344 |
|
|
rtx dest_mem;
|
2345 |
|
|
int clear_bytes;
|
2346 |
|
|
enum machine_mode mode;
|
2347 |
|
|
|
2348 |
|
|
/* If this is not a fixed size move, just call memcpy. */
|
2349 |
|
|
if (! constp)
|
2350 |
|
|
return FALSE;
|
2351 |
|
|
|
2352 |
|
|
/* This should be a fixed size alignment. */
|
2353 |
|
|
gcc_assert (GET_CODE (align_rtx) == CONST_INT);
|
2354 |
|
|
|
2355 |
|
|
align = INTVAL (align_rtx);
|
2356 |
|
|
|
2357 |
|
|
/* Anything to move? */
|
2358 |
|
|
bytes = INTVAL (bytes_rtx);
|
2359 |
|
|
if (bytes <= 0)
|
2360 |
|
|
return TRUE;
|
2361 |
|
|
|
2362 |
|
|
/* Don't support real large clears. */
|
2363 |
|
|
if (bytes > TOTAL_MOVE_REG*align)
|
2364 |
|
|
return FALSE;
|
2365 |
|
|
|
2366 |
|
|
/* Move the address into a scratch register. */
|
2367 |
|
|
dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
|
2368 |
|
|
|
2369 |
|
|
num_reg = offset = 0;
|
2370 |
|
|
for ( ; bytes > 0; (bytes -= clear_bytes), (offset += clear_bytes))
|
2371 |
|
|
{
|
2372 |
|
|
/* Calculate the correct offset for src/dest. */
|
2373 |
|
|
dest_addr = ((offset == 0)
|
2374 |
|
|
? dest_reg
|
2375 |
|
|
: plus_constant (dest_reg, offset));
|
2376 |
|
|
|
2377 |
|
|
/* Generate the appropriate store of gr0. */
|
2378 |
|
|
if (bytes >= 4 && align >= 4)
|
2379 |
|
|
mode = SImode;
|
2380 |
|
|
else if (bytes >= 2 && align >= 2)
|
2381 |
|
|
mode = HImode;
|
2382 |
|
|
else
|
2383 |
|
|
mode = QImode;
|
2384 |
|
|
|
2385 |
|
|
clear_bytes = GET_MODE_SIZE (mode);
|
2386 |
|
|
dest_mem = change_address (orig_dest, mode, dest_addr);
|
2387 |
|
|
emit_insn (gen_rtx_SET (VOIDmode, dest_mem, const0_rtx));
|
2388 |
|
|
}
|
2389 |
|
|
|
2390 |
|
|
return TRUE;
|
2391 |
|
|
}
|
2392 |
|
|
|
2393 |
|
|
|
2394 |
|
|
/* The following variable is used to output modifiers of assembler
|
2395 |
|
|
code of the current output insn. */
|
2396 |
|
|
|
2397 |
|
|
static rtx *frv_insn_operands;
|
2398 |
|
|
|
2399 |
|
|
/* The following function is used to add assembler insn code suffix .p
|
2400 |
|
|
if it is necessary. */
|
2401 |
|
|
|
2402 |
|
|
const char *
|
2403 |
|
|
frv_asm_output_opcode (FILE *f, const char *ptr)
|
2404 |
|
|
{
|
2405 |
|
|
int c;
|
2406 |
|
|
|
2407 |
|
|
if (frv_insn_packing_flag <= 0)
|
2408 |
|
|
return ptr;
|
2409 |
|
|
|
2410 |
|
|
for (; *ptr && *ptr != ' ' && *ptr != '\t';)
|
2411 |
|
|
{
|
2412 |
|
|
c = *ptr++;
|
2413 |
|
|
if (c == '%' && ((*ptr >= 'a' && *ptr <= 'z')
|
2414 |
|
|
|| (*ptr >= 'A' && *ptr <= 'Z')))
|
2415 |
|
|
{
|
2416 |
|
|
int letter = *ptr++;
|
2417 |
|
|
|
2418 |
|
|
c = atoi (ptr);
|
2419 |
|
|
frv_print_operand (f, frv_insn_operands [c], letter);
|
2420 |
|
|
while ((c = *ptr) >= '0' && c <= '9')
|
2421 |
|
|
ptr++;
|
2422 |
|
|
}
|
2423 |
|
|
else
|
2424 |
|
|
fputc (c, f);
|
2425 |
|
|
}
|
2426 |
|
|
|
2427 |
|
|
fprintf (f, ".p");
|
2428 |
|
|
|
2429 |
|
|
return ptr;
|
2430 |
|
|
}
|
2431 |
|
|
|
2432 |
|
|
/* Set up the packing bit for the current output insn. Note that this
|
2433 |
|
|
function is not called for asm insns. */
|
2434 |
|
|
|
2435 |
|
|
void
|
2436 |
|
|
frv_final_prescan_insn (rtx insn, rtx *opvec,
|
2437 |
|
|
int noperands ATTRIBUTE_UNUSED)
|
2438 |
|
|
{
|
2439 |
|
|
if (INSN_P (insn))
|
2440 |
|
|
{
|
2441 |
|
|
if (frv_insn_packing_flag >= 0)
|
2442 |
|
|
{
|
2443 |
|
|
frv_insn_operands = opvec;
|
2444 |
|
|
frv_insn_packing_flag = PACKING_FLAG_P (insn);
|
2445 |
|
|
}
|
2446 |
|
|
else if (recog_memoized (insn) >= 0
|
2447 |
|
|
&& get_attr_acc_group (insn) == ACC_GROUP_ODD)
|
2448 |
|
|
/* Packing optimizations have been disabled, but INSN can only
|
2449 |
|
|
be issued in M1. Insert an mnop in M0. */
|
2450 |
|
|
fprintf (asm_out_file, "\tmnop.p\n");
|
2451 |
|
|
}
|
2452 |
|
|
}
|
2453 |
|
|
|
2454 |
|
|
|
2455 |
|
|
|
2456 |
|
|
/* A C expression whose value is RTL representing the address in a stack frame
|
2457 |
|
|
where the pointer to the caller's frame is stored. Assume that FRAMEADDR is
|
2458 |
|
|
an RTL expression for the address of the stack frame itself.
|
2459 |
|
|
|
2460 |
|
|
If you don't define this macro, the default is to return the value of
|
2461 |
|
|
FRAMEADDR--that is, the stack frame address is also the address of the stack
|
2462 |
|
|
word that points to the previous frame. */
|
2463 |
|
|
|
2464 |
|
|
/* The default is correct, but we need to make sure the frame gets created. */
|
2465 |
|
|
rtx
|
2466 |
|
|
frv_dynamic_chain_address (rtx frame)
|
2467 |
|
|
{
|
2468 |
|
|
cfun->machine->frame_needed = 1;
|
2469 |
|
|
return frame;
|
2470 |
|
|
}
|
2471 |
|
|
|
2472 |
|
|
|
2473 |
|
|
/* A C expression whose value is RTL representing the value of the return
|
2474 |
|
|
address for the frame COUNT steps up from the current frame, after the
|
2475 |
|
|
prologue. FRAMEADDR is the frame pointer of the COUNT frame, or the frame
|
2476 |
|
|
pointer of the COUNT - 1 frame if `RETURN_ADDR_IN_PREVIOUS_FRAME' is
|
2477 |
|
|
defined.
|
2478 |
|
|
|
2479 |
|
|
The value of the expression must always be the correct address when COUNT is
|
2480 |
|
|
zero, but may be `NULL_RTX' if there is not way to determine the return
|
2481 |
|
|
address of other frames. */
|
2482 |
|
|
|
2483 |
|
|
rtx
|
2484 |
|
|
frv_return_addr_rtx (int count, rtx frame)
|
2485 |
|
|
{
|
2486 |
|
|
if (count != 0)
|
2487 |
|
|
return const0_rtx;
|
2488 |
|
|
cfun->machine->frame_needed = 1;
|
2489 |
|
|
return gen_rtx_MEM (Pmode, plus_constant (frame, 8));
|
2490 |
|
|
}
|
2491 |
|
|
|
2492 |
|
|
/* Given a memory reference MEMREF, interpret the referenced memory as
|
2493 |
|
|
an array of MODE values, and return a reference to the element
|
2494 |
|
|
specified by INDEX. Assume that any pre-modification implicit in
|
2495 |
|
|
MEMREF has already happened.
|
2496 |
|
|
|
2497 |
|
|
MEMREF must be a legitimate operand for modes larger than SImode.
|
2498 |
|
|
GO_IF_LEGITIMATE_ADDRESS forbids register+register addresses, which
|
2499 |
|
|
this function cannot handle. */
|
2500 |
|
|
rtx
|
2501 |
|
|
frv_index_memory (rtx memref, enum machine_mode mode, int index)
|
2502 |
|
|
{
|
2503 |
|
|
rtx base = XEXP (memref, 0);
|
2504 |
|
|
if (GET_CODE (base) == PRE_MODIFY)
|
2505 |
|
|
base = XEXP (base, 0);
|
2506 |
|
|
return change_address (memref, mode,
|
2507 |
|
|
plus_constant (base, index * GET_MODE_SIZE (mode)));
|
2508 |
|
|
}
|
2509 |
|
|
|
2510 |
|
|
|
2511 |
|
|
/* Print a memory address as an operand to reference that memory location. */
|
2512 |
|
|
void
|
2513 |
|
|
frv_print_operand_address (FILE * stream, rtx x)
|
2514 |
|
|
{
|
2515 |
|
|
if (GET_CODE (x) == MEM)
|
2516 |
|
|
x = XEXP (x, 0);
|
2517 |
|
|
|
2518 |
|
|
switch (GET_CODE (x))
|
2519 |
|
|
{
|
2520 |
|
|
case REG:
|
2521 |
|
|
fputs (reg_names [ REGNO (x)], stream);
|
2522 |
|
|
return;
|
2523 |
|
|
|
2524 |
|
|
case CONST_INT:
|
2525 |
|
|
fprintf (stream, "%ld", (long) INTVAL (x));
|
2526 |
|
|
return;
|
2527 |
|
|
|
2528 |
|
|
case SYMBOL_REF:
|
2529 |
|
|
assemble_name (stream, XSTR (x, 0));
|
2530 |
|
|
return;
|
2531 |
|
|
|
2532 |
|
|
case LABEL_REF:
|
2533 |
|
|
case CONST:
|
2534 |
|
|
output_addr_const (stream, x);
|
2535 |
|
|
return;
|
2536 |
|
|
|
2537 |
|
|
default:
|
2538 |
|
|
break;
|
2539 |
|
|
}
|
2540 |
|
|
|
2541 |
|
|
fatal_insn ("bad insn to frv_print_operand_address:", x);
|
2542 |
|
|
}
|
2543 |
|
|
|
2544 |
|
|
|
2545 |
|
|
static void
|
2546 |
|
|
frv_print_operand_memory_reference_reg (FILE * stream, rtx x)
|
2547 |
|
|
{
|
2548 |
|
|
int regno = true_regnum (x);
|
2549 |
|
|
if (GPR_P (regno))
|
2550 |
|
|
fputs (reg_names[regno], stream);
|
2551 |
|
|
else
|
2552 |
|
|
fatal_insn ("bad register to frv_print_operand_memory_reference_reg:", x);
|
2553 |
|
|
}
|
2554 |
|
|
|
2555 |
|
|
/* Print a memory reference suitable for the ld/st instructions. */
|
2556 |
|
|
|
2557 |
|
|
static void
|
2558 |
|
|
frv_print_operand_memory_reference (FILE * stream, rtx x, int addr_offset)
|
2559 |
|
|
{
|
2560 |
|
|
struct frv_unspec unspec;
|
2561 |
|
|
rtx x0 = NULL_RTX;
|
2562 |
|
|
rtx x1 = NULL_RTX;
|
2563 |
|
|
|
2564 |
|
|
switch (GET_CODE (x))
|
2565 |
|
|
{
|
2566 |
|
|
case SUBREG:
|
2567 |
|
|
case REG:
|
2568 |
|
|
x0 = x;
|
2569 |
|
|
break;
|
2570 |
|
|
|
2571 |
|
|
case PRE_MODIFY: /* (pre_modify (reg) (plus (reg) (reg))) */
|
2572 |
|
|
x0 = XEXP (x, 0);
|
2573 |
|
|
x1 = XEXP (XEXP (x, 1), 1);
|
2574 |
|
|
break;
|
2575 |
|
|
|
2576 |
|
|
case CONST_INT:
|
2577 |
|
|
x1 = x;
|
2578 |
|
|
break;
|
2579 |
|
|
|
2580 |
|
|
case PLUS:
|
2581 |
|
|
x0 = XEXP (x, 0);
|
2582 |
|
|
x1 = XEXP (x, 1);
|
2583 |
|
|
if (GET_CODE (x0) == CONST_INT)
|
2584 |
|
|
{
|
2585 |
|
|
x0 = XEXP (x, 1);
|
2586 |
|
|
x1 = XEXP (x, 0);
|
2587 |
|
|
}
|
2588 |
|
|
break;
|
2589 |
|
|
|
2590 |
|
|
default:
|
2591 |
|
|
fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
|
2592 |
|
|
break;
|
2593 |
|
|
|
2594 |
|
|
}
|
2595 |
|
|
|
2596 |
|
|
if (addr_offset)
|
2597 |
|
|
{
|
2598 |
|
|
if (!x1)
|
2599 |
|
|
x1 = const0_rtx;
|
2600 |
|
|
else if (GET_CODE (x1) != CONST_INT)
|
2601 |
|
|
fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
|
2602 |
|
|
}
|
2603 |
|
|
|
2604 |
|
|
fputs ("@(", stream);
|
2605 |
|
|
if (!x0)
|
2606 |
|
|
fputs (reg_names[GPR_R0], stream);
|
2607 |
|
|
else if (GET_CODE (x0) == REG || GET_CODE (x0) == SUBREG)
|
2608 |
|
|
frv_print_operand_memory_reference_reg (stream, x0);
|
2609 |
|
|
else
|
2610 |
|
|
fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
|
2611 |
|
|
|
2612 |
|
|
fputs (",", stream);
|
2613 |
|
|
if (!x1)
|
2614 |
|
|
fputs (reg_names [GPR_R0], stream);
|
2615 |
|
|
|
2616 |
|
|
else
|
2617 |
|
|
{
|
2618 |
|
|
switch (GET_CODE (x1))
|
2619 |
|
|
{
|
2620 |
|
|
case SUBREG:
|
2621 |
|
|
case REG:
|
2622 |
|
|
frv_print_operand_memory_reference_reg (stream, x1);
|
2623 |
|
|
break;
|
2624 |
|
|
|
2625 |
|
|
case CONST_INT:
|
2626 |
|
|
fprintf (stream, "%ld", (long) (INTVAL (x1) + addr_offset));
|
2627 |
|
|
break;
|
2628 |
|
|
|
2629 |
|
|
case CONST:
|
2630 |
|
|
if (!frv_const_unspec_p (x1, &unspec))
|
2631 |
|
|
fatal_insn ("bad insn to frv_print_operand_memory_reference:", x1);
|
2632 |
|
|
frv_output_const_unspec (stream, &unspec);
|
2633 |
|
|
break;
|
2634 |
|
|
|
2635 |
|
|
default:
|
2636 |
|
|
fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
|
2637 |
|
|
}
|
2638 |
|
|
}
|
2639 |
|
|
|
2640 |
|
|
fputs (")", stream);
|
2641 |
|
|
}
|
2642 |
|
|
|
2643 |
|
|
|
2644 |
|
|
/* Return 2 for likely branches and 0 for non-likely branches */
|
2645 |
|
|
|
2646 |
|
|
#define FRV_JUMP_LIKELY 2
|
2647 |
|
|
#define FRV_JUMP_NOT_LIKELY 0
|
2648 |
|
|
|
2649 |
|
|
static int
|
2650 |
|
|
frv_print_operand_jump_hint (rtx insn)
|
2651 |
|
|
{
|
2652 |
|
|
rtx note;
|
2653 |
|
|
rtx labelref;
|
2654 |
|
|
int ret;
|
2655 |
|
|
HOST_WIDE_INT prob = -1;
|
2656 |
|
|
enum { UNKNOWN, BACKWARD, FORWARD } jump_type = UNKNOWN;
|
2657 |
|
|
|
2658 |
|
|
gcc_assert (GET_CODE (insn) == JUMP_INSN);
|
2659 |
|
|
|
2660 |
|
|
/* Assume any non-conditional jump is likely. */
|
2661 |
|
|
if (! any_condjump_p (insn))
|
2662 |
|
|
ret = FRV_JUMP_LIKELY;
|
2663 |
|
|
|
2664 |
|
|
else
|
2665 |
|
|
{
|
2666 |
|
|
labelref = condjump_label (insn);
|
2667 |
|
|
if (labelref)
|
2668 |
|
|
{
|
2669 |
|
|
rtx label = XEXP (labelref, 0);
|
2670 |
|
|
jump_type = (insn_current_address > INSN_ADDRESSES (INSN_UID (label))
|
2671 |
|
|
? BACKWARD
|
2672 |
|
|
: FORWARD);
|
2673 |
|
|
}
|
2674 |
|
|
|
2675 |
|
|
note = find_reg_note (insn, REG_BR_PROB, 0);
|
2676 |
|
|
if (!note)
|
2677 |
|
|
ret = ((jump_type == BACKWARD) ? FRV_JUMP_LIKELY : FRV_JUMP_NOT_LIKELY);
|
2678 |
|
|
|
2679 |
|
|
else
|
2680 |
|
|
{
|
2681 |
|
|
prob = INTVAL (XEXP (note, 0));
|
2682 |
|
|
ret = ((prob >= (REG_BR_PROB_BASE / 2))
|
2683 |
|
|
? FRV_JUMP_LIKELY
|
2684 |
|
|
: FRV_JUMP_NOT_LIKELY);
|
2685 |
|
|
}
|
2686 |
|
|
}
|
2687 |
|
|
|
2688 |
|
|
#if 0
|
2689 |
|
|
if (TARGET_DEBUG)
|
2690 |
|
|
{
|
2691 |
|
|
char *direction;
|
2692 |
|
|
|
2693 |
|
|
switch (jump_type)
|
2694 |
|
|
{
|
2695 |
|
|
default:
|
2696 |
|
|
case UNKNOWN: direction = "unknown jump direction"; break;
|
2697 |
|
|
case BACKWARD: direction = "jump backward"; break;
|
2698 |
|
|
case FORWARD: direction = "jump forward"; break;
|
2699 |
|
|
}
|
2700 |
|
|
|
2701 |
|
|
fprintf (stderr,
|
2702 |
|
|
"%s: uid %ld, %s, probability = %ld, max prob. = %ld, hint = %d\n",
|
2703 |
|
|
IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
|
2704 |
|
|
(long)INSN_UID (insn), direction, (long)prob,
|
2705 |
|
|
(long)REG_BR_PROB_BASE, ret);
|
2706 |
|
|
}
|
2707 |
|
|
#endif
|
2708 |
|
|
|
2709 |
|
|
return ret;
|
2710 |
|
|
}
|
2711 |
|
|
|
2712 |
|
|
|
2713 |
|
|
/* Return the comparison operator to use for CODE given that the ICC
|
2714 |
|
|
register is OP0. */
|
2715 |
|
|
|
2716 |
|
|
static const char *
|
2717 |
|
|
comparison_string (enum rtx_code code, rtx op0)
|
2718 |
|
|
{
|
2719 |
|
|
bool is_nz_p = GET_MODE (op0) == CC_NZmode;
|
2720 |
|
|
switch (code)
|
2721 |
|
|
{
|
2722 |
|
|
default: output_operand_lossage ("bad condition code");
|
2723 |
|
|
case EQ: return "eq";
|
2724 |
|
|
case NE: return "ne";
|
2725 |
|
|
case LT: return is_nz_p ? "n" : "lt";
|
2726 |
|
|
case LE: return "le";
|
2727 |
|
|
case GT: return "gt";
|
2728 |
|
|
case GE: return is_nz_p ? "p" : "ge";
|
2729 |
|
|
case LTU: return is_nz_p ? "no" : "c";
|
2730 |
|
|
case LEU: return is_nz_p ? "eq" : "ls";
|
2731 |
|
|
case GTU: return is_nz_p ? "ne" : "hi";
|
2732 |
|
|
case GEU: return is_nz_p ? "ra" : "nc";
|
2733 |
|
|
}
|
2734 |
|
|
}
|
2735 |
|
|
|
2736 |
|
|
/* Print an operand to an assembler instruction.
|
2737 |
|
|
|
2738 |
|
|
`%' followed by a letter and a digit says to output an operand in an
|
2739 |
|
|
alternate fashion. Four letters have standard, built-in meanings described
|
2740 |
|
|
below. The machine description macro `PRINT_OPERAND' can define additional
|
2741 |
|
|
letters with nonstandard meanings.
|
2742 |
|
|
|
2743 |
|
|
`%cDIGIT' can be used to substitute an operand that is a constant value
|
2744 |
|
|
without the syntax that normally indicates an immediate operand.
|
2745 |
|
|
|
2746 |
|
|
`%nDIGIT' is like `%cDIGIT' except that the value of the constant is negated
|
2747 |
|
|
before printing.
|
2748 |
|
|
|
2749 |
|
|
`%aDIGIT' can be used to substitute an operand as if it were a memory
|
2750 |
|
|
reference, with the actual operand treated as the address. This may be
|
2751 |
|
|
useful when outputting a "load address" instruction, because often the
|
2752 |
|
|
assembler syntax for such an instruction requires you to write the operand
|
2753 |
|
|
as if it were a memory reference.
|
2754 |
|
|
|
2755 |
|
|
`%lDIGIT' is used to substitute a `label_ref' into a jump instruction.
|
2756 |
|
|
|
2757 |
|
|
`%=' outputs a number which is unique to each instruction in the entire
|
2758 |
|
|
compilation. This is useful for making local labels to be referred to more
|
2759 |
|
|
than once in a single template that generates multiple assembler
|
2760 |
|
|
instructions.
|
2761 |
|
|
|
2762 |
|
|
`%' followed by a punctuation character specifies a substitution that does
|
2763 |
|
|
not use an operand. Only one case is standard: `%%' outputs a `%' into the
|
2764 |
|
|
assembler code. Other nonstandard cases can be defined in the
|
2765 |
|
|
`PRINT_OPERAND' macro. You must also define which punctuation characters
|
2766 |
|
|
are valid with the `PRINT_OPERAND_PUNCT_VALID_P' macro. */
|
2767 |
|
|
|
2768 |
|
|
void
|
2769 |
|
|
frv_print_operand (FILE * file, rtx x, int code)
|
2770 |
|
|
{
|
2771 |
|
|
struct frv_unspec unspec;
|
2772 |
|
|
HOST_WIDE_INT value;
|
2773 |
|
|
int offset;
|
2774 |
|
|
|
2775 |
|
|
if (code != 0 && !isalpha (code))
|
2776 |
|
|
value = 0;
|
2777 |
|
|
|
2778 |
|
|
else if (GET_CODE (x) == CONST_INT)
|
2779 |
|
|
value = INTVAL (x);
|
2780 |
|
|
|
2781 |
|
|
else if (GET_CODE (x) == CONST_DOUBLE)
|
2782 |
|
|
{
|
2783 |
|
|
if (GET_MODE (x) == SFmode)
|
2784 |
|
|
{
|
2785 |
|
|
REAL_VALUE_TYPE rv;
|
2786 |
|
|
long l;
|
2787 |
|
|
|
2788 |
|
|
REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
|
2789 |
|
|
REAL_VALUE_TO_TARGET_SINGLE (rv, l);
|
2790 |
|
|
value = l;
|
2791 |
|
|
}
|
2792 |
|
|
|
2793 |
|
|
else if (GET_MODE (x) == VOIDmode)
|
2794 |
|
|
value = CONST_DOUBLE_LOW (x);
|
2795 |
|
|
|
2796 |
|
|
else
|
2797 |
|
|
fatal_insn ("bad insn in frv_print_operand, bad const_double", x);
|
2798 |
|
|
}
|
2799 |
|
|
|
2800 |
|
|
else
|
2801 |
|
|
value = 0;
|
2802 |
|
|
|
2803 |
|
|
switch (code)
|
2804 |
|
|
{
|
2805 |
|
|
|
2806 |
|
|
case '.':
|
2807 |
|
|
/* Output r0. */
|
2808 |
|
|
fputs (reg_names[GPR_R0], file);
|
2809 |
|
|
break;
|
2810 |
|
|
|
2811 |
|
|
case '#':
|
2812 |
|
|
fprintf (file, "%d", frv_print_operand_jump_hint (current_output_insn));
|
2813 |
|
|
break;
|
2814 |
|
|
|
2815 |
|
|
case '@':
|
2816 |
|
|
/* Output small data area base register (gr16). */
|
2817 |
|
|
fputs (reg_names[SDA_BASE_REG], file);
|
2818 |
|
|
break;
|
2819 |
|
|
|
2820 |
|
|
case '~':
|
2821 |
|
|
/* Output pic register (gr17). */
|
2822 |
|
|
fputs (reg_names[PIC_REGNO], file);
|
2823 |
|
|
break;
|
2824 |
|
|
|
2825 |
|
|
case '*':
|
2826 |
|
|
/* Output the temporary integer CCR register. */
|
2827 |
|
|
fputs (reg_names[ICR_TEMP], file);
|
2828 |
|
|
break;
|
2829 |
|
|
|
2830 |
|
|
case '&':
|
2831 |
|
|
/* Output the temporary integer CC register. */
|
2832 |
|
|
fputs (reg_names[ICC_TEMP], file);
|
2833 |
|
|
break;
|
2834 |
|
|
|
2835 |
|
|
/* case 'a': print an address. */
|
2836 |
|
|
|
2837 |
|
|
case 'C':
|
2838 |
|
|
/* Print appropriate test for integer branch false operation. */
|
2839 |
|
|
fputs (comparison_string (reverse_condition (GET_CODE (x)),
|
2840 |
|
|
XEXP (x, 0)), file);
|
2841 |
|
|
break;
|
2842 |
|
|
|
2843 |
|
|
case 'c':
|
2844 |
|
|
/* Print appropriate test for integer branch true operation. */
|
2845 |
|
|
fputs (comparison_string (GET_CODE (x), XEXP (x, 0)), file);
|
2846 |
|
|
break;
|
2847 |
|
|
|
2848 |
|
|
case 'e':
|
2849 |
|
|
/* Print 1 for a NE and 0 for an EQ to give the final argument
|
2850 |
|
|
for a conditional instruction. */
|
2851 |
|
|
if (GET_CODE (x) == NE)
|
2852 |
|
|
fputs ("1", file);
|
2853 |
|
|
|
2854 |
|
|
else if (GET_CODE (x) == EQ)
|
2855 |
|
|
fputs ("0", file);
|
2856 |
|
|
|
2857 |
|
|
else
|
2858 |
|
|
fatal_insn ("bad insn to frv_print_operand, 'e' modifier:", x);
|
2859 |
|
|
break;
|
2860 |
|
|
|
2861 |
|
|
case 'F':
|
2862 |
|
|
/* Print appropriate test for floating point branch false operation. */
|
2863 |
|
|
switch (GET_CODE (x))
|
2864 |
|
|
{
|
2865 |
|
|
default:
|
2866 |
|
|
fatal_insn ("bad insn to frv_print_operand, 'F' modifier:", x);
|
2867 |
|
|
|
2868 |
|
|
case EQ: fputs ("ne", file); break;
|
2869 |
|
|
case NE: fputs ("eq", file); break;
|
2870 |
|
|
case LT: fputs ("uge", file); break;
|
2871 |
|
|
case LE: fputs ("ug", file); break;
|
2872 |
|
|
case GT: fputs ("ule", file); break;
|
2873 |
|
|
case GE: fputs ("ul", file); break;
|
2874 |
|
|
}
|
2875 |
|
|
break;
|
2876 |
|
|
|
2877 |
|
|
case 'f':
|
2878 |
|
|
/* Print appropriate test for floating point branch true operation. */
|
2879 |
|
|
switch (GET_CODE (x))
|
2880 |
|
|
{
|
2881 |
|
|
default:
|
2882 |
|
|
fatal_insn ("bad insn to frv_print_operand, 'f' modifier:", x);
|
2883 |
|
|
|
2884 |
|
|
case EQ: fputs ("eq", file); break;
|
2885 |
|
|
case NE: fputs ("ne", file); break;
|
2886 |
|
|
case LT: fputs ("lt", file); break;
|
2887 |
|
|
case LE: fputs ("le", file); break;
|
2888 |
|
|
case GT: fputs ("gt", file); break;
|
2889 |
|
|
case GE: fputs ("ge", file); break;
|
2890 |
|
|
}
|
2891 |
|
|
break;
|
2892 |
|
|
|
2893 |
|
|
case 'g':
|
2894 |
|
|
/* Print appropriate GOT function. */
|
2895 |
|
|
if (GET_CODE (x) != CONST_INT)
|
2896 |
|
|
fatal_insn ("bad insn to frv_print_operand, 'g' modifier:", x);
|
2897 |
|
|
fputs (unspec_got_name (INTVAL (x)), file);
|
2898 |
|
|
break;
|
2899 |
|
|
|
2900 |
|
|
case 'I':
|
2901 |
|
|
/* Print 'i' if the operand is a constant, or is a memory reference that
|
2902 |
|
|
adds a constant. */
|
2903 |
|
|
if (GET_CODE (x) == MEM)
|
2904 |
|
|
x = ((GET_CODE (XEXP (x, 0)) == PLUS)
|
2905 |
|
|
? XEXP (XEXP (x, 0), 1)
|
2906 |
|
|
: XEXP (x, 0));
|
2907 |
|
|
else if (GET_CODE (x) == PLUS)
|
2908 |
|
|
x = XEXP (x, 1);
|
2909 |
|
|
|
2910 |
|
|
switch (GET_CODE (x))
|
2911 |
|
|
{
|
2912 |
|
|
default:
|
2913 |
|
|
break;
|
2914 |
|
|
|
2915 |
|
|
case CONST_INT:
|
2916 |
|
|
case SYMBOL_REF:
|
2917 |
|
|
case CONST:
|
2918 |
|
|
fputs ("i", file);
|
2919 |
|
|
break;
|
2920 |
|
|
}
|
2921 |
|
|
break;
|
2922 |
|
|
|
2923 |
|
|
case 'i':
|
2924 |
|
|
/* For jump instructions, print 'i' if the operand is a constant or
|
2925 |
|
|
is an expression that adds a constant. */
|
2926 |
|
|
if (GET_CODE (x) == CONST_INT)
|
2927 |
|
|
fputs ("i", file);
|
2928 |
|
|
|
2929 |
|
|
else
|
2930 |
|
|
{
|
2931 |
|
|
if (GET_CODE (x) == CONST_INT
|
2932 |
|
|
|| (GET_CODE (x) == PLUS
|
2933 |
|
|
&& (GET_CODE (XEXP (x, 1)) == CONST_INT
|
2934 |
|
|
|| GET_CODE (XEXP (x, 0)) == CONST_INT)))
|
2935 |
|
|
fputs ("i", file);
|
2936 |
|
|
}
|
2937 |
|
|
break;
|
2938 |
|
|
|
2939 |
|
|
case 'L':
|
2940 |
|
|
/* Print the lower register of a double word register pair */
|
2941 |
|
|
if (GET_CODE (x) == REG)
|
2942 |
|
|
fputs (reg_names[ REGNO (x)+1 ], file);
|
2943 |
|
|
else
|
2944 |
|
|
fatal_insn ("bad insn to frv_print_operand, 'L' modifier:", x);
|
2945 |
|
|
break;
|
2946 |
|
|
|
2947 |
|
|
/* case 'l': print a LABEL_REF. */
|
2948 |
|
|
|
2949 |
|
|
case 'M':
|
2950 |
|
|
case 'N':
|
2951 |
|
|
/* Print a memory reference for ld/st/jmp, %N prints a memory reference
|
2952 |
|
|
for the second word of double memory operations. */
|
2953 |
|
|
offset = (code == 'M') ? 0 : UNITS_PER_WORD;
|
2954 |
|
|
switch (GET_CODE (x))
|
2955 |
|
|
{
|
2956 |
|
|
default:
|
2957 |
|
|
fatal_insn ("bad insn to frv_print_operand, 'M/N' modifier:", x);
|
2958 |
|
|
|
2959 |
|
|
case MEM:
|
2960 |
|
|
frv_print_operand_memory_reference (file, XEXP (x, 0), offset);
|
2961 |
|
|
break;
|
2962 |
|
|
|
2963 |
|
|
case REG:
|
2964 |
|
|
case SUBREG:
|
2965 |
|
|
case CONST_INT:
|
2966 |
|
|
case PLUS:
|
2967 |
|
|
case SYMBOL_REF:
|
2968 |
|
|
frv_print_operand_memory_reference (file, x, offset);
|
2969 |
|
|
break;
|
2970 |
|
|
}
|
2971 |
|
|
break;
|
2972 |
|
|
|
2973 |
|
|
case 'O':
|
2974 |
|
|
/* Print the opcode of a command. */
|
2975 |
|
|
switch (GET_CODE (x))
|
2976 |
|
|
{
|
2977 |
|
|
default:
|
2978 |
|
|
fatal_insn ("bad insn to frv_print_operand, 'O' modifier:", x);
|
2979 |
|
|
|
2980 |
|
|
case PLUS: fputs ("add", file); break;
|
2981 |
|
|
case MINUS: fputs ("sub", file); break;
|
2982 |
|
|
case AND: fputs ("and", file); break;
|
2983 |
|
|
case IOR: fputs ("or", file); break;
|
2984 |
|
|
case XOR: fputs ("xor", file); break;
|
2985 |
|
|
case ASHIFT: fputs ("sll", file); break;
|
2986 |
|
|
case ASHIFTRT: fputs ("sra", file); break;
|
2987 |
|
|
case LSHIFTRT: fputs ("srl", file); break;
|
2988 |
|
|
}
|
2989 |
|
|
break;
|
2990 |
|
|
|
2991 |
|
|
/* case 'n': negate and print a constant int. */
|
2992 |
|
|
|
2993 |
|
|
case 'P':
|
2994 |
|
|
/* Print PIC label using operand as the number. */
|
2995 |
|
|
if (GET_CODE (x) != CONST_INT)
|
2996 |
|
|
fatal_insn ("bad insn to frv_print_operand, P modifier:", x);
|
2997 |
|
|
|
2998 |
|
|
fprintf (file, ".LCF%ld", (long)INTVAL (x));
|
2999 |
|
|
break;
|
3000 |
|
|
|
3001 |
|
|
case 'U':
|
3002 |
|
|
/* Print 'u' if the operand is a update load/store. */
|
3003 |
|
|
if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
|
3004 |
|
|
fputs ("u", file);
|
3005 |
|
|
break;
|
3006 |
|
|
|
3007 |
|
|
case 'z':
|
3008 |
|
|
/* If value is 0, print gr0, otherwise it must be a register. */
|
3009 |
|
|
if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0)
|
3010 |
|
|
fputs (reg_names[GPR_R0], file);
|
3011 |
|
|
|
3012 |
|
|
else if (GET_CODE (x) == REG)
|
3013 |
|
|
fputs (reg_names [REGNO (x)], file);
|
3014 |
|
|
|
3015 |
|
|
else
|
3016 |
|
|
fatal_insn ("bad insn in frv_print_operand, z case", x);
|
3017 |
|
|
break;
|
3018 |
|
|
|
3019 |
|
|
case 'x':
|
3020 |
|
|
/* Print constant in hex. */
|
3021 |
|
|
if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
|
3022 |
|
|
{
|
3023 |
|
|
fprintf (file, "%s0x%.4lx", IMMEDIATE_PREFIX, (long) value);
|
3024 |
|
|
break;
|
3025 |
|
|
}
|
3026 |
|
|
|
3027 |
|
|
/* Fall through. */
|
3028 |
|
|
|
3029 |
|
|
case '\0':
|
3030 |
|
|
if (GET_CODE (x) == REG)
|
3031 |
|
|
fputs (reg_names [REGNO (x)], file);
|
3032 |
|
|
|
3033 |
|
|
else if (GET_CODE (x) == CONST_INT
|
3034 |
|
|
|| GET_CODE (x) == CONST_DOUBLE)
|
3035 |
|
|
fprintf (file, "%s%ld", IMMEDIATE_PREFIX, (long) value);
|
3036 |
|
|
|
3037 |
|
|
else if (frv_const_unspec_p (x, &unspec))
|
3038 |
|
|
frv_output_const_unspec (file, &unspec);
|
3039 |
|
|
|
3040 |
|
|
else if (GET_CODE (x) == MEM)
|
3041 |
|
|
frv_print_operand_address (file, XEXP (x, 0));
|
3042 |
|
|
|
3043 |
|
|
else if (CONSTANT_ADDRESS_P (x))
|
3044 |
|
|
frv_print_operand_address (file, x);
|
3045 |
|
|
|
3046 |
|
|
else
|
3047 |
|
|
fatal_insn ("bad insn in frv_print_operand, 0 case", x);
|
3048 |
|
|
|
3049 |
|
|
break;
|
3050 |
|
|
|
3051 |
|
|
default:
|
3052 |
|
|
fatal_insn ("frv_print_operand: unknown code", x);
|
3053 |
|
|
break;
|
3054 |
|
|
}
|
3055 |
|
|
|
3056 |
|
|
return;
|
3057 |
|
|
}
|
3058 |
|
|
|
3059 |
|
|
|
3060 |
|
|
/* A C statement (sans semicolon) for initializing the variable CUM for the
|
3061 |
|
|
state at the beginning of the argument list. The variable has type
|
3062 |
|
|
`CUMULATIVE_ARGS'. The value of FNTYPE is the tree node for the data type
|
3063 |
|
|
of the function which will receive the args, or 0 if the args are to a
|
3064 |
|
|
compiler support library function. The value of INDIRECT is nonzero when
|
3065 |
|
|
processing an indirect call, for example a call through a function pointer.
|
3066 |
|
|
The value of INDIRECT is zero for a call to an explicitly named function, a
|
3067 |
|
|
library function call, or when `INIT_CUMULATIVE_ARGS' is used to find
|
3068 |
|
|
arguments for the function being compiled.
|
3069 |
|
|
|
3070 |
|
|
When processing a call to a compiler support library function, LIBNAME
|
3071 |
|
|
identifies which one. It is a `symbol_ref' rtx which contains the name of
|
3072 |
|
|
the function, as a string. LIBNAME is 0 when an ordinary C function call is
|
3073 |
|
|
being processed. Thus, each time this macro is called, either LIBNAME or
|
3074 |
|
|
FNTYPE is nonzero, but never both of them at once. */
|
3075 |
|
|
|
3076 |
|
|
void
|
3077 |
|
|
frv_init_cumulative_args (CUMULATIVE_ARGS *cum,
|
3078 |
|
|
tree fntype,
|
3079 |
|
|
rtx libname,
|
3080 |
|
|
tree fndecl,
|
3081 |
|
|
int incoming)
|
3082 |
|
|
{
|
3083 |
|
|
*cum = FIRST_ARG_REGNUM;
|
3084 |
|
|
|
3085 |
|
|
if (TARGET_DEBUG_ARG)
|
3086 |
|
|
{
|
3087 |
|
|
fprintf (stderr, "\ninit_cumulative_args:");
|
3088 |
|
|
if (!fndecl && fntype)
|
3089 |
|
|
fputs (" indirect", stderr);
|
3090 |
|
|
|
3091 |
|
|
if (incoming)
|
3092 |
|
|
fputs (" incoming", stderr);
|
3093 |
|
|
|
3094 |
|
|
if (fntype)
|
3095 |
|
|
{
|
3096 |
|
|
tree ret_type = TREE_TYPE (fntype);
|
3097 |
|
|
fprintf (stderr, " return=%s,",
|
3098 |
|
|
tree_code_name[ (int)TREE_CODE (ret_type) ]);
|
3099 |
|
|
}
|
3100 |
|
|
|
3101 |
|
|
if (libname && GET_CODE (libname) == SYMBOL_REF)
|
3102 |
|
|
fprintf (stderr, " libname=%s", XSTR (libname, 0));
|
3103 |
|
|
|
3104 |
|
|
if (cfun->returns_struct)
|
3105 |
|
|
fprintf (stderr, " return-struct");
|
3106 |
|
|
|
3107 |
|
|
putc ('\n', stderr);
|
3108 |
|
|
}
|
3109 |
|
|
}
|
3110 |
|
|
|
3111 |
|
|
|
3112 |
|
|
/* Return true if we should pass an argument on the stack rather than
|
3113 |
|
|
in registers. */
|
3114 |
|
|
|
3115 |
|
|
static bool
|
3116 |
|
|
frv_must_pass_in_stack (enum machine_mode mode, tree type)
|
3117 |
|
|
{
|
3118 |
|
|
if (mode == BLKmode)
|
3119 |
|
|
return true;
|
3120 |
|
|
if (type == NULL)
|
3121 |
|
|
return false;
|
3122 |
|
|
return AGGREGATE_TYPE_P (type);
|
3123 |
|
|
}
|
3124 |
|
|
|
3125 |
|
|
/* If defined, a C expression that gives the alignment boundary, in bits, of an
|
3126 |
|
|
argument with the specified mode and type. If it is not defined,
|
3127 |
|
|
`PARM_BOUNDARY' is used for all arguments. */
|
3128 |
|
|
|
3129 |
|
|
int
|
3130 |
|
|
frv_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
|
3131 |
|
|
tree type ATTRIBUTE_UNUSED)
|
3132 |
|
|
{
|
3133 |
|
|
return BITS_PER_WORD;
|
3134 |
|
|
}
|
3135 |
|
|
|
3136 |
|
|
rtx
|
3137 |
|
|
frv_function_arg (CUMULATIVE_ARGS *cum,
|
3138 |
|
|
enum machine_mode mode,
|
3139 |
|
|
tree type ATTRIBUTE_UNUSED,
|
3140 |
|
|
int named,
|
3141 |
|
|
int incoming ATTRIBUTE_UNUSED)
|
3142 |
|
|
{
|
3143 |
|
|
enum machine_mode xmode = (mode == BLKmode) ? SImode : mode;
|
3144 |
|
|
int arg_num = *cum;
|
3145 |
|
|
rtx ret;
|
3146 |
|
|
const char *debstr;
|
3147 |
|
|
|
3148 |
|
|
/* Return a marker for use in the call instruction. */
|
3149 |
|
|
if (xmode == VOIDmode)
|
3150 |
|
|
{
|
3151 |
|
|
ret = const0_rtx;
|
3152 |
|
|
debstr = "<0>";
|
3153 |
|
|
}
|
3154 |
|
|
|
3155 |
|
|
else if (arg_num <= LAST_ARG_REGNUM)
|
3156 |
|
|
{
|
3157 |
|
|
ret = gen_rtx_REG (xmode, arg_num);
|
3158 |
|
|
debstr = reg_names[arg_num];
|
3159 |
|
|
}
|
3160 |
|
|
|
3161 |
|
|
else
|
3162 |
|
|
{
|
3163 |
|
|
ret = NULL_RTX;
|
3164 |
|
|
debstr = "memory";
|
3165 |
|
|
}
|
3166 |
|
|
|
3167 |
|
|
if (TARGET_DEBUG_ARG)
|
3168 |
|
|
fprintf (stderr,
|
3169 |
|
|
"function_arg: words = %2d, mode = %4s, named = %d, size = %3d, arg = %s\n",
|
3170 |
|
|
arg_num, GET_MODE_NAME (mode), named, GET_MODE_SIZE (mode), debstr);
|
3171 |
|
|
|
3172 |
|
|
return ret;
|
3173 |
|
|
}
|
3174 |
|
|
|
3175 |
|
|
|
3176 |
|
|
/* A C statement (sans semicolon) to update the summarizer variable CUM to
|
3177 |
|
|
advance past an argument in the argument list. The values MODE, TYPE and
|
3178 |
|
|
NAMED describe that argument. Once this is done, the variable CUM is
|
3179 |
|
|
suitable for analyzing the *following* argument with `FUNCTION_ARG', etc.
|
3180 |
|
|
|
3181 |
|
|
This macro need not do anything if the argument in question was passed on
|
3182 |
|
|
the stack. The compiler knows how to track the amount of stack space used
|
3183 |
|
|
for arguments without any special help. */
|
3184 |
|
|
|
3185 |
|
|
void
|
3186 |
|
|
frv_function_arg_advance (CUMULATIVE_ARGS *cum,
|
3187 |
|
|
enum machine_mode mode,
|
3188 |
|
|
tree type ATTRIBUTE_UNUSED,
|
3189 |
|
|
int named)
|
3190 |
|
|
{
|
3191 |
|
|
enum machine_mode xmode = (mode == BLKmode) ? SImode : mode;
|
3192 |
|
|
int bytes = GET_MODE_SIZE (xmode);
|
3193 |
|
|
int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
|
3194 |
|
|
int arg_num = *cum;
|
3195 |
|
|
|
3196 |
|
|
*cum = arg_num + words;
|
3197 |
|
|
|
3198 |
|
|
if (TARGET_DEBUG_ARG)
|
3199 |
|
|
fprintf (stderr,
|
3200 |
|
|
"function_adv: words = %2d, mode = %4s, named = %d, size = %3d\n",
|
3201 |
|
|
arg_num, GET_MODE_NAME (mode), named, words * UNITS_PER_WORD);
|
3202 |
|
|
}
|
3203 |
|
|
|
3204 |
|
|
|
3205 |
|
|
/* A C expression for the number of words, at the beginning of an argument,
|
3206 |
|
|
must be put in registers. The value must be zero for arguments that are
|
3207 |
|
|
passed entirely in registers or that are entirely pushed on the stack.
|
3208 |
|
|
|
3209 |
|
|
On some machines, certain arguments must be passed partially in registers
|
3210 |
|
|
and partially in memory. On these machines, typically the first N words of
|
3211 |
|
|
arguments are passed in registers, and the rest on the stack. If a
|
3212 |
|
|
multi-word argument (a `double' or a structure) crosses that boundary, its
|
3213 |
|
|
first few words must be passed in registers and the rest must be pushed.
|
3214 |
|
|
This macro tells the compiler when this occurs, and how many of the words
|
3215 |
|
|
should go in registers.
|
3216 |
|
|
|
3217 |
|
|
`FUNCTION_ARG' for these arguments should return the first register to be
|
3218 |
|
|
used by the caller for this argument; likewise `FUNCTION_INCOMING_ARG', for
|
3219 |
|
|
the called function. */
|
3220 |
|
|
|
3221 |
|
|
static int
|
3222 |
|
|
frv_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
|
3223 |
|
|
tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
|
3224 |
|
|
{
|
3225 |
|
|
enum machine_mode xmode = (mode == BLKmode) ? SImode : mode;
|
3226 |
|
|
int bytes = GET_MODE_SIZE (xmode);
|
3227 |
|
|
int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
|
3228 |
|
|
int arg_num = *cum;
|
3229 |
|
|
int ret;
|
3230 |
|
|
|
3231 |
|
|
ret = ((arg_num <= LAST_ARG_REGNUM && arg_num + words > LAST_ARG_REGNUM+1)
|
3232 |
|
|
? LAST_ARG_REGNUM - arg_num + 1
|
3233 |
|
|
: 0);
|
3234 |
|
|
ret *= UNITS_PER_WORD;
|
3235 |
|
|
|
3236 |
|
|
if (TARGET_DEBUG_ARG && ret)
|
3237 |
|
|
fprintf (stderr, "frv_arg_partial_bytes: %d\n", ret);
|
3238 |
|
|
|
3239 |
|
|
return ret;
|
3240 |
|
|
}
|
3241 |
|
|
|
3242 |
|
|
|
3243 |
|
|
/* Return true if a register is ok to use as a base or index register. */
|
3244 |
|
|
|
3245 |
|
|
static FRV_INLINE int
|
3246 |
|
|
frv_regno_ok_for_base_p (int regno, int strict_p)
|
3247 |
|
|
{
|
3248 |
|
|
if (GPR_P (regno))
|
3249 |
|
|
return TRUE;
|
3250 |
|
|
|
3251 |
|
|
if (strict_p)
|
3252 |
|
|
return (reg_renumber[regno] >= 0 && GPR_P (reg_renumber[regno]));
|
3253 |
|
|
|
3254 |
|
|
if (regno == ARG_POINTER_REGNUM)
|
3255 |
|
|
return TRUE;
|
3256 |
|
|
|
3257 |
|
|
return (regno >= FIRST_PSEUDO_REGISTER);
|
3258 |
|
|
}
|
3259 |
|
|
|
3260 |
|
|
|
3261 |
|
|
/* A C compound statement with a conditional `goto LABEL;' executed if X (an
|
3262 |
|
|
RTX) is a legitimate memory address on the target machine for a memory
|
3263 |
|
|
operand of mode MODE.
|
3264 |
|
|
|
3265 |
|
|
It usually pays to define several simpler macros to serve as subroutines for
|
3266 |
|
|
this one. Otherwise it may be too complicated to understand.
|
3267 |
|
|
|
3268 |
|
|
This macro must exist in two variants: a strict variant and a non-strict
|
3269 |
|
|
one. The strict variant is used in the reload pass. It must be defined so
|
3270 |
|
|
that any pseudo-register that has not been allocated a hard register is
|
3271 |
|
|
considered a memory reference. In contexts where some kind of register is
|
3272 |
|
|
required, a pseudo-register with no hard register must be rejected.
|
3273 |
|
|
|
3274 |
|
|
The non-strict variant is used in other passes. It must be defined to
|
3275 |
|
|
accept all pseudo-registers in every context where some kind of register is
|
3276 |
|
|
required.
|
3277 |
|
|
|
3278 |
|
|
Compiler source files that want to use the strict variant of this macro
|
3279 |
|
|
define the macro `REG_OK_STRICT'. You should use an `#ifdef REG_OK_STRICT'
|
3280 |
|
|
conditional to define the strict variant in that case and the non-strict
|
3281 |
|
|
variant otherwise.
|
3282 |
|
|
|
3283 |
|
|
Subroutines to check for acceptable registers for various purposes (one for
|
3284 |
|
|
base registers, one for index registers, and so on) are typically among the
|
3285 |
|
|
subroutines used to define `GO_IF_LEGITIMATE_ADDRESS'. Then only these
|
3286 |
|
|
subroutine macros need have two variants; the higher levels of macros may be
|
3287 |
|
|
the same whether strict or not.
|
3288 |
|
|
|
3289 |
|
|
Normally, constant addresses which are the sum of a `symbol_ref' and an
|
3290 |
|
|
integer are stored inside a `const' RTX to mark them as constant.
|
3291 |
|
|
Therefore, there is no need to recognize such sums specifically as
|
3292 |
|
|
legitimate addresses. Normally you would simply recognize any `const' as
|
3293 |
|
|
legitimate.
|
3294 |
|
|
|
3295 |
|
|
Usually `PRINT_OPERAND_ADDRESS' is not prepared to handle constant sums that
|
3296 |
|
|
are not marked with `const'. It assumes that a naked `plus' indicates
|
3297 |
|
|
indexing. If so, then you *must* reject such naked constant sums as
|
3298 |
|
|
illegitimate addresses, so that none of them will be given to
|
3299 |
|
|
`PRINT_OPERAND_ADDRESS'.
|
3300 |
|
|
|
3301 |
|
|
On some machines, whether a symbolic address is legitimate depends on the
|
3302 |
|
|
section that the address refers to. On these machines, define the macro
|
3303 |
|
|
`ENCODE_SECTION_INFO' to store the information into the `symbol_ref', and
|
3304 |
|
|
then check for it here. When you see a `const', you will have to look
|
3305 |
|
|
inside it to find the `symbol_ref' in order to determine the section.
|
3306 |
|
|
|
3307 |
|
|
The best way to modify the name string is by adding text to the beginning,
|
3308 |
|
|
with suitable punctuation to prevent any ambiguity. Allocate the new name
|
3309 |
|
|
in `saveable_obstack'. You will have to modify `ASM_OUTPUT_LABELREF' to
|
3310 |
|
|
remove and decode the added text and output the name accordingly, and define
|
3311 |
|
|
`(* targetm.strip_name_encoding)' to access the original name string.
|
3312 |
|
|
|
3313 |
|
|
You can check the information stored here into the `symbol_ref' in the
|
3314 |
|
|
definitions of the macros `GO_IF_LEGITIMATE_ADDRESS' and
|
3315 |
|
|
`PRINT_OPERAND_ADDRESS'. */
|
3316 |
|
|
|
3317 |
|
|
int
|
3318 |
|
|
frv_legitimate_address_p (enum machine_mode mode,
|
3319 |
|
|
rtx x,
|
3320 |
|
|
int strict_p,
|
3321 |
|
|
int condexec_p,
|
3322 |
|
|
int allow_double_reg_p)
|
3323 |
|
|
{
|
3324 |
|
|
rtx x0, x1;
|
3325 |
|
|
int ret = 0;
|
3326 |
|
|
HOST_WIDE_INT value;
|
3327 |
|
|
unsigned regno0;
|
3328 |
|
|
|
3329 |
|
|
if (FRV_SYMBOL_REF_TLS_P (x))
|
3330 |
|
|
return 0;
|
3331 |
|
|
|
3332 |
|
|
switch (GET_CODE (x))
|
3333 |
|
|
{
|
3334 |
|
|
default:
|
3335 |
|
|
break;
|
3336 |
|
|
|
3337 |
|
|
case SUBREG:
|
3338 |
|
|
x = SUBREG_REG (x);
|
3339 |
|
|
if (GET_CODE (x) != REG)
|
3340 |
|
|
break;
|
3341 |
|
|
|
3342 |
|
|
/* Fall through. */
|
3343 |
|
|
|
3344 |
|
|
case REG:
|
3345 |
|
|
ret = frv_regno_ok_for_base_p (REGNO (x), strict_p);
|
3346 |
|
|
break;
|
3347 |
|
|
|
3348 |
|
|
case PRE_MODIFY:
|
3349 |
|
|
x0 = XEXP (x, 0);
|
3350 |
|
|
x1 = XEXP (x, 1);
|
3351 |
|
|
if (GET_CODE (x0) != REG
|
3352 |
|
|
|| ! frv_regno_ok_for_base_p (REGNO (x0), strict_p)
|
3353 |
|
|
|| GET_CODE (x1) != PLUS
|
3354 |
|
|
|| ! rtx_equal_p (x0, XEXP (x1, 0))
|
3355 |
|
|
|| GET_CODE (XEXP (x1, 1)) != REG
|
3356 |
|
|
|| ! frv_regno_ok_for_base_p (REGNO (XEXP (x1, 1)), strict_p))
|
3357 |
|
|
break;
|
3358 |
|
|
|
3359 |
|
|
ret = 1;
|
3360 |
|
|
break;
|
3361 |
|
|
|
3362 |
|
|
case CONST_INT:
|
3363 |
|
|
/* 12 bit immediate */
|
3364 |
|
|
if (condexec_p)
|
3365 |
|
|
ret = FALSE;
|
3366 |
|
|
else
|
3367 |
|
|
{
|
3368 |
|
|
ret = IN_RANGE_P (INTVAL (x), -2048, 2047);
|
3369 |
|
|
|
3370 |
|
|
/* If we can't use load/store double operations, make sure we can
|
3371 |
|
|
address the second word. */
|
3372 |
|
|
if (ret && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
|
3373 |
|
|
ret = IN_RANGE_P (INTVAL (x) + GET_MODE_SIZE (mode) - 1,
|
3374 |
|
|
-2048, 2047);
|
3375 |
|
|
}
|
3376 |
|
|
break;
|
3377 |
|
|
|
3378 |
|
|
case PLUS:
|
3379 |
|
|
x0 = XEXP (x, 0);
|
3380 |
|
|
x1 = XEXP (x, 1);
|
3381 |
|
|
|
3382 |
|
|
if (GET_CODE (x0) == SUBREG)
|
3383 |
|
|
x0 = SUBREG_REG (x0);
|
3384 |
|
|
|
3385 |
|
|
if (GET_CODE (x0) != REG)
|
3386 |
|
|
break;
|
3387 |
|
|
|
3388 |
|
|
regno0 = REGNO (x0);
|
3389 |
|
|
if (!frv_regno_ok_for_base_p (regno0, strict_p))
|
3390 |
|
|
break;
|
3391 |
|
|
|
3392 |
|
|
switch (GET_CODE (x1))
|
3393 |
|
|
{
|
3394 |
|
|
default:
|
3395 |
|
|
break;
|
3396 |
|
|
|
3397 |
|
|
case SUBREG:
|
3398 |
|
|
x1 = SUBREG_REG (x1);
|
3399 |
|
|
if (GET_CODE (x1) != REG)
|
3400 |
|
|
break;
|
3401 |
|
|
|
3402 |
|
|
/* Fall through. */
|
3403 |
|
|
|
3404 |
|
|
case REG:
|
3405 |
|
|
/* Do not allow reg+reg addressing for modes > 1 word if we
|
3406 |
|
|
can't depend on having move double instructions. */
|
3407 |
|
|
if (!allow_double_reg_p && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
|
3408 |
|
|
ret = FALSE;
|
3409 |
|
|
else
|
3410 |
|
|
ret = frv_regno_ok_for_base_p (REGNO (x1), strict_p);
|
3411 |
|
|
break;
|
3412 |
|
|
|
3413 |
|
|
case CONST_INT:
|
3414 |
|
|
/* 12 bit immediate */
|
3415 |
|
|
if (condexec_p)
|
3416 |
|
|
ret = FALSE;
|
3417 |
|
|
else
|
3418 |
|
|
{
|
3419 |
|
|
value = INTVAL (x1);
|
3420 |
|
|
ret = IN_RANGE_P (value, -2048, 2047);
|
3421 |
|
|
|
3422 |
|
|
/* If we can't use load/store double operations, make sure we can
|
3423 |
|
|
address the second word. */
|
3424 |
|
|
if (ret && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
|
3425 |
|
|
ret = IN_RANGE_P (value + GET_MODE_SIZE (mode) - 1, -2048, 2047);
|
3426 |
|
|
}
|
3427 |
|
|
break;
|
3428 |
|
|
|
3429 |
|
|
case CONST:
|
3430 |
|
|
if (!condexec_p && got12_operand (x1, VOIDmode))
|
3431 |
|
|
ret = TRUE;
|
3432 |
|
|
break;
|
3433 |
|
|
|
3434 |
|
|
}
|
3435 |
|
|
break;
|
3436 |
|
|
}
|
3437 |
|
|
|
3438 |
|
|
if (TARGET_DEBUG_ADDR)
|
3439 |
|
|
{
|
3440 |
|
|
fprintf (stderr, "\n========== GO_IF_LEGITIMATE_ADDRESS, mode = %s, result = %d, addresses are %sstrict%s\n",
|
3441 |
|
|
GET_MODE_NAME (mode), ret, (strict_p) ? "" : "not ",
|
3442 |
|
|
(condexec_p) ? ", inside conditional code" : "");
|
3443 |
|
|
debug_rtx (x);
|
3444 |
|
|
}
|
3445 |
|
|
|
3446 |
|
|
return ret;
|
3447 |
|
|
}
|
3448 |
|
|
|
3449 |
|
|
/* Given an ADDR, generate code to inline the PLT. */
|
3450 |
|
|
static rtx
|
3451 |
|
|
gen_inlined_tls_plt (rtx addr)
|
3452 |
|
|
{
|
3453 |
|
|
rtx retval, dest;
|
3454 |
|
|
rtx picreg = get_hard_reg_initial_val (Pmode, FDPIC_REG);
|
3455 |
|
|
|
3456 |
|
|
|
3457 |
|
|
dest = gen_reg_rtx (DImode);
|
3458 |
|
|
|
3459 |
|
|
if (flag_pic == 1)
|
3460 |
|
|
{
|
3461 |
|
|
/*
|
3462 |
|
|
-fpic version:
|
3463 |
|
|
|
3464 |
|
|
lddi.p @(gr15, #gottlsdesc12(ADDR)), gr8
|
3465 |
|
|
calll #gettlsoff(ADDR)@(gr8, gr0)
|
3466 |
|
|
*/
|
3467 |
|
|
emit_insn (gen_tls_lddi (dest, addr, picreg));
|
3468 |
|
|
}
|
3469 |
|
|
else
|
3470 |
|
|
{
|
3471 |
|
|
/*
|
3472 |
|
|
-fPIC version:
|
3473 |
|
|
|
3474 |
|
|
sethi.p #gottlsdeschi(ADDR), gr8
|
3475 |
|
|
setlo #gottlsdesclo(ADDR), gr8
|
3476 |
|
|
ldd #tlsdesc(ADDR)@(gr15, gr8), gr8
|
3477 |
|
|
calll #gettlsoff(ADDR)@(gr8, gr0)
|
3478 |
|
|
*/
|
3479 |
|
|
rtx reguse = gen_reg_rtx (Pmode);
|
3480 |
|
|
emit_insn (gen_tlsoff_hilo (reguse, addr, GEN_INT (R_FRV_GOTTLSDESCHI)));
|
3481 |
|
|
emit_insn (gen_tls_tlsdesc_ldd (dest, picreg, reguse, addr));
|
3482 |
|
|
}
|
3483 |
|
|
|
3484 |
|
|
retval = gen_reg_rtx (Pmode);
|
3485 |
|
|
emit_insn (gen_tls_indirect_call (retval, addr, dest, picreg));
|
3486 |
|
|
return retval;
|
3487 |
|
|
}
|
3488 |
|
|
|
3489 |
|
|
/* Emit a TLSMOFF or TLSMOFF12 offset, depending on -mTLS. Returns
|
3490 |
|
|
the destination address. */
|
3491 |
|
|
static rtx
|
3492 |
|
|
gen_tlsmoff (rtx addr, rtx reg)
|
3493 |
|
|
{
|
3494 |
|
|
rtx dest = gen_reg_rtx (Pmode);
|
3495 |
|
|
|
3496 |
|
|
if (TARGET_BIG_TLS)
|
3497 |
|
|
{
|
3498 |
|
|
/* sethi.p #tlsmoffhi(x), grA
|
3499 |
|
|
setlo #tlsmofflo(x), grA
|
3500 |
|
|
*/
|
3501 |
|
|
dest = gen_reg_rtx (Pmode);
|
3502 |
|
|
emit_insn (gen_tlsoff_hilo (dest, addr,
|
3503 |
|
|
GEN_INT (R_FRV_TLSMOFFHI)));
|
3504 |
|
|
dest = gen_rtx_PLUS (Pmode, dest, reg);
|
3505 |
|
|
}
|
3506 |
|
|
else
|
3507 |
|
|
{
|
3508 |
|
|
/* addi grB, #tlsmoff12(x), grC
|
3509 |
|
|
-or-
|
3510 |
|
|
ld/st @(grB, #tlsmoff12(x)), grC
|
3511 |
|
|
*/
|
3512 |
|
|
dest = gen_reg_rtx (Pmode);
|
3513 |
|
|
emit_insn (gen_symGOTOFF2reg_i (dest, addr, reg,
|
3514 |
|
|
GEN_INT (R_FRV_TLSMOFF12)));
|
3515 |
|
|
}
|
3516 |
|
|
return dest;
|
3517 |
|
|
}
|
3518 |
|
|
|
3519 |
|
|
/* Generate code for a TLS address. */
|
3520 |
|
|
static rtx
|
3521 |
|
|
frv_legitimize_tls_address (rtx addr, enum tls_model model)
|
3522 |
|
|
{
|
3523 |
|
|
rtx dest, tp = gen_rtx_REG (Pmode, 29);
|
3524 |
|
|
rtx picreg = get_hard_reg_initial_val (Pmode, 15);
|
3525 |
|
|
|
3526 |
|
|
switch (model)
|
3527 |
|
|
{
|
3528 |
|
|
case TLS_MODEL_INITIAL_EXEC:
|
3529 |
|
|
if (flag_pic == 1)
|
3530 |
|
|
{
|
3531 |
|
|
/* -fpic version.
|
3532 |
|
|
ldi @(gr15, #gottlsoff12(x)), gr5
|
3533 |
|
|
*/
|
3534 |
|
|
dest = gen_reg_rtx (Pmode);
|
3535 |
|
|
emit_insn (gen_tls_load_gottlsoff12 (dest, addr, picreg));
|
3536 |
|
|
dest = gen_rtx_PLUS (Pmode, tp, dest);
|
3537 |
|
|
}
|
3538 |
|
|
else
|
3539 |
|
|
{
|
3540 |
|
|
/* -fPIC or anything else.
|
3541 |
|
|
|
3542 |
|
|
sethi.p #gottlsoffhi(x), gr14
|
3543 |
|
|
setlo #gottlsofflo(x), gr14
|
3544 |
|
|
ld #tlsoff(x)@(gr15, gr14), gr9
|
3545 |
|
|
*/
|
3546 |
|
|
rtx tmp = gen_reg_rtx (Pmode);
|
3547 |
|
|
dest = gen_reg_rtx (Pmode);
|
3548 |
|
|
emit_insn (gen_tlsoff_hilo (tmp, addr,
|
3549 |
|
|
GEN_INT (R_FRV_GOTTLSOFF_HI)));
|
3550 |
|
|
|
3551 |
|
|
emit_insn (gen_tls_tlsoff_ld (dest, picreg, tmp, addr));
|
3552 |
|
|
dest = gen_rtx_PLUS (Pmode, tp, dest);
|
3553 |
|
|
}
|
3554 |
|
|
break;
|
3555 |
|
|
case TLS_MODEL_LOCAL_DYNAMIC:
|
3556 |
|
|
{
|
3557 |
|
|
rtx reg, retval;
|
3558 |
|
|
|
3559 |
|
|
if (TARGET_INLINE_PLT)
|
3560 |
|
|
retval = gen_inlined_tls_plt (GEN_INT (0));
|
3561 |
|
|
else
|
3562 |
|
|
{
|
3563 |
|
|
/* call #gettlsoff(0) */
|
3564 |
|
|
retval = gen_reg_rtx (Pmode);
|
3565 |
|
|
emit_insn (gen_call_gettlsoff (retval, GEN_INT (0), picreg));
|
3566 |
|
|
}
|
3567 |
|
|
|
3568 |
|
|
reg = gen_reg_rtx (Pmode);
|
3569 |
|
|
emit_insn (gen_rtx_SET (VOIDmode, reg,
|
3570 |
|
|
gen_rtx_PLUS (Pmode,
|
3571 |
|
|
retval, tp)));
|
3572 |
|
|
|
3573 |
|
|
dest = gen_tlsmoff (addr, reg);
|
3574 |
|
|
|
3575 |
|
|
/*
|
3576 |
|
|
dest = gen_reg_rtx (Pmode);
|
3577 |
|
|
emit_insn (gen_tlsoff_hilo (dest, addr,
|
3578 |
|
|
GEN_INT (R_FRV_TLSMOFFHI)));
|
3579 |
|
|
dest = gen_rtx_PLUS (Pmode, dest, reg);
|
3580 |
|
|
*/
|
3581 |
|
|
break;
|
3582 |
|
|
}
|
3583 |
|
|
case TLS_MODEL_LOCAL_EXEC:
|
3584 |
|
|
dest = gen_tlsmoff (addr, gen_rtx_REG (Pmode, 29));
|
3585 |
|
|
break;
|
3586 |
|
|
case TLS_MODEL_GLOBAL_DYNAMIC:
|
3587 |
|
|
{
|
3588 |
|
|
rtx retval;
|
3589 |
|
|
|
3590 |
|
|
if (TARGET_INLINE_PLT)
|
3591 |
|
|
retval = gen_inlined_tls_plt (addr);
|
3592 |
|
|
else
|
3593 |
|
|
{
|
3594 |
|
|
/* call #gettlsoff(x) */
|
3595 |
|
|
retval = gen_reg_rtx (Pmode);
|
3596 |
|
|
emit_insn (gen_call_gettlsoff (retval, addr, picreg));
|
3597 |
|
|
}
|
3598 |
|
|
dest = gen_rtx_PLUS (Pmode, retval, tp);
|
3599 |
|
|
break;
|
3600 |
|
|
}
|
3601 |
|
|
default:
|
3602 |
|
|
gcc_unreachable ();
|
3603 |
|
|
}
|
3604 |
|
|
|
3605 |
|
|
return dest;
|
3606 |
|
|
}
|
3607 |
|
|
|
3608 |
|
|
rtx
|
3609 |
|
|
frv_legitimize_address (rtx x,
|
3610 |
|
|
rtx oldx ATTRIBUTE_UNUSED,
|
3611 |
|
|
enum machine_mode mode ATTRIBUTE_UNUSED)
|
3612 |
|
|
{
|
3613 |
|
|
if (GET_CODE (x) == SYMBOL_REF)
|
3614 |
|
|
{
|
3615 |
|
|
enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
|
3616 |
|
|
if (model != 0)
|
3617 |
|
|
return frv_legitimize_tls_address (x, model);
|
3618 |
|
|
}
|
3619 |
|
|
|
3620 |
|
|
return NULL_RTX;
|
3621 |
|
|
}
|
3622 |
|
|
|
3623 |
|
|
/* Test whether a local function descriptor is canonical, i.e.,
|
3624 |
|
|
whether we can use FUNCDESC_GOTOFF to compute the address of the
|
3625 |
|
|
function. */
|
3626 |
|
|
|
3627 |
|
|
static bool
|
3628 |
|
|
frv_local_funcdesc_p (rtx fnx)
|
3629 |
|
|
{
|
3630 |
|
|
tree fn;
|
3631 |
|
|
enum symbol_visibility vis;
|
3632 |
|
|
bool ret;
|
3633 |
|
|
|
3634 |
|
|
if (! SYMBOL_REF_LOCAL_P (fnx))
|
3635 |
|
|
return FALSE;
|
3636 |
|
|
|
3637 |
|
|
fn = SYMBOL_REF_DECL (fnx);
|
3638 |
|
|
|
3639 |
|
|
if (! fn)
|
3640 |
|
|
return FALSE;
|
3641 |
|
|
|
3642 |
|
|
vis = DECL_VISIBILITY (fn);
|
3643 |
|
|
|
3644 |
|
|
if (vis == VISIBILITY_PROTECTED)
|
3645 |
|
|
/* Private function descriptors for protected functions are not
|
3646 |
|
|
canonical. Temporarily change the visibility to global. */
|
3647 |
|
|
vis = VISIBILITY_DEFAULT;
|
3648 |
|
|
else if (flag_shlib)
|
3649 |
|
|
/* If we're already compiling for a shared library (that, unlike
|
3650 |
|
|
executables, can't assume that the existence of a definition
|
3651 |
|
|
implies local binding), we can skip the re-testing. */
|
3652 |
|
|
return TRUE;
|
3653 |
|
|
|
3654 |
|
|
ret = default_binds_local_p_1 (fn, flag_pic);
|
3655 |
|
|
|
3656 |
|
|
DECL_VISIBILITY (fn) = vis;
|
3657 |
|
|
|
3658 |
|
|
return ret;
|
3659 |
|
|
}
|
3660 |
|
|
|
3661 |
|
|
/* Load the _gp symbol into DEST. SRC is supposed to be the FDPIC
|
3662 |
|
|
register. */
|
3663 |
|
|
|
3664 |
|
|
rtx
|
3665 |
|
|
frv_gen_GPsym2reg (rtx dest, rtx src)
|
3666 |
|
|
{
|
3667 |
|
|
tree gp = get_identifier ("_gp");
|
3668 |
|
|
rtx gp_sym = gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (gp));
|
3669 |
|
|
|
3670 |
|
|
return gen_symGOT2reg (dest, gp_sym, src, GEN_INT (R_FRV_GOT12));
|
3671 |
|
|
}
|
3672 |
|
|
|
3673 |
|
|
static const char *
|
3674 |
|
|
unspec_got_name (int i)
|
3675 |
|
|
{
|
3676 |
|
|
switch (i)
|
3677 |
|
|
{
|
3678 |
|
|
case R_FRV_GOT12: return "got12";
|
3679 |
|
|
case R_FRV_GOTHI: return "gothi";
|
3680 |
|
|
case R_FRV_GOTLO: return "gotlo";
|
3681 |
|
|
case R_FRV_FUNCDESC: return "funcdesc";
|
3682 |
|
|
case R_FRV_FUNCDESC_GOT12: return "gotfuncdesc12";
|
3683 |
|
|
case R_FRV_FUNCDESC_GOTHI: return "gotfuncdeschi";
|
3684 |
|
|
case R_FRV_FUNCDESC_GOTLO: return "gotfuncdesclo";
|
3685 |
|
|
case R_FRV_FUNCDESC_VALUE: return "funcdescvalue";
|
3686 |
|
|
case R_FRV_FUNCDESC_GOTOFF12: return "gotofffuncdesc12";
|
3687 |
|
|
case R_FRV_FUNCDESC_GOTOFFHI: return "gotofffuncdeschi";
|
3688 |
|
|
case R_FRV_FUNCDESC_GOTOFFLO: return "gotofffuncdesclo";
|
3689 |
|
|
case R_FRV_GOTOFF12: return "gotoff12";
|
3690 |
|
|
case R_FRV_GOTOFFHI: return "gotoffhi";
|
3691 |
|
|
case R_FRV_GOTOFFLO: return "gotofflo";
|
3692 |
|
|
case R_FRV_GPREL12: return "gprel12";
|
3693 |
|
|
case R_FRV_GPRELHI: return "gprelhi";
|
3694 |
|
|
case R_FRV_GPRELLO: return "gprello";
|
3695 |
|
|
case R_FRV_GOTTLSOFF_HI: return "gottlsoffhi";
|
3696 |
|
|
case R_FRV_GOTTLSOFF_LO: return "gottlsofflo";
|
3697 |
|
|
case R_FRV_TLSMOFFHI: return "tlsmoffhi";
|
3698 |
|
|
case R_FRV_TLSMOFFLO: return "tlsmofflo";
|
3699 |
|
|
case R_FRV_TLSMOFF12: return "tlsmoff12";
|
3700 |
|
|
case R_FRV_TLSDESCHI: return "tlsdeschi";
|
3701 |
|
|
case R_FRV_TLSDESCLO: return "tlsdesclo";
|
3702 |
|
|
case R_FRV_GOTTLSDESCHI: return "gottlsdeschi";
|
3703 |
|
|
case R_FRV_GOTTLSDESCLO: return "gottlsdesclo";
|
3704 |
|
|
default: gcc_unreachable ();
|
3705 |
|
|
}
|
3706 |
|
|
}
|
3707 |
|
|
|
3708 |
|
|
/* Write the assembler syntax for UNSPEC to STREAM. Note that any offset
|
3709 |
|
|
is added inside the relocation operator. */
|
3710 |
|
|
|
3711 |
|
|
static void
|
3712 |
|
|
frv_output_const_unspec (FILE *stream, const struct frv_unspec *unspec)
|
3713 |
|
|
{
|
3714 |
|
|
fprintf (stream, "#%s(", unspec_got_name (unspec->reloc));
|
3715 |
|
|
output_addr_const (stream, plus_constant (unspec->symbol, unspec->offset));
|
3716 |
|
|
fputs (")", stream);
|
3717 |
|
|
}
|
3718 |
|
|
|
3719 |
|
|
/* Implement FIND_BASE_TERM. See whether ORIG_X represents #gprel12(foo)
|
3720 |
|
|
or #gotoff12(foo) for some small data symbol foo. If so, return foo,
|
3721 |
|
|
otherwise return ORIG_X. */
|
3722 |
|
|
|
3723 |
|
|
rtx
|
3724 |
|
|
frv_find_base_term (rtx x)
|
3725 |
|
|
{
|
3726 |
|
|
struct frv_unspec unspec;
|
3727 |
|
|
|
3728 |
|
|
if (frv_const_unspec_p (x, &unspec)
|
3729 |
|
|
&& frv_small_data_reloc_p (unspec.symbol, unspec.reloc))
|
3730 |
|
|
return plus_constant (unspec.symbol, unspec.offset);
|
3731 |
|
|
|
3732 |
|
|
return x;
|
3733 |
|
|
}
|
3734 |
|
|
|
3735 |
|
|
/* Return 1 if operand is a valid FRV address. CONDEXEC_P is true if
|
3736 |
|
|
the operand is used by a predicated instruction. */
|
3737 |
|
|
|
3738 |
|
|
int
|
3739 |
|
|
frv_legitimate_memory_operand (rtx op, enum machine_mode mode, int condexec_p)
|
3740 |
|
|
{
|
3741 |
|
|
return ((GET_MODE (op) == mode || mode == VOIDmode)
|
3742 |
|
|
&& GET_CODE (op) == MEM
|
3743 |
|
|
&& frv_legitimate_address_p (mode, XEXP (op, 0),
|
3744 |
|
|
reload_completed, condexec_p, FALSE));
|
3745 |
|
|
}
|
3746 |
|
|
|
3747 |
|
|
void
|
3748 |
|
|
frv_expand_fdpic_call (rtx *operands, bool ret_value, bool sibcall)
|
3749 |
|
|
{
|
3750 |
|
|
rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
|
3751 |
|
|
rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REG);
|
3752 |
|
|
rtx c, rvrtx=0;
|
3753 |
|
|
rtx addr;
|
3754 |
|
|
|
3755 |
|
|
if (ret_value)
|
3756 |
|
|
{
|
3757 |
|
|
rvrtx = operands[0];
|
3758 |
|
|
operands ++;
|
3759 |
|
|
}
|
3760 |
|
|
|
3761 |
|
|
addr = XEXP (operands[0], 0);
|
3762 |
|
|
|
3763 |
|
|
/* Inline PLTs if we're optimizing for speed. We'd like to inline
|
3764 |
|
|
any calls that would involve a PLT, but can't tell, since we
|
3765 |
|
|
don't know whether an extern function is going to be provided by
|
3766 |
|
|
a separate translation unit or imported from a separate module.
|
3767 |
|
|
When compiling for shared libraries, if the function has default
|
3768 |
|
|
visibility, we assume it's overridable, so we inline the PLT, but
|
3769 |
|
|
for executables, we don't really have a way to make a good
|
3770 |
|
|
decision: a function is as likely to be imported from a shared
|
3771 |
|
|
library as it is to be defined in the executable itself. We
|
3772 |
|
|
assume executables will get global functions defined locally,
|
3773 |
|
|
whereas shared libraries will have them potentially overridden,
|
3774 |
|
|
so we only inline PLTs when compiling for shared libraries.
|
3775 |
|
|
|
3776 |
|
|
In order to mark a function as local to a shared library, any
|
3777 |
|
|
non-default visibility attribute suffices. Unfortunately,
|
3778 |
|
|
there's no simple way to tag a function declaration as ``in a
|
3779 |
|
|
different module'', which we could then use to trigger PLT
|
3780 |
|
|
inlining on executables. There's -minline-plt, but it affects
|
3781 |
|
|
all external functions, so one would have to also mark function
|
3782 |
|
|
declarations available in the same module with non-default
|
3783 |
|
|
visibility, which is advantageous in itself. */
|
3784 |
|
|
if (GET_CODE (addr) == SYMBOL_REF
|
3785 |
|
|
&& ((!SYMBOL_REF_LOCAL_P (addr) && TARGET_INLINE_PLT)
|
3786 |
|
|
|| sibcall))
|
3787 |
|
|
{
|
3788 |
|
|
rtx x, dest;
|
3789 |
|
|
dest = gen_reg_rtx (SImode);
|
3790 |
|
|
if (flag_pic != 1)
|
3791 |
|
|
x = gen_symGOTOFF2reg_hilo (dest, addr, OUR_FDPIC_REG,
|
3792 |
|
|
GEN_INT (R_FRV_FUNCDESC_GOTOFF12));
|
3793 |
|
|
else
|
3794 |
|
|
x = gen_symGOTOFF2reg (dest, addr, OUR_FDPIC_REG,
|
3795 |
|
|
GEN_INT (R_FRV_FUNCDESC_GOTOFF12));
|
3796 |
|
|
emit_insn (x);
|
3797 |
|
|
cfun->uses_pic_offset_table = TRUE;
|
3798 |
|
|
addr = dest;
|
3799 |
|
|
}
|
3800 |
|
|
else if (GET_CODE (addr) == SYMBOL_REF)
|
3801 |
|
|
{
|
3802 |
|
|
/* These are always either local, or handled through a local
|
3803 |
|
|
PLT. */
|
3804 |
|
|
if (ret_value)
|
3805 |
|
|
c = gen_call_value_fdpicsi (rvrtx, addr, operands[1],
|
3806 |
|
|
operands[2], picreg, lr);
|
3807 |
|
|
else
|
3808 |
|
|
c = gen_call_fdpicsi (addr, operands[1], operands[2], picreg, lr);
|
3809 |
|
|
emit_call_insn (c);
|
3810 |
|
|
return;
|
3811 |
|
|
}
|
3812 |
|
|
else if (! ldd_address_operand (addr, Pmode))
|
3813 |
|
|
addr = force_reg (Pmode, addr);
|
3814 |
|
|
|
3815 |
|
|
picreg = gen_reg_rtx (DImode);
|
3816 |
|
|
emit_insn (gen_movdi_ldd (picreg, addr));
|
3817 |
|
|
|
3818 |
|
|
if (sibcall && ret_value)
|
3819 |
|
|
c = gen_sibcall_value_fdpicdi (rvrtx, picreg, const0_rtx);
|
3820 |
|
|
else if (sibcall)
|
3821 |
|
|
c = gen_sibcall_fdpicdi (picreg, const0_rtx);
|
3822 |
|
|
else if (ret_value)
|
3823 |
|
|
c = gen_call_value_fdpicdi (rvrtx, picreg, const0_rtx, lr);
|
3824 |
|
|
else
|
3825 |
|
|
c = gen_call_fdpicdi (picreg, const0_rtx, lr);
|
3826 |
|
|
emit_call_insn (c);
|
3827 |
|
|
}
|
3828 |
|
|
|
3829 |
|
|
/* Look for a SYMBOL_REF of a function in an rtx. We always want to
|
3830 |
|
|
process these separately from any offsets, such that we add any
|
3831 |
|
|
offsets to the function descriptor (the actual pointer), not to the
|
3832 |
|
|
function address. */
|
3833 |
|
|
|
3834 |
|
|
static bool
|
3835 |
|
|
frv_function_symbol_referenced_p (rtx x)
|
3836 |
|
|
{
|
3837 |
|
|
const char *format;
|
3838 |
|
|
int length;
|
3839 |
|
|
int j;
|
3840 |
|
|
|
3841 |
|
|
if (GET_CODE (x) == SYMBOL_REF)
|
3842 |
|
|
return SYMBOL_REF_FUNCTION_P (x);
|
3843 |
|
|
|
3844 |
|
|
length = GET_RTX_LENGTH (GET_CODE (x));
|
3845 |
|
|
format = GET_RTX_FORMAT (GET_CODE (x));
|
3846 |
|
|
|
3847 |
|
|
for (j = 0; j < length; ++j)
|
3848 |
|
|
{
|
3849 |
|
|
switch (format[j])
|
3850 |
|
|
{
|
3851 |
|
|
case 'e':
|
3852 |
|
|
if (frv_function_symbol_referenced_p (XEXP (x, j)))
|
3853 |
|
|
return TRUE;
|
3854 |
|
|
break;
|
3855 |
|
|
|
3856 |
|
|
case 'V':
|
3857 |
|
|
case 'E':
|
3858 |
|
|
if (XVEC (x, j) != 0)
|
3859 |
|
|
{
|
3860 |
|
|
int k;
|
3861 |
|
|
for (k = 0; k < XVECLEN (x, j); ++k)
|
3862 |
|
|
if (frv_function_symbol_referenced_p (XVECEXP (x, j, k)))
|
3863 |
|
|
return TRUE;
|
3864 |
|
|
}
|
3865 |
|
|
break;
|
3866 |
|
|
|
3867 |
|
|
default:
|
3868 |
|
|
/* Nothing to do. */
|
3869 |
|
|
break;
|
3870 |
|
|
}
|
3871 |
|
|
}
|
3872 |
|
|
|
3873 |
|
|
return FALSE;
|
3874 |
|
|
}
|
3875 |
|
|
|
3876 |
|
|
/* Return true if the memory operand is one that can be conditionally
|
3877 |
|
|
executed. */
|
3878 |
|
|
|
3879 |
|
|
int
|
3880 |
|
|
condexec_memory_operand (rtx op, enum machine_mode mode)
|
3881 |
|
|
{
|
3882 |
|
|
enum machine_mode op_mode = GET_MODE (op);
|
3883 |
|
|
rtx addr;
|
3884 |
|
|
|
3885 |
|
|
if (mode != VOIDmode && op_mode != mode)
|
3886 |
|
|
return FALSE;
|
3887 |
|
|
|
3888 |
|
|
switch (op_mode)
|
3889 |
|
|
{
|
3890 |
|
|
default:
|
3891 |
|
|
return FALSE;
|
3892 |
|
|
|
3893 |
|
|
case QImode:
|
3894 |
|
|
case HImode:
|
3895 |
|
|
case SImode:
|
3896 |
|
|
case SFmode:
|
3897 |
|
|
break;
|
3898 |
|
|
}
|
3899 |
|
|
|
3900 |
|
|
if (GET_CODE (op) != MEM)
|
3901 |
|
|
return FALSE;
|
3902 |
|
|
|
3903 |
|
|
addr = XEXP (op, 0);
|
3904 |
|
|
return frv_legitimate_address_p (mode, addr, reload_completed, TRUE, FALSE);
|
3905 |
|
|
}
|
3906 |
|
|
|
3907 |
|
|
/* Return true if the bare return instruction can be used outside of the
|
3908 |
|
|
epilog code. For frv, we only do it if there was no stack allocation. */
|
3909 |
|
|
|
3910 |
|
|
int
|
3911 |
|
|
direct_return_p (void)
|
3912 |
|
|
{
|
3913 |
|
|
frv_stack_t *info;
|
3914 |
|
|
|
3915 |
|
|
if (!reload_completed)
|
3916 |
|
|
return FALSE;
|
3917 |
|
|
|
3918 |
|
|
info = frv_stack_info ();
|
3919 |
|
|
return (info->total_size == 0);
|
3920 |
|
|
}
|
3921 |
|
|
|
3922 |
|
|
|
3923 |
|
|
void
|
3924 |
|
|
frv_emit_move (enum machine_mode mode, rtx dest, rtx src)
|
3925 |
|
|
{
|
3926 |
|
|
if (GET_CODE (src) == SYMBOL_REF)
|
3927 |
|
|
{
|
3928 |
|
|
enum tls_model model = SYMBOL_REF_TLS_MODEL (src);
|
3929 |
|
|
if (model != 0)
|
3930 |
|
|
src = frv_legitimize_tls_address (src, model);
|
3931 |
|
|
}
|
3932 |
|
|
|
3933 |
|
|
switch (mode)
|
3934 |
|
|
{
|
3935 |
|
|
case SImode:
|
3936 |
|
|
if (frv_emit_movsi (dest, src))
|
3937 |
|
|
return;
|
3938 |
|
|
break;
|
3939 |
|
|
|
3940 |
|
|
case QImode:
|
3941 |
|
|
case HImode:
|
3942 |
|
|
case DImode:
|
3943 |
|
|
case SFmode:
|
3944 |
|
|
case DFmode:
|
3945 |
|
|
if (!reload_in_progress
|
3946 |
|
|
&& !reload_completed
|
3947 |
|
|
&& !register_operand (dest, mode)
|
3948 |
|
|
&& !reg_or_0_operand (src, mode))
|
3949 |
|
|
src = copy_to_mode_reg (mode, src);
|
3950 |
|
|
break;
|
3951 |
|
|
|
3952 |
|
|
default:
|
3953 |
|
|
gcc_unreachable ();
|
3954 |
|
|
}
|
3955 |
|
|
|
3956 |
|
|
emit_insn (gen_rtx_SET (VOIDmode, dest, src));
|
3957 |
|
|
}
|
3958 |
|
|
|
3959 |
|
|
/* Emit code to handle a MOVSI, adding in the small data register or pic
|
3960 |
|
|
register if needed to load up addresses. Return TRUE if the appropriate
|
3961 |
|
|
instructions are emitted. */
|
3962 |
|
|
|
3963 |
|
|
int
|
3964 |
|
|
frv_emit_movsi (rtx dest, rtx src)
|
3965 |
|
|
{
|
3966 |
|
|
int base_regno = -1;
|
3967 |
|
|
int unspec = 0;
|
3968 |
|
|
rtx sym = src;
|
3969 |
|
|
struct frv_unspec old_unspec;
|
3970 |
|
|
|
3971 |
|
|
if (!reload_in_progress
|
3972 |
|
|
&& !reload_completed
|
3973 |
|
|
&& !register_operand (dest, SImode)
|
3974 |
|
|
&& (!reg_or_0_operand (src, SImode)
|
3975 |
|
|
/* Virtual registers will almost always be replaced by an
|
3976 |
|
|
add instruction, so expose this to CSE by copying to
|
3977 |
|
|
an intermediate register. */
|
3978 |
|
|
|| (GET_CODE (src) == REG
|
3979 |
|
|
&& IN_RANGE_P (REGNO (src),
|
3980 |
|
|
FIRST_VIRTUAL_REGISTER,
|
3981 |
|
|
LAST_VIRTUAL_REGISTER))))
|
3982 |
|
|
{
|
3983 |
|
|
emit_insn (gen_rtx_SET (VOIDmode, dest, copy_to_mode_reg (SImode, src)));
|
3984 |
|
|
return TRUE;
|
3985 |
|
|
}
|
3986 |
|
|
|
3987 |
|
|
/* Explicitly add in the PIC or small data register if needed. */
|
3988 |
|
|
switch (GET_CODE (src))
|
3989 |
|
|
{
|
3990 |
|
|
default:
|
3991 |
|
|
break;
|
3992 |
|
|
|
3993 |
|
|
case LABEL_REF:
|
3994 |
|
|
handle_label:
|
3995 |
|
|
if (TARGET_FDPIC)
|
3996 |
|
|
{
|
3997 |
|
|
/* Using GPREL12, we use a single GOT entry for all symbols
|
3998 |
|
|
in read-only sections, but trade sequences such as:
|
3999 |
|
|
|
4000 |
|
|
sethi #gothi(label), gr#
|
4001 |
|
|
setlo #gotlo(label), gr#
|
4002 |
|
|
ld @(gr15,gr#), gr#
|
4003 |
|
|
|
4004 |
|
|
for
|
4005 |
|
|
|
4006 |
|
|
ld @(gr15,#got12(_gp)), gr#
|
4007 |
|
|
sethi #gprelhi(label), gr##
|
4008 |
|
|
setlo #gprello(label), gr##
|
4009 |
|
|
add gr#, gr##, gr##
|
4010 |
|
|
|
4011 |
|
|
We may often be able to share gr# for multiple
|
4012 |
|
|
computations of GPREL addresses, and we may often fold
|
4013 |
|
|
the final add into the pair of registers of a load or
|
4014 |
|
|
store instruction, so it's often profitable. Even when
|
4015 |
|
|
optimizing for size, we're trading a GOT entry for an
|
4016 |
|
|
additional instruction, which trades GOT space
|
4017 |
|
|
(read-write) for code size (read-only, shareable), as
|
4018 |
|
|
long as the symbol is not used in more than two different
|
4019 |
|
|
locations.
|
4020 |
|
|
|
4021 |
|
|
With -fpie/-fpic, we'd be trading a single load for a
|
4022 |
|
|
sequence of 4 instructions, because the offset of the
|
4023 |
|
|
label can't be assumed to be addressable with 12 bits, so
|
4024 |
|
|
we don't do this. */
|
4025 |
|
|
if (TARGET_GPREL_RO)
|
4026 |
|
|
unspec = R_FRV_GPREL12;
|
4027 |
|
|
else
|
4028 |
|
|
unspec = R_FRV_GOT12;
|
4029 |
|
|
}
|
4030 |
|
|
else if (flag_pic)
|
4031 |
|
|
base_regno = PIC_REGNO;
|
4032 |
|
|
|
4033 |
|
|
break;
|
4034 |
|
|
|
4035 |
|
|
case CONST:
|
4036 |
|
|
if (frv_const_unspec_p (src, &old_unspec))
|
4037 |
|
|
break;
|
4038 |
|
|
|
4039 |
|
|
if (TARGET_FDPIC && frv_function_symbol_referenced_p (XEXP (src, 0)))
|
4040 |
|
|
{
|
4041 |
|
|
handle_whatever:
|
4042 |
|
|
src = force_reg (GET_MODE (XEXP (src, 0)), XEXP (src, 0));
|
4043 |
|
|
emit_move_insn (dest, src);
|
4044 |
|
|
return TRUE;
|
4045 |
|
|
}
|
4046 |
|
|
else
|
4047 |
|
|
{
|
4048 |
|
|
sym = XEXP (sym, 0);
|
4049 |
|
|
if (GET_CODE (sym) == PLUS
|
4050 |
|
|
&& GET_CODE (XEXP (sym, 0)) == SYMBOL_REF
|
4051 |
|
|
&& GET_CODE (XEXP (sym, 1)) == CONST_INT)
|
4052 |
|
|
sym = XEXP (sym, 0);
|
4053 |
|
|
if (GET_CODE (sym) == SYMBOL_REF)
|
4054 |
|
|
goto handle_sym;
|
4055 |
|
|
else if (GET_CODE (sym) == LABEL_REF)
|
4056 |
|
|
goto handle_label;
|
4057 |
|
|
else
|
4058 |
|
|
goto handle_whatever;
|
4059 |
|
|
}
|
4060 |
|
|
break;
|
4061 |
|
|
|
4062 |
|
|
case SYMBOL_REF:
|
4063 |
|
|
handle_sym:
|
4064 |
|
|
if (TARGET_FDPIC)
|
4065 |
|
|
{
|
4066 |
|
|
enum tls_model model = SYMBOL_REF_TLS_MODEL (sym);
|
4067 |
|
|
|
4068 |
|
|
if (model != 0)
|
4069 |
|
|
{
|
4070 |
|
|
src = frv_legitimize_tls_address (src, model);
|
4071 |
|
|
emit_move_insn (dest, src);
|
4072 |
|
|
return TRUE;
|
4073 |
|
|
}
|
4074 |
|
|
|
4075 |
|
|
if (SYMBOL_REF_FUNCTION_P (sym))
|
4076 |
|
|
{
|
4077 |
|
|
if (frv_local_funcdesc_p (sym))
|
4078 |
|
|
unspec = R_FRV_FUNCDESC_GOTOFF12;
|
4079 |
|
|
else
|
4080 |
|
|
unspec = R_FRV_FUNCDESC_GOT12;
|
4081 |
|
|
}
|
4082 |
|
|
else
|
4083 |
|
|
{
|
4084 |
|
|
if (CONSTANT_POOL_ADDRESS_P (sym))
|
4085 |
|
|
switch (GET_CODE (get_pool_constant (sym)))
|
4086 |
|
|
{
|
4087 |
|
|
case CONST:
|
4088 |
|
|
case SYMBOL_REF:
|
4089 |
|
|
case LABEL_REF:
|
4090 |
|
|
if (flag_pic)
|
4091 |
|
|
{
|
4092 |
|
|
unspec = R_FRV_GOTOFF12;
|
4093 |
|
|
break;
|
4094 |
|
|
}
|
4095 |
|
|
/* Fall through. */
|
4096 |
|
|
default:
|
4097 |
|
|
if (TARGET_GPREL_RO)
|
4098 |
|
|
unspec = R_FRV_GPREL12;
|
4099 |
|
|
else
|
4100 |
|
|
unspec = R_FRV_GOT12;
|
4101 |
|
|
break;
|
4102 |
|
|
}
|
4103 |
|
|
else if (SYMBOL_REF_LOCAL_P (sym)
|
4104 |
|
|
&& !SYMBOL_REF_EXTERNAL_P (sym)
|
4105 |
|
|
&& SYMBOL_REF_DECL (sym)
|
4106 |
|
|
&& (!DECL_P (SYMBOL_REF_DECL (sym))
|
4107 |
|
|
|| !DECL_COMMON (SYMBOL_REF_DECL (sym))))
|
4108 |
|
|
{
|
4109 |
|
|
tree decl = SYMBOL_REF_DECL (sym);
|
4110 |
|
|
tree init = TREE_CODE (decl) == VAR_DECL
|
4111 |
|
|
? DECL_INITIAL (decl)
|
4112 |
|
|
: TREE_CODE (decl) == CONSTRUCTOR
|
4113 |
|
|
? decl : 0;
|
4114 |
|
|
int reloc = 0;
|
4115 |
|
|
bool named_section, readonly;
|
4116 |
|
|
|
4117 |
|
|
if (init && init != error_mark_node)
|
4118 |
|
|
reloc = compute_reloc_for_constant (init);
|
4119 |
|
|
|
4120 |
|
|
named_section = TREE_CODE (decl) == VAR_DECL
|
4121 |
|
|
&& lookup_attribute ("section", DECL_ATTRIBUTES (decl));
|
4122 |
|
|
readonly = decl_readonly_section (decl, reloc);
|
4123 |
|
|
|
4124 |
|
|
if (named_section)
|
4125 |
|
|
unspec = R_FRV_GOT12;
|
4126 |
|
|
else if (!readonly)
|
4127 |
|
|
unspec = R_FRV_GOTOFF12;
|
4128 |
|
|
else if (readonly && TARGET_GPREL_RO)
|
4129 |
|
|
unspec = R_FRV_GPREL12;
|
4130 |
|
|
else
|
4131 |
|
|
unspec = R_FRV_GOT12;
|
4132 |
|
|
}
|
4133 |
|
|
else
|
4134 |
|
|
unspec = R_FRV_GOT12;
|
4135 |
|
|
}
|
4136 |
|
|
}
|
4137 |
|
|
|
4138 |
|
|
else if (SYMBOL_REF_SMALL_P (sym))
|
4139 |
|
|
base_regno = SDA_BASE_REG;
|
4140 |
|
|
|
4141 |
|
|
else if (flag_pic)
|
4142 |
|
|
base_regno = PIC_REGNO;
|
4143 |
|
|
|
4144 |
|
|
break;
|
4145 |
|
|
}
|
4146 |
|
|
|
4147 |
|
|
if (base_regno >= 0)
|
4148 |
|
|
{
|
4149 |
|
|
if (GET_CODE (sym) == SYMBOL_REF && SYMBOL_REF_SMALL_P (sym))
|
4150 |
|
|
emit_insn (gen_symGOTOFF2reg (dest, src,
|
4151 |
|
|
gen_rtx_REG (Pmode, base_regno),
|
4152 |
|
|
GEN_INT (R_FRV_GPREL12)));
|
4153 |
|
|
else
|
4154 |
|
|
emit_insn (gen_symGOTOFF2reg_hilo (dest, src,
|
4155 |
|
|
gen_rtx_REG (Pmode, base_regno),
|
4156 |
|
|
GEN_INT (R_FRV_GPREL12)));
|
4157 |
|
|
if (base_regno == PIC_REGNO)
|
4158 |
|
|
cfun->uses_pic_offset_table = TRUE;
|
4159 |
|
|
return TRUE;
|
4160 |
|
|
}
|
4161 |
|
|
|
4162 |
|
|
if (unspec)
|
4163 |
|
|
{
|
4164 |
|
|
rtx x;
|
4165 |
|
|
|
4166 |
|
|
/* Since OUR_FDPIC_REG is a pseudo register, we can't safely introduce
|
4167 |
|
|
new uses of it once reload has begun. */
|
4168 |
|
|
gcc_assert (!reload_in_progress && !reload_completed);
|
4169 |
|
|
|
4170 |
|
|
switch (unspec)
|
4171 |
|
|
{
|
4172 |
|
|
case R_FRV_GOTOFF12:
|
4173 |
|
|
if (!frv_small_data_reloc_p (sym, unspec))
|
4174 |
|
|
x = gen_symGOTOFF2reg_hilo (dest, src, OUR_FDPIC_REG,
|
4175 |
|
|
GEN_INT (unspec));
|
4176 |
|
|
else
|
4177 |
|
|
x = gen_symGOTOFF2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
|
4178 |
|
|
break;
|
4179 |
|
|
case R_FRV_GPREL12:
|
4180 |
|
|
if (!frv_small_data_reloc_p (sym, unspec))
|
4181 |
|
|
x = gen_symGPREL2reg_hilo (dest, src, OUR_FDPIC_REG,
|
4182 |
|
|
GEN_INT (unspec));
|
4183 |
|
|
else
|
4184 |
|
|
x = gen_symGPREL2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
|
4185 |
|
|
break;
|
4186 |
|
|
case R_FRV_FUNCDESC_GOTOFF12:
|
4187 |
|
|
if (flag_pic != 1)
|
4188 |
|
|
x = gen_symGOTOFF2reg_hilo (dest, src, OUR_FDPIC_REG,
|
4189 |
|
|
GEN_INT (unspec));
|
4190 |
|
|
else
|
4191 |
|
|
x = gen_symGOTOFF2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
|
4192 |
|
|
break;
|
4193 |
|
|
default:
|
4194 |
|
|
if (flag_pic != 1)
|
4195 |
|
|
x = gen_symGOT2reg_hilo (dest, src, OUR_FDPIC_REG,
|
4196 |
|
|
GEN_INT (unspec));
|
4197 |
|
|
else
|
4198 |
|
|
x = gen_symGOT2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
|
4199 |
|
|
break;
|
4200 |
|
|
}
|
4201 |
|
|
emit_insn (x);
|
4202 |
|
|
cfun->uses_pic_offset_table = TRUE;
|
4203 |
|
|
return TRUE;
|
4204 |
|
|
}
|
4205 |
|
|
|
4206 |
|
|
|
4207 |
|
|
return FALSE;
|
4208 |
|
|
}
|
4209 |
|
|
|
4210 |
|
|
|
4211 |
|
|
/* Return a string to output a single word move. */
|
4212 |
|
|
|
4213 |
|
|
const char *
|
4214 |
|
|
output_move_single (rtx operands[], rtx insn)
|
4215 |
|
|
{
|
4216 |
|
|
rtx dest = operands[0];
|
4217 |
|
|
rtx src = operands[1];
|
4218 |
|
|
|
4219 |
|
|
if (GET_CODE (dest) == REG)
|
4220 |
|
|
{
|
4221 |
|
|
int dest_regno = REGNO (dest);
|
4222 |
|
|
enum machine_mode mode = GET_MODE (dest);
|
4223 |
|
|
|
4224 |
|
|
if (GPR_P (dest_regno))
|
4225 |
|
|
{
|
4226 |
|
|
if (GET_CODE (src) == REG)
|
4227 |
|
|
{
|
4228 |
|
|
/* gpr <- some sort of register */
|
4229 |
|
|
int src_regno = REGNO (src);
|
4230 |
|
|
|
4231 |
|
|
if (GPR_P (src_regno))
|
4232 |
|
|
return "mov %1, %0";
|
4233 |
|
|
|
4234 |
|
|
else if (FPR_P (src_regno))
|
4235 |
|
|
return "movfg %1, %0";
|
4236 |
|
|
|
4237 |
|
|
else if (SPR_P (src_regno))
|
4238 |
|
|
return "movsg %1, %0";
|
4239 |
|
|
}
|
4240 |
|
|
|
4241 |
|
|
else if (GET_CODE (src) == MEM)
|
4242 |
|
|
{
|
4243 |
|
|
/* gpr <- memory */
|
4244 |
|
|
switch (mode)
|
4245 |
|
|
{
|
4246 |
|
|
default:
|
4247 |
|
|
break;
|
4248 |
|
|
|
4249 |
|
|
case QImode:
|
4250 |
|
|
return "ldsb%I1%U1 %M1,%0";
|
4251 |
|
|
|
4252 |
|
|
case HImode:
|
4253 |
|
|
return "ldsh%I1%U1 %M1,%0";
|
4254 |
|
|
|
4255 |
|
|
case SImode:
|
4256 |
|
|
case SFmode:
|
4257 |
|
|
return "ld%I1%U1 %M1, %0";
|
4258 |
|
|
}
|
4259 |
|
|
}
|
4260 |
|
|
|
4261 |
|
|
else if (GET_CODE (src) == CONST_INT
|
4262 |
|
|
|| GET_CODE (src) == CONST_DOUBLE)
|
4263 |
|
|
{
|
4264 |
|
|
/* gpr <- integer/floating constant */
|
4265 |
|
|
HOST_WIDE_INT value;
|
4266 |
|
|
|
4267 |
|
|
if (GET_CODE (src) == CONST_INT)
|
4268 |
|
|
value = INTVAL (src);
|
4269 |
|
|
|
4270 |
|
|
else if (mode == SFmode)
|
4271 |
|
|
{
|
4272 |
|
|
REAL_VALUE_TYPE rv;
|
4273 |
|
|
long l;
|
4274 |
|
|
|
4275 |
|
|
REAL_VALUE_FROM_CONST_DOUBLE (rv, src);
|
4276 |
|
|
REAL_VALUE_TO_TARGET_SINGLE (rv, l);
|
4277 |
|
|
value = l;
|
4278 |
|
|
}
|
4279 |
|
|
|
4280 |
|
|
else
|
4281 |
|
|
value = CONST_DOUBLE_LOW (src);
|
4282 |
|
|
|
4283 |
|
|
if (IN_RANGE_P (value, -32768, 32767))
|
4284 |
|
|
return "setlos %1, %0";
|
4285 |
|
|
|
4286 |
|
|
return "#";
|
4287 |
|
|
}
|
4288 |
|
|
|
4289 |
|
|
else if (GET_CODE (src) == SYMBOL_REF
|
4290 |
|
|
|| GET_CODE (src) == LABEL_REF
|
4291 |
|
|
|| GET_CODE (src) == CONST)
|
4292 |
|
|
{
|
4293 |
|
|
return "#";
|
4294 |
|
|
}
|
4295 |
|
|
}
|
4296 |
|
|
|
4297 |
|
|
else if (FPR_P (dest_regno))
|
4298 |
|
|
{
|
4299 |
|
|
if (GET_CODE (src) == REG)
|
4300 |
|
|
{
|
4301 |
|
|
/* fpr <- some sort of register */
|
4302 |
|
|
int src_regno = REGNO (src);
|
4303 |
|
|
|
4304 |
|
|
if (GPR_P (src_regno))
|
4305 |
|
|
return "movgf %1, %0";
|
4306 |
|
|
|
4307 |
|
|
else if (FPR_P (src_regno))
|
4308 |
|
|
{
|
4309 |
|
|
if (TARGET_HARD_FLOAT)
|
4310 |
|
|
return "fmovs %1, %0";
|
4311 |
|
|
else
|
4312 |
|
|
return "mor %1, %1, %0";
|
4313 |
|
|
}
|
4314 |
|
|
}
|
4315 |
|
|
|
4316 |
|
|
else if (GET_CODE (src) == MEM)
|
4317 |
|
|
{
|
4318 |
|
|
/* fpr <- memory */
|
4319 |
|
|
switch (mode)
|
4320 |
|
|
{
|
4321 |
|
|
default:
|
4322 |
|
|
break;
|
4323 |
|
|
|
4324 |
|
|
case QImode:
|
4325 |
|
|
return "ldbf%I1%U1 %M1,%0";
|
4326 |
|
|
|
4327 |
|
|
case HImode:
|
4328 |
|
|
return "ldhf%I1%U1 %M1,%0";
|
4329 |
|
|
|
4330 |
|
|
case SImode:
|
4331 |
|
|
case SFmode:
|
4332 |
|
|
return "ldf%I1%U1 %M1, %0";
|
4333 |
|
|
}
|
4334 |
|
|
}
|
4335 |
|
|
|
4336 |
|
|
else if (ZERO_P (src))
|
4337 |
|
|
return "movgf %., %0";
|
4338 |
|
|
}
|
4339 |
|
|
|
4340 |
|
|
else if (SPR_P (dest_regno))
|
4341 |
|
|
{
|
4342 |
|
|
if (GET_CODE (src) == REG)
|
4343 |
|
|
{
|
4344 |
|
|
/* spr <- some sort of register */
|
4345 |
|
|
int src_regno = REGNO (src);
|
4346 |
|
|
|
4347 |
|
|
if (GPR_P (src_regno))
|
4348 |
|
|
return "movgs %1, %0";
|
4349 |
|
|
}
|
4350 |
|
|
else if (ZERO_P (src))
|
4351 |
|
|
return "movgs %., %0";
|
4352 |
|
|
}
|
4353 |
|
|
}
|
4354 |
|
|
|
4355 |
|
|
else if (GET_CODE (dest) == MEM)
|
4356 |
|
|
{
|
4357 |
|
|
if (GET_CODE (src) == REG)
|
4358 |
|
|
{
|
4359 |
|
|
int src_regno = REGNO (src);
|
4360 |
|
|
enum machine_mode mode = GET_MODE (dest);
|
4361 |
|
|
|
4362 |
|
|
if (GPR_P (src_regno))
|
4363 |
|
|
{
|
4364 |
|
|
switch (mode)
|
4365 |
|
|
{
|
4366 |
|
|
default:
|
4367 |
|
|
break;
|
4368 |
|
|
|
4369 |
|
|
case QImode:
|
4370 |
|
|
return "stb%I0%U0 %1, %M0";
|
4371 |
|
|
|
4372 |
|
|
case HImode:
|
4373 |
|
|
return "sth%I0%U0 %1, %M0";
|
4374 |
|
|
|
4375 |
|
|
case SImode:
|
4376 |
|
|
case SFmode:
|
4377 |
|
|
return "st%I0%U0 %1, %M0";
|
4378 |
|
|
}
|
4379 |
|
|
}
|
4380 |
|
|
|
4381 |
|
|
else if (FPR_P (src_regno))
|
4382 |
|
|
{
|
4383 |
|
|
switch (mode)
|
4384 |
|
|
{
|
4385 |
|
|
default:
|
4386 |
|
|
break;
|
4387 |
|
|
|
4388 |
|
|
case QImode:
|
4389 |
|
|
return "stbf%I0%U0 %1, %M0";
|
4390 |
|
|
|
4391 |
|
|
case HImode:
|
4392 |
|
|
return "sthf%I0%U0 %1, %M0";
|
4393 |
|
|
|
4394 |
|
|
case SImode:
|
4395 |
|
|
case SFmode:
|
4396 |
|
|
return "stf%I0%U0 %1, %M0";
|
4397 |
|
|
}
|
4398 |
|
|
}
|
4399 |
|
|
}
|
4400 |
|
|
|
4401 |
|
|
else if (ZERO_P (src))
|
4402 |
|
|
{
|
4403 |
|
|
switch (GET_MODE (dest))
|
4404 |
|
|
{
|
4405 |
|
|
default:
|
4406 |
|
|
break;
|
4407 |
|
|
|
4408 |
|
|
case QImode:
|
4409 |
|
|
return "stb%I0%U0 %., %M0";
|
4410 |
|
|
|
4411 |
|
|
case HImode:
|
4412 |
|
|
return "sth%I0%U0 %., %M0";
|
4413 |
|
|
|
4414 |
|
|
case SImode:
|
4415 |
|
|
case SFmode:
|
4416 |
|
|
return "st%I0%U0 %., %M0";
|
4417 |
|
|
}
|
4418 |
|
|
}
|
4419 |
|
|
}
|
4420 |
|
|
|
4421 |
|
|
fatal_insn ("bad output_move_single operand", insn);
|
4422 |
|
|
return "";
|
4423 |
|
|
}
|
4424 |
|
|
|
4425 |
|
|
|
4426 |
|
|
/* Return a string to output a double word move. */
|
4427 |
|
|
|
4428 |
|
|
const char *
|
4429 |
|
|
output_move_double (rtx operands[], rtx insn)
|
4430 |
|
|
{
|
4431 |
|
|
rtx dest = operands[0];
|
4432 |
|
|
rtx src = operands[1];
|
4433 |
|
|
enum machine_mode mode = GET_MODE (dest);
|
4434 |
|
|
|
4435 |
|
|
if (GET_CODE (dest) == REG)
|
4436 |
|
|
{
|
4437 |
|
|
int dest_regno = REGNO (dest);
|
4438 |
|
|
|
4439 |
|
|
if (GPR_P (dest_regno))
|
4440 |
|
|
{
|
4441 |
|
|
if (GET_CODE (src) == REG)
|
4442 |
|
|
{
|
4443 |
|
|
/* gpr <- some sort of register */
|
4444 |
|
|
int src_regno = REGNO (src);
|
4445 |
|
|
|
4446 |
|
|
if (GPR_P (src_regno))
|
4447 |
|
|
return "#";
|
4448 |
|
|
|
4449 |
|
|
else if (FPR_P (src_regno))
|
4450 |
|
|
{
|
4451 |
|
|
if (((dest_regno - GPR_FIRST) & 1) == 0
|
4452 |
|
|
&& ((src_regno - FPR_FIRST) & 1) == 0)
|
4453 |
|
|
return "movfgd %1, %0";
|
4454 |
|
|
|
4455 |
|
|
return "#";
|
4456 |
|
|
}
|
4457 |
|
|
}
|
4458 |
|
|
|
4459 |
|
|
else if (GET_CODE (src) == MEM)
|
4460 |
|
|
{
|
4461 |
|
|
/* gpr <- memory */
|
4462 |
|
|
if (dbl_memory_one_insn_operand (src, mode))
|
4463 |
|
|
return "ldd%I1%U1 %M1, %0";
|
4464 |
|
|
|
4465 |
|
|
return "#";
|
4466 |
|
|
}
|
4467 |
|
|
|
4468 |
|
|
else if (GET_CODE (src) == CONST_INT
|
4469 |
|
|
|| GET_CODE (src) == CONST_DOUBLE)
|
4470 |
|
|
return "#";
|
4471 |
|
|
}
|
4472 |
|
|
|
4473 |
|
|
else if (FPR_P (dest_regno))
|
4474 |
|
|
{
|
4475 |
|
|
if (GET_CODE (src) == REG)
|
4476 |
|
|
{
|
4477 |
|
|
/* fpr <- some sort of register */
|
4478 |
|
|
int src_regno = REGNO (src);
|
4479 |
|
|
|
4480 |
|
|
if (GPR_P (src_regno))
|
4481 |
|
|
{
|
4482 |
|
|
if (((dest_regno - FPR_FIRST) & 1) == 0
|
4483 |
|
|
&& ((src_regno - GPR_FIRST) & 1) == 0)
|
4484 |
|
|
return "movgfd %1, %0";
|
4485 |
|
|
|
4486 |
|
|
return "#";
|
4487 |
|
|
}
|
4488 |
|
|
|
4489 |
|
|
else if (FPR_P (src_regno))
|
4490 |
|
|
{
|
4491 |
|
|
if (TARGET_DOUBLE
|
4492 |
|
|
&& ((dest_regno - FPR_FIRST) & 1) == 0
|
4493 |
|
|
&& ((src_regno - FPR_FIRST) & 1) == 0)
|
4494 |
|
|
return "fmovd %1, %0";
|
4495 |
|
|
|
4496 |
|
|
return "#";
|
4497 |
|
|
}
|
4498 |
|
|
}
|
4499 |
|
|
|
4500 |
|
|
else if (GET_CODE (src) == MEM)
|
4501 |
|
|
{
|
4502 |
|
|
/* fpr <- memory */
|
4503 |
|
|
if (dbl_memory_one_insn_operand (src, mode))
|
4504 |
|
|
return "lddf%I1%U1 %M1, %0";
|
4505 |
|
|
|
4506 |
|
|
return "#";
|
4507 |
|
|
}
|
4508 |
|
|
|
4509 |
|
|
else if (ZERO_P (src))
|
4510 |
|
|
return "#";
|
4511 |
|
|
}
|
4512 |
|
|
}
|
4513 |
|
|
|
4514 |
|
|
else if (GET_CODE (dest) == MEM)
|
4515 |
|
|
{
|
4516 |
|
|
if (GET_CODE (src) == REG)
|
4517 |
|
|
{
|
4518 |
|
|
int src_regno = REGNO (src);
|
4519 |
|
|
|
4520 |
|
|
if (GPR_P (src_regno))
|
4521 |
|
|
{
|
4522 |
|
|
if (((src_regno - GPR_FIRST) & 1) == 0
|
4523 |
|
|
&& dbl_memory_one_insn_operand (dest, mode))
|
4524 |
|
|
return "std%I0%U0 %1, %M0";
|
4525 |
|
|
|
4526 |
|
|
return "#";
|
4527 |
|
|
}
|
4528 |
|
|
|
4529 |
|
|
if (FPR_P (src_regno))
|
4530 |
|
|
{
|
4531 |
|
|
if (((src_regno - FPR_FIRST) & 1) == 0
|
4532 |
|
|
&& dbl_memory_one_insn_operand (dest, mode))
|
4533 |
|
|
return "stdf%I0%U0 %1, %M0";
|
4534 |
|
|
|
4535 |
|
|
return "#";
|
4536 |
|
|
}
|
4537 |
|
|
}
|
4538 |
|
|
|
4539 |
|
|
else if (ZERO_P (src))
|
4540 |
|
|
{
|
4541 |
|
|
if (dbl_memory_one_insn_operand (dest, mode))
|
4542 |
|
|
return "std%I0%U0 %., %M0";
|
4543 |
|
|
|
4544 |
|
|
return "#";
|
4545 |
|
|
}
|
4546 |
|
|
}
|
4547 |
|
|
|
4548 |
|
|
fatal_insn ("bad output_move_double operand", insn);
|
4549 |
|
|
return "";
|
4550 |
|
|
}
|
4551 |
|
|
|
4552 |
|
|
|
4553 |
|
|
/* Return a string to output a single word conditional move.
|
4554 |
|
|
Operand0 -- EQ/NE of ccr register and 0
|
4555 |
|
|
Operand1 -- CCR register
|
4556 |
|
|
Operand2 -- destination
|
4557 |
|
|
Operand3 -- source */
|
4558 |
|
|
|
4559 |
|
|
const char *
|
4560 |
|
|
output_condmove_single (rtx operands[], rtx insn)
|
4561 |
|
|
{
|
4562 |
|
|
rtx dest = operands[2];
|
4563 |
|
|
rtx src = operands[3];
|
4564 |
|
|
|
4565 |
|
|
if (GET_CODE (dest) == REG)
|
4566 |
|
|
{
|
4567 |
|
|
int dest_regno = REGNO (dest);
|
4568 |
|
|
enum machine_mode mode = GET_MODE (dest);
|
4569 |
|
|
|
4570 |
|
|
if (GPR_P (dest_regno))
|
4571 |
|
|
{
|
4572 |
|
|
if (GET_CODE (src) == REG)
|
4573 |
|
|
{
|
4574 |
|
|
/* gpr <- some sort of register */
|
4575 |
|
|
int src_regno = REGNO (src);
|
4576 |
|
|
|
4577 |
|
|
if (GPR_P (src_regno))
|
4578 |
|
|
return "cmov %z3, %2, %1, %e0";
|
4579 |
|
|
|
4580 |
|
|
else if (FPR_P (src_regno))
|
4581 |
|
|
return "cmovfg %3, %2, %1, %e0";
|
4582 |
|
|
}
|
4583 |
|
|
|
4584 |
|
|
else if (GET_CODE (src) == MEM)
|
4585 |
|
|
{
|
4586 |
|
|
/* gpr <- memory */
|
4587 |
|
|
switch (mode)
|
4588 |
|
|
{
|
4589 |
|
|
default:
|
4590 |
|
|
break;
|
4591 |
|
|
|
4592 |
|
|
case QImode:
|
4593 |
|
|
return "cldsb%I3%U3 %M3, %2, %1, %e0";
|
4594 |
|
|
|
4595 |
|
|
case HImode:
|
4596 |
|
|
return "cldsh%I3%U3 %M3, %2, %1, %e0";
|
4597 |
|
|
|
4598 |
|
|
case SImode:
|
4599 |
|
|
case SFmode:
|
4600 |
|
|
return "cld%I3%U3 %M3, %2, %1, %e0";
|
4601 |
|
|
}
|
4602 |
|
|
}
|
4603 |
|
|
|
4604 |
|
|
else if (ZERO_P (src))
|
4605 |
|
|
return "cmov %., %2, %1, %e0";
|
4606 |
|
|
}
|
4607 |
|
|
|
4608 |
|
|
else if (FPR_P (dest_regno))
|
4609 |
|
|
{
|
4610 |
|
|
if (GET_CODE (src) == REG)
|
4611 |
|
|
{
|
4612 |
|
|
/* fpr <- some sort of register */
|
4613 |
|
|
int src_regno = REGNO (src);
|
4614 |
|
|
|
4615 |
|
|
if (GPR_P (src_regno))
|
4616 |
|
|
return "cmovgf %3, %2, %1, %e0";
|
4617 |
|
|
|
4618 |
|
|
else if (FPR_P (src_regno))
|
4619 |
|
|
{
|
4620 |
|
|
if (TARGET_HARD_FLOAT)
|
4621 |
|
|
return "cfmovs %3,%2,%1,%e0";
|
4622 |
|
|
else
|
4623 |
|
|
return "cmor %3, %3, %2, %1, %e0";
|
4624 |
|
|
}
|
4625 |
|
|
}
|
4626 |
|
|
|
4627 |
|
|
else if (GET_CODE (src) == MEM)
|
4628 |
|
|
{
|
4629 |
|
|
/* fpr <- memory */
|
4630 |
|
|
if (mode == SImode || mode == SFmode)
|
4631 |
|
|
return "cldf%I3%U3 %M3, %2, %1, %e0";
|
4632 |
|
|
}
|
4633 |
|
|
|
4634 |
|
|
else if (ZERO_P (src))
|
4635 |
|
|
return "cmovgf %., %2, %1, %e0";
|
4636 |
|
|
}
|
4637 |
|
|
}
|
4638 |
|
|
|
4639 |
|
|
else if (GET_CODE (dest) == MEM)
|
4640 |
|
|
{
|
4641 |
|
|
if (GET_CODE (src) == REG)
|
4642 |
|
|
{
|
4643 |
|
|
int src_regno = REGNO (src);
|
4644 |
|
|
enum machine_mode mode = GET_MODE (dest);
|
4645 |
|
|
|
4646 |
|
|
if (GPR_P (src_regno))
|
4647 |
|
|
{
|
4648 |
|
|
switch (mode)
|
4649 |
|
|
{
|
4650 |
|
|
default:
|
4651 |
|
|
break;
|
4652 |
|
|
|
4653 |
|
|
case QImode:
|
4654 |
|
|
return "cstb%I2%U2 %3, %M2, %1, %e0";
|
4655 |
|
|
|
4656 |
|
|
case HImode:
|
4657 |
|
|
return "csth%I2%U2 %3, %M2, %1, %e0";
|
4658 |
|
|
|
4659 |
|
|
case SImode:
|
4660 |
|
|
case SFmode:
|
4661 |
|
|
return "cst%I2%U2 %3, %M2, %1, %e0";
|
4662 |
|
|
}
|
4663 |
|
|
}
|
4664 |
|
|
|
4665 |
|
|
else if (FPR_P (src_regno) && (mode == SImode || mode == SFmode))
|
4666 |
|
|
return "cstf%I2%U2 %3, %M2, %1, %e0";
|
4667 |
|
|
}
|
4668 |
|
|
|
4669 |
|
|
else if (ZERO_P (src))
|
4670 |
|
|
{
|
4671 |
|
|
enum machine_mode mode = GET_MODE (dest);
|
4672 |
|
|
switch (mode)
|
4673 |
|
|
{
|
4674 |
|
|
default:
|
4675 |
|
|
break;
|
4676 |
|
|
|
4677 |
|
|
case QImode:
|
4678 |
|
|
return "cstb%I2%U2 %., %M2, %1, %e0";
|
4679 |
|
|
|
4680 |
|
|
case HImode:
|
4681 |
|
|
return "csth%I2%U2 %., %M2, %1, %e0";
|
4682 |
|
|
|
4683 |
|
|
case SImode:
|
4684 |
|
|
case SFmode:
|
4685 |
|
|
return "cst%I2%U2 %., %M2, %1, %e0";
|
4686 |
|
|
}
|
4687 |
|
|
}
|
4688 |
|
|
}
|
4689 |
|
|
|
4690 |
|
|
fatal_insn ("bad output_condmove_single operand", insn);
|
4691 |
|
|
return "";
|
4692 |
|
|
}
|
4693 |
|
|
|
4694 |
|
|
|
4695 |
|
|
/* Emit the appropriate code to do a comparison, returning the register the
|
4696 |
|
|
comparison was done it. */
|
4697 |
|
|
|
4698 |
|
|
static rtx
|
4699 |
|
|
frv_emit_comparison (enum rtx_code test, rtx op0, rtx op1)
|
4700 |
|
|
{
|
4701 |
|
|
enum machine_mode cc_mode;
|
4702 |
|
|
rtx cc_reg;
|
4703 |
|
|
|
4704 |
|
|
/* Floating point doesn't have comparison against a constant. */
|
4705 |
|
|
if (GET_MODE (op0) == CC_FPmode && GET_CODE (op1) != REG)
|
4706 |
|
|
op1 = force_reg (GET_MODE (op0), op1);
|
4707 |
|
|
|
4708 |
|
|
/* Possibly disable using anything but a fixed register in order to work
|
4709 |
|
|
around cse moving comparisons past function calls. */
|
4710 |
|
|
cc_mode = SELECT_CC_MODE (test, op0, op1);
|
4711 |
|
|
cc_reg = ((TARGET_ALLOC_CC)
|
4712 |
|
|
? gen_reg_rtx (cc_mode)
|
4713 |
|
|
: gen_rtx_REG (cc_mode,
|
4714 |
|
|
(cc_mode == CC_FPmode) ? FCC_FIRST : ICC_FIRST));
|
4715 |
|
|
|
4716 |
|
|
emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
|
4717 |
|
|
gen_rtx_COMPARE (cc_mode, op0, op1)));
|
4718 |
|
|
|
4719 |
|
|
return cc_reg;
|
4720 |
|
|
}
|
4721 |
|
|
|
4722 |
|
|
|
4723 |
|
|
/* Emit code for a conditional branch. The comparison operands were previously
|
4724 |
|
|
stored in frv_compare_op0 and frv_compare_op1.
|
4725 |
|
|
|
4726 |
|
|
XXX: I originally wanted to add a clobber of a CCR register to use in
|
4727 |
|
|
conditional execution, but that confuses the rest of the compiler. */
|
4728 |
|
|
|
4729 |
|
|
int
|
4730 |
|
|
frv_emit_cond_branch (enum rtx_code test, rtx label)
|
4731 |
|
|
{
|
4732 |
|
|
rtx test_rtx;
|
4733 |
|
|
rtx label_ref;
|
4734 |
|
|
rtx if_else;
|
4735 |
|
|
rtx cc_reg = frv_emit_comparison (test, frv_compare_op0, frv_compare_op1);
|
4736 |
|
|
enum machine_mode cc_mode = GET_MODE (cc_reg);
|
4737 |
|
|
|
4738 |
|
|
/* Branches generate:
|
4739 |
|
|
(set (pc)
|
4740 |
|
|
(if_then_else (<test>, <cc_reg>, (const_int 0))
|
4741 |
|
|
(label_ref <branch_label>)
|
4742 |
|
|
(pc))) */
|
4743 |
|
|
label_ref = gen_rtx_LABEL_REF (VOIDmode, label);
|
4744 |
|
|
test_rtx = gen_rtx_fmt_ee (test, cc_mode, cc_reg, const0_rtx);
|
4745 |
|
|
if_else = gen_rtx_IF_THEN_ELSE (cc_mode, test_rtx, label_ref, pc_rtx);
|
4746 |
|
|
emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, if_else));
|
4747 |
|
|
return TRUE;
|
4748 |
|
|
}
|
4749 |
|
|
|
4750 |
|
|
|
4751 |
|
|
/* Emit code to set a gpr to 1/0 based on a comparison. The comparison
|
4752 |
|
|
operands were previously stored in frv_compare_op0 and frv_compare_op1. */
|
4753 |
|
|
|
4754 |
|
|
int
|
4755 |
|
|
frv_emit_scc (enum rtx_code test, rtx target)
|
4756 |
|
|
{
|
4757 |
|
|
rtx set;
|
4758 |
|
|
rtx test_rtx;
|
4759 |
|
|
rtx clobber;
|
4760 |
|
|
rtx cr_reg;
|
4761 |
|
|
rtx cc_reg = frv_emit_comparison (test, frv_compare_op0, frv_compare_op1);
|
4762 |
|
|
|
4763 |
|
|
/* SCC instructions generate:
|
4764 |
|
|
(parallel [(set <target> (<test>, <cc_reg>, (const_int 0))
|
4765 |
|
|
(clobber (<ccr_reg>))]) */
|
4766 |
|
|
test_rtx = gen_rtx_fmt_ee (test, SImode, cc_reg, const0_rtx);
|
4767 |
|
|
set = gen_rtx_SET (VOIDmode, target, test_rtx);
|
4768 |
|
|
|
4769 |
|
|
cr_reg = ((TARGET_ALLOC_CC)
|
4770 |
|
|
? gen_reg_rtx (CC_CCRmode)
|
4771 |
|
|
: gen_rtx_REG (CC_CCRmode,
|
4772 |
|
|
((GET_MODE (cc_reg) == CC_FPmode)
|
4773 |
|
|
? FCR_FIRST
|
4774 |
|
|
: ICR_FIRST)));
|
4775 |
|
|
|
4776 |
|
|
clobber = gen_rtx_CLOBBER (VOIDmode, cr_reg);
|
4777 |
|
|
emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
|
4778 |
|
|
return TRUE;
|
4779 |
|
|
}
|
4780 |
|
|
|
4781 |
|
|
|
4782 |
|
|
/* Split a SCC instruction into component parts, returning a SEQUENCE to hold
|
4783 |
|
|
the separate insns. */
|
4784 |
|
|
|
4785 |
|
|
rtx
|
4786 |
|
|
frv_split_scc (rtx dest, rtx test, rtx cc_reg, rtx cr_reg, HOST_WIDE_INT value)
|
4787 |
|
|
{
|
4788 |
|
|
rtx ret;
|
4789 |
|
|
|
4790 |
|
|
start_sequence ();
|
4791 |
|
|
|
4792 |
|
|
/* Set the appropriate CCR bit. */
|
4793 |
|
|
emit_insn (gen_rtx_SET (VOIDmode,
|
4794 |
|
|
cr_reg,
|
4795 |
|
|
gen_rtx_fmt_ee (GET_CODE (test),
|
4796 |
|
|
GET_MODE (cr_reg),
|
4797 |
|
|
cc_reg,
|
4798 |
|
|
const0_rtx)));
|
4799 |
|
|
|
4800 |
|
|
/* Move the value into the destination. */
|
4801 |
|
|
emit_move_insn (dest, GEN_INT (value));
|
4802 |
|
|
|
4803 |
|
|
/* Move 0 into the destination if the test failed */
|
4804 |
|
|
emit_insn (gen_rtx_COND_EXEC (VOIDmode,
|
4805 |
|
|
gen_rtx_EQ (GET_MODE (cr_reg),
|
4806 |
|
|
cr_reg,
|
4807 |
|
|
const0_rtx),
|
4808 |
|
|
gen_rtx_SET (VOIDmode, dest, const0_rtx)));
|
4809 |
|
|
|
4810 |
|
|
/* Finish up, return sequence. */
|
4811 |
|
|
ret = get_insns ();
|
4812 |
|
|
end_sequence ();
|
4813 |
|
|
return ret;
|
4814 |
|
|
}
|
4815 |
|
|
|
4816 |
|
|
|
4817 |
|
|
/* Emit the code for a conditional move, return TRUE if we could do the
|
4818 |
|
|
move. */
|
4819 |
|
|
|
4820 |
|
|
int
|
4821 |
|
|
frv_emit_cond_move (rtx dest, rtx test_rtx, rtx src1, rtx src2)
|
4822 |
|
|
{
|
4823 |
|
|
rtx set;
|
4824 |
|
|
rtx clobber_cc;
|
4825 |
|
|
rtx test2;
|
4826 |
|
|
rtx cr_reg;
|
4827 |
|
|
rtx if_rtx;
|
4828 |
|
|
enum rtx_code test = GET_CODE (test_rtx);
|
4829 |
|
|
rtx cc_reg = frv_emit_comparison (test, frv_compare_op0, frv_compare_op1);
|
4830 |
|
|
enum machine_mode cc_mode = GET_MODE (cc_reg);
|
4831 |
|
|
|
4832 |
|
|
/* Conditional move instructions generate:
|
4833 |
|
|
(parallel [(set <target>
|
4834 |
|
|
(if_then_else (<test> <cc_reg> (const_int 0))
|
4835 |
|
|
<src1>
|
4836 |
|
|
<src2>))
|
4837 |
|
|
(clobber (<ccr_reg>))]) */
|
4838 |
|
|
|
4839 |
|
|
/* Handle various cases of conditional move involving two constants. */
|
4840 |
|
|
if (GET_CODE (src1) == CONST_INT && GET_CODE (src2) == CONST_INT)
|
4841 |
|
|
{
|
4842 |
|
|
HOST_WIDE_INT value1 = INTVAL (src1);
|
4843 |
|
|
HOST_WIDE_INT value2 = INTVAL (src2);
|
4844 |
|
|
|
4845 |
|
|
/* Having 0 as one of the constants can be done by loading the other
|
4846 |
|
|
constant, and optionally moving in gr0. */
|
4847 |
|
|
if (value1 == 0 || value2 == 0)
|
4848 |
|
|
;
|
4849 |
|
|
|
4850 |
|
|
/* If the first value is within an addi range and also the difference
|
4851 |
|
|
between the two fits in an addi's range, load up the difference, then
|
4852 |
|
|
conditionally move in 0, and then unconditionally add the first
|
4853 |
|
|
value. */
|
4854 |
|
|
else if (IN_RANGE_P (value1, -2048, 2047)
|
4855 |
|
|
&& IN_RANGE_P (value2 - value1, -2048, 2047))
|
4856 |
|
|
;
|
4857 |
|
|
|
4858 |
|
|
/* If neither condition holds, just force the constant into a
|
4859 |
|
|
register. */
|
4860 |
|
|
else
|
4861 |
|
|
{
|
4862 |
|
|
src1 = force_reg (GET_MODE (dest), src1);
|
4863 |
|
|
src2 = force_reg (GET_MODE (dest), src2);
|
4864 |
|
|
}
|
4865 |
|
|
}
|
4866 |
|
|
|
4867 |
|
|
/* If one value is a register, insure the other value is either 0 or a
|
4868 |
|
|
register. */
|
4869 |
|
|
else
|
4870 |
|
|
{
|
4871 |
|
|
if (GET_CODE (src1) == CONST_INT && INTVAL (src1) != 0)
|
4872 |
|
|
src1 = force_reg (GET_MODE (dest), src1);
|
4873 |
|
|
|
4874 |
|
|
if (GET_CODE (src2) == CONST_INT && INTVAL (src2) != 0)
|
4875 |
|
|
src2 = force_reg (GET_MODE (dest), src2);
|
4876 |
|
|
}
|
4877 |
|
|
|
4878 |
|
|
test2 = gen_rtx_fmt_ee (test, cc_mode, cc_reg, const0_rtx);
|
4879 |
|
|
if_rtx = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), test2, src1, src2);
|
4880 |
|
|
|
4881 |
|
|
set = gen_rtx_SET (VOIDmode, dest, if_rtx);
|
4882 |
|
|
|
4883 |
|
|
cr_reg = ((TARGET_ALLOC_CC)
|
4884 |
|
|
? gen_reg_rtx (CC_CCRmode)
|
4885 |
|
|
: gen_rtx_REG (CC_CCRmode,
|
4886 |
|
|
(cc_mode == CC_FPmode) ? FCR_FIRST : ICR_FIRST));
|
4887 |
|
|
|
4888 |
|
|
clobber_cc = gen_rtx_CLOBBER (VOIDmode, cr_reg);
|
4889 |
|
|
emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber_cc)));
|
4890 |
|
|
return TRUE;
|
4891 |
|
|
}
|
4892 |
|
|
|
4893 |
|
|
|
4894 |
|
|
/* Split a conditional move into constituent parts, returning a SEQUENCE
|
4895 |
|
|
containing all of the insns. */
|
4896 |
|
|
|
4897 |
|
|
rtx
|
4898 |
|
|
frv_split_cond_move (rtx operands[])
|
4899 |
|
|
{
|
4900 |
|
|
rtx dest = operands[0];
|
4901 |
|
|
rtx test = operands[1];
|
4902 |
|
|
rtx cc_reg = operands[2];
|
4903 |
|
|
rtx src1 = operands[3];
|
4904 |
|
|
rtx src2 = operands[4];
|
4905 |
|
|
rtx cr_reg = operands[5];
|
4906 |
|
|
rtx ret;
|
4907 |
|
|
enum machine_mode cr_mode = GET_MODE (cr_reg);
|
4908 |
|
|
|
4909 |
|
|
start_sequence ();
|
4910 |
|
|
|
4911 |
|
|
/* Set the appropriate CCR bit. */
|
4912 |
|
|
emit_insn (gen_rtx_SET (VOIDmode,
|
4913 |
|
|
cr_reg,
|
4914 |
|
|
gen_rtx_fmt_ee (GET_CODE (test),
|
4915 |
|
|
GET_MODE (cr_reg),
|
4916 |
|
|
cc_reg,
|
4917 |
|
|
const0_rtx)));
|
4918 |
|
|
|
4919 |
|
|
/* Handle various cases of conditional move involving two constants. */
|
4920 |
|
|
if (GET_CODE (src1) == CONST_INT && GET_CODE (src2) == CONST_INT)
|
4921 |
|
|
{
|
4922 |
|
|
HOST_WIDE_INT value1 = INTVAL (src1);
|
4923 |
|
|
HOST_WIDE_INT value2 = INTVAL (src2);
|
4924 |
|
|
|
4925 |
|
|
/* Having 0 as one of the constants can be done by loading the other
|
4926 |
|
|
constant, and optionally moving in gr0. */
|
4927 |
|
|
if (value1 == 0)
|
4928 |
|
|
{
|
4929 |
|
|
emit_move_insn (dest, src2);
|
4930 |
|
|
emit_insn (gen_rtx_COND_EXEC (VOIDmode,
|
4931 |
|
|
gen_rtx_NE (cr_mode, cr_reg,
|
4932 |
|
|
const0_rtx),
|
4933 |
|
|
gen_rtx_SET (VOIDmode, dest, src1)));
|
4934 |
|
|
}
|
4935 |
|
|
|
4936 |
|
|
else if (value2 == 0)
|
4937 |
|
|
{
|
4938 |
|
|
emit_move_insn (dest, src1);
|
4939 |
|
|
emit_insn (gen_rtx_COND_EXEC (VOIDmode,
|
4940 |
|
|
gen_rtx_EQ (cr_mode, cr_reg,
|
4941 |
|
|
const0_rtx),
|
4942 |
|
|
gen_rtx_SET (VOIDmode, dest, src2)));
|
4943 |
|
|
}
|
4944 |
|
|
|
4945 |
|
|
/* If the first value is within an addi range and also the difference
|
4946 |
|
|
between the two fits in an addi's range, load up the difference, then
|
4947 |
|
|
conditionally move in 0, and then unconditionally add the first
|
4948 |
|
|
value. */
|
4949 |
|
|
else if (IN_RANGE_P (value1, -2048, 2047)
|
4950 |
|
|
&& IN_RANGE_P (value2 - value1, -2048, 2047))
|
4951 |
|
|
{
|
4952 |
|
|
rtx dest_si = ((GET_MODE (dest) == SImode)
|
4953 |
|
|
? dest
|
4954 |
|
|
: gen_rtx_SUBREG (SImode, dest, 0));
|
4955 |
|
|
|
4956 |
|
|
emit_move_insn (dest_si, GEN_INT (value2 - value1));
|
4957 |
|
|
emit_insn (gen_rtx_COND_EXEC (VOIDmode,
|
4958 |
|
|
gen_rtx_NE (cr_mode, cr_reg,
|
4959 |
|
|
const0_rtx),
|
4960 |
|
|
gen_rtx_SET (VOIDmode, dest_si,
|
4961 |
|
|
const0_rtx)));
|
4962 |
|
|
emit_insn (gen_addsi3 (dest_si, dest_si, src1));
|
4963 |
|
|
}
|
4964 |
|
|
|
4965 |
|
|
else
|
4966 |
|
|
gcc_unreachable ();
|
4967 |
|
|
}
|
4968 |
|
|
else
|
4969 |
|
|
{
|
4970 |
|
|
/* Emit the conditional move for the test being true if needed. */
|
4971 |
|
|
if (! rtx_equal_p (dest, src1))
|
4972 |
|
|
emit_insn (gen_rtx_COND_EXEC (VOIDmode,
|
4973 |
|
|
gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
|
4974 |
|
|
gen_rtx_SET (VOIDmode, dest, src1)));
|
4975 |
|
|
|
4976 |
|
|
/* Emit the conditional move for the test being false if needed. */
|
4977 |
|
|
if (! rtx_equal_p (dest, src2))
|
4978 |
|
|
emit_insn (gen_rtx_COND_EXEC (VOIDmode,
|
4979 |
|
|
gen_rtx_EQ (cr_mode, cr_reg, const0_rtx),
|
4980 |
|
|
gen_rtx_SET (VOIDmode, dest, src2)));
|
4981 |
|
|
}
|
4982 |
|
|
|
4983 |
|
|
/* Finish up, return sequence. */
|
4984 |
|
|
ret = get_insns ();
|
4985 |
|
|
end_sequence ();
|
4986 |
|
|
return ret;
|
4987 |
|
|
}
|
4988 |
|
|
|
4989 |
|
|
|
4990 |
|
|
/* Split (set DEST SOURCE), where DEST is a double register and SOURCE is a
|
4991 |
|
|
memory location that is not known to be dword-aligned. */
|
4992 |
|
|
void
|
4993 |
|
|
frv_split_double_load (rtx dest, rtx source)
|
4994 |
|
|
{
|
4995 |
|
|
int regno = REGNO (dest);
|
4996 |
|
|
rtx dest1 = gen_highpart (SImode, dest);
|
4997 |
|
|
rtx dest2 = gen_lowpart (SImode, dest);
|
4998 |
|
|
rtx address = XEXP (source, 0);
|
4999 |
|
|
|
5000 |
|
|
/* If the address is pre-modified, load the lower-numbered register
|
5001 |
|
|
first, then load the other register using an integer offset from
|
5002 |
|
|
the modified base register. This order should always be safe,
|
5003 |
|
|
since the pre-modification cannot affect the same registers as the
|
5004 |
|
|
load does.
|
5005 |
|
|
|
5006 |
|
|
The situation for other loads is more complicated. Loading one
|
5007 |
|
|
of the registers could affect the value of ADDRESS, so we must
|
5008 |
|
|
be careful which order we do them in. */
|
5009 |
|
|
if (GET_CODE (address) == PRE_MODIFY
|
5010 |
|
|
|| ! refers_to_regno_p (regno, regno + 1, address, NULL))
|
5011 |
|
|
{
|
5012 |
|
|
/* It is safe to load the lower-numbered register first. */
|
5013 |
|
|
emit_move_insn (dest1, change_address (source, SImode, NULL));
|
5014 |
|
|
emit_move_insn (dest2, frv_index_memory (source, SImode, 1));
|
5015 |
|
|
}
|
5016 |
|
|
else
|
5017 |
|
|
{
|
5018 |
|
|
/* ADDRESS is not pre-modified and the address depends on the
|
5019 |
|
|
lower-numbered register. Load the higher-numbered register
|
5020 |
|
|
first. */
|
5021 |
|
|
emit_move_insn (dest2, frv_index_memory (source, SImode, 1));
|
5022 |
|
|
emit_move_insn (dest1, change_address (source, SImode, NULL));
|
5023 |
|
|
}
|
5024 |
|
|
}
|
5025 |
|
|
|
5026 |
|
|
/* Split (set DEST SOURCE), where DEST refers to a dword memory location
|
5027 |
|
|
and SOURCE is either a double register or the constant zero. */
|
5028 |
|
|
void
|
5029 |
|
|
frv_split_double_store (rtx dest, rtx source)
|
5030 |
|
|
{
|
5031 |
|
|
rtx dest1 = change_address (dest, SImode, NULL);
|
5032 |
|
|
rtx dest2 = frv_index_memory (dest, SImode, 1);
|
5033 |
|
|
if (ZERO_P (source))
|
5034 |
|
|
{
|
5035 |
|
|
emit_move_insn (dest1, CONST0_RTX (SImode));
|
5036 |
|
|
emit_move_insn (dest2, CONST0_RTX (SImode));
|
5037 |
|
|
}
|
5038 |
|
|
else
|
5039 |
|
|
{
|
5040 |
|
|
emit_move_insn (dest1, gen_highpart (SImode, source));
|
5041 |
|
|
emit_move_insn (dest2, gen_lowpart (SImode, source));
|
5042 |
|
|
}
|
5043 |
|
|
}
|
5044 |
|
|
|
5045 |
|
|
|
5046 |
|
|
/* Split a min/max operation returning a SEQUENCE containing all of the
|
5047 |
|
|
insns. */
|
5048 |
|
|
|
5049 |
|
|
rtx
|
5050 |
|
|
frv_split_minmax (rtx operands[])
|
5051 |
|
|
{
|
5052 |
|
|
rtx dest = operands[0];
|
5053 |
|
|
rtx minmax = operands[1];
|
5054 |
|
|
rtx src1 = operands[2];
|
5055 |
|
|
rtx src2 = operands[3];
|
5056 |
|
|
rtx cc_reg = operands[4];
|
5057 |
|
|
rtx cr_reg = operands[5];
|
5058 |
|
|
rtx ret;
|
5059 |
|
|
enum rtx_code test_code;
|
5060 |
|
|
enum machine_mode cr_mode = GET_MODE (cr_reg);
|
5061 |
|
|
|
5062 |
|
|
start_sequence ();
|
5063 |
|
|
|
5064 |
|
|
/* Figure out which test to use. */
|
5065 |
|
|
switch (GET_CODE (minmax))
|
5066 |
|
|
{
|
5067 |
|
|
default:
|
5068 |
|
|
gcc_unreachable ();
|
5069 |
|
|
|
5070 |
|
|
case SMIN: test_code = LT; break;
|
5071 |
|
|
case SMAX: test_code = GT; break;
|
5072 |
|
|
case UMIN: test_code = LTU; break;
|
5073 |
|
|
case UMAX: test_code = GTU; break;
|
5074 |
|
|
}
|
5075 |
|
|
|
5076 |
|
|
/* Issue the compare instruction. */
|
5077 |
|
|
emit_insn (gen_rtx_SET (VOIDmode,
|
5078 |
|
|
cc_reg,
|
5079 |
|
|
gen_rtx_COMPARE (GET_MODE (cc_reg),
|
5080 |
|
|
src1, src2)));
|
5081 |
|
|
|
5082 |
|
|
/* Set the appropriate CCR bit. */
|
5083 |
|
|
emit_insn (gen_rtx_SET (VOIDmode,
|
5084 |
|
|
cr_reg,
|
5085 |
|
|
gen_rtx_fmt_ee (test_code,
|
5086 |
|
|
GET_MODE (cr_reg),
|
5087 |
|
|
cc_reg,
|
5088 |
|
|
const0_rtx)));
|
5089 |
|
|
|
5090 |
|
|
/* If are taking the min/max of a nonzero constant, load that first, and
|
5091 |
|
|
then do a conditional move of the other value. */
|
5092 |
|
|
if (GET_CODE (src2) == CONST_INT && INTVAL (src2) != 0)
|
5093 |
|
|
{
|
5094 |
|
|
gcc_assert (!rtx_equal_p (dest, src1));
|
5095 |
|
|
|
5096 |
|
|
emit_move_insn (dest, src2);
|
5097 |
|
|
emit_insn (gen_rtx_COND_EXEC (VOIDmode,
|
5098 |
|
|
gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
|
5099 |
|
|
gen_rtx_SET (VOIDmode, dest, src1)));
|
5100 |
|
|
}
|
5101 |
|
|
|
5102 |
|
|
/* Otherwise, do each half of the move. */
|
5103 |
|
|
else
|
5104 |
|
|
{
|
5105 |
|
|
/* Emit the conditional move for the test being true if needed. */
|
5106 |
|
|
if (! rtx_equal_p (dest, src1))
|
5107 |
|
|
emit_insn (gen_rtx_COND_EXEC (VOIDmode,
|
5108 |
|
|
gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
|
5109 |
|
|
gen_rtx_SET (VOIDmode, dest, src1)));
|
5110 |
|
|
|
5111 |
|
|
/* Emit the conditional move for the test being false if needed. */
|
5112 |
|
|
if (! rtx_equal_p (dest, src2))
|
5113 |
|
|
emit_insn (gen_rtx_COND_EXEC (VOIDmode,
|
5114 |
|
|
gen_rtx_EQ (cr_mode, cr_reg, const0_rtx),
|
5115 |
|
|
gen_rtx_SET (VOIDmode, dest, src2)));
|
5116 |
|
|
}
|
5117 |
|
|
|
5118 |
|
|
/* Finish up, return sequence. */
|
5119 |
|
|
ret = get_insns ();
|
5120 |
|
|
end_sequence ();
|
5121 |
|
|
return ret;
|
5122 |
|
|
}
|
5123 |
|
|
|
5124 |
|
|
|
5125 |
|
|
/* Split an integer abs operation returning a SEQUENCE containing all of the
|
5126 |
|
|
insns. */
|
5127 |
|
|
|
5128 |
|
|
rtx
|
5129 |
|
|
frv_split_abs (rtx operands[])
|
5130 |
|
|
{
|
5131 |
|
|
rtx dest = operands[0];
|
5132 |
|
|
rtx src = operands[1];
|
5133 |
|
|
rtx cc_reg = operands[2];
|
5134 |
|
|
rtx cr_reg = operands[3];
|
5135 |
|
|
rtx ret;
|
5136 |
|
|
|
5137 |
|
|
start_sequence ();
|
5138 |
|
|
|
5139 |
|
|
/* Issue the compare < 0 instruction. */
|
5140 |
|
|
emit_insn (gen_rtx_SET (VOIDmode,
|
5141 |
|
|
cc_reg,
|
5142 |
|
|
gen_rtx_COMPARE (CCmode, src, const0_rtx)));
|
5143 |
|
|
|
5144 |
|
|
/* Set the appropriate CCR bit. */
|
5145 |
|
|
emit_insn (gen_rtx_SET (VOIDmode,
|
5146 |
|
|
cr_reg,
|
5147 |
|
|
gen_rtx_fmt_ee (LT, CC_CCRmode, cc_reg, const0_rtx)));
|
5148 |
|
|
|
5149 |
|
|
/* Emit the conditional negate if the value is negative. */
|
5150 |
|
|
emit_insn (gen_rtx_COND_EXEC (VOIDmode,
|
5151 |
|
|
gen_rtx_NE (CC_CCRmode, cr_reg, const0_rtx),
|
5152 |
|
|
gen_negsi2 (dest, src)));
|
5153 |
|
|
|
5154 |
|
|
/* Emit the conditional move for the test being false if needed. */
|
5155 |
|
|
if (! rtx_equal_p (dest, src))
|
5156 |
|
|
emit_insn (gen_rtx_COND_EXEC (VOIDmode,
|
5157 |
|
|
gen_rtx_EQ (CC_CCRmode, cr_reg, const0_rtx),
|
5158 |
|
|
gen_rtx_SET (VOIDmode, dest, src)));
|
5159 |
|
|
|
5160 |
|
|
/* Finish up, return sequence. */
|
5161 |
|
|
ret = get_insns ();
|
5162 |
|
|
end_sequence ();
|
5163 |
|
|
return ret;
|
5164 |
|
|
}
|
5165 |
|
|
|
5166 |
|
|
|
5167 |
|
|
/* An internal function called by for_each_rtx to clear in a hard_reg set each
|
5168 |
|
|
register used in an insn. */
|
5169 |
|
|
|
5170 |
|
|
static int
|
5171 |
|
|
frv_clear_registers_used (rtx *ptr, void *data)
|
5172 |
|
|
{
|
5173 |
|
|
if (GET_CODE (*ptr) == REG)
|
5174 |
|
|
{
|
5175 |
|
|
int regno = REGNO (*ptr);
|
5176 |
|
|
HARD_REG_SET *p_regs = (HARD_REG_SET *)data;
|
5177 |
|
|
|
5178 |
|
|
if (regno < FIRST_PSEUDO_REGISTER)
|
5179 |
|
|
{
|
5180 |
|
|
int reg_max = regno + HARD_REGNO_NREGS (regno, GET_MODE (*ptr));
|
5181 |
|
|
|
5182 |
|
|
while (regno < reg_max)
|
5183 |
|
|
{
|
5184 |
|
|
CLEAR_HARD_REG_BIT (*p_regs, regno);
|
5185 |
|
|
regno++;
|
5186 |
|
|
}
|
5187 |
|
|
}
|
5188 |
|
|
}
|
5189 |
|
|
|
5190 |
|
|
return 0;
|
5191 |
|
|
}
|
5192 |
|
|
|
5193 |
|
|
|
5194 |
|
|
/* Initialize the extra fields provided by IFCVT_EXTRA_FIELDS. */
|
5195 |
|
|
|
5196 |
|
|
/* On the FR-V, we don't have any extra fields per se, but it is useful hook to
|
5197 |
|
|
initialize the static storage. */
|
5198 |
|
|
void
|
5199 |
|
|
frv_ifcvt_init_extra_fields (ce_if_block_t *ce_info ATTRIBUTE_UNUSED)
|
5200 |
|
|
{
|
5201 |
|
|
frv_ifcvt.added_insns_list = NULL_RTX;
|
5202 |
|
|
frv_ifcvt.cur_scratch_regs = 0;
|
5203 |
|
|
frv_ifcvt.num_nested_cond_exec = 0;
|
5204 |
|
|
frv_ifcvt.cr_reg = NULL_RTX;
|
5205 |
|
|
frv_ifcvt.nested_cc_reg = NULL_RTX;
|
5206 |
|
|
frv_ifcvt.extra_int_cr = NULL_RTX;
|
5207 |
|
|
frv_ifcvt.extra_fp_cr = NULL_RTX;
|
5208 |
|
|
frv_ifcvt.last_nested_if_cr = NULL_RTX;
|
5209 |
|
|
}
|
5210 |
|
|
|
5211 |
|
|
|
5212 |
|
|
/* Internal function to add a potential insn to the list of insns to be inserted
|
5213 |
|
|
if the conditional execution conversion is successful. */
|
5214 |
|
|
|
5215 |
|
|
static void
|
5216 |
|
|
frv_ifcvt_add_insn (rtx pattern, rtx insn, int before_p)
|
5217 |
|
|
{
|
5218 |
|
|
rtx link = alloc_EXPR_LIST (VOIDmode, pattern, insn);
|
5219 |
|
|
|
5220 |
|
|
link->jump = before_p; /* Mark to add this before or after insn. */
|
5221 |
|
|
frv_ifcvt.added_insns_list = alloc_EXPR_LIST (VOIDmode, link,
|
5222 |
|
|
frv_ifcvt.added_insns_list);
|
5223 |
|
|
|
5224 |
|
|
if (TARGET_DEBUG_COND_EXEC)
|
5225 |
|
|
{
|
5226 |
|
|
fprintf (stderr,
|
5227 |
|
|
"\n:::::::::: frv_ifcvt_add_insn: add the following %s insn %d:\n",
|
5228 |
|
|
(before_p) ? "before" : "after",
|
5229 |
|
|
(int)INSN_UID (insn));
|
5230 |
|
|
|
5231 |
|
|
debug_rtx (pattern);
|
5232 |
|
|
}
|
5233 |
|
|
}
|
5234 |
|
|
|
5235 |
|
|
|
5236 |
|
|
/* A C expression to modify the code described by the conditional if
|
5237 |
|
|
information CE_INFO, possibly updating the tests in TRUE_EXPR, and
|
5238 |
|
|
FALSE_EXPR for converting if-then and if-then-else code to conditional
|
5239 |
|
|
instructions. Set either TRUE_EXPR or FALSE_EXPR to a null pointer if the
|
5240 |
|
|
tests cannot be converted. */
|
5241 |
|
|
|
5242 |
|
|
void
|
5243 |
|
|
frv_ifcvt_modify_tests (ce_if_block_t *ce_info, rtx *p_true, rtx *p_false)
|
5244 |
|
|
{
|
5245 |
|
|
basic_block test_bb = ce_info->test_bb; /* test basic block */
|
5246 |
|
|
basic_block then_bb = ce_info->then_bb; /* THEN */
|
5247 |
|
|
basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
|
5248 |
|
|
basic_block join_bb = ce_info->join_bb; /* join block or NULL */
|
5249 |
|
|
rtx true_expr = *p_true;
|
5250 |
|
|
rtx cr;
|
5251 |
|
|
rtx cc;
|
5252 |
|
|
rtx nested_cc;
|
5253 |
|
|
enum machine_mode mode = GET_MODE (true_expr);
|
5254 |
|
|
int j;
|
5255 |
|
|
basic_block *bb;
|
5256 |
|
|
int num_bb;
|
5257 |
|
|
frv_tmp_reg_t *tmp_reg = &frv_ifcvt.tmp_reg;
|
5258 |
|
|
rtx check_insn;
|
5259 |
|
|
rtx sub_cond_exec_reg;
|
5260 |
|
|
enum rtx_code code;
|
5261 |
|
|
enum rtx_code code_true;
|
5262 |
|
|
enum rtx_code code_false;
|
5263 |
|
|
enum reg_class cc_class;
|
5264 |
|
|
enum reg_class cr_class;
|
5265 |
|
|
int cc_first;
|
5266 |
|
|
int cc_last;
|
5267 |
|
|
reg_set_iterator rsi;
|
5268 |
|
|
|
5269 |
|
|
/* Make sure we are only dealing with hard registers. Also honor the
|
5270 |
|
|
-mno-cond-exec switch, and -mno-nested-cond-exec switches if
|
5271 |
|
|
applicable. */
|
5272 |
|
|
if (!reload_completed || !TARGET_COND_EXEC
|
5273 |
|
|
|| (!TARGET_NESTED_CE && ce_info->pass > 1))
|
5274 |
|
|
goto fail;
|
5275 |
|
|
|
5276 |
|
|
/* Figure out which registers we can allocate for our own purposes. Only
|
5277 |
|
|
consider registers that are not preserved across function calls and are
|
5278 |
|
|
not fixed. However, allow the ICC/ICR temporary registers to be allocated
|
5279 |
|
|
if we did not need to use them in reloading other registers. */
|
5280 |
|
|
memset (&tmp_reg->regs, 0, sizeof (tmp_reg->regs));
|
5281 |
|
|
COPY_HARD_REG_SET (tmp_reg->regs, call_used_reg_set);
|
5282 |
|
|
AND_COMPL_HARD_REG_SET (tmp_reg->regs, fixed_reg_set);
|
5283 |
|
|
SET_HARD_REG_BIT (tmp_reg->regs, ICC_TEMP);
|
5284 |
|
|
SET_HARD_REG_BIT (tmp_reg->regs, ICR_TEMP);
|
5285 |
|
|
|
5286 |
|
|
/* If this is a nested IF, we need to discover whether the CC registers that
|
5287 |
|
|
are set/used inside of the block are used anywhere else. If not, we can
|
5288 |
|
|
change them to be the CC register that is paired with the CR register that
|
5289 |
|
|
controls the outermost IF block. */
|
5290 |
|
|
if (ce_info->pass > 1)
|
5291 |
|
|
{
|
5292 |
|
|
CLEAR_HARD_REG_SET (frv_ifcvt.nested_cc_ok_rewrite);
|
5293 |
|
|
for (j = CC_FIRST; j <= CC_LAST; j++)
|
5294 |
|
|
if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
|
5295 |
|
|
{
|
5296 |
|
|
if (REGNO_REG_SET_P (then_bb->il.rtl->global_live_at_start, j))
|
5297 |
|
|
continue;
|
5298 |
|
|
|
5299 |
|
|
if (else_bb
|
5300 |
|
|
&& REGNO_REG_SET_P (else_bb->il.rtl->global_live_at_start, j))
|
5301 |
|
|
continue;
|
5302 |
|
|
|
5303 |
|
|
if (join_bb
|
5304 |
|
|
&& REGNO_REG_SET_P (join_bb->il.rtl->global_live_at_start, j))
|
5305 |
|
|
continue;
|
5306 |
|
|
|
5307 |
|
|
SET_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, j);
|
5308 |
|
|
}
|
5309 |
|
|
}
|
5310 |
|
|
|
5311 |
|
|
for (j = 0; j < frv_ifcvt.cur_scratch_regs; j++)
|
5312 |
|
|
frv_ifcvt.scratch_regs[j] = NULL_RTX;
|
5313 |
|
|
|
5314 |
|
|
frv_ifcvt.added_insns_list = NULL_RTX;
|
5315 |
|
|
frv_ifcvt.cur_scratch_regs = 0;
|
5316 |
|
|
|
5317 |
|
|
bb = (basic_block *) alloca ((2 + ce_info->num_multiple_test_blocks)
|
5318 |
|
|
* sizeof (basic_block));
|
5319 |
|
|
|
5320 |
|
|
if (join_bb)
|
5321 |
|
|
{
|
5322 |
|
|
unsigned int regno;
|
5323 |
|
|
|
5324 |
|
|
/* Remove anything live at the beginning of the join block from being
|
5325 |
|
|
available for allocation. */
|
5326 |
|
|
EXECUTE_IF_SET_IN_REG_SET (join_bb->il.rtl->global_live_at_start, 0, regno, rsi)
|
5327 |
|
|
{
|
5328 |
|
|
if (regno < FIRST_PSEUDO_REGISTER)
|
5329 |
|
|
CLEAR_HARD_REG_BIT (tmp_reg->regs, regno);
|
5330 |
|
|
}
|
5331 |
|
|
}
|
5332 |
|
|
|
5333 |
|
|
/* Add in all of the blocks in multiple &&/|| blocks to be scanned. */
|
5334 |
|
|
num_bb = 0;
|
5335 |
|
|
if (ce_info->num_multiple_test_blocks)
|
5336 |
|
|
{
|
5337 |
|
|
basic_block multiple_test_bb = ce_info->last_test_bb;
|
5338 |
|
|
|
5339 |
|
|
while (multiple_test_bb != test_bb)
|
5340 |
|
|
{
|
5341 |
|
|
bb[num_bb++] = multiple_test_bb;
|
5342 |
|
|
multiple_test_bb = EDGE_PRED (multiple_test_bb, 0)->src;
|
5343 |
|
|
}
|
5344 |
|
|
}
|
5345 |
|
|
|
5346 |
|
|
/* Add in the THEN and ELSE blocks to be scanned. */
|
5347 |
|
|
bb[num_bb++] = then_bb;
|
5348 |
|
|
if (else_bb)
|
5349 |
|
|
bb[num_bb++] = else_bb;
|
5350 |
|
|
|
5351 |
|
|
sub_cond_exec_reg = NULL_RTX;
|
5352 |
|
|
frv_ifcvt.num_nested_cond_exec = 0;
|
5353 |
|
|
|
5354 |
|
|
/* Scan all of the blocks for registers that must not be allocated. */
|
5355 |
|
|
for (j = 0; j < num_bb; j++)
|
5356 |
|
|
{
|
5357 |
|
|
rtx last_insn = BB_END (bb[j]);
|
5358 |
|
|
rtx insn = BB_HEAD (bb[j]);
|
5359 |
|
|
unsigned int regno;
|
5360 |
|
|
|
5361 |
|
|
if (dump_file)
|
5362 |
|
|
fprintf (dump_file, "Scanning %s block %d, start %d, end %d\n",
|
5363 |
|
|
(bb[j] == else_bb) ? "else" : ((bb[j] == then_bb) ? "then" : "test"),
|
5364 |
|
|
(int) bb[j]->index,
|
5365 |
|
|
(int) INSN_UID (BB_HEAD (bb[j])),
|
5366 |
|
|
(int) INSN_UID (BB_END (bb[j])));
|
5367 |
|
|
|
5368 |
|
|
/* Anything live at the beginning of the block is obviously unavailable
|
5369 |
|
|
for allocation. */
|
5370 |
|
|
EXECUTE_IF_SET_IN_REG_SET (bb[j]->il.rtl->global_live_at_start, 0, regno, rsi)
|
5371 |
|
|
{
|
5372 |
|
|
if (regno < FIRST_PSEUDO_REGISTER)
|
5373 |
|
|
CLEAR_HARD_REG_BIT (tmp_reg->regs, regno);
|
5374 |
|
|
}
|
5375 |
|
|
|
5376 |
|
|
/* Loop through the insns in the block. */
|
5377 |
|
|
for (;;)
|
5378 |
|
|
{
|
5379 |
|
|
/* Mark any new registers that are created as being unavailable for
|
5380 |
|
|
allocation. Also see if the CC register used in nested IFs can be
|
5381 |
|
|
reallocated. */
|
5382 |
|
|
if (INSN_P (insn))
|
5383 |
|
|
{
|
5384 |
|
|
rtx pattern;
|
5385 |
|
|
rtx set;
|
5386 |
|
|
int skip_nested_if = FALSE;
|
5387 |
|
|
|
5388 |
|
|
for_each_rtx (&PATTERN (insn), frv_clear_registers_used,
|
5389 |
|
|
(void *)&tmp_reg->regs);
|
5390 |
|
|
|
5391 |
|
|
pattern = PATTERN (insn);
|
5392 |
|
|
if (GET_CODE (pattern) == COND_EXEC)
|
5393 |
|
|
{
|
5394 |
|
|
rtx reg = XEXP (COND_EXEC_TEST (pattern), 0);
|
5395 |
|
|
|
5396 |
|
|
if (reg != sub_cond_exec_reg)
|
5397 |
|
|
{
|
5398 |
|
|
sub_cond_exec_reg = reg;
|
5399 |
|
|
frv_ifcvt.num_nested_cond_exec++;
|
5400 |
|
|
}
|
5401 |
|
|
}
|
5402 |
|
|
|
5403 |
|
|
set = single_set_pattern (pattern);
|
5404 |
|
|
if (set)
|
5405 |
|
|
{
|
5406 |
|
|
rtx dest = SET_DEST (set);
|
5407 |
|
|
rtx src = SET_SRC (set);
|
5408 |
|
|
|
5409 |
|
|
if (GET_CODE (dest) == REG)
|
5410 |
|
|
{
|
5411 |
|
|
int regno = REGNO (dest);
|
5412 |
|
|
enum rtx_code src_code = GET_CODE (src);
|
5413 |
|
|
|
5414 |
|
|
if (CC_P (regno) && src_code == COMPARE)
|
5415 |
|
|
skip_nested_if = TRUE;
|
5416 |
|
|
|
5417 |
|
|
else if (CR_P (regno)
|
5418 |
|
|
&& (src_code == IF_THEN_ELSE
|
5419 |
|
|
|| COMPARISON_P (src)))
|
5420 |
|
|
skip_nested_if = TRUE;
|
5421 |
|
|
}
|
5422 |
|
|
}
|
5423 |
|
|
|
5424 |
|
|
if (! skip_nested_if)
|
5425 |
|
|
for_each_rtx (&PATTERN (insn), frv_clear_registers_used,
|
5426 |
|
|
(void *)&frv_ifcvt.nested_cc_ok_rewrite);
|
5427 |
|
|
}
|
5428 |
|
|
|
5429 |
|
|
if (insn == last_insn)
|
5430 |
|
|
break;
|
5431 |
|
|
|
5432 |
|
|
insn = NEXT_INSN (insn);
|
5433 |
|
|
}
|
5434 |
|
|
}
|
5435 |
|
|
|
5436 |
|
|
/* If this is a nested if, rewrite the CC registers that are available to
|
5437 |
|
|
include the ones that can be rewritten, to increase the chance of being
|
5438 |
|
|
able to allocate a paired CC/CR register combination. */
|
5439 |
|
|
if (ce_info->pass > 1)
|
5440 |
|
|
{
|
5441 |
|
|
for (j = CC_FIRST; j <= CC_LAST; j++)
|
5442 |
|
|
if (TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, j))
|
5443 |
|
|
SET_HARD_REG_BIT (tmp_reg->regs, j);
|
5444 |
|
|
else
|
5445 |
|
|
CLEAR_HARD_REG_BIT (tmp_reg->regs, j);
|
5446 |
|
|
}
|
5447 |
|
|
|
5448 |
|
|
if (dump_file)
|
5449 |
|
|
{
|
5450 |
|
|
int num_gprs = 0;
|
5451 |
|
|
fprintf (dump_file, "Available GPRs: ");
|
5452 |
|
|
|
5453 |
|
|
for (j = GPR_FIRST; j <= GPR_LAST; j++)
|
5454 |
|
|
if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
|
5455 |
|
|
{
|
5456 |
|
|
fprintf (dump_file, " %d [%s]", j, reg_names[j]);
|
5457 |
|
|
if (++num_gprs > GPR_TEMP_NUM+2)
|
5458 |
|
|
break;
|
5459 |
|
|
}
|
5460 |
|
|
|
5461 |
|
|
fprintf (dump_file, "%s\nAvailable CRs: ",
|
5462 |
|
|
(num_gprs > GPR_TEMP_NUM+2) ? " ..." : "");
|
5463 |
|
|
|
5464 |
|
|
for (j = CR_FIRST; j <= CR_LAST; j++)
|
5465 |
|
|
if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
|
5466 |
|
|
fprintf (dump_file, " %d [%s]", j, reg_names[j]);
|
5467 |
|
|
|
5468 |
|
|
fputs ("\n", dump_file);
|
5469 |
|
|
|
5470 |
|
|
if (ce_info->pass > 1)
|
5471 |
|
|
{
|
5472 |
|
|
fprintf (dump_file, "Modifiable CCs: ");
|
5473 |
|
|
for (j = CC_FIRST; j <= CC_LAST; j++)
|
5474 |
|
|
if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
|
5475 |
|
|
fprintf (dump_file, " %d [%s]", j, reg_names[j]);
|
5476 |
|
|
|
5477 |
|
|
fprintf (dump_file, "\n%d nested COND_EXEC statements\n",
|
5478 |
|
|
frv_ifcvt.num_nested_cond_exec);
|
5479 |
|
|
}
|
5480 |
|
|
}
|
5481 |
|
|
|
5482 |
|
|
/* Allocate the appropriate temporary condition code register. Try to
|
5483 |
|
|
allocate the ICR/FCR register that corresponds to the ICC/FCC register so
|
5484 |
|
|
that conditional cmp's can be done. */
|
5485 |
|
|
if (mode == CCmode || mode == CC_UNSmode || mode == CC_NZmode)
|
5486 |
|
|
{
|
5487 |
|
|
cr_class = ICR_REGS;
|
5488 |
|
|
cc_class = ICC_REGS;
|
5489 |
|
|
cc_first = ICC_FIRST;
|
5490 |
|
|
cc_last = ICC_LAST;
|
5491 |
|
|
}
|
5492 |
|
|
else if (mode == CC_FPmode)
|
5493 |
|
|
{
|
5494 |
|
|
cr_class = FCR_REGS;
|
5495 |
|
|
cc_class = FCC_REGS;
|
5496 |
|
|
cc_first = FCC_FIRST;
|
5497 |
|
|
cc_last = FCC_LAST;
|
5498 |
|
|
}
|
5499 |
|
|
else
|
5500 |
|
|
{
|
5501 |
|
|
cc_first = cc_last = 0;
|
5502 |
|
|
cr_class = cc_class = NO_REGS;
|
5503 |
|
|
}
|
5504 |
|
|
|
5505 |
|
|
cc = XEXP (true_expr, 0);
|
5506 |
|
|
nested_cc = cr = NULL_RTX;
|
5507 |
|
|
if (cc_class != NO_REGS)
|
5508 |
|
|
{
|
5509 |
|
|
/* For nested IFs and &&/||, see if we can find a CC and CR register pair
|
5510 |
|
|
so we can execute a csubcc/caddcc/cfcmps instruction. */
|
5511 |
|
|
int cc_regno;
|
5512 |
|
|
|
5513 |
|
|
for (cc_regno = cc_first; cc_regno <= cc_last; cc_regno++)
|
5514 |
|
|
{
|
5515 |
|
|
int cr_regno = cc_regno - CC_FIRST + CR_FIRST;
|
5516 |
|
|
|
5517 |
|
|
if (TEST_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, cc_regno)
|
5518 |
|
|
&& TEST_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, cr_regno))
|
5519 |
|
|
{
|
5520 |
|
|
frv_ifcvt.tmp_reg.next_reg[ (int)cr_class ] = cr_regno;
|
5521 |
|
|
cr = frv_alloc_temp_reg (tmp_reg, cr_class, CC_CCRmode, TRUE,
|
5522 |
|
|
TRUE);
|
5523 |
|
|
|
5524 |
|
|
frv_ifcvt.tmp_reg.next_reg[ (int)cc_class ] = cc_regno;
|
5525 |
|
|
nested_cc = frv_alloc_temp_reg (tmp_reg, cc_class, CCmode,
|
5526 |
|
|
TRUE, TRUE);
|
5527 |
|
|
break;
|
5528 |
|
|
}
|
5529 |
|
|
}
|
5530 |
|
|
}
|
5531 |
|
|
|
5532 |
|
|
if (! cr)
|
5533 |
|
|
{
|
5534 |
|
|
if (dump_file)
|
5535 |
|
|
fprintf (dump_file, "Could not allocate a CR temporary register\n");
|
5536 |
|
|
|
5537 |
|
|
goto fail;
|
5538 |
|
|
}
|
5539 |
|
|
|
5540 |
|
|
if (dump_file)
|
5541 |
|
|
fprintf (dump_file,
|
5542 |
|
|
"Will use %s for conditional execution, %s for nested comparisons\n",
|
5543 |
|
|
reg_names[ REGNO (cr)],
|
5544 |
|
|
(nested_cc) ? reg_names[ REGNO (nested_cc) ] : "<none>");
|
5545 |
|
|
|
5546 |
|
|
/* Set the CCR bit. Note for integer tests, we reverse the condition so that
|
5547 |
|
|
in an IF-THEN-ELSE sequence, we are testing the TRUE case against the CCR
|
5548 |
|
|
bit being true. We don't do this for floating point, because of NaNs. */
|
5549 |
|
|
code = GET_CODE (true_expr);
|
5550 |
|
|
if (GET_MODE (cc) != CC_FPmode)
|
5551 |
|
|
{
|
5552 |
|
|
code = reverse_condition (code);
|
5553 |
|
|
code_true = EQ;
|
5554 |
|
|
code_false = NE;
|
5555 |
|
|
}
|
5556 |
|
|
else
|
5557 |
|
|
{
|
5558 |
|
|
code_true = NE;
|
5559 |
|
|
code_false = EQ;
|
5560 |
|
|
}
|
5561 |
|
|
|
5562 |
|
|
check_insn = gen_rtx_SET (VOIDmode, cr,
|
5563 |
|
|
gen_rtx_fmt_ee (code, CC_CCRmode, cc, const0_rtx));
|
5564 |
|
|
|
5565 |
|
|
/* Record the check insn to be inserted later. */
|
5566 |
|
|
frv_ifcvt_add_insn (check_insn, BB_END (test_bb), TRUE);
|
5567 |
|
|
|
5568 |
|
|
/* Update the tests. */
|
5569 |
|
|
frv_ifcvt.cr_reg = cr;
|
5570 |
|
|
frv_ifcvt.nested_cc_reg = nested_cc;
|
5571 |
|
|
*p_true = gen_rtx_fmt_ee (code_true, CC_CCRmode, cr, const0_rtx);
|
5572 |
|
|
*p_false = gen_rtx_fmt_ee (code_false, CC_CCRmode, cr, const0_rtx);
|
5573 |
|
|
return;
|
5574 |
|
|
|
5575 |
|
|
/* Fail, don't do this conditional execution. */
|
5576 |
|
|
fail:
|
5577 |
|
|
*p_true = NULL_RTX;
|
5578 |
|
|
*p_false = NULL_RTX;
|
5579 |
|
|
if (dump_file)
|
5580 |
|
|
fprintf (dump_file, "Disabling this conditional execution.\n");
|
5581 |
|
|
|
5582 |
|
|
return;
|
5583 |
|
|
}
|
5584 |
|
|
|
5585 |
|
|
|
5586 |
|
|
/* A C expression to modify the code described by the conditional if
|
5587 |
|
|
information CE_INFO, for the basic block BB, possibly updating the tests in
|
5588 |
|
|
TRUE_EXPR, and FALSE_EXPR for converting the && and || parts of if-then or
|
5589 |
|
|
if-then-else code to conditional instructions. Set either TRUE_EXPR or
|
5590 |
|
|
FALSE_EXPR to a null pointer if the tests cannot be converted. */
|
5591 |
|
|
|
5592 |
|
|
/* p_true and p_false are given expressions of the form:
|
5593 |
|
|
|
5594 |
|
|
(and (eq:CC_CCR (reg:CC_CCR)
|
5595 |
|
|
(const_int 0))
|
5596 |
|
|
(eq:CC (reg:CC)
|
5597 |
|
|
(const_int 0))) */
|
5598 |
|
|
|
5599 |
|
|
void
|
5600 |
|
|
frv_ifcvt_modify_multiple_tests (ce_if_block_t *ce_info,
|
5601 |
|
|
basic_block bb,
|
5602 |
|
|
rtx *p_true,
|
5603 |
|
|
rtx *p_false)
|
5604 |
|
|
{
|
5605 |
|
|
rtx old_true = XEXP (*p_true, 0);
|
5606 |
|
|
rtx old_false = XEXP (*p_false, 0);
|
5607 |
|
|
rtx true_expr = XEXP (*p_true, 1);
|
5608 |
|
|
rtx false_expr = XEXP (*p_false, 1);
|
5609 |
|
|
rtx test_expr;
|
5610 |
|
|
rtx old_test;
|
5611 |
|
|
rtx cr = XEXP (old_true, 0);
|
5612 |
|
|
rtx check_insn;
|
5613 |
|
|
rtx new_cr = NULL_RTX;
|
5614 |
|
|
rtx *p_new_cr = (rtx *)0;
|
5615 |
|
|
rtx if_else;
|
5616 |
|
|
rtx compare;
|
5617 |
|
|
rtx cc;
|
5618 |
|
|
enum reg_class cr_class;
|
5619 |
|
|
enum machine_mode mode = GET_MODE (true_expr);
|
5620 |
|
|
rtx (*logical_func)(rtx, rtx, rtx);
|
5621 |
|
|
|
5622 |
|
|
if (TARGET_DEBUG_COND_EXEC)
|
5623 |
|
|
{
|
5624 |
|
|
fprintf (stderr,
|
5625 |
|
|
"\n:::::::::: frv_ifcvt_modify_multiple_tests, before modification for %s\ntrue insn:\n",
|
5626 |
|
|
ce_info->and_and_p ? "&&" : "||");
|
5627 |
|
|
|
5628 |
|
|
debug_rtx (*p_true);
|
5629 |
|
|
|
5630 |
|
|
fputs ("\nfalse insn:\n", stderr);
|
5631 |
|
|
debug_rtx (*p_false);
|
5632 |
|
|
}
|
5633 |
|
|
|
5634 |
|
|
if (!TARGET_MULTI_CE)
|
5635 |
|
|
goto fail;
|
5636 |
|
|
|
5637 |
|
|
if (GET_CODE (cr) != REG)
|
5638 |
|
|
goto fail;
|
5639 |
|
|
|
5640 |
|
|
if (mode == CCmode || mode == CC_UNSmode || mode == CC_NZmode)
|
5641 |
|
|
{
|
5642 |
|
|
cr_class = ICR_REGS;
|
5643 |
|
|
p_new_cr = &frv_ifcvt.extra_int_cr;
|
5644 |
|
|
}
|
5645 |
|
|
else if (mode == CC_FPmode)
|
5646 |
|
|
{
|
5647 |
|
|
cr_class = FCR_REGS;
|
5648 |
|
|
p_new_cr = &frv_ifcvt.extra_fp_cr;
|
5649 |
|
|
}
|
5650 |
|
|
else
|
5651 |
|
|
goto fail;
|
5652 |
|
|
|
5653 |
|
|
/* Allocate a temp CR, reusing a previously allocated temp CR if we have 3 or
|
5654 |
|
|
more &&/|| tests. */
|
5655 |
|
|
new_cr = *p_new_cr;
|
5656 |
|
|
if (! new_cr)
|
5657 |
|
|
{
|
5658 |
|
|
new_cr = *p_new_cr = frv_alloc_temp_reg (&frv_ifcvt.tmp_reg, cr_class,
|
5659 |
|
|
CC_CCRmode, TRUE, TRUE);
|
5660 |
|
|
if (! new_cr)
|
5661 |
|
|
goto fail;
|
5662 |
|
|
}
|
5663 |
|
|
|
5664 |
|
|
if (ce_info->and_and_p)
|
5665 |
|
|
{
|
5666 |
|
|
old_test = old_false;
|
5667 |
|
|
test_expr = true_expr;
|
5668 |
|
|
logical_func = (GET_CODE (old_true) == EQ) ? gen_andcr : gen_andncr;
|
5669 |
|
|
*p_true = gen_rtx_NE (CC_CCRmode, cr, const0_rtx);
|
5670 |
|
|
*p_false = gen_rtx_EQ (CC_CCRmode, cr, const0_rtx);
|
5671 |
|
|
}
|
5672 |
|
|
else
|
5673 |
|
|
{
|
5674 |
|
|
old_test = old_false;
|
5675 |
|
|
test_expr = false_expr;
|
5676 |
|
|
logical_func = (GET_CODE (old_false) == EQ) ? gen_orcr : gen_orncr;
|
5677 |
|
|
*p_true = gen_rtx_EQ (CC_CCRmode, cr, const0_rtx);
|
5678 |
|
|
*p_false = gen_rtx_NE (CC_CCRmode, cr, const0_rtx);
|
5679 |
|
|
}
|
5680 |
|
|
|
5681 |
|
|
/* First add the andcr/andncr/orcr/orncr, which will be added after the
|
5682 |
|
|
conditional check instruction, due to frv_ifcvt_add_insn being a LIFO
|
5683 |
|
|
stack. */
|
5684 |
|
|
frv_ifcvt_add_insn ((*logical_func) (cr, cr, new_cr), BB_END (bb), TRUE);
|
5685 |
|
|
|
5686 |
|
|
/* Now add the conditional check insn. */
|
5687 |
|
|
cc = XEXP (test_expr, 0);
|
5688 |
|
|
compare = gen_rtx_fmt_ee (GET_CODE (test_expr), CC_CCRmode, cc, const0_rtx);
|
5689 |
|
|
if_else = gen_rtx_IF_THEN_ELSE (CC_CCRmode, old_test, compare, const0_rtx);
|
5690 |
|
|
|
5691 |
|
|
check_insn = gen_rtx_SET (VOIDmode, new_cr, if_else);
|
5692 |
|
|
|
5693 |
|
|
/* Add the new check insn to the list of check insns that need to be
|
5694 |
|
|
inserted. */
|
5695 |
|
|
frv_ifcvt_add_insn (check_insn, BB_END (bb), TRUE);
|
5696 |
|
|
|
5697 |
|
|
if (TARGET_DEBUG_COND_EXEC)
|
5698 |
|
|
{
|
5699 |
|
|
fputs ("\n:::::::::: frv_ifcvt_modify_multiple_tests, after modification\ntrue insn:\n",
|
5700 |
|
|
stderr);
|
5701 |
|
|
|
5702 |
|
|
debug_rtx (*p_true);
|
5703 |
|
|
|
5704 |
|
|
fputs ("\nfalse insn:\n", stderr);
|
5705 |
|
|
debug_rtx (*p_false);
|
5706 |
|
|
}
|
5707 |
|
|
|
5708 |
|
|
return;
|
5709 |
|
|
|
5710 |
|
|
fail:
|
5711 |
|
|
*p_true = *p_false = NULL_RTX;
|
5712 |
|
|
|
5713 |
|
|
/* If we allocated a CR register, release it. */
|
5714 |
|
|
if (new_cr)
|
5715 |
|
|
{
|
5716 |
|
|
CLEAR_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, REGNO (new_cr));
|
5717 |
|
|
*p_new_cr = NULL_RTX;
|
5718 |
|
|
}
|
5719 |
|
|
|
5720 |
|
|
if (TARGET_DEBUG_COND_EXEC)
|
5721 |
|
|
fputs ("\n:::::::::: frv_ifcvt_modify_multiple_tests, failed.\n", stderr);
|
5722 |
|
|
|
5723 |
|
|
return;
|
5724 |
|
|
}
|
5725 |
|
|
|
5726 |
|
|
|
5727 |
|
|
/* Return a register which will be loaded with a value if an IF block is
|
5728 |
|
|
converted to conditional execution. This is used to rewrite instructions
|
5729 |
|
|
that use constants to ones that just use registers. */
|
5730 |
|
|
|
5731 |
|
|
static rtx
|
5732 |
|
|
frv_ifcvt_load_value (rtx value, rtx insn ATTRIBUTE_UNUSED)
|
5733 |
|
|
{
|
5734 |
|
|
int num_alloc = frv_ifcvt.cur_scratch_regs;
|
5735 |
|
|
int i;
|
5736 |
|
|
rtx reg;
|
5737 |
|
|
|
5738 |
|
|
/* We know gr0 == 0, so replace any errant uses. */
|
5739 |
|
|
if (value == const0_rtx)
|
5740 |
|
|
return gen_rtx_REG (SImode, GPR_FIRST);
|
5741 |
|
|
|
5742 |
|
|
/* First search all registers currently loaded to see if we have an
|
5743 |
|
|
applicable constant. */
|
5744 |
|
|
if (CONSTANT_P (value)
|
5745 |
|
|
|| (GET_CODE (value) == REG && REGNO (value) == LR_REGNO))
|
5746 |
|
|
{
|
5747 |
|
|
for (i = 0; i < num_alloc; i++)
|
5748 |
|
|
{
|
5749 |
|
|
if (rtx_equal_p (SET_SRC (frv_ifcvt.scratch_regs[i]), value))
|
5750 |
|
|
return SET_DEST (frv_ifcvt.scratch_regs[i]);
|
5751 |
|
|
}
|
5752 |
|
|
}
|
5753 |
|
|
|
5754 |
|
|
/* Have we exhausted the number of registers available? */
|
5755 |
|
|
if (num_alloc >= GPR_TEMP_NUM)
|
5756 |
|
|
{
|
5757 |
|
|
if (dump_file)
|
5758 |
|
|
fprintf (dump_file, "Too many temporary registers allocated\n");
|
5759 |
|
|
|
5760 |
|
|
return NULL_RTX;
|
5761 |
|
|
}
|
5762 |
|
|
|
5763 |
|
|
/* Allocate the new register. */
|
5764 |
|
|
reg = frv_alloc_temp_reg (&frv_ifcvt.tmp_reg, GPR_REGS, SImode, TRUE, TRUE);
|
5765 |
|
|
if (! reg)
|
5766 |
|
|
{
|
5767 |
|
|
if (dump_file)
|
5768 |
|
|
fputs ("Could not find a scratch register\n", dump_file);
|
5769 |
|
|
|
5770 |
|
|
return NULL_RTX;
|
5771 |
|
|
}
|
5772 |
|
|
|
5773 |
|
|
frv_ifcvt.cur_scratch_regs++;
|
5774 |
|
|
frv_ifcvt.scratch_regs[num_alloc] = gen_rtx_SET (VOIDmode, reg, value);
|
5775 |
|
|
|
5776 |
|
|
if (dump_file)
|
5777 |
|
|
{
|
5778 |
|
|
if (GET_CODE (value) == CONST_INT)
|
5779 |
|
|
fprintf (dump_file, "Register %s will hold %ld\n",
|
5780 |
|
|
reg_names[ REGNO (reg)], (long)INTVAL (value));
|
5781 |
|
|
|
5782 |
|
|
else if (GET_CODE (value) == REG && REGNO (value) == LR_REGNO)
|
5783 |
|
|
fprintf (dump_file, "Register %s will hold LR\n",
|
5784 |
|
|
reg_names[ REGNO (reg)]);
|
5785 |
|
|
|
5786 |
|
|
else
|
5787 |
|
|
fprintf (dump_file, "Register %s will hold a saved value\n",
|
5788 |
|
|
reg_names[ REGNO (reg)]);
|
5789 |
|
|
}
|
5790 |
|
|
|
5791 |
|
|
return reg;
|
5792 |
|
|
}
|
5793 |
|
|
|
5794 |
|
|
|
5795 |
|
|
/* Update a MEM used in conditional code that might contain an offset to put
|
5796 |
|
|
the offset into a scratch register, so that the conditional load/store
|
5797 |
|
|
operations can be used. This function returns the original pointer if the
|
5798 |
|
|
MEM is valid to use in conditional code, NULL if we can't load up the offset
|
5799 |
|
|
into a temporary register, or the new MEM if we were successful. */
|
5800 |
|
|
|
5801 |
|
|
static rtx
|
5802 |
|
|
frv_ifcvt_rewrite_mem (rtx mem, enum machine_mode mode, rtx insn)
|
5803 |
|
|
{
|
5804 |
|
|
rtx addr = XEXP (mem, 0);
|
5805 |
|
|
|
5806 |
|
|
if (!frv_legitimate_address_p (mode, addr, reload_completed, TRUE, FALSE))
|
5807 |
|
|
{
|
5808 |
|
|
if (GET_CODE (addr) == PLUS)
|
5809 |
|
|
{
|
5810 |
|
|
rtx addr_op0 = XEXP (addr, 0);
|
5811 |
|
|
rtx addr_op1 = XEXP (addr, 1);
|
5812 |
|
|
|
5813 |
|
|
if (GET_CODE (addr_op0) == REG && CONSTANT_P (addr_op1))
|
5814 |
|
|
{
|
5815 |
|
|
rtx reg = frv_ifcvt_load_value (addr_op1, insn);
|
5816 |
|
|
if (!reg)
|
5817 |
|
|
return NULL_RTX;
|
5818 |
|
|
|
5819 |
|
|
addr = gen_rtx_PLUS (Pmode, addr_op0, reg);
|
5820 |
|
|
}
|
5821 |
|
|
|
5822 |
|
|
else
|
5823 |
|
|
return NULL_RTX;
|
5824 |
|
|
}
|
5825 |
|
|
|
5826 |
|
|
else if (CONSTANT_P (addr))
|
5827 |
|
|
addr = frv_ifcvt_load_value (addr, insn);
|
5828 |
|
|
|
5829 |
|
|
else
|
5830 |
|
|
return NULL_RTX;
|
5831 |
|
|
|
5832 |
|
|
if (addr == NULL_RTX)
|
5833 |
|
|
return NULL_RTX;
|
5834 |
|
|
|
5835 |
|
|
else if (XEXP (mem, 0) != addr)
|
5836 |
|
|
return change_address (mem, mode, addr);
|
5837 |
|
|
}
|
5838 |
|
|
|
5839 |
|
|
return mem;
|
5840 |
|
|
}
|
5841 |
|
|
|
5842 |
|
|
|
5843 |
|
|
/* Given a PATTERN, return a SET expression if this PATTERN has only a single
|
5844 |
|
|
SET, possibly conditionally executed. It may also have CLOBBERs, USEs. */
|
5845 |
|
|
|
5846 |
|
|
static rtx
|
5847 |
|
|
single_set_pattern (rtx pattern)
|
5848 |
|
|
{
|
5849 |
|
|
rtx set;
|
5850 |
|
|
int i;
|
5851 |
|
|
|
5852 |
|
|
if (GET_CODE (pattern) == COND_EXEC)
|
5853 |
|
|
pattern = COND_EXEC_CODE (pattern);
|
5854 |
|
|
|
5855 |
|
|
if (GET_CODE (pattern) == SET)
|
5856 |
|
|
return pattern;
|
5857 |
|
|
|
5858 |
|
|
else if (GET_CODE (pattern) == PARALLEL)
|
5859 |
|
|
{
|
5860 |
|
|
for (i = 0, set = 0; i < XVECLEN (pattern, 0); i++)
|
5861 |
|
|
{
|
5862 |
|
|
rtx sub = XVECEXP (pattern, 0, i);
|
5863 |
|
|
|
5864 |
|
|
switch (GET_CODE (sub))
|
5865 |
|
|
{
|
5866 |
|
|
case USE:
|
5867 |
|
|
case CLOBBER:
|
5868 |
|
|
break;
|
5869 |
|
|
|
5870 |
|
|
case SET:
|
5871 |
|
|
if (set)
|
5872 |
|
|
return 0;
|
5873 |
|
|
else
|
5874 |
|
|
set = sub;
|
5875 |
|
|
break;
|
5876 |
|
|
|
5877 |
|
|
default:
|
5878 |
|
|
return 0;
|
5879 |
|
|
}
|
5880 |
|
|
}
|
5881 |
|
|
return set;
|
5882 |
|
|
}
|
5883 |
|
|
|
5884 |
|
|
return 0;
|
5885 |
|
|
}
|
5886 |
|
|
|
5887 |
|
|
|
5888 |
|
|
/* A C expression to modify the code described by the conditional if
|
5889 |
|
|
information CE_INFO with the new PATTERN in INSN. If PATTERN is a null
|
5890 |
|
|
pointer after the IFCVT_MODIFY_INSN macro executes, it is assumed that that
|
5891 |
|
|
insn cannot be converted to be executed conditionally. */
|
5892 |
|
|
|
5893 |
|
|
rtx
|
5894 |
|
|
frv_ifcvt_modify_insn (ce_if_block_t *ce_info,
|
5895 |
|
|
rtx pattern,
|
5896 |
|
|
rtx insn)
|
5897 |
|
|
{
|
5898 |
|
|
rtx orig_ce_pattern = pattern;
|
5899 |
|
|
rtx set;
|
5900 |
|
|
rtx op0;
|
5901 |
|
|
rtx op1;
|
5902 |
|
|
rtx test;
|
5903 |
|
|
|
5904 |
|
|
gcc_assert (GET_CODE (pattern) == COND_EXEC);
|
5905 |
|
|
|
5906 |
|
|
test = COND_EXEC_TEST (pattern);
|
5907 |
|
|
if (GET_CODE (test) == AND)
|
5908 |
|
|
{
|
5909 |
|
|
rtx cr = frv_ifcvt.cr_reg;
|
5910 |
|
|
rtx test_reg;
|
5911 |
|
|
|
5912 |
|
|
op0 = XEXP (test, 0);
|
5913 |
|
|
if (! rtx_equal_p (cr, XEXP (op0, 0)))
|
5914 |
|
|
goto fail;
|
5915 |
|
|
|
5916 |
|
|
op1 = XEXP (test, 1);
|
5917 |
|
|
test_reg = XEXP (op1, 0);
|
5918 |
|
|
if (GET_CODE (test_reg) != REG)
|
5919 |
|
|
goto fail;
|
5920 |
|
|
|
5921 |
|
|
/* Is this the first nested if block in this sequence? If so, generate
|
5922 |
|
|
an andcr or andncr. */
|
5923 |
|
|
if (! frv_ifcvt.last_nested_if_cr)
|
5924 |
|
|
{
|
5925 |
|
|
rtx and_op;
|
5926 |
|
|
|
5927 |
|
|
frv_ifcvt.last_nested_if_cr = test_reg;
|
5928 |
|
|
if (GET_CODE (op0) == NE)
|
5929 |
|
|
and_op = gen_andcr (test_reg, cr, test_reg);
|
5930 |
|
|
else
|
5931 |
|
|
and_op = gen_andncr (test_reg, cr, test_reg);
|
5932 |
|
|
|
5933 |
|
|
frv_ifcvt_add_insn (and_op, insn, TRUE);
|
5934 |
|
|
}
|
5935 |
|
|
|
5936 |
|
|
/* If this isn't the first statement in the nested if sequence, see if we
|
5937 |
|
|
are dealing with the same register. */
|
5938 |
|
|
else if (! rtx_equal_p (test_reg, frv_ifcvt.last_nested_if_cr))
|
5939 |
|
|
goto fail;
|
5940 |
|
|
|
5941 |
|
|
COND_EXEC_TEST (pattern) = test = op1;
|
5942 |
|
|
}
|
5943 |
|
|
|
5944 |
|
|
/* If this isn't a nested if, reset state variables. */
|
5945 |
|
|
else
|
5946 |
|
|
{
|
5947 |
|
|
frv_ifcvt.last_nested_if_cr = NULL_RTX;
|
5948 |
|
|
}
|
5949 |
|
|
|
5950 |
|
|
set = single_set_pattern (pattern);
|
5951 |
|
|
if (set)
|
5952 |
|
|
{
|
5953 |
|
|
rtx dest = SET_DEST (set);
|
5954 |
|
|
rtx src = SET_SRC (set);
|
5955 |
|
|
enum machine_mode mode = GET_MODE (dest);
|
5956 |
|
|
|
5957 |
|
|
/* Check for normal binary operators. */
|
5958 |
|
|
if (mode == SImode && ARITHMETIC_P (src))
|
5959 |
|
|
{
|
5960 |
|
|
op0 = XEXP (src, 0);
|
5961 |
|
|
op1 = XEXP (src, 1);
|
5962 |
|
|
|
5963 |
|
|
if (integer_register_operand (op0, SImode) && CONSTANT_P (op1))
|
5964 |
|
|
{
|
5965 |
|
|
op1 = frv_ifcvt_load_value (op1, insn);
|
5966 |
|
|
if (op1)
|
5967 |
|
|
COND_EXEC_CODE (pattern)
|
5968 |
|
|
= gen_rtx_SET (VOIDmode, dest, gen_rtx_fmt_ee (GET_CODE (src),
|
5969 |
|
|
GET_MODE (src),
|
5970 |
|
|
op0, op1));
|
5971 |
|
|
else
|
5972 |
|
|
goto fail;
|
5973 |
|
|
}
|
5974 |
|
|
}
|
5975 |
|
|
|
5976 |
|
|
/* For multiply by a constant, we need to handle the sign extending
|
5977 |
|
|
correctly. Add a USE of the value after the multiply to prevent flow
|
5978 |
|
|
from cratering because only one register out of the two were used. */
|
5979 |
|
|
else if (mode == DImode && GET_CODE (src) == MULT)
|
5980 |
|
|
{
|
5981 |
|
|
op0 = XEXP (src, 0);
|
5982 |
|
|
op1 = XEXP (src, 1);
|
5983 |
|
|
if (GET_CODE (op0) == SIGN_EXTEND && GET_CODE (op1) == CONST_INT)
|
5984 |
|
|
{
|
5985 |
|
|
op1 = frv_ifcvt_load_value (op1, insn);
|
5986 |
|
|
if (op1)
|
5987 |
|
|
{
|
5988 |
|
|
op1 = gen_rtx_SIGN_EXTEND (DImode, op1);
|
5989 |
|
|
COND_EXEC_CODE (pattern)
|
5990 |
|
|
= gen_rtx_SET (VOIDmode, dest,
|
5991 |
|
|
gen_rtx_MULT (DImode, op0, op1));
|
5992 |
|
|
}
|
5993 |
|
|
else
|
5994 |
|
|
goto fail;
|
5995 |
|
|
}
|
5996 |
|
|
|
5997 |
|
|
frv_ifcvt_add_insn (gen_rtx_USE (VOIDmode, dest), insn, FALSE);
|
5998 |
|
|
}
|
5999 |
|
|
|
6000 |
|
|
/* If we are just loading a constant created for a nested conditional
|
6001 |
|
|
execution statement, just load the constant without any conditional
|
6002 |
|
|
execution, since we know that the constant will not interfere with any
|
6003 |
|
|
other registers. */
|
6004 |
|
|
else if (frv_ifcvt.scratch_insns_bitmap
|
6005 |
|
|
&& bitmap_bit_p (frv_ifcvt.scratch_insns_bitmap,
|
6006 |
|
|
INSN_UID (insn))
|
6007 |
|
|
&& REG_P (SET_DEST (set))
|
6008 |
|
|
/* We must not unconditionally set a scratch reg chosen
|
6009 |
|
|
for a nested if-converted block if its incoming
|
6010 |
|
|
value from the TEST block (or the result of the THEN
|
6011 |
|
|
branch) could/should propagate to the JOIN block.
|
6012 |
|
|
It suffices to test whether the register is live at
|
6013 |
|
|
the JOIN point: if it's live there, we can infer
|
6014 |
|
|
that we set it in the former JOIN block of the
|
6015 |
|
|
nested if-converted block (otherwise it wouldn't
|
6016 |
|
|
have been available as a scratch register), and it
|
6017 |
|
|
is either propagated through or set in the other
|
6018 |
|
|
conditional block. It's probably not worth trying
|
6019 |
|
|
to catch the latter case, and it could actually
|
6020 |
|
|
limit scheduling of the combined block quite
|
6021 |
|
|
severely. */
|
6022 |
|
|
&& ce_info->join_bb
|
6023 |
|
|
&& ! (REGNO_REG_SET_P
|
6024 |
|
|
(ce_info->join_bb->il.rtl->global_live_at_start,
|
6025 |
|
|
REGNO (SET_DEST (set))))
|
6026 |
|
|
/* Similarly, we must not unconditionally set a reg
|
6027 |
|
|
used as scratch in the THEN branch if the same reg
|
6028 |
|
|
is live in the ELSE branch. */
|
6029 |
|
|
&& (! ce_info->else_bb
|
6030 |
|
|
|| BLOCK_FOR_INSN (insn) == ce_info->else_bb
|
6031 |
|
|
|| ! (REGNO_REG_SET_P
|
6032 |
|
|
(ce_info->else_bb->il.rtl->global_live_at_start,
|
6033 |
|
|
REGNO (SET_DEST (set))))))
|
6034 |
|
|
pattern = set;
|
6035 |
|
|
|
6036 |
|
|
else if (mode == QImode || mode == HImode || mode == SImode
|
6037 |
|
|
|| mode == SFmode)
|
6038 |
|
|
{
|
6039 |
|
|
int changed_p = FALSE;
|
6040 |
|
|
|
6041 |
|
|
/* Check for just loading up a constant */
|
6042 |
|
|
if (CONSTANT_P (src) && integer_register_operand (dest, mode))
|
6043 |
|
|
{
|
6044 |
|
|
src = frv_ifcvt_load_value (src, insn);
|
6045 |
|
|
if (!src)
|
6046 |
|
|
goto fail;
|
6047 |
|
|
|
6048 |
|
|
changed_p = TRUE;
|
6049 |
|
|
}
|
6050 |
|
|
|
6051 |
|
|
/* See if we need to fix up stores */
|
6052 |
|
|
if (GET_CODE (dest) == MEM)
|
6053 |
|
|
{
|
6054 |
|
|
rtx new_mem = frv_ifcvt_rewrite_mem (dest, mode, insn);
|
6055 |
|
|
|
6056 |
|
|
if (!new_mem)
|
6057 |
|
|
goto fail;
|
6058 |
|
|
|
6059 |
|
|
else if (new_mem != dest)
|
6060 |
|
|
{
|
6061 |
|
|
changed_p = TRUE;
|
6062 |
|
|
dest = new_mem;
|
6063 |
|
|
}
|
6064 |
|
|
}
|
6065 |
|
|
|
6066 |
|
|
/* See if we need to fix up loads */
|
6067 |
|
|
if (GET_CODE (src) == MEM)
|
6068 |
|
|
{
|
6069 |
|
|
rtx new_mem = frv_ifcvt_rewrite_mem (src, mode, insn);
|
6070 |
|
|
|
6071 |
|
|
if (!new_mem)
|
6072 |
|
|
goto fail;
|
6073 |
|
|
|
6074 |
|
|
else if (new_mem != src)
|
6075 |
|
|
{
|
6076 |
|
|
changed_p = TRUE;
|
6077 |
|
|
src = new_mem;
|
6078 |
|
|
}
|
6079 |
|
|
}
|
6080 |
|
|
|
6081 |
|
|
/* If either src or destination changed, redo SET. */
|
6082 |
|
|
if (changed_p)
|
6083 |
|
|
COND_EXEC_CODE (pattern) = gen_rtx_SET (VOIDmode, dest, src);
|
6084 |
|
|
}
|
6085 |
|
|
|
6086 |
|
|
/* Rewrite a nested set cccr in terms of IF_THEN_ELSE. Also deal with
|
6087 |
|
|
rewriting the CC register to be the same as the paired CC/CR register
|
6088 |
|
|
for nested ifs. */
|
6089 |
|
|
else if (mode == CC_CCRmode && COMPARISON_P (src))
|
6090 |
|
|
{
|
6091 |
|
|
int regno = REGNO (XEXP (src, 0));
|
6092 |
|
|
rtx if_else;
|
6093 |
|
|
|
6094 |
|
|
if (ce_info->pass > 1
|
6095 |
|
|
&& regno != (int)REGNO (frv_ifcvt.nested_cc_reg)
|
6096 |
|
|
&& TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, regno))
|
6097 |
|
|
{
|
6098 |
|
|
src = gen_rtx_fmt_ee (GET_CODE (src),
|
6099 |
|
|
CC_CCRmode,
|
6100 |
|
|
frv_ifcvt.nested_cc_reg,
|
6101 |
|
|
XEXP (src, 1));
|
6102 |
|
|
}
|
6103 |
|
|
|
6104 |
|
|
if_else = gen_rtx_IF_THEN_ELSE (CC_CCRmode, test, src, const0_rtx);
|
6105 |
|
|
pattern = gen_rtx_SET (VOIDmode, dest, if_else);
|
6106 |
|
|
}
|
6107 |
|
|
|
6108 |
|
|
/* Remap a nested compare instruction to use the paired CC/CR reg. */
|
6109 |
|
|
else if (ce_info->pass > 1
|
6110 |
|
|
&& GET_CODE (dest) == REG
|
6111 |
|
|
&& CC_P (REGNO (dest))
|
6112 |
|
|
&& REGNO (dest) != REGNO (frv_ifcvt.nested_cc_reg)
|
6113 |
|
|
&& TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite,
|
6114 |
|
|
REGNO (dest))
|
6115 |
|
|
&& GET_CODE (src) == COMPARE)
|
6116 |
|
|
{
|
6117 |
|
|
PUT_MODE (frv_ifcvt.nested_cc_reg, GET_MODE (dest));
|
6118 |
|
|
COND_EXEC_CODE (pattern)
|
6119 |
|
|
= gen_rtx_SET (VOIDmode, frv_ifcvt.nested_cc_reg, copy_rtx (src));
|
6120 |
|
|
}
|
6121 |
|
|
}
|
6122 |
|
|
|
6123 |
|
|
if (TARGET_DEBUG_COND_EXEC)
|
6124 |
|
|
{
|
6125 |
|
|
rtx orig_pattern = PATTERN (insn);
|
6126 |
|
|
|
6127 |
|
|
PATTERN (insn) = pattern;
|
6128 |
|
|
fprintf (stderr,
|
6129 |
|
|
"\n:::::::::: frv_ifcvt_modify_insn: pass = %d, insn after modification:\n",
|
6130 |
|
|
ce_info->pass);
|
6131 |
|
|
|
6132 |
|
|
debug_rtx (insn);
|
6133 |
|
|
PATTERN (insn) = orig_pattern;
|
6134 |
|
|
}
|
6135 |
|
|
|
6136 |
|
|
return pattern;
|
6137 |
|
|
|
6138 |
|
|
fail:
|
6139 |
|
|
if (TARGET_DEBUG_COND_EXEC)
|
6140 |
|
|
{
|
6141 |
|
|
rtx orig_pattern = PATTERN (insn);
|
6142 |
|
|
|
6143 |
|
|
PATTERN (insn) = orig_ce_pattern;
|
6144 |
|
|
fprintf (stderr,
|
6145 |
|
|
"\n:::::::::: frv_ifcvt_modify_insn: pass = %d, insn could not be modified:\n",
|
6146 |
|
|
ce_info->pass);
|
6147 |
|
|
|
6148 |
|
|
debug_rtx (insn);
|
6149 |
|
|
PATTERN (insn) = orig_pattern;
|
6150 |
|
|
}
|
6151 |
|
|
|
6152 |
|
|
return NULL_RTX;
|
6153 |
|
|
}
|
6154 |
|
|
|
6155 |
|
|
|
6156 |
|
|
/* A C expression to perform any final machine dependent modifications in
|
6157 |
|
|
converting code to conditional execution in the code described by the
|
6158 |
|
|
conditional if information CE_INFO. */
|
6159 |
|
|
|
6160 |
|
|
void
|
6161 |
|
|
frv_ifcvt_modify_final (ce_if_block_t *ce_info ATTRIBUTE_UNUSED)
|
6162 |
|
|
{
|
6163 |
|
|
rtx existing_insn;
|
6164 |
|
|
rtx check_insn;
|
6165 |
|
|
rtx p = frv_ifcvt.added_insns_list;
|
6166 |
|
|
int i;
|
6167 |
|
|
|
6168 |
|
|
/* Loop inserting the check insns. The last check insn is the first test,
|
6169 |
|
|
and is the appropriate place to insert constants. */
|
6170 |
|
|
gcc_assert (p);
|
6171 |
|
|
|
6172 |
|
|
do
|
6173 |
|
|
{
|
6174 |
|
|
rtx check_and_insert_insns = XEXP (p, 0);
|
6175 |
|
|
rtx old_p = p;
|
6176 |
|
|
|
6177 |
|
|
check_insn = XEXP (check_and_insert_insns, 0);
|
6178 |
|
|
existing_insn = XEXP (check_and_insert_insns, 1);
|
6179 |
|
|
p = XEXP (p, 1);
|
6180 |
|
|
|
6181 |
|
|
/* The jump bit is used to say that the new insn is to be inserted BEFORE
|
6182 |
|
|
the existing insn, otherwise it is to be inserted AFTER. */
|
6183 |
|
|
if (check_and_insert_insns->jump)
|
6184 |
|
|
{
|
6185 |
|
|
emit_insn_before (check_insn, existing_insn);
|
6186 |
|
|
check_and_insert_insns->jump = 0;
|
6187 |
|
|
}
|
6188 |
|
|
else
|
6189 |
|
|
emit_insn_after (check_insn, existing_insn);
|
6190 |
|
|
|
6191 |
|
|
free_EXPR_LIST_node (check_and_insert_insns);
|
6192 |
|
|
free_EXPR_LIST_node (old_p);
|
6193 |
|
|
}
|
6194 |
|
|
while (p != NULL_RTX);
|
6195 |
|
|
|
6196 |
|
|
/* Load up any constants needed into temp gprs */
|
6197 |
|
|
for (i = 0; i < frv_ifcvt.cur_scratch_regs; i++)
|
6198 |
|
|
{
|
6199 |
|
|
rtx insn = emit_insn_before (frv_ifcvt.scratch_regs[i], existing_insn);
|
6200 |
|
|
if (! frv_ifcvt.scratch_insns_bitmap)
|
6201 |
|
|
frv_ifcvt.scratch_insns_bitmap = BITMAP_ALLOC (NULL);
|
6202 |
|
|
bitmap_set_bit (frv_ifcvt.scratch_insns_bitmap, INSN_UID (insn));
|
6203 |
|
|
frv_ifcvt.scratch_regs[i] = NULL_RTX;
|
6204 |
|
|
}
|
6205 |
|
|
|
6206 |
|
|
frv_ifcvt.added_insns_list = NULL_RTX;
|
6207 |
|
|
frv_ifcvt.cur_scratch_regs = 0;
|
6208 |
|
|
}
|
6209 |
|
|
|
6210 |
|
|
|
6211 |
|
|
/* A C expression to cancel any machine dependent modifications in converting
|
6212 |
|
|
code to conditional execution in the code described by the conditional if
|
6213 |
|
|
information CE_INFO. */
|
6214 |
|
|
|
6215 |
|
|
void
|
6216 |
|
|
frv_ifcvt_modify_cancel (ce_if_block_t *ce_info ATTRIBUTE_UNUSED)
|
6217 |
|
|
{
|
6218 |
|
|
int i;
|
6219 |
|
|
rtx p = frv_ifcvt.added_insns_list;
|
6220 |
|
|
|
6221 |
|
|
/* Loop freeing up the EXPR_LIST's allocated. */
|
6222 |
|
|
while (p != NULL_RTX)
|
6223 |
|
|
{
|
6224 |
|
|
rtx check_and_jump = XEXP (p, 0);
|
6225 |
|
|
rtx old_p = p;
|
6226 |
|
|
|
6227 |
|
|
p = XEXP (p, 1);
|
6228 |
|
|
free_EXPR_LIST_node (check_and_jump);
|
6229 |
|
|
free_EXPR_LIST_node (old_p);
|
6230 |
|
|
}
|
6231 |
|
|
|
6232 |
|
|
/* Release any temporary gprs allocated. */
|
6233 |
|
|
for (i = 0; i < frv_ifcvt.cur_scratch_regs; i++)
|
6234 |
|
|
frv_ifcvt.scratch_regs[i] = NULL_RTX;
|
6235 |
|
|
|
6236 |
|
|
frv_ifcvt.added_insns_list = NULL_RTX;
|
6237 |
|
|
frv_ifcvt.cur_scratch_regs = 0;
|
6238 |
|
|
return;
|
6239 |
|
|
}
|
6240 |
|
|
|
6241 |
|
|
/* A C expression for the size in bytes of the trampoline, as an integer.
|
6242 |
|
|
The template is:
|
6243 |
|
|
|
6244 |
|
|
setlo #0, <jmp_reg>
|
6245 |
|
|
setlo #0, <static_chain>
|
6246 |
|
|
sethi #0, <jmp_reg>
|
6247 |
|
|
sethi #0, <static_chain>
|
6248 |
|
|
jmpl @(gr0,<jmp_reg>) */
|
6249 |
|
|
|
6250 |
|
|
int
|
6251 |
|
|
frv_trampoline_size (void)
|
6252 |
|
|
{
|
6253 |
|
|
if (TARGET_FDPIC)
|
6254 |
|
|
/* Allocate room for the function descriptor and the lddi
|
6255 |
|
|
instruction. */
|
6256 |
|
|
return 8 + 6 * 4;
|
6257 |
|
|
return 5 /* instructions */ * 4 /* instruction size. */;
|
6258 |
|
|
}
|
6259 |
|
|
|
6260 |
|
|
|
6261 |
|
|
/* A C statement to initialize the variable parts of a trampoline. ADDR is an
|
6262 |
|
|
RTX for the address of the trampoline; FNADDR is an RTX for the address of
|
6263 |
|
|
the nested function; STATIC_CHAIN is an RTX for the static chain value that
|
6264 |
|
|
should be passed to the function when it is called.
|
6265 |
|
|
|
6266 |
|
|
The template is:
|
6267 |
|
|
|
6268 |
|
|
setlo #0, <jmp_reg>
|
6269 |
|
|
setlo #0, <static_chain>
|
6270 |
|
|
sethi #0, <jmp_reg>
|
6271 |
|
|
sethi #0, <static_chain>
|
6272 |
|
|
jmpl @(gr0,<jmp_reg>) */
|
6273 |
|
|
|
6274 |
|
|
void
|
6275 |
|
|
frv_initialize_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
|
6276 |
|
|
{
|
6277 |
|
|
rtx sc_reg = force_reg (Pmode, static_chain);
|
6278 |
|
|
|
6279 |
|
|
emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
|
6280 |
|
|
FALSE, VOIDmode, 4,
|
6281 |
|
|
addr, Pmode,
|
6282 |
|
|
GEN_INT (frv_trampoline_size ()), SImode,
|
6283 |
|
|
fnaddr, Pmode,
|
6284 |
|
|
sc_reg, Pmode);
|
6285 |
|
|
}
|
6286 |
|
|
|
6287 |
|
|
|
6288 |
|
|
/* Many machines have some registers that cannot be copied directly to or from
|
6289 |
|
|
memory or even from other types of registers. An example is the `MQ'
|
6290 |
|
|
register, which on most machines, can only be copied to or from general
|
6291 |
|
|
registers, but not memory. Some machines allow copying all registers to and
|
6292 |
|
|
from memory, but require a scratch register for stores to some memory
|
6293 |
|
|
locations (e.g., those with symbolic address on the RT, and those with
|
6294 |
|
|
certain symbolic address on the SPARC when compiling PIC). In some cases,
|
6295 |
|
|
both an intermediate and a scratch register are required.
|
6296 |
|
|
|
6297 |
|
|
You should define these macros to indicate to the reload phase that it may
|
6298 |
|
|
need to allocate at least one register for a reload in addition to the
|
6299 |
|
|
register to contain the data. Specifically, if copying X to a register
|
6300 |
|
|
CLASS in MODE requires an intermediate register, you should define
|
6301 |
|
|
`SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
|
6302 |
|
|
whose registers can be used as intermediate registers or scratch registers.
|
6303 |
|
|
|
6304 |
|
|
If copying a register CLASS in MODE to X requires an intermediate or scratch
|
6305 |
|
|
register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
|
6306 |
|
|
largest register class required. If the requirements for input and output
|
6307 |
|
|
reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
|
6308 |
|
|
instead of defining both macros identically.
|
6309 |
|
|
|
6310 |
|
|
The values returned by these macros are often `GENERAL_REGS'. Return
|
6311 |
|
|
`NO_REGS' if no spare register is needed; i.e., if X can be directly copied
|
6312 |
|
|
to or from a register of CLASS in MODE without requiring a scratch register.
|
6313 |
|
|
Do not define this macro if it would always return `NO_REGS'.
|
6314 |
|
|
|
6315 |
|
|
If a scratch register is required (either with or without an intermediate
|
6316 |
|
|
register), you should define patterns for `reload_inM' or `reload_outM', as
|
6317 |
|
|
required.. These patterns, which will normally be implemented with a
|
6318 |
|
|
`define_expand', should be similar to the `movM' patterns, except that
|
6319 |
|
|
operand 2 is the scratch register.
|
6320 |
|
|
|
6321 |
|
|
Define constraints for the reload register and scratch register that contain
|
6322 |
|
|
a single register class. If the original reload register (whose class is
|
6323 |
|
|
CLASS) can meet the constraint given in the pattern, the value returned by
|
6324 |
|
|
these macros is used for the class of the scratch register. Otherwise, two
|
6325 |
|
|
additional reload registers are required. Their classes are obtained from
|
6326 |
|
|
the constraints in the insn pattern.
|
6327 |
|
|
|
6328 |
|
|
X might be a pseudo-register or a `subreg' of a pseudo-register, which could
|
6329 |
|
|
either be in a hard register or in memory. Use `true_regnum' to find out;
|
6330 |
|
|
it will return -1 if the pseudo is in memory and the hard register number if
|
6331 |
|
|
it is in a register.
|
6332 |
|
|
|
6333 |
|
|
These macros should not be used in the case where a particular class of
|
6334 |
|
|
registers can only be copied to memory and not to another class of
|
6335 |
|
|
registers. In that case, secondary reload registers are not needed and
|
6336 |
|
|
would not be helpful. Instead, a stack location must be used to perform the
|
6337 |
|
|
copy and the `movM' pattern should use memory as an intermediate storage.
|
6338 |
|
|
This case often occurs between floating-point and general registers. */
|
6339 |
|
|
|
6340 |
|
|
enum reg_class
|
6341 |
|
|
frv_secondary_reload_class (enum reg_class class,
|
6342 |
|
|
enum machine_mode mode ATTRIBUTE_UNUSED,
|
6343 |
|
|
rtx x,
|
6344 |
|
|
int in_p ATTRIBUTE_UNUSED)
|
6345 |
|
|
{
|
6346 |
|
|
enum reg_class ret;
|
6347 |
|
|
|
6348 |
|
|
switch (class)
|
6349 |
|
|
{
|
6350 |
|
|
default:
|
6351 |
|
|
ret = NO_REGS;
|
6352 |
|
|
break;
|
6353 |
|
|
|
6354 |
|
|
/* Accumulators/Accumulator guard registers need to go through floating
|
6355 |
|
|
point registers. */
|
6356 |
|
|
case QUAD_REGS:
|
6357 |
|
|
case EVEN_REGS:
|
6358 |
|
|
case GPR_REGS:
|
6359 |
|
|
ret = NO_REGS;
|
6360 |
|
|
if (x && GET_CODE (x) == REG)
|
6361 |
|
|
{
|
6362 |
|
|
int regno = REGNO (x);
|
6363 |
|
|
|
6364 |
|
|
if (ACC_P (regno) || ACCG_P (regno))
|
6365 |
|
|
ret = FPR_REGS;
|
6366 |
|
|
}
|
6367 |
|
|
break;
|
6368 |
|
|
|
6369 |
|
|
/* Nonzero constants should be loaded into an FPR through a GPR. */
|
6370 |
|
|
case QUAD_FPR_REGS:
|
6371 |
|
|
case FEVEN_REGS:
|
6372 |
|
|
case FPR_REGS:
|
6373 |
|
|
if (x && CONSTANT_P (x) && !ZERO_P (x))
|
6374 |
|
|
ret = GPR_REGS;
|
6375 |
|
|
else
|
6376 |
|
|
ret = NO_REGS;
|
6377 |
|
|
break;
|
6378 |
|
|
|
6379 |
|
|
/* All of these types need gpr registers. */
|
6380 |
|
|
case ICC_REGS:
|
6381 |
|
|
case FCC_REGS:
|
6382 |
|
|
case CC_REGS:
|
6383 |
|
|
case ICR_REGS:
|
6384 |
|
|
case FCR_REGS:
|
6385 |
|
|
case CR_REGS:
|
6386 |
|
|
case LCR_REG:
|
6387 |
|
|
case LR_REG:
|
6388 |
|
|
ret = GPR_REGS;
|
6389 |
|
|
break;
|
6390 |
|
|
|
6391 |
|
|
/* The accumulators need fpr registers */
|
6392 |
|
|
case ACC_REGS:
|
6393 |
|
|
case EVEN_ACC_REGS:
|
6394 |
|
|
case QUAD_ACC_REGS:
|
6395 |
|
|
case ACCG_REGS:
|
6396 |
|
|
ret = FPR_REGS;
|
6397 |
|
|
break;
|
6398 |
|
|
}
|
6399 |
|
|
|
6400 |
|
|
return ret;
|
6401 |
|
|
}
|
6402 |
|
|
|
6403 |
|
|
|
6404 |
|
|
/* A C expression whose value is nonzero if pseudos that have been assigned to
|
6405 |
|
|
registers of class CLASS would likely be spilled because registers of CLASS
|
6406 |
|
|
are needed for spill registers.
|
6407 |
|
|
|
6408 |
|
|
The default value of this macro returns 1 if CLASS has exactly one register
|
6409 |
|
|
and zero otherwise. On most machines, this default should be used. Only
|
6410 |
|
|
define this macro to some other expression if pseudo allocated by
|
6411 |
|
|
`local-alloc.c' end up in memory because their hard registers were needed
|
6412 |
|
|
for spill registers. If this macro returns nonzero for those classes, those
|
6413 |
|
|
pseudos will only be allocated by `global.c', which knows how to reallocate
|
6414 |
|
|
the pseudo to another register. If there would not be another register
|
6415 |
|
|
available for reallocation, you should not change the definition of this
|
6416 |
|
|
macro since the only effect of such a definition would be to slow down
|
6417 |
|
|
register allocation. */
|
6418 |
|
|
|
6419 |
|
|
int
|
6420 |
|
|
frv_class_likely_spilled_p (enum reg_class class)
|
6421 |
|
|
{
|
6422 |
|
|
switch (class)
|
6423 |
|
|
{
|
6424 |
|
|
default:
|
6425 |
|
|
break;
|
6426 |
|
|
|
6427 |
|
|
case GR8_REGS:
|
6428 |
|
|
case GR9_REGS:
|
6429 |
|
|
case GR89_REGS:
|
6430 |
|
|
case FDPIC_FPTR_REGS:
|
6431 |
|
|
case FDPIC_REGS:
|
6432 |
|
|
case ICC_REGS:
|
6433 |
|
|
case FCC_REGS:
|
6434 |
|
|
case CC_REGS:
|
6435 |
|
|
case ICR_REGS:
|
6436 |
|
|
case FCR_REGS:
|
6437 |
|
|
case CR_REGS:
|
6438 |
|
|
case LCR_REG:
|
6439 |
|
|
case LR_REG:
|
6440 |
|
|
case SPR_REGS:
|
6441 |
|
|
case QUAD_ACC_REGS:
|
6442 |
|
|
case EVEN_ACC_REGS:
|
6443 |
|
|
case ACC_REGS:
|
6444 |
|
|
case ACCG_REGS:
|
6445 |
|
|
return TRUE;
|
6446 |
|
|
}
|
6447 |
|
|
|
6448 |
|
|
return FALSE;
|
6449 |
|
|
}
|
6450 |
|
|
|
6451 |
|
|
|
6452 |
|
|
/* An expression for the alignment of a structure field FIELD if the
|
6453 |
|
|
alignment computed in the usual way is COMPUTED. GCC uses this
|
6454 |
|
|
value instead of the value in `BIGGEST_ALIGNMENT' or
|
6455 |
|
|
`BIGGEST_FIELD_ALIGNMENT', if defined, for structure fields only. */
|
6456 |
|
|
|
6457 |
|
|
/* The definition type of the bit field data is either char, short, long or
|
6458 |
|
|
long long. The maximum bit size is the number of bits of its own type.
|
6459 |
|
|
|
6460 |
|
|
The bit field data is assigned to a storage unit that has an adequate size
|
6461 |
|
|
for bit field data retention and is located at the smallest address.
|
6462 |
|
|
|
6463 |
|
|
Consecutive bit field data are packed at consecutive bits having the same
|
6464 |
|
|
storage unit, with regard to the type, beginning with the MSB and continuing
|
6465 |
|
|
toward the LSB.
|
6466 |
|
|
|
6467 |
|
|
If a field to be assigned lies over a bit field type boundary, its
|
6468 |
|
|
assignment is completed by aligning it with a boundary suitable for the
|
6469 |
|
|
type.
|
6470 |
|
|
|
6471 |
|
|
When a bit field having a bit length of 0 is declared, it is forcibly
|
6472 |
|
|
assigned to the next storage unit.
|
6473 |
|
|
|
6474 |
|
|
e.g)
|
6475 |
|
|
struct {
|
6476 |
|
|
int a:2;
|
6477 |
|
|
int b:6;
|
6478 |
|
|
char c:4;
|
6479 |
|
|
int d:10;
|
6480 |
|
|
int :0;
|
6481 |
|
|
int f:2;
|
6482 |
|
|
} x;
|
6483 |
|
|
|
6484 |
|
|
+0 +1 +2 +3
|
6485 |
|
|
&x 00000000 00000000 00000000 00000000
|
6486 |
|
|
MLM----L
|
6487 |
|
|
a b
|
6488 |
|
|
&x+4 00000000 00000000 00000000 00000000
|
6489 |
|
|
M--L
|
6490 |
|
|
c
|
6491 |
|
|
&x+8 00000000 00000000 00000000 00000000
|
6492 |
|
|
M----------L
|
6493 |
|
|
d
|
6494 |
|
|
&x+12 00000000 00000000 00000000 00000000
|
6495 |
|
|
ML
|
6496 |
|
|
f
|
6497 |
|
|
*/
|
6498 |
|
|
|
6499 |
|
|
int
|
6500 |
|
|
frv_adjust_field_align (tree field, int computed)
|
6501 |
|
|
{
|
6502 |
|
|
/* Make sure that the bitfield is not wider than the type. */
|
6503 |
|
|
if (DECL_BIT_FIELD (field)
|
6504 |
|
|
&& !DECL_ARTIFICIAL (field))
|
6505 |
|
|
{
|
6506 |
|
|
tree parent = DECL_CONTEXT (field);
|
6507 |
|
|
tree prev = NULL_TREE;
|
6508 |
|
|
tree cur;
|
6509 |
|
|
|
6510 |
|
|
for (cur = TYPE_FIELDS (parent); cur && cur != field; cur = TREE_CHAIN (cur))
|
6511 |
|
|
{
|
6512 |
|
|
if (TREE_CODE (cur) != FIELD_DECL)
|
6513 |
|
|
continue;
|
6514 |
|
|
|
6515 |
|
|
prev = cur;
|
6516 |
|
|
}
|
6517 |
|
|
|
6518 |
|
|
gcc_assert (cur);
|
6519 |
|
|
|
6520 |
|
|
/* If this isn't a :0 field and if the previous element is a bitfield
|
6521 |
|
|
also, see if the type is different, if so, we will need to align the
|
6522 |
|
|
bit-field to the next boundary. */
|
6523 |
|
|
if (prev
|
6524 |
|
|
&& ! DECL_PACKED (field)
|
6525 |
|
|
&& ! integer_zerop (DECL_SIZE (field))
|
6526 |
|
|
&& DECL_BIT_FIELD_TYPE (field) != DECL_BIT_FIELD_TYPE (prev))
|
6527 |
|
|
{
|
6528 |
|
|
int prev_align = TYPE_ALIGN (TREE_TYPE (prev));
|
6529 |
|
|
int cur_align = TYPE_ALIGN (TREE_TYPE (field));
|
6530 |
|
|
computed = (prev_align > cur_align) ? prev_align : cur_align;
|
6531 |
|
|
}
|
6532 |
|
|
}
|
6533 |
|
|
|
6534 |
|
|
return computed;
|
6535 |
|
|
}
|
6536 |
|
|
|
6537 |
|
|
|
6538 |
|
|
/* A C expression that is nonzero if it is permissible to store a value of mode
|
6539 |
|
|
MODE in hard register number REGNO (or in several registers starting with
|
6540 |
|
|
that one). For a machine where all registers are equivalent, a suitable
|
6541 |
|
|
definition is
|
6542 |
|
|
|
6543 |
|
|
#define HARD_REGNO_MODE_OK(REGNO, MODE) 1
|
6544 |
|
|
|
6545 |
|
|
It is not necessary for this macro to check for the numbers of fixed
|
6546 |
|
|
registers, because the allocation mechanism considers them to be always
|
6547 |
|
|
occupied.
|
6548 |
|
|
|
6549 |
|
|
On some machines, double-precision values must be kept in even/odd register
|
6550 |
|
|
pairs. The way to implement that is to define this macro to reject odd
|
6551 |
|
|
register numbers for such modes.
|
6552 |
|
|
|
6553 |
|
|
The minimum requirement for a mode to be OK in a register is that the
|
6554 |
|
|
`movMODE' instruction pattern support moves between the register and any
|
6555 |
|
|
other hard register for which the mode is OK; and that moving a value into
|
6556 |
|
|
the register and back out not alter it.
|
6557 |
|
|
|
6558 |
|
|
Since the same instruction used to move `SImode' will work for all narrower
|
6559 |
|
|
integer modes, it is not necessary on any machine for `HARD_REGNO_MODE_OK'
|
6560 |
|
|
to distinguish between these modes, provided you define patterns `movhi',
|
6561 |
|
|
etc., to take advantage of this. This is useful because of the interaction
|
6562 |
|
|
between `HARD_REGNO_MODE_OK' and `MODES_TIEABLE_P'; it is very desirable for
|
6563 |
|
|
all integer modes to be tieable.
|
6564 |
|
|
|
6565 |
|
|
Many machines have special registers for floating point arithmetic. Often
|
6566 |
|
|
people assume that floating point machine modes are allowed only in floating
|
6567 |
|
|
point registers. This is not true. Any registers that can hold integers
|
6568 |
|
|
can safely *hold* a floating point machine mode, whether or not floating
|
6569 |
|
|
arithmetic can be done on it in those registers. Integer move instructions
|
6570 |
|
|
can be used to move the values.
|
6571 |
|
|
|
6572 |
|
|
On some machines, though, the converse is true: fixed-point machine modes
|
6573 |
|
|
may not go in floating registers. This is true if the floating registers
|
6574 |
|
|
normalize any value stored in them, because storing a non-floating value
|
6575 |
|
|
there would garble it. In this case, `HARD_REGNO_MODE_OK' should reject
|
6576 |
|
|
fixed-point machine modes in floating registers. But if the floating
|
6577 |
|
|
registers do not automatically normalize, if you can store any bit pattern
|
6578 |
|
|
in one and retrieve it unchanged without a trap, then any machine mode may
|
6579 |
|
|
go in a floating register, so you can define this macro to say so.
|
6580 |
|
|
|
6581 |
|
|
The primary significance of special floating registers is rather that they
|
6582 |
|
|
are the registers acceptable in floating point arithmetic instructions.
|
6583 |
|
|
However, this is of no concern to `HARD_REGNO_MODE_OK'. You handle it by
|
6584 |
|
|
writing the proper constraints for those instructions.
|
6585 |
|
|
|
6586 |
|
|
On some machines, the floating registers are especially slow to access, so
|
6587 |
|
|
that it is better to store a value in a stack frame than in such a register
|
6588 |
|
|
if floating point arithmetic is not being done. As long as the floating
|
6589 |
|
|
registers are not in class `GENERAL_REGS', they will not be used unless some
|
6590 |
|
|
pattern's constraint asks for one. */
|
6591 |
|
|
|
6592 |
|
|
int
|
6593 |
|
|
frv_hard_regno_mode_ok (int regno, enum machine_mode mode)
|
6594 |
|
|
{
|
6595 |
|
|
int base;
|
6596 |
|
|
int mask;
|
6597 |
|
|
|
6598 |
|
|
switch (mode)
|
6599 |
|
|
{
|
6600 |
|
|
case CCmode:
|
6601 |
|
|
case CC_UNSmode:
|
6602 |
|
|
case CC_NZmode:
|
6603 |
|
|
return ICC_P (regno) || GPR_P (regno);
|
6604 |
|
|
|
6605 |
|
|
case CC_CCRmode:
|
6606 |
|
|
return CR_P (regno) || GPR_P (regno);
|
6607 |
|
|
|
6608 |
|
|
case CC_FPmode:
|
6609 |
|
|
return FCC_P (regno) || GPR_P (regno);
|
6610 |
|
|
|
6611 |
|
|
default:
|
6612 |
|
|
break;
|
6613 |
|
|
}
|
6614 |
|
|
|
6615 |
|
|
/* Set BASE to the first register in REGNO's class. Set MASK to the
|
6616 |
|
|
bits that must be clear in (REGNO - BASE) for the register to be
|
6617 |
|
|
well-aligned. */
|
6618 |
|
|
if (INTEGRAL_MODE_P (mode) || FLOAT_MODE_P (mode) || VECTOR_MODE_P (mode))
|
6619 |
|
|
{
|
6620 |
|
|
if (ACCG_P (regno))
|
6621 |
|
|
{
|
6622 |
|
|
/* ACCGs store one byte. Two-byte quantities must start in
|
6623 |
|
|
even-numbered registers, four-byte ones in registers whose
|
6624 |
|
|
numbers are divisible by four, and so on. */
|
6625 |
|
|
base = ACCG_FIRST;
|
6626 |
|
|
mask = GET_MODE_SIZE (mode) - 1;
|
6627 |
|
|
}
|
6628 |
|
|
else
|
6629 |
|
|
{
|
6630 |
|
|
/* The other registers store one word. */
|
6631 |
|
|
if (GPR_P (regno) || regno == AP_FIRST)
|
6632 |
|
|
base = GPR_FIRST;
|
6633 |
|
|
|
6634 |
|
|
else if (FPR_P (regno))
|
6635 |
|
|
base = FPR_FIRST;
|
6636 |
|
|
|
6637 |
|
|
else if (ACC_P (regno))
|
6638 |
|
|
base = ACC_FIRST;
|
6639 |
|
|
|
6640 |
|
|
else if (SPR_P (regno))
|
6641 |
|
|
return mode == SImode;
|
6642 |
|
|
|
6643 |
|
|
/* Fill in the table. */
|
6644 |
|
|
else
|
6645 |
|
|
return 0;
|
6646 |
|
|
|
6647 |
|
|
/* Anything smaller than an SI is OK in any word-sized register. */
|
6648 |
|
|
if (GET_MODE_SIZE (mode) < 4)
|
6649 |
|
|
return 1;
|
6650 |
|
|
|
6651 |
|
|
mask = (GET_MODE_SIZE (mode) / 4) - 1;
|
6652 |
|
|
}
|
6653 |
|
|
return (((regno - base) & mask) == 0);
|
6654 |
|
|
}
|
6655 |
|
|
|
6656 |
|
|
return 0;
|
6657 |
|
|
}
|
6658 |
|
|
|
6659 |
|
|
|
6660 |
|
|
/* A C expression for the number of consecutive hard registers, starting at
|
6661 |
|
|
register number REGNO, required to hold a value of mode MODE.
|
6662 |
|
|
|
6663 |
|
|
On a machine where all registers are exactly one word, a suitable definition
|
6664 |
|
|
of this macro is
|
6665 |
|
|
|
6666 |
|
|
#define HARD_REGNO_NREGS(REGNO, MODE) \
|
6667 |
|
|
((GET_MODE_SIZE (MODE) + UNITS_PER_WORD - 1) \
|
6668 |
|
|
/ UNITS_PER_WORD)) */
|
6669 |
|
|
|
6670 |
|
|
/* On the FRV, make the CC_FP mode take 3 words in the integer registers, so
|
6671 |
|
|
that we can build the appropriate instructions to properly reload the
|
6672 |
|
|
values. Also, make the byte-sized accumulator guards use one guard
|
6673 |
|
|
for each byte. */
|
6674 |
|
|
|
6675 |
|
|
int
|
6676 |
|
|
frv_hard_regno_nregs (int regno, enum machine_mode mode)
|
6677 |
|
|
{
|
6678 |
|
|
if (ACCG_P (regno))
|
6679 |
|
|
return GET_MODE_SIZE (mode);
|
6680 |
|
|
else
|
6681 |
|
|
return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
|
6682 |
|
|
}
|
6683 |
|
|
|
6684 |
|
|
|
6685 |
|
|
/* A C expression for the maximum number of consecutive registers of
|
6686 |
|
|
class CLASS needed to hold a value of mode MODE.
|
6687 |
|
|
|
6688 |
|
|
This is closely related to the macro `HARD_REGNO_NREGS'. In fact, the value
|
6689 |
|
|
of the macro `CLASS_MAX_NREGS (CLASS, MODE)' should be the maximum value of
|
6690 |
|
|
`HARD_REGNO_NREGS (REGNO, MODE)' for all REGNO values in the class CLASS.
|
6691 |
|
|
|
6692 |
|
|
This macro helps control the handling of multiple-word values in
|
6693 |
|
|
the reload pass.
|
6694 |
|
|
|
6695 |
|
|
This declaration is required. */
|
6696 |
|
|
|
6697 |
|
|
int
|
6698 |
|
|
frv_class_max_nregs (enum reg_class class, enum machine_mode mode)
|
6699 |
|
|
{
|
6700 |
|
|
if (class == ACCG_REGS)
|
6701 |
|
|
/* An N-byte value requires N accumulator guards. */
|
6702 |
|
|
return GET_MODE_SIZE (mode);
|
6703 |
|
|
else
|
6704 |
|
|
return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
|
6705 |
|
|
}
|
6706 |
|
|
|
6707 |
|
|
|
6708 |
|
|
/* A C expression that is nonzero if X is a legitimate constant for an
|
6709 |
|
|
immediate operand on the target machine. You can assume that X satisfies
|
6710 |
|
|
`CONSTANT_P', so you need not check this. In fact, `1' is a suitable
|
6711 |
|
|
definition for this macro on machines where anything `CONSTANT_P' is valid. */
|
6712 |
|
|
|
6713 |
|
|
int
|
6714 |
|
|
frv_legitimate_constant_p (rtx x)
|
6715 |
|
|
{
|
6716 |
|
|
enum machine_mode mode = GET_MODE (x);
|
6717 |
|
|
|
6718 |
|
|
/* frv_cannot_force_const_mem always returns true for FDPIC. This
|
6719 |
|
|
means that the move expanders will be expected to deal with most
|
6720 |
|
|
kinds of constant, regardless of what we return here.
|
6721 |
|
|
|
6722 |
|
|
However, among its other duties, LEGITIMATE_CONSTANT_P decides whether
|
6723 |
|
|
a constant can be entered into reg_equiv_constant[]. If we return true,
|
6724 |
|
|
reload can create new instances of the constant whenever it likes.
|
6725 |
|
|
|
6726 |
|
|
The idea is therefore to accept as many constants as possible (to give
|
6727 |
|
|
reload more freedom) while rejecting constants that can only be created
|
6728 |
|
|
at certain times. In particular, anything with a symbolic component will
|
6729 |
|
|
require use of the pseudo FDPIC register, which is only available before
|
6730 |
|
|
reload. */
|
6731 |
|
|
if (TARGET_FDPIC)
|
6732 |
|
|
return LEGITIMATE_PIC_OPERAND_P (x);
|
6733 |
|
|
|
6734 |
|
|
/* All of the integer constants are ok. */
|
6735 |
|
|
if (GET_CODE (x) != CONST_DOUBLE)
|
6736 |
|
|
return TRUE;
|
6737 |
|
|
|
6738 |
|
|
/* double integer constants are ok. */
|
6739 |
|
|
if (mode == VOIDmode || mode == DImode)
|
6740 |
|
|
return TRUE;
|
6741 |
|
|
|
6742 |
|
|
/* 0 is always ok. */
|
6743 |
|
|
if (x == CONST0_RTX (mode))
|
6744 |
|
|
return TRUE;
|
6745 |
|
|
|
6746 |
|
|
/* If floating point is just emulated, allow any constant, since it will be
|
6747 |
|
|
constructed in the GPRs. */
|
6748 |
|
|
if (!TARGET_HAS_FPRS)
|
6749 |
|
|
return TRUE;
|
6750 |
|
|
|
6751 |
|
|
if (mode == DFmode && !TARGET_DOUBLE)
|
6752 |
|
|
return TRUE;
|
6753 |
|
|
|
6754 |
|
|
/* Otherwise store the constant away and do a load. */
|
6755 |
|
|
return FALSE;
|
6756 |
|
|
}
|
6757 |
|
|
|
6758 |
|
|
/* Implement SELECT_CC_MODE. Choose CC_FP for floating-point comparisons,
|
6759 |
|
|
CC_NZ for comparisons against zero in which a single Z or N flag test
|
6760 |
|
|
is enough, CC_UNS for other unsigned comparisons, and CC for other
|
6761 |
|
|
signed comparisons. */
|
6762 |
|
|
|
6763 |
|
|
enum machine_mode
|
6764 |
|
|
frv_select_cc_mode (enum rtx_code code, rtx x, rtx y)
|
6765 |
|
|
{
|
6766 |
|
|
if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
|
6767 |
|
|
return CC_FPmode;
|
6768 |
|
|
|
6769 |
|
|
switch (code)
|
6770 |
|
|
{
|
6771 |
|
|
case EQ:
|
6772 |
|
|
case NE:
|
6773 |
|
|
case LT:
|
6774 |
|
|
case GE:
|
6775 |
|
|
return y == const0_rtx ? CC_NZmode : CCmode;
|
6776 |
|
|
|
6777 |
|
|
case GTU:
|
6778 |
|
|
case GEU:
|
6779 |
|
|
case LTU:
|
6780 |
|
|
case LEU:
|
6781 |
|
|
return y == const0_rtx ? CC_NZmode : CC_UNSmode;
|
6782 |
|
|
|
6783 |
|
|
default:
|
6784 |
|
|
return CCmode;
|
6785 |
|
|
}
|
6786 |
|
|
}
|
6787 |
|
|
|
6788 |
|
|
/* A C expression for the cost of moving data from a register in class FROM to
|
6789 |
|
|
one in class TO. The classes are expressed using the enumeration values
|
6790 |
|
|
such as `GENERAL_REGS'. A value of 4 is the default; other values are
|
6791 |
|
|
interpreted relative to that.
|
6792 |
|
|
|
6793 |
|
|
It is not required that the cost always equal 2 when FROM is the same as TO;
|
6794 |
|
|
on some machines it is expensive to move between registers if they are not
|
6795 |
|
|
general registers.
|
6796 |
|
|
|
6797 |
|
|
If reload sees an insn consisting of a single `set' between two hard
|
6798 |
|
|
registers, and if `REGISTER_MOVE_COST' applied to their classes returns a
|
6799 |
|
|
value of 2, reload does not check to ensure that the constraints of the insn
|
6800 |
|
|
are met. Setting a cost of other than 2 will allow reload to verify that
|
6801 |
|
|
the constraints are met. You should do this if the `movM' pattern's
|
6802 |
|
|
constraints do not allow such copying. */
|
6803 |
|
|
|
6804 |
|
|
#define HIGH_COST 40
|
6805 |
|
|
#define MEDIUM_COST 3
|
6806 |
|
|
#define LOW_COST 1
|
6807 |
|
|
|
6808 |
|
|
int
|
6809 |
|
|
frv_register_move_cost (enum reg_class from, enum reg_class to)
|
6810 |
|
|
{
|
6811 |
|
|
switch (from)
|
6812 |
|
|
{
|
6813 |
|
|
default:
|
6814 |
|
|
break;
|
6815 |
|
|
|
6816 |
|
|
case QUAD_REGS:
|
6817 |
|
|
case EVEN_REGS:
|
6818 |
|
|
case GPR_REGS:
|
6819 |
|
|
switch (to)
|
6820 |
|
|
{
|
6821 |
|
|
default:
|
6822 |
|
|
break;
|
6823 |
|
|
|
6824 |
|
|
case QUAD_REGS:
|
6825 |
|
|
case EVEN_REGS:
|
6826 |
|
|
case GPR_REGS:
|
6827 |
|
|
return LOW_COST;
|
6828 |
|
|
|
6829 |
|
|
case FEVEN_REGS:
|
6830 |
|
|
case FPR_REGS:
|
6831 |
|
|
return LOW_COST;
|
6832 |
|
|
|
6833 |
|
|
case LCR_REG:
|
6834 |
|
|
case LR_REG:
|
6835 |
|
|
case SPR_REGS:
|
6836 |
|
|
return LOW_COST;
|
6837 |
|
|
}
|
6838 |
|
|
|
6839 |
|
|
case FEVEN_REGS:
|
6840 |
|
|
case FPR_REGS:
|
6841 |
|
|
switch (to)
|
6842 |
|
|
{
|
6843 |
|
|
default:
|
6844 |
|
|
break;
|
6845 |
|
|
|
6846 |
|
|
case QUAD_REGS:
|
6847 |
|
|
case EVEN_REGS:
|
6848 |
|
|
case GPR_REGS:
|
6849 |
|
|
case ACC_REGS:
|
6850 |
|
|
case EVEN_ACC_REGS:
|
6851 |
|
|
case QUAD_ACC_REGS:
|
6852 |
|
|
case ACCG_REGS:
|
6853 |
|
|
return MEDIUM_COST;
|
6854 |
|
|
|
6855 |
|
|
case FEVEN_REGS:
|
6856 |
|
|
case FPR_REGS:
|
6857 |
|
|
return LOW_COST;
|
6858 |
|
|
}
|
6859 |
|
|
|
6860 |
|
|
case LCR_REG:
|
6861 |
|
|
case LR_REG:
|
6862 |
|
|
case SPR_REGS:
|
6863 |
|
|
switch (to)
|
6864 |
|
|
{
|
6865 |
|
|
default:
|
6866 |
|
|
break;
|
6867 |
|
|
|
6868 |
|
|
case QUAD_REGS:
|
6869 |
|
|
case EVEN_REGS:
|
6870 |
|
|
case GPR_REGS:
|
6871 |
|
|
return MEDIUM_COST;
|
6872 |
|
|
}
|
6873 |
|
|
|
6874 |
|
|
case ACC_REGS:
|
6875 |
|
|
case EVEN_ACC_REGS:
|
6876 |
|
|
case QUAD_ACC_REGS:
|
6877 |
|
|
case ACCG_REGS:
|
6878 |
|
|
switch (to)
|
6879 |
|
|
{
|
6880 |
|
|
default:
|
6881 |
|
|
break;
|
6882 |
|
|
|
6883 |
|
|
case FEVEN_REGS:
|
6884 |
|
|
case FPR_REGS:
|
6885 |
|
|
return MEDIUM_COST;
|
6886 |
|
|
|
6887 |
|
|
}
|
6888 |
|
|
}
|
6889 |
|
|
|
6890 |
|
|
return HIGH_COST;
|
6891 |
|
|
}
|
6892 |
|
|
|
6893 |
|
|
/* Implementation of TARGET_ASM_INTEGER. In the FRV case we need to
|
6894 |
|
|
use ".picptr" to generate safe relocations for PIC code. We also
|
6895 |
|
|
need a fixup entry for aligned (non-debugging) code. */
|
6896 |
|
|
|
6897 |
|
|
static bool
|
6898 |
|
|
frv_assemble_integer (rtx value, unsigned int size, int aligned_p)
|
6899 |
|
|
{
|
6900 |
|
|
if ((flag_pic || TARGET_FDPIC) && size == UNITS_PER_WORD)
|
6901 |
|
|
{
|
6902 |
|
|
if (GET_CODE (value) == CONST
|
6903 |
|
|
|| GET_CODE (value) == SYMBOL_REF
|
6904 |
|
|
|| GET_CODE (value) == LABEL_REF)
|
6905 |
|
|
{
|
6906 |
|
|
if (TARGET_FDPIC && GET_CODE (value) == SYMBOL_REF
|
6907 |
|
|
&& SYMBOL_REF_FUNCTION_P (value))
|
6908 |
|
|
{
|
6909 |
|
|
fputs ("\t.picptr\tfuncdesc(", asm_out_file);
|
6910 |
|
|
output_addr_const (asm_out_file, value);
|
6911 |
|
|
fputs (")\n", asm_out_file);
|
6912 |
|
|
return true;
|
6913 |
|
|
}
|
6914 |
|
|
else if (TARGET_FDPIC && GET_CODE (value) == CONST
|
6915 |
|
|
&& frv_function_symbol_referenced_p (value))
|
6916 |
|
|
return false;
|
6917 |
|
|
if (aligned_p && !TARGET_FDPIC)
|
6918 |
|
|
{
|
6919 |
|
|
static int label_num = 0;
|
6920 |
|
|
char buf[256];
|
6921 |
|
|
const char *p;
|
6922 |
|
|
|
6923 |
|
|
ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", label_num++);
|
6924 |
|
|
p = (* targetm.strip_name_encoding) (buf);
|
6925 |
|
|
|
6926 |
|
|
fprintf (asm_out_file, "%s:\n", p);
|
6927 |
|
|
fprintf (asm_out_file, "%s\n", FIXUP_SECTION_ASM_OP);
|
6928 |
|
|
fprintf (asm_out_file, "\t.picptr\t%s\n", p);
|
6929 |
|
|
fprintf (asm_out_file, "\t.previous\n");
|
6930 |
|
|
}
|
6931 |
|
|
assemble_integer_with_op ("\t.picptr\t", value);
|
6932 |
|
|
return true;
|
6933 |
|
|
}
|
6934 |
|
|
if (!aligned_p)
|
6935 |
|
|
{
|
6936 |
|
|
/* We've set the unaligned SI op to NULL, so we always have to
|
6937 |
|
|
handle the unaligned case here. */
|
6938 |
|
|
assemble_integer_with_op ("\t.4byte\t", value);
|
6939 |
|
|
return true;
|
6940 |
|
|
}
|
6941 |
|
|
}
|
6942 |
|
|
return default_assemble_integer (value, size, aligned_p);
|
6943 |
|
|
}
|
6944 |
|
|
|
6945 |
|
|
/* Function to set up the backend function structure. */
|
6946 |
|
|
|
6947 |
|
|
static struct machine_function *
|
6948 |
|
|
frv_init_machine_status (void)
|
6949 |
|
|
{
|
6950 |
|
|
return ggc_alloc_cleared (sizeof (struct machine_function));
|
6951 |
|
|
}
|
6952 |
|
|
|
6953 |
|
|
/* Implement TARGET_SCHED_ISSUE_RATE. */
|
6954 |
|
|
|
6955 |
|
|
int
|
6956 |
|
|
frv_issue_rate (void)
|
6957 |
|
|
{
|
6958 |
|
|
if (!TARGET_PACK)
|
6959 |
|
|
return 1;
|
6960 |
|
|
|
6961 |
|
|
switch (frv_cpu_type)
|
6962 |
|
|
{
|
6963 |
|
|
default:
|
6964 |
|
|
case FRV_CPU_FR300:
|
6965 |
|
|
case FRV_CPU_SIMPLE:
|
6966 |
|
|
return 1;
|
6967 |
|
|
|
6968 |
|
|
case FRV_CPU_FR400:
|
6969 |
|
|
case FRV_CPU_FR405:
|
6970 |
|
|
case FRV_CPU_FR450:
|
6971 |
|
|
return 2;
|
6972 |
|
|
|
6973 |
|
|
case FRV_CPU_GENERIC:
|
6974 |
|
|
case FRV_CPU_FR500:
|
6975 |
|
|
case FRV_CPU_TOMCAT:
|
6976 |
|
|
return 4;
|
6977 |
|
|
|
6978 |
|
|
case FRV_CPU_FR550:
|
6979 |
|
|
return 8;
|
6980 |
|
|
}
|
6981 |
|
|
}
|
6982 |
|
|
|
6983 |
|
|
/* A for_each_rtx callback. If X refers to an accumulator, return
|
6984 |
|
|
ACC_GROUP_ODD if the bit 2 of the register number is set and
|
6985 |
|
|
ACC_GROUP_EVEN if it is clear. Return 0 (ACC_GROUP_NONE)
|
6986 |
|
|
otherwise. */
|
6987 |
|
|
|
6988 |
|
|
static int
|
6989 |
|
|
frv_acc_group_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
|
6990 |
|
|
{
|
6991 |
|
|
if (REG_P (*x))
|
6992 |
|
|
{
|
6993 |
|
|
if (ACC_P (REGNO (*x)))
|
6994 |
|
|
return (REGNO (*x) - ACC_FIRST) & 4 ? ACC_GROUP_ODD : ACC_GROUP_EVEN;
|
6995 |
|
|
if (ACCG_P (REGNO (*x)))
|
6996 |
|
|
return (REGNO (*x) - ACCG_FIRST) & 4 ? ACC_GROUP_ODD : ACC_GROUP_EVEN;
|
6997 |
|
|
}
|
6998 |
|
|
return 0;
|
6999 |
|
|
}
|
7000 |
|
|
|
7001 |
|
|
/* Return the value of INSN's acc_group attribute. */
|
7002 |
|
|
|
7003 |
|
|
int
|
7004 |
|
|
frv_acc_group (rtx insn)
|
7005 |
|
|
{
|
7006 |
|
|
/* This distinction only applies to the FR550 packing constraints. */
|
7007 |
|
|
if (frv_cpu_type != FRV_CPU_FR550)
|
7008 |
|
|
return ACC_GROUP_NONE;
|
7009 |
|
|
return for_each_rtx (&PATTERN (insn), frv_acc_group_1, 0);
|
7010 |
|
|
}
|
7011 |
|
|
|
7012 |
|
|
/* Return the index of the DFA unit in FRV_UNIT_NAMES[] that instruction
|
7013 |
|
|
INSN will try to claim first. Since this value depends only on the
|
7014 |
|
|
type attribute, we can cache the results in FRV_TYPE_TO_UNIT[]. */
|
7015 |
|
|
|
7016 |
|
|
static unsigned int
|
7017 |
|
|
frv_insn_unit (rtx insn)
|
7018 |
|
|
{
|
7019 |
|
|
enum attr_type type;
|
7020 |
|
|
|
7021 |
|
|
type = get_attr_type (insn);
|
7022 |
|
|
if (frv_type_to_unit[type] == ARRAY_SIZE (frv_unit_codes))
|
7023 |
|
|
{
|
7024 |
|
|
/* We haven't seen this type of instruction before. */
|
7025 |
|
|
state_t state;
|
7026 |
|
|
unsigned int unit;
|
7027 |
|
|
|
7028 |
|
|
/* Issue the instruction on its own to see which unit it prefers. */
|
7029 |
|
|
state = alloca (state_size ());
|
7030 |
|
|
state_reset (state);
|
7031 |
|
|
state_transition (state, insn);
|
7032 |
|
|
|
7033 |
|
|
/* Find out which unit was taken. */
|
7034 |
|
|
for (unit = 0; unit < ARRAY_SIZE (frv_unit_codes); unit++)
|
7035 |
|
|
if (cpu_unit_reservation_p (state, frv_unit_codes[unit]))
|
7036 |
|
|
break;
|
7037 |
|
|
|
7038 |
|
|
gcc_assert (unit != ARRAY_SIZE (frv_unit_codes));
|
7039 |
|
|
|
7040 |
|
|
frv_type_to_unit[type] = unit;
|
7041 |
|
|
}
|
7042 |
|
|
return frv_type_to_unit[type];
|
7043 |
|
|
}
|
7044 |
|
|
|
7045 |
|
|
/* Return true if INSN issues to a branch unit. */
|
7046 |
|
|
|
7047 |
|
|
static bool
|
7048 |
|
|
frv_issues_to_branch_unit_p (rtx insn)
|
7049 |
|
|
{
|
7050 |
|
|
return frv_unit_groups[frv_insn_unit (insn)] == GROUP_B;
|
7051 |
|
|
}
|
7052 |
|
|
|
7053 |
|
|
/* The current state of the packing pass, implemented by frv_pack_insns. */
|
7054 |
|
|
static struct {
|
7055 |
|
|
/* The state of the pipeline DFA. */
|
7056 |
|
|
state_t dfa_state;
|
7057 |
|
|
|
7058 |
|
|
/* Which hardware registers are set within the current packet,
|
7059 |
|
|
and the conditions under which they are set. */
|
7060 |
|
|
regstate_t regstate[FIRST_PSEUDO_REGISTER];
|
7061 |
|
|
|
7062 |
|
|
/* The memory locations that have been modified so far in this
|
7063 |
|
|
packet. MEM is the memref and COND is the regstate_t condition
|
7064 |
|
|
under which it is set. */
|
7065 |
|
|
struct {
|
7066 |
|
|
rtx mem;
|
7067 |
|
|
regstate_t cond;
|
7068 |
|
|
} mems[2];
|
7069 |
|
|
|
7070 |
|
|
/* The number of valid entries in MEMS. The value is larger than
|
7071 |
|
|
ARRAY_SIZE (mems) if there were too many mems to record. */
|
7072 |
|
|
unsigned int num_mems;
|
7073 |
|
|
|
7074 |
|
|
/* The maximum number of instructions that can be packed together. */
|
7075 |
|
|
unsigned int issue_rate;
|
7076 |
|
|
|
7077 |
|
|
/* The instructions in the packet, partitioned into groups. */
|
7078 |
|
|
struct frv_packet_group {
|
7079 |
|
|
/* How many instructions in the packet belong to this group. */
|
7080 |
|
|
unsigned int num_insns;
|
7081 |
|
|
|
7082 |
|
|
/* A list of the instructions that belong to this group, in the order
|
7083 |
|
|
they appear in the rtl stream. */
|
7084 |
|
|
rtx insns[ARRAY_SIZE (frv_unit_codes)];
|
7085 |
|
|
|
7086 |
|
|
/* The contents of INSNS after they have been sorted into the correct
|
7087 |
|
|
assembly-language order. Element X issues to unit X. The list may
|
7088 |
|
|
contain extra nops. */
|
7089 |
|
|
rtx sorted[ARRAY_SIZE (frv_unit_codes)];
|
7090 |
|
|
|
7091 |
|
|
/* The member of frv_nops[] to use in sorted[]. */
|
7092 |
|
|
rtx nop;
|
7093 |
|
|
} groups[NUM_GROUPS];
|
7094 |
|
|
|
7095 |
|
|
/* The instructions that make up the current packet. */
|
7096 |
|
|
rtx insns[ARRAY_SIZE (frv_unit_codes)];
|
7097 |
|
|
unsigned int num_insns;
|
7098 |
|
|
} frv_packet;
|
7099 |
|
|
|
7100 |
|
|
/* Return the regstate_t flags for the given COND_EXEC condition.
|
7101 |
|
|
Abort if the condition isn't in the right form. */
|
7102 |
|
|
|
7103 |
|
|
static int
|
7104 |
|
|
frv_cond_flags (rtx cond)
|
7105 |
|
|
{
|
7106 |
|
|
gcc_assert ((GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
|
7107 |
|
|
&& GET_CODE (XEXP (cond, 0)) == REG
|
7108 |
|
|
&& CR_P (REGNO (XEXP (cond, 0)))
|
7109 |
|
|
&& XEXP (cond, 1) == const0_rtx);
|
7110 |
|
|
return ((REGNO (XEXP (cond, 0)) - CR_FIRST)
|
7111 |
|
|
| (GET_CODE (cond) == NE
|
7112 |
|
|
? REGSTATE_IF_TRUE
|
7113 |
|
|
: REGSTATE_IF_FALSE));
|
7114 |
|
|
}
|
7115 |
|
|
|
7116 |
|
|
|
7117 |
|
|
/* Return true if something accessed under condition COND2 can
|
7118 |
|
|
conflict with something written under condition COND1. */
|
7119 |
|
|
|
7120 |
|
|
static bool
|
7121 |
|
|
frv_regstate_conflict_p (regstate_t cond1, regstate_t cond2)
|
7122 |
|
|
{
|
7123 |
|
|
/* If either reference was unconditional, we have a conflict. */
|
7124 |
|
|
if ((cond1 & REGSTATE_IF_EITHER) == 0
|
7125 |
|
|
|| (cond2 & REGSTATE_IF_EITHER) == 0)
|
7126 |
|
|
return true;
|
7127 |
|
|
|
7128 |
|
|
/* The references might conflict if they were controlled by
|
7129 |
|
|
different CRs. */
|
7130 |
|
|
if ((cond1 & REGSTATE_CC_MASK) != (cond2 & REGSTATE_CC_MASK))
|
7131 |
|
|
return true;
|
7132 |
|
|
|
7133 |
|
|
/* They definitely conflict if they are controlled by the
|
7134 |
|
|
same condition. */
|
7135 |
|
|
if ((cond1 & cond2 & REGSTATE_IF_EITHER) != 0)
|
7136 |
|
|
return true;
|
7137 |
|
|
|
7138 |
|
|
return false;
|
7139 |
|
|
}
|
7140 |
|
|
|
7141 |
|
|
|
7142 |
|
|
/* A for_each_rtx callback. Return 1 if *X depends on an instruction in
|
7143 |
|
|
the current packet. DATA points to a regstate_t that describes the
|
7144 |
|
|
condition under which *X might be set or used. */
|
7145 |
|
|
|
7146 |
|
|
static int
|
7147 |
|
|
frv_registers_conflict_p_1 (rtx *x, void *data)
|
7148 |
|
|
{
|
7149 |
|
|
unsigned int regno, i;
|
7150 |
|
|
regstate_t cond;
|
7151 |
|
|
|
7152 |
|
|
cond = *(regstate_t *) data;
|
7153 |
|
|
|
7154 |
|
|
if (GET_CODE (*x) == REG)
|
7155 |
|
|
FOR_EACH_REGNO (regno, *x)
|
7156 |
|
|
if ((frv_packet.regstate[regno] & REGSTATE_MODIFIED) != 0)
|
7157 |
|
|
if (frv_regstate_conflict_p (frv_packet.regstate[regno], cond))
|
7158 |
|
|
return 1;
|
7159 |
|
|
|
7160 |
|
|
if (GET_CODE (*x) == MEM)
|
7161 |
|
|
{
|
7162 |
|
|
/* If we ran out of memory slots, assume a conflict. */
|
7163 |
|
|
if (frv_packet.num_mems > ARRAY_SIZE (frv_packet.mems))
|
7164 |
|
|
return 1;
|
7165 |
|
|
|
7166 |
|
|
/* Check for output or true dependencies with earlier MEMs. */
|
7167 |
|
|
for (i = 0; i < frv_packet.num_mems; i++)
|
7168 |
|
|
if (frv_regstate_conflict_p (frv_packet.mems[i].cond, cond))
|
7169 |
|
|
{
|
7170 |
|
|
if (true_dependence (frv_packet.mems[i].mem, VOIDmode,
|
7171 |
|
|
*x, rtx_varies_p))
|
7172 |
|
|
return 1;
|
7173 |
|
|
|
7174 |
|
|
if (output_dependence (frv_packet.mems[i].mem, *x))
|
7175 |
|
|
return 1;
|
7176 |
|
|
}
|
7177 |
|
|
}
|
7178 |
|
|
|
7179 |
|
|
/* The return values of calls aren't significant: they describe
|
7180 |
|
|
the effect of the call as a whole, not of the insn itself. */
|
7181 |
|
|
if (GET_CODE (*x) == SET && GET_CODE (SET_SRC (*x)) == CALL)
|
7182 |
|
|
{
|
7183 |
|
|
if (for_each_rtx (&SET_SRC (*x), frv_registers_conflict_p_1, data))
|
7184 |
|
|
return 1;
|
7185 |
|
|
return -1;
|
7186 |
|
|
}
|
7187 |
|
|
|
7188 |
|
|
/* Check subexpressions. */
|
7189 |
|
|
return 0;
|
7190 |
|
|
}
|
7191 |
|
|
|
7192 |
|
|
|
7193 |
|
|
/* Return true if something in X might depend on an instruction
|
7194 |
|
|
in the current packet. */
|
7195 |
|
|
|
7196 |
|
|
static bool
|
7197 |
|
|
frv_registers_conflict_p (rtx x)
|
7198 |
|
|
{
|
7199 |
|
|
regstate_t flags;
|
7200 |
|
|
|
7201 |
|
|
flags = 0;
|
7202 |
|
|
if (GET_CODE (x) == COND_EXEC)
|
7203 |
|
|
{
|
7204 |
|
|
if (for_each_rtx (&XEXP (x, 0), frv_registers_conflict_p_1, &flags))
|
7205 |
|
|
return true;
|
7206 |
|
|
|
7207 |
|
|
flags |= frv_cond_flags (XEXP (x, 0));
|
7208 |
|
|
x = XEXP (x, 1);
|
7209 |
|
|
}
|
7210 |
|
|
return for_each_rtx (&x, frv_registers_conflict_p_1, &flags);
|
7211 |
|
|
}
|
7212 |
|
|
|
7213 |
|
|
|
7214 |
|
|
/* A note_stores callback. DATA points to the regstate_t condition
|
7215 |
|
|
under which X is modified. Update FRV_PACKET accordingly. */
|
7216 |
|
|
|
7217 |
|
|
static void
|
7218 |
|
|
frv_registers_update_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
|
7219 |
|
|
{
|
7220 |
|
|
unsigned int regno;
|
7221 |
|
|
|
7222 |
|
|
if (GET_CODE (x) == REG)
|
7223 |
|
|
FOR_EACH_REGNO (regno, x)
|
7224 |
|
|
frv_packet.regstate[regno] |= *(regstate_t *) data;
|
7225 |
|
|
|
7226 |
|
|
if (GET_CODE (x) == MEM)
|
7227 |
|
|
{
|
7228 |
|
|
if (frv_packet.num_mems < ARRAY_SIZE (frv_packet.mems))
|
7229 |
|
|
{
|
7230 |
|
|
frv_packet.mems[frv_packet.num_mems].mem = x;
|
7231 |
|
|
frv_packet.mems[frv_packet.num_mems].cond = *(regstate_t *) data;
|
7232 |
|
|
}
|
7233 |
|
|
frv_packet.num_mems++;
|
7234 |
|
|
}
|
7235 |
|
|
}
|
7236 |
|
|
|
7237 |
|
|
|
7238 |
|
|
/* Update the register state information for an instruction whose
|
7239 |
|
|
body is X. */
|
7240 |
|
|
|
7241 |
|
|
static void
|
7242 |
|
|
frv_registers_update (rtx x)
|
7243 |
|
|
{
|
7244 |
|
|
regstate_t flags;
|
7245 |
|
|
|
7246 |
|
|
flags = REGSTATE_MODIFIED;
|
7247 |
|
|
if (GET_CODE (x) == COND_EXEC)
|
7248 |
|
|
{
|
7249 |
|
|
flags |= frv_cond_flags (XEXP (x, 0));
|
7250 |
|
|
x = XEXP (x, 1);
|
7251 |
|
|
}
|
7252 |
|
|
note_stores (x, frv_registers_update_1, &flags);
|
7253 |
|
|
}
|
7254 |
|
|
|
7255 |
|
|
|
7256 |
|
|
/* Initialize frv_packet for the start of a new packet. */
|
7257 |
|
|
|
7258 |
|
|
static void
|
7259 |
|
|
frv_start_packet (void)
|
7260 |
|
|
{
|
7261 |
|
|
enum frv_insn_group group;
|
7262 |
|
|
|
7263 |
|
|
memset (frv_packet.regstate, 0, sizeof (frv_packet.regstate));
|
7264 |
|
|
frv_packet.num_mems = 0;
|
7265 |
|
|
frv_packet.num_insns = 0;
|
7266 |
|
|
for (group = 0; group < NUM_GROUPS; group++)
|
7267 |
|
|
frv_packet.groups[group].num_insns = 0;
|
7268 |
|
|
}
|
7269 |
|
|
|
7270 |
|
|
|
7271 |
|
|
/* Likewise for the start of a new basic block. */
|
7272 |
|
|
|
7273 |
|
|
static void
|
7274 |
|
|
frv_start_packet_block (void)
|
7275 |
|
|
{
|
7276 |
|
|
state_reset (frv_packet.dfa_state);
|
7277 |
|
|
frv_start_packet ();
|
7278 |
|
|
}
|
7279 |
|
|
|
7280 |
|
|
|
7281 |
|
|
/* Finish the current packet, if any, and start a new one. Call
|
7282 |
|
|
HANDLE_PACKET with FRV_PACKET describing the completed packet. */
|
7283 |
|
|
|
7284 |
|
|
static void
|
7285 |
|
|
frv_finish_packet (void (*handle_packet) (void))
|
7286 |
|
|
{
|
7287 |
|
|
if (frv_packet.num_insns > 0)
|
7288 |
|
|
{
|
7289 |
|
|
handle_packet ();
|
7290 |
|
|
state_transition (frv_packet.dfa_state, 0);
|
7291 |
|
|
frv_start_packet ();
|
7292 |
|
|
}
|
7293 |
|
|
}
|
7294 |
|
|
|
7295 |
|
|
|
7296 |
|
|
/* Return true if INSN can be added to the current packet. Update
|
7297 |
|
|
the DFA state on success. */
|
7298 |
|
|
|
7299 |
|
|
static bool
|
7300 |
|
|
frv_pack_insn_p (rtx insn)
|
7301 |
|
|
{
|
7302 |
|
|
/* See if the packet is already as long as it can be. */
|
7303 |
|
|
if (frv_packet.num_insns == frv_packet.issue_rate)
|
7304 |
|
|
return false;
|
7305 |
|
|
|
7306 |
|
|
/* If the scheduler thought that an instruction should start a packet,
|
7307 |
|
|
it's usually a good idea to believe it. It knows much more about
|
7308 |
|
|
the latencies than we do.
|
7309 |
|
|
|
7310 |
|
|
There are some exceptions though:
|
7311 |
|
|
|
7312 |
|
|
- Conditional instructions are scheduled on the assumption that
|
7313 |
|
|
they will be executed. This is usually a good thing, since it
|
7314 |
|
|
tends to avoid unnecessary stalls in the conditional code.
|
7315 |
|
|
But we want to pack conditional instructions as tightly as
|
7316 |
|
|
possible, in order to optimize the case where they aren't
|
7317 |
|
|
executed.
|
7318 |
|
|
|
7319 |
|
|
- The scheduler will always put branches on their own, even
|
7320 |
|
|
if there's no real dependency.
|
7321 |
|
|
|
7322 |
|
|
- There's no point putting a call in its own packet unless
|
7323 |
|
|
we have to. */
|
7324 |
|
|
if (frv_packet.num_insns > 0
|
7325 |
|
|
&& GET_CODE (insn) == INSN
|
7326 |
|
|
&& GET_MODE (insn) == TImode
|
7327 |
|
|
&& GET_CODE (PATTERN (insn)) != COND_EXEC)
|
7328 |
|
|
return false;
|
7329 |
|
|
|
7330 |
|
|
/* Check for register conflicts. Don't do this for setlo since any
|
7331 |
|
|
conflict will be with the partnering sethi, with which it can
|
7332 |
|
|
be packed. */
|
7333 |
|
|
if (get_attr_type (insn) != TYPE_SETLO)
|
7334 |
|
|
if (frv_registers_conflict_p (PATTERN (insn)))
|
7335 |
|
|
return false;
|
7336 |
|
|
|
7337 |
|
|
return state_transition (frv_packet.dfa_state, insn) < 0;
|
7338 |
|
|
}
|
7339 |
|
|
|
7340 |
|
|
|
7341 |
|
|
/* Add instruction INSN to the current packet. */
|
7342 |
|
|
|
7343 |
|
|
static void
|
7344 |
|
|
frv_add_insn_to_packet (rtx insn)
|
7345 |
|
|
{
|
7346 |
|
|
struct frv_packet_group *packet_group;
|
7347 |
|
|
|
7348 |
|
|
packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]];
|
7349 |
|
|
packet_group->insns[packet_group->num_insns++] = insn;
|
7350 |
|
|
frv_packet.insns[frv_packet.num_insns++] = insn;
|
7351 |
|
|
|
7352 |
|
|
frv_registers_update (PATTERN (insn));
|
7353 |
|
|
}
|
7354 |
|
|
|
7355 |
|
|
|
7356 |
|
|
/* Insert INSN (a member of frv_nops[]) into the current packet. If the
|
7357 |
|
|
packet ends in a branch or call, insert the nop before it, otherwise
|
7358 |
|
|
add to the end. */
|
7359 |
|
|
|
7360 |
|
|
static void
|
7361 |
|
|
frv_insert_nop_in_packet (rtx insn)
|
7362 |
|
|
{
|
7363 |
|
|
struct frv_packet_group *packet_group;
|
7364 |
|
|
rtx last;
|
7365 |
|
|
|
7366 |
|
|
packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]];
|
7367 |
|
|
last = frv_packet.insns[frv_packet.num_insns - 1];
|
7368 |
|
|
if (GET_CODE (last) != INSN)
|
7369 |
|
|
{
|
7370 |
|
|
insn = emit_insn_before (PATTERN (insn), last);
|
7371 |
|
|
frv_packet.insns[frv_packet.num_insns - 1] = insn;
|
7372 |
|
|
frv_packet.insns[frv_packet.num_insns++] = last;
|
7373 |
|
|
}
|
7374 |
|
|
else
|
7375 |
|
|
{
|
7376 |
|
|
insn = emit_insn_after (PATTERN (insn), last);
|
7377 |
|
|
frv_packet.insns[frv_packet.num_insns++] = insn;
|
7378 |
|
|
}
|
7379 |
|
|
packet_group->insns[packet_group->num_insns++] = insn;
|
7380 |
|
|
}
|
7381 |
|
|
|
7382 |
|
|
|
7383 |
|
|
/* If packing is enabled, divide the instructions into packets and
|
7384 |
|
|
return true. Call HANDLE_PACKET for each complete packet. */
|
7385 |
|
|
|
7386 |
|
|
static bool
|
7387 |
|
|
frv_for_each_packet (void (*handle_packet) (void))
|
7388 |
|
|
{
|
7389 |
|
|
rtx insn, next_insn;
|
7390 |
|
|
|
7391 |
|
|
frv_packet.issue_rate = frv_issue_rate ();
|
7392 |
|
|
|
7393 |
|
|
/* Early exit if we don't want to pack insns. */
|
7394 |
|
|
if (!optimize
|
7395 |
|
|
|| !flag_schedule_insns_after_reload
|
7396 |
|
|
|| !TARGET_VLIW_BRANCH
|
7397 |
|
|
|| frv_packet.issue_rate == 1)
|
7398 |
|
|
return false;
|
7399 |
|
|
|
7400 |
|
|
/* Set up the initial packing state. */
|
7401 |
|
|
dfa_start ();
|
7402 |
|
|
frv_packet.dfa_state = alloca (state_size ());
|
7403 |
|
|
|
7404 |
|
|
frv_start_packet_block ();
|
7405 |
|
|
for (insn = get_insns (); insn != 0; insn = next_insn)
|
7406 |
|
|
{
|
7407 |
|
|
enum rtx_code code;
|
7408 |
|
|
bool eh_insn_p;
|
7409 |
|
|
|
7410 |
|
|
code = GET_CODE (insn);
|
7411 |
|
|
next_insn = NEXT_INSN (insn);
|
7412 |
|
|
|
7413 |
|
|
if (code == CODE_LABEL)
|
7414 |
|
|
{
|
7415 |
|
|
frv_finish_packet (handle_packet);
|
7416 |
|
|
frv_start_packet_block ();
|
7417 |
|
|
}
|
7418 |
|
|
|
7419 |
|
|
if (INSN_P (insn))
|
7420 |
|
|
switch (GET_CODE (PATTERN (insn)))
|
7421 |
|
|
{
|
7422 |
|
|
case USE:
|
7423 |
|
|
case CLOBBER:
|
7424 |
|
|
case ADDR_VEC:
|
7425 |
|
|
case ADDR_DIFF_VEC:
|
7426 |
|
|
break;
|
7427 |
|
|
|
7428 |
|
|
default:
|
7429 |
|
|
/* Calls mustn't be packed on a TOMCAT. */
|
7430 |
|
|
if (GET_CODE (insn) == CALL_INSN && frv_cpu_type == FRV_CPU_TOMCAT)
|
7431 |
|
|
frv_finish_packet (handle_packet);
|
7432 |
|
|
|
7433 |
|
|
/* Since the last instruction in a packet determines the EH
|
7434 |
|
|
region, any exception-throwing instruction must come at
|
7435 |
|
|
the end of reordered packet. Insns that issue to a
|
7436 |
|
|
branch unit are bound to come last; for others it's
|
7437 |
|
|
too hard to predict. */
|
7438 |
|
|
eh_insn_p = (find_reg_note (insn, REG_EH_REGION, NULL) != NULL);
|
7439 |
|
|
if (eh_insn_p && !frv_issues_to_branch_unit_p (insn))
|
7440 |
|
|
frv_finish_packet (handle_packet);
|
7441 |
|
|
|
7442 |
|
|
/* Finish the current packet if we can't add INSN to it.
|
7443 |
|
|
Simulate cycles until INSN is ready to issue. */
|
7444 |
|
|
if (!frv_pack_insn_p (insn))
|
7445 |
|
|
{
|
7446 |
|
|
frv_finish_packet (handle_packet);
|
7447 |
|
|
while (!frv_pack_insn_p (insn))
|
7448 |
|
|
state_transition (frv_packet.dfa_state, 0);
|
7449 |
|
|
}
|
7450 |
|
|
|
7451 |
|
|
/* Add the instruction to the packet. */
|
7452 |
|
|
frv_add_insn_to_packet (insn);
|
7453 |
|
|
|
7454 |
|
|
/* Calls and jumps end a packet, as do insns that throw
|
7455 |
|
|
an exception. */
|
7456 |
|
|
if (code == CALL_INSN || code == JUMP_INSN || eh_insn_p)
|
7457 |
|
|
frv_finish_packet (handle_packet);
|
7458 |
|
|
break;
|
7459 |
|
|
}
|
7460 |
|
|
}
|
7461 |
|
|
frv_finish_packet (handle_packet);
|
7462 |
|
|
dfa_finish ();
|
7463 |
|
|
return true;
|
7464 |
|
|
}
|
7465 |
|
|
|
7466 |
|
|
/* Subroutine of frv_sort_insn_group. We are trying to sort
|
7467 |
|
|
frv_packet.groups[GROUP].sorted[0...NUM_INSNS-1] into assembly
|
7468 |
|
|
language order. We have already picked a new position for
|
7469 |
|
|
frv_packet.groups[GROUP].sorted[X] if bit X of ISSUED is set.
|
7470 |
|
|
These instructions will occupy elements [0, LOWER_SLOT) and
|
7471 |
|
|
[UPPER_SLOT, NUM_INSNS) of the final (sorted) array. STATE is
|
7472 |
|
|
the DFA state after issuing these instructions.
|
7473 |
|
|
|
7474 |
|
|
Try filling elements [LOWER_SLOT, UPPER_SLOT) with every permutation
|
7475 |
|
|
of the unused instructions. Return true if one such permutation gives
|
7476 |
|
|
a valid ordering, leaving the successful permutation in sorted[].
|
7477 |
|
|
Do not modify sorted[] until a valid permutation is found. */
|
7478 |
|
|
|
7479 |
|
|
static bool
|
7480 |
|
|
frv_sort_insn_group_1 (enum frv_insn_group group,
|
7481 |
|
|
unsigned int lower_slot, unsigned int upper_slot,
|
7482 |
|
|
unsigned int issued, unsigned int num_insns,
|
7483 |
|
|
state_t state)
|
7484 |
|
|
{
|
7485 |
|
|
struct frv_packet_group *packet_group;
|
7486 |
|
|
unsigned int i;
|
7487 |
|
|
state_t test_state;
|
7488 |
|
|
size_t dfa_size;
|
7489 |
|
|
rtx insn;
|
7490 |
|
|
|
7491 |
|
|
/* Early success if we've filled all the slots. */
|
7492 |
|
|
if (lower_slot == upper_slot)
|
7493 |
|
|
return true;
|
7494 |
|
|
|
7495 |
|
|
packet_group = &frv_packet.groups[group];
|
7496 |
|
|
dfa_size = state_size ();
|
7497 |
|
|
test_state = alloca (dfa_size);
|
7498 |
|
|
|
7499 |
|
|
/* Try issuing each unused instruction. */
|
7500 |
|
|
for (i = num_insns - 1; i + 1 != 0; i--)
|
7501 |
|
|
if (~issued & (1 << i))
|
7502 |
|
|
{
|
7503 |
|
|
insn = packet_group->sorted[i];
|
7504 |
|
|
memcpy (test_state, state, dfa_size);
|
7505 |
|
|
if (state_transition (test_state, insn) < 0
|
7506 |
|
|
&& cpu_unit_reservation_p (test_state,
|
7507 |
|
|
NTH_UNIT (group, upper_slot - 1))
|
7508 |
|
|
&& frv_sort_insn_group_1 (group, lower_slot, upper_slot - 1,
|
7509 |
|
|
issued | (1 << i), num_insns,
|
7510 |
|
|
test_state))
|
7511 |
|
|
{
|
7512 |
|
|
packet_group->sorted[upper_slot - 1] = insn;
|
7513 |
|
|
return true;
|
7514 |
|
|
}
|
7515 |
|
|
}
|
7516 |
|
|
|
7517 |
|
|
return false;
|
7518 |
|
|
}
|
7519 |
|
|
|
7520 |
|
|
/* Compare two instructions by their frv_insn_unit. */
|
7521 |
|
|
|
7522 |
|
|
static int
|
7523 |
|
|
frv_compare_insns (const void *first, const void *second)
|
7524 |
|
|
{
|
7525 |
|
|
const rtx *insn1 = first, *insn2 = second;
|
7526 |
|
|
return frv_insn_unit (*insn1) - frv_insn_unit (*insn2);
|
7527 |
|
|
}
|
7528 |
|
|
|
7529 |
|
|
/* Copy frv_packet.groups[GROUP].insns[] to frv_packet.groups[GROUP].sorted[]
|
7530 |
|
|
and sort it into assembly language order. See frv.md for a description of
|
7531 |
|
|
the algorithm. */
|
7532 |
|
|
|
7533 |
|
|
static void
|
7534 |
|
|
frv_sort_insn_group (enum frv_insn_group group)
|
7535 |
|
|
{
|
7536 |
|
|
struct frv_packet_group *packet_group;
|
7537 |
|
|
unsigned int first, i, nop, max_unit, num_slots;
|
7538 |
|
|
state_t state, test_state;
|
7539 |
|
|
size_t dfa_size;
|
7540 |
|
|
|
7541 |
|
|
packet_group = &frv_packet.groups[group];
|
7542 |
|
|
|
7543 |
|
|
/* Assume no nop is needed. */
|
7544 |
|
|
packet_group->nop = 0;
|
7545 |
|
|
|
7546 |
|
|
if (packet_group->num_insns == 0)
|
7547 |
|
|
return;
|
7548 |
|
|
|
7549 |
|
|
/* Copy insns[] to sorted[]. */
|
7550 |
|
|
memcpy (packet_group->sorted, packet_group->insns,
|
7551 |
|
|
sizeof (rtx) * packet_group->num_insns);
|
7552 |
|
|
|
7553 |
|
|
/* Sort sorted[] by the unit that each insn tries to take first. */
|
7554 |
|
|
if (packet_group->num_insns > 1)
|
7555 |
|
|
qsort (packet_group->sorted, packet_group->num_insns,
|
7556 |
|
|
sizeof (rtx), frv_compare_insns);
|
7557 |
|
|
|
7558 |
|
|
/* That's always enough for branch and control insns. */
|
7559 |
|
|
if (group == GROUP_B || group == GROUP_C)
|
7560 |
|
|
return;
|
7561 |
|
|
|
7562 |
|
|
dfa_size = state_size ();
|
7563 |
|
|
state = alloca (dfa_size);
|
7564 |
|
|
test_state = alloca (dfa_size);
|
7565 |
|
|
|
7566 |
|
|
/* Find the highest FIRST such that sorted[0...FIRST-1] can issue
|
7567 |
|
|
consecutively and such that the DFA takes unit X when sorted[X]
|
7568 |
|
|
is added. Set STATE to the new DFA state. */
|
7569 |
|
|
state_reset (test_state);
|
7570 |
|
|
for (first = 0; first < packet_group->num_insns; first++)
|
7571 |
|
|
{
|
7572 |
|
|
memcpy (state, test_state, dfa_size);
|
7573 |
|
|
if (state_transition (test_state, packet_group->sorted[first]) >= 0
|
7574 |
|
|
|| !cpu_unit_reservation_p (test_state, NTH_UNIT (group, first)))
|
7575 |
|
|
break;
|
7576 |
|
|
}
|
7577 |
|
|
|
7578 |
|
|
/* If all the instructions issued in ascending order, we're done. */
|
7579 |
|
|
if (first == packet_group->num_insns)
|
7580 |
|
|
return;
|
7581 |
|
|
|
7582 |
|
|
/* Add nops to the end of sorted[] and try each permutation until
|
7583 |
|
|
we find one that works. */
|
7584 |
|
|
for (nop = 0; nop < frv_num_nops; nop++)
|
7585 |
|
|
{
|
7586 |
|
|
max_unit = frv_insn_unit (frv_nops[nop]);
|
7587 |
|
|
if (frv_unit_groups[max_unit] == group)
|
7588 |
|
|
{
|
7589 |
|
|
packet_group->nop = frv_nops[nop];
|
7590 |
|
|
num_slots = UNIT_NUMBER (max_unit) + 1;
|
7591 |
|
|
for (i = packet_group->num_insns; i < num_slots; i++)
|
7592 |
|
|
packet_group->sorted[i] = frv_nops[nop];
|
7593 |
|
|
if (frv_sort_insn_group_1 (group, first, num_slots,
|
7594 |
|
|
(1 << first) - 1, num_slots, state))
|
7595 |
|
|
return;
|
7596 |
|
|
}
|
7597 |
|
|
}
|
7598 |
|
|
gcc_unreachable ();
|
7599 |
|
|
}
|
7600 |
|
|
|
7601 |
|
|
/* Sort the current packet into assembly-language order. Set packing
|
7602 |
|
|
flags as appropriate. */
|
7603 |
|
|
|
7604 |
|
|
static void
|
7605 |
|
|
frv_reorder_packet (void)
|
7606 |
|
|
{
|
7607 |
|
|
unsigned int cursor[NUM_GROUPS];
|
7608 |
|
|
rtx insns[ARRAY_SIZE (frv_unit_groups)];
|
7609 |
|
|
unsigned int unit, to, from;
|
7610 |
|
|
enum frv_insn_group group;
|
7611 |
|
|
struct frv_packet_group *packet_group;
|
7612 |
|
|
|
7613 |
|
|
/* First sort each group individually. */
|
7614 |
|
|
for (group = 0; group < NUM_GROUPS; group++)
|
7615 |
|
|
{
|
7616 |
|
|
cursor[group] = 0;
|
7617 |
|
|
frv_sort_insn_group (group);
|
7618 |
|
|
}
|
7619 |
|
|
|
7620 |
|
|
/* Go through the unit template and try add an instruction from
|
7621 |
|
|
that unit's group. */
|
7622 |
|
|
to = 0;
|
7623 |
|
|
for (unit = 0; unit < ARRAY_SIZE (frv_unit_groups); unit++)
|
7624 |
|
|
{
|
7625 |
|
|
group = frv_unit_groups[unit];
|
7626 |
|
|
packet_group = &frv_packet.groups[group];
|
7627 |
|
|
if (cursor[group] < packet_group->num_insns)
|
7628 |
|
|
{
|
7629 |
|
|
/* frv_reorg should have added nops for us. */
|
7630 |
|
|
gcc_assert (packet_group->sorted[cursor[group]]
|
7631 |
|
|
!= packet_group->nop);
|
7632 |
|
|
insns[to++] = packet_group->sorted[cursor[group]++];
|
7633 |
|
|
}
|
7634 |
|
|
}
|
7635 |
|
|
|
7636 |
|
|
gcc_assert (to == frv_packet.num_insns);
|
7637 |
|
|
|
7638 |
|
|
/* Clear the last instruction's packing flag, thus marking the end of
|
7639 |
|
|
a packet. Reorder the other instructions relative to it. */
|
7640 |
|
|
CLEAR_PACKING_FLAG (insns[to - 1]);
|
7641 |
|
|
for (from = 0; from < to - 1; from++)
|
7642 |
|
|
{
|
7643 |
|
|
remove_insn (insns[from]);
|
7644 |
|
|
add_insn_before (insns[from], insns[to - 1]);
|
7645 |
|
|
SET_PACKING_FLAG (insns[from]);
|
7646 |
|
|
}
|
7647 |
|
|
}
|
7648 |
|
|
|
7649 |
|
|
|
7650 |
|
|
/* Divide instructions into packets. Reorder the contents of each
|
7651 |
|
|
packet so that they are in the correct assembly-language order.
|
7652 |
|
|
|
7653 |
|
|
Since this pass can change the raw meaning of the rtl stream, it must
|
7654 |
|
|
only be called at the last minute, just before the instructions are
|
7655 |
|
|
written out. */
|
7656 |
|
|
|
7657 |
|
|
static void
|
7658 |
|
|
frv_pack_insns (void)
|
7659 |
|
|
{
|
7660 |
|
|
if (frv_for_each_packet (frv_reorder_packet))
|
7661 |
|
|
frv_insn_packing_flag = 0;
|
7662 |
|
|
else
|
7663 |
|
|
frv_insn_packing_flag = -1;
|
7664 |
|
|
}
|
7665 |
|
|
|
7666 |
|
|
/* See whether we need to add nops to group GROUP in order to
|
7667 |
|
|
make a valid packet. */
|
7668 |
|
|
|
7669 |
|
|
static void
|
7670 |
|
|
frv_fill_unused_units (enum frv_insn_group group)
|
7671 |
|
|
{
|
7672 |
|
|
unsigned int non_nops, nops, i;
|
7673 |
|
|
struct frv_packet_group *packet_group;
|
7674 |
|
|
|
7675 |
|
|
packet_group = &frv_packet.groups[group];
|
7676 |
|
|
|
7677 |
|
|
/* Sort the instructions into assembly-language order.
|
7678 |
|
|
Use nops to fill slots that are otherwise unused. */
|
7679 |
|
|
frv_sort_insn_group (group);
|
7680 |
|
|
|
7681 |
|
|
/* See how many nops are needed before the final useful instruction. */
|
7682 |
|
|
i = nops = 0;
|
7683 |
|
|
for (non_nops = 0; non_nops < packet_group->num_insns; non_nops++)
|
7684 |
|
|
while (packet_group->sorted[i++] == packet_group->nop)
|
7685 |
|
|
nops++;
|
7686 |
|
|
|
7687 |
|
|
/* Insert that many nops into the instruction stream. */
|
7688 |
|
|
while (nops-- > 0)
|
7689 |
|
|
frv_insert_nop_in_packet (packet_group->nop);
|
7690 |
|
|
}
|
7691 |
|
|
|
7692 |
|
|
/* Return true if accesses IO1 and IO2 refer to the same doubleword. */
|
7693 |
|
|
|
7694 |
|
|
static bool
|
7695 |
|
|
frv_same_doubleword_p (const struct frv_io *io1, const struct frv_io *io2)
|
7696 |
|
|
{
|
7697 |
|
|
if (io1->const_address != 0 && io2->const_address != 0)
|
7698 |
|
|
return io1->const_address == io2->const_address;
|
7699 |
|
|
|
7700 |
|
|
if (io1->var_address != 0 && io2->var_address != 0)
|
7701 |
|
|
return rtx_equal_p (io1->var_address, io2->var_address);
|
7702 |
|
|
|
7703 |
|
|
return false;
|
7704 |
|
|
}
|
7705 |
|
|
|
7706 |
|
|
/* Return true if operations IO1 and IO2 are guaranteed to complete
|
7707 |
|
|
in order. */
|
7708 |
|
|
|
7709 |
|
|
static bool
|
7710 |
|
|
frv_io_fixed_order_p (const struct frv_io *io1, const struct frv_io *io2)
|
7711 |
|
|
{
|
7712 |
|
|
/* The order of writes is always preserved. */
|
7713 |
|
|
if (io1->type == FRV_IO_WRITE && io2->type == FRV_IO_WRITE)
|
7714 |
|
|
return true;
|
7715 |
|
|
|
7716 |
|
|
/* The order of reads isn't preserved. */
|
7717 |
|
|
if (io1->type != FRV_IO_WRITE && io2->type != FRV_IO_WRITE)
|
7718 |
|
|
return false;
|
7719 |
|
|
|
7720 |
|
|
/* One operation is a write and the other is (or could be) a read.
|
7721 |
|
|
The order is only guaranteed if the accesses are to the same
|
7722 |
|
|
doubleword. */
|
7723 |
|
|
return frv_same_doubleword_p (io1, io2);
|
7724 |
|
|
}
|
7725 |
|
|
|
7726 |
|
|
/* Generalize I/O operation X so that it covers both X and Y. */
|
7727 |
|
|
|
7728 |
|
|
static void
|
7729 |
|
|
frv_io_union (struct frv_io *x, const struct frv_io *y)
|
7730 |
|
|
{
|
7731 |
|
|
if (x->type != y->type)
|
7732 |
|
|
x->type = FRV_IO_UNKNOWN;
|
7733 |
|
|
if (!frv_same_doubleword_p (x, y))
|
7734 |
|
|
{
|
7735 |
|
|
x->const_address = 0;
|
7736 |
|
|
x->var_address = 0;
|
7737 |
|
|
}
|
7738 |
|
|
}
|
7739 |
|
|
|
7740 |
|
|
/* Fill IO with information about the load or store associated with
|
7741 |
|
|
membar instruction INSN. */
|
7742 |
|
|
|
7743 |
|
|
static void
|
7744 |
|
|
frv_extract_membar (struct frv_io *io, rtx insn)
|
7745 |
|
|
{
|
7746 |
|
|
extract_insn (insn);
|
7747 |
|
|
io->type = INTVAL (recog_data.operand[2]);
|
7748 |
|
|
io->const_address = INTVAL (recog_data.operand[1]);
|
7749 |
|
|
io->var_address = XEXP (recog_data.operand[0], 0);
|
7750 |
|
|
}
|
7751 |
|
|
|
7752 |
|
|
/* A note_stores callback for which DATA points to an rtx. Nullify *DATA
|
7753 |
|
|
if X is a register and *DATA depends on X. */
|
7754 |
|
|
|
7755 |
|
|
static void
|
7756 |
|
|
frv_io_check_address (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
|
7757 |
|
|
{
|
7758 |
|
|
rtx *other = data;
|
7759 |
|
|
|
7760 |
|
|
if (REG_P (x) && *other != 0 && reg_overlap_mentioned_p (x, *other))
|
7761 |
|
|
*other = 0;
|
7762 |
|
|
}
|
7763 |
|
|
|
7764 |
|
|
/* A note_stores callback for which DATA points to a HARD_REG_SET.
|
7765 |
|
|
Remove every modified register from the set. */
|
7766 |
|
|
|
7767 |
|
|
static void
|
7768 |
|
|
frv_io_handle_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
|
7769 |
|
|
{
|
7770 |
|
|
HARD_REG_SET *set = data;
|
7771 |
|
|
unsigned int regno;
|
7772 |
|
|
|
7773 |
|
|
if (REG_P (x))
|
7774 |
|
|
FOR_EACH_REGNO (regno, x)
|
7775 |
|
|
CLEAR_HARD_REG_BIT (*set, regno);
|
7776 |
|
|
}
|
7777 |
|
|
|
7778 |
|
|
/* A for_each_rtx callback for which DATA points to a HARD_REG_SET.
|
7779 |
|
|
Add every register in *X to the set. */
|
7780 |
|
|
|
7781 |
|
|
static int
|
7782 |
|
|
frv_io_handle_use_1 (rtx *x, void *data)
|
7783 |
|
|
{
|
7784 |
|
|
HARD_REG_SET *set = data;
|
7785 |
|
|
unsigned int regno;
|
7786 |
|
|
|
7787 |
|
|
if (REG_P (*x))
|
7788 |
|
|
FOR_EACH_REGNO (regno, *x)
|
7789 |
|
|
SET_HARD_REG_BIT (*set, regno);
|
7790 |
|
|
|
7791 |
|
|
return 0;
|
7792 |
|
|
}
|
7793 |
|
|
|
7794 |
|
|
/* A note_stores callback that applies frv_io_handle_use_1 to an
|
7795 |
|
|
entire rhs value. */
|
7796 |
|
|
|
7797 |
|
|
static void
|
7798 |
|
|
frv_io_handle_use (rtx *x, void *data)
|
7799 |
|
|
{
|
7800 |
|
|
for_each_rtx (x, frv_io_handle_use_1, data);
|
7801 |
|
|
}
|
7802 |
|
|
|
7803 |
|
|
/* Go through block BB looking for membars to remove. There are two
|
7804 |
|
|
cases where intra-block analysis is enough:
|
7805 |
|
|
|
7806 |
|
|
- a membar is redundant if it occurs between two consecutive I/O
|
7807 |
|
|
operations and if those operations are guaranteed to complete
|
7808 |
|
|
in order.
|
7809 |
|
|
|
7810 |
|
|
- a membar for a __builtin_read is redundant if the result is
|
7811 |
|
|
used before the next I/O operation is issued.
|
7812 |
|
|
|
7813 |
|
|
If the last membar in the block could not be removed, and there
|
7814 |
|
|
are guaranteed to be no I/O operations between that membar and
|
7815 |
|
|
the end of the block, store the membar in *LAST_MEMBAR, otherwise
|
7816 |
|
|
store null.
|
7817 |
|
|
|
7818 |
|
|
Describe the block's first I/O operation in *NEXT_IO. Describe
|
7819 |
|
|
an unknown operation if the block doesn't do any I/O. */
|
7820 |
|
|
|
7821 |
|
|
static void
|
7822 |
|
|
frv_optimize_membar_local (basic_block bb, struct frv_io *next_io,
|
7823 |
|
|
rtx *last_membar)
|
7824 |
|
|
{
|
7825 |
|
|
HARD_REG_SET used_regs;
|
7826 |
|
|
rtx next_membar, set, insn;
|
7827 |
|
|
bool next_is_end_p;
|
7828 |
|
|
|
7829 |
|
|
/* NEXT_IO is the next I/O operation to be performed after the current
|
7830 |
|
|
instruction. It starts off as being an unknown operation. */
|
7831 |
|
|
memset (next_io, 0, sizeof (*next_io));
|
7832 |
|
|
|
7833 |
|
|
/* NEXT_IS_END_P is true if NEXT_IO describes the end of the block. */
|
7834 |
|
|
next_is_end_p = true;
|
7835 |
|
|
|
7836 |
|
|
/* If the current instruction is a __builtin_read or __builtin_write,
|
7837 |
|
|
NEXT_MEMBAR is the membar instruction associated with it. NEXT_MEMBAR
|
7838 |
|
|
is null if the membar has already been deleted.
|
7839 |
|
|
|
7840 |
|
|
Note that the initialization here should only be needed to
|
7841 |
|
|
suppress warnings. */
|
7842 |
|
|
next_membar = 0;
|
7843 |
|
|
|
7844 |
|
|
/* USED_REGS is the set of registers that are used before the
|
7845 |
|
|
next I/O instruction. */
|
7846 |
|
|
CLEAR_HARD_REG_SET (used_regs);
|
7847 |
|
|
|
7848 |
|
|
for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn))
|
7849 |
|
|
if (GET_CODE (insn) == CALL_INSN)
|
7850 |
|
|
{
|
7851 |
|
|
/* We can't predict what a call will do to volatile memory. */
|
7852 |
|
|
memset (next_io, 0, sizeof (struct frv_io));
|
7853 |
|
|
next_is_end_p = false;
|
7854 |
|
|
CLEAR_HARD_REG_SET (used_regs);
|
7855 |
|
|
}
|
7856 |
|
|
else if (INSN_P (insn))
|
7857 |
|
|
switch (recog_memoized (insn))
|
7858 |
|
|
{
|
7859 |
|
|
case CODE_FOR_optional_membar_qi:
|
7860 |
|
|
case CODE_FOR_optional_membar_hi:
|
7861 |
|
|
case CODE_FOR_optional_membar_si:
|
7862 |
|
|
case CODE_FOR_optional_membar_di:
|
7863 |
|
|
next_membar = insn;
|
7864 |
|
|
if (next_is_end_p)
|
7865 |
|
|
{
|
7866 |
|
|
/* Local information isn't enough to decide whether this
|
7867 |
|
|
membar is needed. Stash it away for later. */
|
7868 |
|
|
*last_membar = insn;
|
7869 |
|
|
frv_extract_membar (next_io, insn);
|
7870 |
|
|
next_is_end_p = false;
|
7871 |
|
|
}
|
7872 |
|
|
else
|
7873 |
|
|
{
|
7874 |
|
|
/* Check whether the I/O operation before INSN could be
|
7875 |
|
|
reordered with one described by NEXT_IO. If it can't,
|
7876 |
|
|
INSN will not be needed. */
|
7877 |
|
|
struct frv_io prev_io;
|
7878 |
|
|
|
7879 |
|
|
frv_extract_membar (&prev_io, insn);
|
7880 |
|
|
if (frv_io_fixed_order_p (&prev_io, next_io))
|
7881 |
|
|
{
|
7882 |
|
|
if (dump_file)
|
7883 |
|
|
fprintf (dump_file,
|
7884 |
|
|
";; [Local] Removing membar %d since order"
|
7885 |
|
|
" of accesses is guaranteed\n",
|
7886 |
|
|
INSN_UID (next_membar));
|
7887 |
|
|
|
7888 |
|
|
insn = NEXT_INSN (insn);
|
7889 |
|
|
delete_insn (next_membar);
|
7890 |
|
|
next_membar = 0;
|
7891 |
|
|
}
|
7892 |
|
|
*next_io = prev_io;
|
7893 |
|
|
}
|
7894 |
|
|
break;
|
7895 |
|
|
|
7896 |
|
|
default:
|
7897 |
|
|
/* Invalidate NEXT_IO's address if it depends on something that
|
7898 |
|
|
is clobbered by INSN. */
|
7899 |
|
|
if (next_io->var_address)
|
7900 |
|
|
note_stores (PATTERN (insn), frv_io_check_address,
|
7901 |
|
|
&next_io->var_address);
|
7902 |
|
|
|
7903 |
|
|
/* If the next membar is associated with a __builtin_read,
|
7904 |
|
|
see if INSN reads from that address. If it does, and if
|
7905 |
|
|
the destination register is used before the next I/O access,
|
7906 |
|
|
there is no need for the membar. */
|
7907 |
|
|
set = PATTERN (insn);
|
7908 |
|
|
if (next_io->type == FRV_IO_READ
|
7909 |
|
|
&& next_io->var_address != 0
|
7910 |
|
|
&& next_membar != 0
|
7911 |
|
|
&& GET_CODE (set) == SET
|
7912 |
|
|
&& GET_CODE (SET_DEST (set)) == REG
|
7913 |
|
|
&& TEST_HARD_REG_BIT (used_regs, REGNO (SET_DEST (set))))
|
7914 |
|
|
{
|
7915 |
|
|
rtx src;
|
7916 |
|
|
|
7917 |
|
|
src = SET_SRC (set);
|
7918 |
|
|
if (GET_CODE (src) == ZERO_EXTEND)
|
7919 |
|
|
src = XEXP (src, 0);
|
7920 |
|
|
|
7921 |
|
|
if (GET_CODE (src) == MEM
|
7922 |
|
|
&& rtx_equal_p (XEXP (src, 0), next_io->var_address))
|
7923 |
|
|
{
|
7924 |
|
|
if (dump_file)
|
7925 |
|
|
fprintf (dump_file,
|
7926 |
|
|
";; [Local] Removing membar %d since the target"
|
7927 |
|
|
" of %d is used before the I/O operation\n",
|
7928 |
|
|
INSN_UID (next_membar), INSN_UID (insn));
|
7929 |
|
|
|
7930 |
|
|
if (next_membar == *last_membar)
|
7931 |
|
|
*last_membar = 0;
|
7932 |
|
|
|
7933 |
|
|
delete_insn (next_membar);
|
7934 |
|
|
next_membar = 0;
|
7935 |
|
|
}
|
7936 |
|
|
}
|
7937 |
|
|
|
7938 |
|
|
/* If INSN has volatile references, forget about any registers
|
7939 |
|
|
that are used after it. Otherwise forget about uses that
|
7940 |
|
|
are (or might be) defined by INSN. */
|
7941 |
|
|
if (volatile_refs_p (PATTERN (insn)))
|
7942 |
|
|
CLEAR_HARD_REG_SET (used_regs);
|
7943 |
|
|
else
|
7944 |
|
|
note_stores (PATTERN (insn), frv_io_handle_set, &used_regs);
|
7945 |
|
|
|
7946 |
|
|
note_uses (&PATTERN (insn), frv_io_handle_use, &used_regs);
|
7947 |
|
|
break;
|
7948 |
|
|
}
|
7949 |
|
|
}
|
7950 |
|
|
|
7951 |
|
|
/* See if MEMBAR, the last membar instruction in BB, can be removed.
|
7952 |
|
|
FIRST_IO[X] describes the first operation performed by basic block X. */
|
7953 |
|
|
|
7954 |
|
|
static void
|
7955 |
|
|
frv_optimize_membar_global (basic_block bb, struct frv_io *first_io,
|
7956 |
|
|
rtx membar)
|
7957 |
|
|
{
|
7958 |
|
|
struct frv_io this_io, next_io;
|
7959 |
|
|
edge succ;
|
7960 |
|
|
edge_iterator ei;
|
7961 |
|
|
|
7962 |
|
|
/* We need to keep the membar if there is an edge to the exit block. */
|
7963 |
|
|
FOR_EACH_EDGE (succ, ei, bb->succs)
|
7964 |
|
|
/* for (succ = bb->succ; succ != 0; succ = succ->succ_next) */
|
7965 |
|
|
if (succ->dest == EXIT_BLOCK_PTR)
|
7966 |
|
|
return;
|
7967 |
|
|
|
7968 |
|
|
/* Work out the union of all successor blocks. */
|
7969 |
|
|
ei = ei_start (bb->succs);
|
7970 |
|
|
ei_cond (ei, &succ);
|
7971 |
|
|
/* next_io = first_io[bb->succ->dest->index]; */
|
7972 |
|
|
next_io = first_io[succ->dest->index];
|
7973 |
|
|
ei = ei_start (bb->succs);
|
7974 |
|
|
if (ei_cond (ei, &succ))
|
7975 |
|
|
{
|
7976 |
|
|
for (ei_next (&ei); ei_cond (ei, &succ); ei_next (&ei))
|
7977 |
|
|
/*for (succ = bb->succ->succ_next; succ != 0; succ = succ->succ_next)*/
|
7978 |
|
|
frv_io_union (&next_io, &first_io[succ->dest->index]);
|
7979 |
|
|
}
|
7980 |
|
|
else
|
7981 |
|
|
gcc_unreachable ();
|
7982 |
|
|
|
7983 |
|
|
frv_extract_membar (&this_io, membar);
|
7984 |
|
|
if (frv_io_fixed_order_p (&this_io, &next_io))
|
7985 |
|
|
{
|
7986 |
|
|
if (dump_file)
|
7987 |
|
|
fprintf (dump_file,
|
7988 |
|
|
";; [Global] Removing membar %d since order of accesses"
|
7989 |
|
|
" is guaranteed\n", INSN_UID (membar));
|
7990 |
|
|
|
7991 |
|
|
delete_insn (membar);
|
7992 |
|
|
}
|
7993 |
|
|
}
|
7994 |
|
|
|
7995 |
|
|
/* Remove redundant membars from the current function. */
|
7996 |
|
|
|
7997 |
|
|
static void
|
7998 |
|
|
frv_optimize_membar (void)
|
7999 |
|
|
{
|
8000 |
|
|
basic_block bb;
|
8001 |
|
|
struct frv_io *first_io;
|
8002 |
|
|
rtx *last_membar;
|
8003 |
|
|
|
8004 |
|
|
compute_bb_for_insn ();
|
8005 |
|
|
first_io = xcalloc (last_basic_block, sizeof (struct frv_io));
|
8006 |
|
|
last_membar = xcalloc (last_basic_block, sizeof (rtx));
|
8007 |
|
|
|
8008 |
|
|
FOR_EACH_BB (bb)
|
8009 |
|
|
frv_optimize_membar_local (bb, &first_io[bb->index],
|
8010 |
|
|
&last_membar[bb->index]);
|
8011 |
|
|
|
8012 |
|
|
FOR_EACH_BB (bb)
|
8013 |
|
|
if (last_membar[bb->index] != 0)
|
8014 |
|
|
frv_optimize_membar_global (bb, first_io, last_membar[bb->index]);
|
8015 |
|
|
|
8016 |
|
|
free (first_io);
|
8017 |
|
|
free (last_membar);
|
8018 |
|
|
}
|
8019 |
|
|
|
8020 |
|
|
/* Used by frv_reorg to keep track of the current packet's address. */
|
8021 |
|
|
static unsigned int frv_packet_address;
|
8022 |
|
|
|
8023 |
|
|
/* If the current packet falls through to a label, try to pad the packet
|
8024 |
|
|
with nops in order to fit the label's alignment requirements. */
|
8025 |
|
|
|
8026 |
|
|
static void
|
8027 |
|
|
frv_align_label (void)
|
8028 |
|
|
{
|
8029 |
|
|
unsigned int alignment, target, nop;
|
8030 |
|
|
rtx x, last, barrier, label;
|
8031 |
|
|
|
8032 |
|
|
/* Walk forward to the start of the next packet. Set ALIGNMENT to the
|
8033 |
|
|
maximum alignment of that packet, LABEL to the last label between
|
8034 |
|
|
the packets, and BARRIER to the last barrier. */
|
8035 |
|
|
last = frv_packet.insns[frv_packet.num_insns - 1];
|
8036 |
|
|
label = barrier = 0;
|
8037 |
|
|
alignment = 4;
|
8038 |
|
|
for (x = NEXT_INSN (last); x != 0 && !INSN_P (x); x = NEXT_INSN (x))
|
8039 |
|
|
{
|
8040 |
|
|
if (LABEL_P (x))
|
8041 |
|
|
{
|
8042 |
|
|
unsigned int subalign = 1 << label_to_alignment (x);
|
8043 |
|
|
alignment = MAX (alignment, subalign);
|
8044 |
|
|
label = x;
|
8045 |
|
|
}
|
8046 |
|
|
if (BARRIER_P (x))
|
8047 |
|
|
barrier = x;
|
8048 |
|
|
}
|
8049 |
|
|
|
8050 |
|
|
/* If -malign-labels, and the packet falls through to an unaligned
|
8051 |
|
|
label, try introducing a nop to align that label to 8 bytes. */
|
8052 |
|
|
if (TARGET_ALIGN_LABELS
|
8053 |
|
|
&& label != 0
|
8054 |
|
|
&& barrier == 0
|
8055 |
|
|
&& frv_packet.num_insns < frv_packet.issue_rate)
|
8056 |
|
|
alignment = MAX (alignment, 8);
|
8057 |
|
|
|
8058 |
|
|
/* Advance the address to the end of the current packet. */
|
8059 |
|
|
frv_packet_address += frv_packet.num_insns * 4;
|
8060 |
|
|
|
8061 |
|
|
/* Work out the target address, after alignment. */
|
8062 |
|
|
target = (frv_packet_address + alignment - 1) & -alignment;
|
8063 |
|
|
|
8064 |
|
|
/* If the packet falls through to the label, try to find an efficient
|
8065 |
|
|
padding sequence. */
|
8066 |
|
|
if (barrier == 0)
|
8067 |
|
|
{
|
8068 |
|
|
/* First try adding nops to the current packet. */
|
8069 |
|
|
for (nop = 0; nop < frv_num_nops; nop++)
|
8070 |
|
|
while (frv_packet_address < target && frv_pack_insn_p (frv_nops[nop]))
|
8071 |
|
|
{
|
8072 |
|
|
frv_insert_nop_in_packet (frv_nops[nop]);
|
8073 |
|
|
frv_packet_address += 4;
|
8074 |
|
|
}
|
8075 |
|
|
|
8076 |
|
|
/* If we still haven't reached the target, add some new packets that
|
8077 |
|
|
contain only nops. If there are two types of nop, insert an
|
8078 |
|
|
alternating sequence of frv_nops[0] and frv_nops[1], which will
|
8079 |
|
|
lead to packets like:
|
8080 |
|
|
|
8081 |
|
|
nop.p
|
8082 |
|
|
mnop.p/fnop.p
|
8083 |
|
|
nop.p
|
8084 |
|
|
mnop/fnop
|
8085 |
|
|
|
8086 |
|
|
etc. Just emit frv_nops[0] if that's the only nop we have. */
|
8087 |
|
|
last = frv_packet.insns[frv_packet.num_insns - 1];
|
8088 |
|
|
nop = 0;
|
8089 |
|
|
while (frv_packet_address < target)
|
8090 |
|
|
{
|
8091 |
|
|
last = emit_insn_after (PATTERN (frv_nops[nop]), last);
|
8092 |
|
|
frv_packet_address += 4;
|
8093 |
|
|
if (frv_num_nops > 1)
|
8094 |
|
|
nop ^= 1;
|
8095 |
|
|
}
|
8096 |
|
|
}
|
8097 |
|
|
|
8098 |
|
|
frv_packet_address = target;
|
8099 |
|
|
}
|
8100 |
|
|
|
8101 |
|
|
/* Subroutine of frv_reorg, called after each packet has been constructed
|
8102 |
|
|
in frv_packet. */
|
8103 |
|
|
|
8104 |
|
|
static void
|
8105 |
|
|
frv_reorg_packet (void)
|
8106 |
|
|
{
|
8107 |
|
|
frv_fill_unused_units (GROUP_I);
|
8108 |
|
|
frv_fill_unused_units (GROUP_FM);
|
8109 |
|
|
frv_align_label ();
|
8110 |
|
|
}
|
8111 |
|
|
|
8112 |
|
|
/* Add an instruction with pattern NOP to frv_nops[]. */
|
8113 |
|
|
|
8114 |
|
|
static void
|
8115 |
|
|
frv_register_nop (rtx nop)
|
8116 |
|
|
{
|
8117 |
|
|
nop = make_insn_raw (nop);
|
8118 |
|
|
NEXT_INSN (nop) = 0;
|
8119 |
|
|
PREV_INSN (nop) = 0;
|
8120 |
|
|
frv_nops[frv_num_nops++] = nop;
|
8121 |
|
|
}
|
8122 |
|
|
|
8123 |
|
|
/* Implement TARGET_MACHINE_DEPENDENT_REORG. Divide the instructions
|
8124 |
|
|
into packets and check whether we need to insert nops in order to
|
8125 |
|
|
fulfill the processor's issue requirements. Also, if the user has
|
8126 |
|
|
requested a certain alignment for a label, try to meet that alignment
|
8127 |
|
|
by inserting nops in the previous packet. */
|
8128 |
|
|
|
8129 |
|
|
static void
|
8130 |
|
|
frv_reorg (void)
|
8131 |
|
|
{
|
8132 |
|
|
if (optimize > 0 && TARGET_OPTIMIZE_MEMBAR && cfun->machine->has_membar_p)
|
8133 |
|
|
frv_optimize_membar ();
|
8134 |
|
|
|
8135 |
|
|
frv_num_nops = 0;
|
8136 |
|
|
frv_register_nop (gen_nop ());
|
8137 |
|
|
if (TARGET_MEDIA)
|
8138 |
|
|
frv_register_nop (gen_mnop ());
|
8139 |
|
|
if (TARGET_HARD_FLOAT)
|
8140 |
|
|
frv_register_nop (gen_fnop ());
|
8141 |
|
|
|
8142 |
|
|
/* Estimate the length of each branch. Although this may change after
|
8143 |
|
|
we've inserted nops, it will only do so in big functions. */
|
8144 |
|
|
shorten_branches (get_insns ());
|
8145 |
|
|
|
8146 |
|
|
frv_packet_address = 0;
|
8147 |
|
|
frv_for_each_packet (frv_reorg_packet);
|
8148 |
|
|
}
|
8149 |
|
|
|
8150 |
|
|
#define def_builtin(name, type, code) \
|
8151 |
|
|
lang_hooks.builtin_function ((name), (type), (code), BUILT_IN_MD, NULL, NULL)
|
8152 |
|
|
|
8153 |
|
|
struct builtin_description
|
8154 |
|
|
{
|
8155 |
|
|
enum insn_code icode;
|
8156 |
|
|
const char *name;
|
8157 |
|
|
enum frv_builtins code;
|
8158 |
|
|
enum rtx_code comparison;
|
8159 |
|
|
unsigned int flag;
|
8160 |
|
|
};
|
8161 |
|
|
|
8162 |
|
|
/* Media intrinsics that take a single, constant argument. */
|
8163 |
|
|
|
8164 |
|
|
static struct builtin_description bdesc_set[] =
|
8165 |
|
|
{
|
8166 |
|
|
{ CODE_FOR_mhdsets, "__MHDSETS", FRV_BUILTIN_MHDSETS, 0, 0 }
|
8167 |
|
|
};
|
8168 |
|
|
|
8169 |
|
|
/* Media intrinsics that take just one argument. */
|
8170 |
|
|
|
8171 |
|
|
static struct builtin_description bdesc_1arg[] =
|
8172 |
|
|
{
|
8173 |
|
|
{ CODE_FOR_mnot, "__MNOT", FRV_BUILTIN_MNOT, 0, 0 },
|
8174 |
|
|
{ CODE_FOR_munpackh, "__MUNPACKH", FRV_BUILTIN_MUNPACKH, 0, 0 },
|
8175 |
|
|
{ CODE_FOR_mbtoh, "__MBTOH", FRV_BUILTIN_MBTOH, 0, 0 },
|
8176 |
|
|
{ CODE_FOR_mhtob, "__MHTOB", FRV_BUILTIN_MHTOB, 0, 0 },
|
8177 |
|
|
{ CODE_FOR_mabshs, "__MABSHS", FRV_BUILTIN_MABSHS, 0, 0 },
|
8178 |
|
|
{ CODE_FOR_scutss, "__SCUTSS", FRV_BUILTIN_SCUTSS, 0, 0 }
|
8179 |
|
|
};
|
8180 |
|
|
|
8181 |
|
|
/* Media intrinsics that take two arguments. */
|
8182 |
|
|
|
8183 |
|
|
static struct builtin_description bdesc_2arg[] =
|
8184 |
|
|
{
|
8185 |
|
|
{ CODE_FOR_mand, "__MAND", FRV_BUILTIN_MAND, 0, 0 },
|
8186 |
|
|
{ CODE_FOR_mor, "__MOR", FRV_BUILTIN_MOR, 0, 0 },
|
8187 |
|
|
{ CODE_FOR_mxor, "__MXOR", FRV_BUILTIN_MXOR, 0, 0 },
|
8188 |
|
|
{ CODE_FOR_maveh, "__MAVEH", FRV_BUILTIN_MAVEH, 0, 0 },
|
8189 |
|
|
{ CODE_FOR_msaths, "__MSATHS", FRV_BUILTIN_MSATHS, 0, 0 },
|
8190 |
|
|
{ CODE_FOR_msathu, "__MSATHU", FRV_BUILTIN_MSATHU, 0, 0 },
|
8191 |
|
|
{ CODE_FOR_maddhss, "__MADDHSS", FRV_BUILTIN_MADDHSS, 0, 0 },
|
8192 |
|
|
{ CODE_FOR_maddhus, "__MADDHUS", FRV_BUILTIN_MADDHUS, 0, 0 },
|
8193 |
|
|
{ CODE_FOR_msubhss, "__MSUBHSS", FRV_BUILTIN_MSUBHSS, 0, 0 },
|
8194 |
|
|
{ CODE_FOR_msubhus, "__MSUBHUS", FRV_BUILTIN_MSUBHUS, 0, 0 },
|
8195 |
|
|
{ CODE_FOR_mqaddhss, "__MQADDHSS", FRV_BUILTIN_MQADDHSS, 0, 0 },
|
8196 |
|
|
{ CODE_FOR_mqaddhus, "__MQADDHUS", FRV_BUILTIN_MQADDHUS, 0, 0 },
|
8197 |
|
|
{ CODE_FOR_mqsubhss, "__MQSUBHSS", FRV_BUILTIN_MQSUBHSS, 0, 0 },
|
8198 |
|
|
{ CODE_FOR_mqsubhus, "__MQSUBHUS", FRV_BUILTIN_MQSUBHUS, 0, 0 },
|
8199 |
|
|
{ CODE_FOR_mpackh, "__MPACKH", FRV_BUILTIN_MPACKH, 0, 0 },
|
8200 |
|
|
{ CODE_FOR_mcop1, "__Mcop1", FRV_BUILTIN_MCOP1, 0, 0 },
|
8201 |
|
|
{ CODE_FOR_mcop2, "__Mcop2", FRV_BUILTIN_MCOP2, 0, 0 },
|
8202 |
|
|
{ CODE_FOR_mwcut, "__MWCUT", FRV_BUILTIN_MWCUT, 0, 0 },
|
8203 |
|
|
{ CODE_FOR_mqsaths, "__MQSATHS", FRV_BUILTIN_MQSATHS, 0, 0 },
|
8204 |
|
|
{ CODE_FOR_mqlclrhs, "__MQLCLRHS", FRV_BUILTIN_MQLCLRHS, 0, 0 },
|
8205 |
|
|
{ CODE_FOR_mqlmths, "__MQLMTHS", FRV_BUILTIN_MQLMTHS, 0, 0 },
|
8206 |
|
|
{ CODE_FOR_smul, "__SMUL", FRV_BUILTIN_SMUL, 0, 0 },
|
8207 |
|
|
{ CODE_FOR_umul, "__UMUL", FRV_BUILTIN_UMUL, 0, 0 },
|
8208 |
|
|
{ CODE_FOR_addss, "__ADDSS", FRV_BUILTIN_ADDSS, 0, 0 },
|
8209 |
|
|
{ CODE_FOR_subss, "__SUBSS", FRV_BUILTIN_SUBSS, 0, 0 },
|
8210 |
|
|
{ CODE_FOR_slass, "__SLASS", FRV_BUILTIN_SLASS, 0, 0 },
|
8211 |
|
|
{ CODE_FOR_scan, "__SCAN", FRV_BUILTIN_SCAN, 0, 0 }
|
8212 |
|
|
};
|
8213 |
|
|
|
8214 |
|
|
/* Integer intrinsics that take two arguments and have no return value. */
|
8215 |
|
|
|
8216 |
|
|
static struct builtin_description bdesc_int_void2arg[] =
|
8217 |
|
|
{
|
8218 |
|
|
{ CODE_FOR_smass, "__SMASS", FRV_BUILTIN_SMASS, 0, 0 },
|
8219 |
|
|
{ CODE_FOR_smsss, "__SMSSS", FRV_BUILTIN_SMSSS, 0, 0 },
|
8220 |
|
|
{ CODE_FOR_smu, "__SMU", FRV_BUILTIN_SMU, 0, 0 }
|
8221 |
|
|
};
|
8222 |
|
|
|
8223 |
|
|
static struct builtin_description bdesc_prefetches[] =
|
8224 |
|
|
{
|
8225 |
|
|
{ CODE_FOR_frv_prefetch0, "__data_prefetch0", FRV_BUILTIN_PREFETCH0, 0, 0 },
|
8226 |
|
|
{ CODE_FOR_frv_prefetch, "__data_prefetch", FRV_BUILTIN_PREFETCH, 0, 0 }
|
8227 |
|
|
};
|
8228 |
|
|
|
8229 |
|
|
/* Media intrinsics that take two arguments, the first being an ACC number. */
|
8230 |
|
|
|
8231 |
|
|
static struct builtin_description bdesc_cut[] =
|
8232 |
|
|
{
|
8233 |
|
|
{ CODE_FOR_mcut, "__MCUT", FRV_BUILTIN_MCUT, 0, 0 },
|
8234 |
|
|
{ CODE_FOR_mcutss, "__MCUTSS", FRV_BUILTIN_MCUTSS, 0, 0 },
|
8235 |
|
|
{ CODE_FOR_mdcutssi, "__MDCUTSSI", FRV_BUILTIN_MDCUTSSI, 0, 0 }
|
8236 |
|
|
};
|
8237 |
|
|
|
8238 |
|
|
/* Two-argument media intrinsics with an immediate second argument. */
|
8239 |
|
|
|
8240 |
|
|
static struct builtin_description bdesc_2argimm[] =
|
8241 |
|
|
{
|
8242 |
|
|
{ CODE_FOR_mrotli, "__MROTLI", FRV_BUILTIN_MROTLI, 0, 0 },
|
8243 |
|
|
{ CODE_FOR_mrotri, "__MROTRI", FRV_BUILTIN_MROTRI, 0, 0 },
|
8244 |
|
|
{ CODE_FOR_msllhi, "__MSLLHI", FRV_BUILTIN_MSLLHI, 0, 0 },
|
8245 |
|
|
{ CODE_FOR_msrlhi, "__MSRLHI", FRV_BUILTIN_MSRLHI, 0, 0 },
|
8246 |
|
|
{ CODE_FOR_msrahi, "__MSRAHI", FRV_BUILTIN_MSRAHI, 0, 0 },
|
8247 |
|
|
{ CODE_FOR_mexpdhw, "__MEXPDHW", FRV_BUILTIN_MEXPDHW, 0, 0 },
|
8248 |
|
|
{ CODE_FOR_mexpdhd, "__MEXPDHD", FRV_BUILTIN_MEXPDHD, 0, 0 },
|
8249 |
|
|
{ CODE_FOR_mdrotli, "__MDROTLI", FRV_BUILTIN_MDROTLI, 0, 0 },
|
8250 |
|
|
{ CODE_FOR_mcplhi, "__MCPLHI", FRV_BUILTIN_MCPLHI, 0, 0 },
|
8251 |
|
|
{ CODE_FOR_mcpli, "__MCPLI", FRV_BUILTIN_MCPLI, 0, 0 },
|
8252 |
|
|
{ CODE_FOR_mhsetlos, "__MHSETLOS", FRV_BUILTIN_MHSETLOS, 0, 0 },
|
8253 |
|
|
{ CODE_FOR_mhsetloh, "__MHSETLOH", FRV_BUILTIN_MHSETLOH, 0, 0 },
|
8254 |
|
|
{ CODE_FOR_mhsethis, "__MHSETHIS", FRV_BUILTIN_MHSETHIS, 0, 0 },
|
8255 |
|
|
{ CODE_FOR_mhsethih, "__MHSETHIH", FRV_BUILTIN_MHSETHIH, 0, 0 },
|
8256 |
|
|
{ CODE_FOR_mhdseth, "__MHDSETH", FRV_BUILTIN_MHDSETH, 0, 0 },
|
8257 |
|
|
{ CODE_FOR_mqsllhi, "__MQSLLHI", FRV_BUILTIN_MQSLLHI, 0, 0 },
|
8258 |
|
|
{ CODE_FOR_mqsrahi, "__MQSRAHI", FRV_BUILTIN_MQSRAHI, 0, 0 }
|
8259 |
|
|
};
|
8260 |
|
|
|
8261 |
|
|
/* Media intrinsics that take two arguments and return void, the first argument
|
8262 |
|
|
being a pointer to 4 words in memory. */
|
8263 |
|
|
|
8264 |
|
|
static struct builtin_description bdesc_void2arg[] =
|
8265 |
|
|
{
|
8266 |
|
|
{ CODE_FOR_mdunpackh, "__MDUNPACKH", FRV_BUILTIN_MDUNPACKH, 0, 0 },
|
8267 |
|
|
{ CODE_FOR_mbtohe, "__MBTOHE", FRV_BUILTIN_MBTOHE, 0, 0 },
|
8268 |
|
|
};
|
8269 |
|
|
|
8270 |
|
|
/* Media intrinsics that take three arguments, the first being a const_int that
|
8271 |
|
|
denotes an accumulator, and that return void. */
|
8272 |
|
|
|
8273 |
|
|
static struct builtin_description bdesc_void3arg[] =
|
8274 |
|
|
{
|
8275 |
|
|
{ CODE_FOR_mcpxrs, "__MCPXRS", FRV_BUILTIN_MCPXRS, 0, 0 },
|
8276 |
|
|
{ CODE_FOR_mcpxru, "__MCPXRU", FRV_BUILTIN_MCPXRU, 0, 0 },
|
8277 |
|
|
{ CODE_FOR_mcpxis, "__MCPXIS", FRV_BUILTIN_MCPXIS, 0, 0 },
|
8278 |
|
|
{ CODE_FOR_mcpxiu, "__MCPXIU", FRV_BUILTIN_MCPXIU, 0, 0 },
|
8279 |
|
|
{ CODE_FOR_mmulhs, "__MMULHS", FRV_BUILTIN_MMULHS, 0, 0 },
|
8280 |
|
|
{ CODE_FOR_mmulhu, "__MMULHU", FRV_BUILTIN_MMULHU, 0, 0 },
|
8281 |
|
|
{ CODE_FOR_mmulxhs, "__MMULXHS", FRV_BUILTIN_MMULXHS, 0, 0 },
|
8282 |
|
|
{ CODE_FOR_mmulxhu, "__MMULXHU", FRV_BUILTIN_MMULXHU, 0, 0 },
|
8283 |
|
|
{ CODE_FOR_mmachs, "__MMACHS", FRV_BUILTIN_MMACHS, 0, 0 },
|
8284 |
|
|
{ CODE_FOR_mmachu, "__MMACHU", FRV_BUILTIN_MMACHU, 0, 0 },
|
8285 |
|
|
{ CODE_FOR_mmrdhs, "__MMRDHS", FRV_BUILTIN_MMRDHS, 0, 0 },
|
8286 |
|
|
{ CODE_FOR_mmrdhu, "__MMRDHU", FRV_BUILTIN_MMRDHU, 0, 0 },
|
8287 |
|
|
{ CODE_FOR_mqcpxrs, "__MQCPXRS", FRV_BUILTIN_MQCPXRS, 0, 0 },
|
8288 |
|
|
{ CODE_FOR_mqcpxru, "__MQCPXRU", FRV_BUILTIN_MQCPXRU, 0, 0 },
|
8289 |
|
|
{ CODE_FOR_mqcpxis, "__MQCPXIS", FRV_BUILTIN_MQCPXIS, 0, 0 },
|
8290 |
|
|
{ CODE_FOR_mqcpxiu, "__MQCPXIU", FRV_BUILTIN_MQCPXIU, 0, 0 },
|
8291 |
|
|
{ CODE_FOR_mqmulhs, "__MQMULHS", FRV_BUILTIN_MQMULHS, 0, 0 },
|
8292 |
|
|
{ CODE_FOR_mqmulhu, "__MQMULHU", FRV_BUILTIN_MQMULHU, 0, 0 },
|
8293 |
|
|
{ CODE_FOR_mqmulxhs, "__MQMULXHS", FRV_BUILTIN_MQMULXHS, 0, 0 },
|
8294 |
|
|
{ CODE_FOR_mqmulxhu, "__MQMULXHU", FRV_BUILTIN_MQMULXHU, 0, 0 },
|
8295 |
|
|
{ CODE_FOR_mqmachs, "__MQMACHS", FRV_BUILTIN_MQMACHS, 0, 0 },
|
8296 |
|
|
{ CODE_FOR_mqmachu, "__MQMACHU", FRV_BUILTIN_MQMACHU, 0, 0 },
|
8297 |
|
|
{ CODE_FOR_mqxmachs, "__MQXMACHS", FRV_BUILTIN_MQXMACHS, 0, 0 },
|
8298 |
|
|
{ CODE_FOR_mqxmacxhs, "__MQXMACXHS", FRV_BUILTIN_MQXMACXHS, 0, 0 },
|
8299 |
|
|
{ CODE_FOR_mqmacxhs, "__MQMACXHS", FRV_BUILTIN_MQMACXHS, 0, 0 }
|
8300 |
|
|
};
|
8301 |
|
|
|
8302 |
|
|
/* Media intrinsics that take two accumulator numbers as argument and
|
8303 |
|
|
return void. */
|
8304 |
|
|
|
8305 |
|
|
static struct builtin_description bdesc_voidacc[] =
|
8306 |
|
|
{
|
8307 |
|
|
{ CODE_FOR_maddaccs, "__MADDACCS", FRV_BUILTIN_MADDACCS, 0, 0 },
|
8308 |
|
|
{ CODE_FOR_msubaccs, "__MSUBACCS", FRV_BUILTIN_MSUBACCS, 0, 0 },
|
8309 |
|
|
{ CODE_FOR_masaccs, "__MASACCS", FRV_BUILTIN_MASACCS, 0, 0 },
|
8310 |
|
|
{ CODE_FOR_mdaddaccs, "__MDADDACCS", FRV_BUILTIN_MDADDACCS, 0, 0 },
|
8311 |
|
|
{ CODE_FOR_mdsubaccs, "__MDSUBACCS", FRV_BUILTIN_MDSUBACCS, 0, 0 },
|
8312 |
|
|
{ CODE_FOR_mdasaccs, "__MDASACCS", FRV_BUILTIN_MDASACCS, 0, 0 }
|
8313 |
|
|
};
|
8314 |
|
|
|
8315 |
|
|
/* Intrinsics that load a value and then issue a MEMBAR. The load is
|
8316 |
|
|
a normal move and the ICODE is for the membar. */
|
8317 |
|
|
|
8318 |
|
|
static struct builtin_description bdesc_loads[] =
|
8319 |
|
|
{
|
8320 |
|
|
{ CODE_FOR_optional_membar_qi, "__builtin_read8",
|
8321 |
|
|
FRV_BUILTIN_READ8, 0, 0 },
|
8322 |
|
|
{ CODE_FOR_optional_membar_hi, "__builtin_read16",
|
8323 |
|
|
FRV_BUILTIN_READ16, 0, 0 },
|
8324 |
|
|
{ CODE_FOR_optional_membar_si, "__builtin_read32",
|
8325 |
|
|
FRV_BUILTIN_READ32, 0, 0 },
|
8326 |
|
|
{ CODE_FOR_optional_membar_di, "__builtin_read64",
|
8327 |
|
|
FRV_BUILTIN_READ64, 0, 0 }
|
8328 |
|
|
};
|
8329 |
|
|
|
8330 |
|
|
/* Likewise stores. */
|
8331 |
|
|
|
8332 |
|
|
static struct builtin_description bdesc_stores[] =
|
8333 |
|
|
{
|
8334 |
|
|
{ CODE_FOR_optional_membar_qi, "__builtin_write8",
|
8335 |
|
|
FRV_BUILTIN_WRITE8, 0, 0 },
|
8336 |
|
|
{ CODE_FOR_optional_membar_hi, "__builtin_write16",
|
8337 |
|
|
FRV_BUILTIN_WRITE16, 0, 0 },
|
8338 |
|
|
{ CODE_FOR_optional_membar_si, "__builtin_write32",
|
8339 |
|
|
FRV_BUILTIN_WRITE32, 0, 0 },
|
8340 |
|
|
{ CODE_FOR_optional_membar_di, "__builtin_write64",
|
8341 |
|
|
FRV_BUILTIN_WRITE64, 0, 0 },
|
8342 |
|
|
};
|
8343 |
|
|
|
8344 |
|
|
/* Initialize media builtins. */
|
8345 |
|
|
|
8346 |
|
|
static void
|
8347 |
|
|
frv_init_builtins (void)
|
8348 |
|
|
{
|
8349 |
|
|
tree endlink = void_list_node;
|
8350 |
|
|
tree accumulator = integer_type_node;
|
8351 |
|
|
tree integer = integer_type_node;
|
8352 |
|
|
tree voidt = void_type_node;
|
8353 |
|
|
tree uhalf = short_unsigned_type_node;
|
8354 |
|
|
tree sword1 = long_integer_type_node;
|
8355 |
|
|
tree uword1 = long_unsigned_type_node;
|
8356 |
|
|
tree sword2 = long_long_integer_type_node;
|
8357 |
|
|
tree uword2 = long_long_unsigned_type_node;
|
8358 |
|
|
tree uword4 = build_pointer_type (uword1);
|
8359 |
|
|
tree vptr = build_pointer_type (build_type_variant (void_type_node, 0, 1));
|
8360 |
|
|
tree ubyte = unsigned_char_type_node;
|
8361 |
|
|
tree iacc = integer_type_node;
|
8362 |
|
|
|
8363 |
|
|
#define UNARY(RET, T1) \
|
8364 |
|
|
build_function_type (RET, tree_cons (NULL_TREE, T1, endlink))
|
8365 |
|
|
|
8366 |
|
|
#define BINARY(RET, T1, T2) \
|
8367 |
|
|
build_function_type (RET, tree_cons (NULL_TREE, T1, \
|
8368 |
|
|
tree_cons (NULL_TREE, T2, endlink)))
|
8369 |
|
|
|
8370 |
|
|
#define TRINARY(RET, T1, T2, T3) \
|
8371 |
|
|
build_function_type (RET, tree_cons (NULL_TREE, T1, \
|
8372 |
|
|
tree_cons (NULL_TREE, T2, \
|
8373 |
|
|
tree_cons (NULL_TREE, T3, endlink))))
|
8374 |
|
|
|
8375 |
|
|
#define QUAD(RET, T1, T2, T3, T4) \
|
8376 |
|
|
build_function_type (RET, tree_cons (NULL_TREE, T1, \
|
8377 |
|
|
tree_cons (NULL_TREE, T2, \
|
8378 |
|
|
tree_cons (NULL_TREE, T3, \
|
8379 |
|
|
tree_cons (NULL_TREE, T4, endlink)))))
|
8380 |
|
|
|
8381 |
|
|
tree void_ftype_void = build_function_type (voidt, endlink);
|
8382 |
|
|
|
8383 |
|
|
tree void_ftype_acc = UNARY (voidt, accumulator);
|
8384 |
|
|
tree void_ftype_uw4_uw1 = BINARY (voidt, uword4, uword1);
|
8385 |
|
|
tree void_ftype_uw4_uw2 = BINARY (voidt, uword4, uword2);
|
8386 |
|
|
tree void_ftype_acc_uw1 = BINARY (voidt, accumulator, uword1);
|
8387 |
|
|
tree void_ftype_acc_acc = BINARY (voidt, accumulator, accumulator);
|
8388 |
|
|
tree void_ftype_acc_uw1_uw1 = TRINARY (voidt, accumulator, uword1, uword1);
|
8389 |
|
|
tree void_ftype_acc_sw1_sw1 = TRINARY (voidt, accumulator, sword1, sword1);
|
8390 |
|
|
tree void_ftype_acc_uw2_uw2 = TRINARY (voidt, accumulator, uword2, uword2);
|
8391 |
|
|
tree void_ftype_acc_sw2_sw2 = TRINARY (voidt, accumulator, sword2, sword2);
|
8392 |
|
|
|
8393 |
|
|
tree uw1_ftype_uw1 = UNARY (uword1, uword1);
|
8394 |
|
|
tree uw1_ftype_sw1 = UNARY (uword1, sword1);
|
8395 |
|
|
tree uw1_ftype_uw2 = UNARY (uword1, uword2);
|
8396 |
|
|
tree uw1_ftype_acc = UNARY (uword1, accumulator);
|
8397 |
|
|
tree uw1_ftype_uh_uh = BINARY (uword1, uhalf, uhalf);
|
8398 |
|
|
tree uw1_ftype_uw1_uw1 = BINARY (uword1, uword1, uword1);
|
8399 |
|
|
tree uw1_ftype_uw1_int = BINARY (uword1, uword1, integer);
|
8400 |
|
|
tree uw1_ftype_acc_uw1 = BINARY (uword1, accumulator, uword1);
|
8401 |
|
|
tree uw1_ftype_acc_sw1 = BINARY (uword1, accumulator, sword1);
|
8402 |
|
|
tree uw1_ftype_uw2_uw1 = BINARY (uword1, uword2, uword1);
|
8403 |
|
|
tree uw1_ftype_uw2_int = BINARY (uword1, uword2, integer);
|
8404 |
|
|
|
8405 |
|
|
tree sw1_ftype_int = UNARY (sword1, integer);
|
8406 |
|
|
tree sw1_ftype_sw1_sw1 = BINARY (sword1, sword1, sword1);
|
8407 |
|
|
tree sw1_ftype_sw1_int = BINARY (sword1, sword1, integer);
|
8408 |
|
|
|
8409 |
|
|
tree uw2_ftype_uw1 = UNARY (uword2, uword1);
|
8410 |
|
|
tree uw2_ftype_uw1_int = BINARY (uword2, uword1, integer);
|
8411 |
|
|
tree uw2_ftype_uw2_uw2 = BINARY (uword2, uword2, uword2);
|
8412 |
|
|
tree uw2_ftype_uw2_int = BINARY (uword2, uword2, integer);
|
8413 |
|
|
tree uw2_ftype_acc_int = BINARY (uword2, accumulator, integer);
|
8414 |
|
|
tree uw2_ftype_uh_uh_uh_uh = QUAD (uword2, uhalf, uhalf, uhalf, uhalf);
|
8415 |
|
|
|
8416 |
|
|
tree sw2_ftype_sw2_sw2 = BINARY (sword2, sword2, sword2);
|
8417 |
|
|
tree sw2_ftype_sw2_int = BINARY (sword2, sword2, integer);
|
8418 |
|
|
tree uw2_ftype_uw1_uw1 = BINARY (uword2, uword1, uword1);
|
8419 |
|
|
tree sw2_ftype_sw1_sw1 = BINARY (sword2, sword1, sword1);
|
8420 |
|
|
tree void_ftype_sw1_sw1 = BINARY (voidt, sword1, sword1);
|
8421 |
|
|
tree void_ftype_iacc_sw2 = BINARY (voidt, iacc, sword2);
|
8422 |
|
|
tree void_ftype_iacc_sw1 = BINARY (voidt, iacc, sword1);
|
8423 |
|
|
tree sw1_ftype_sw1 = UNARY (sword1, sword1);
|
8424 |
|
|
tree sw2_ftype_iacc = UNARY (sword2, iacc);
|
8425 |
|
|
tree sw1_ftype_iacc = UNARY (sword1, iacc);
|
8426 |
|
|
tree void_ftype_ptr = UNARY (voidt, const_ptr_type_node);
|
8427 |
|
|
tree uw1_ftype_vptr = UNARY (uword1, vptr);
|
8428 |
|
|
tree uw2_ftype_vptr = UNARY (uword2, vptr);
|
8429 |
|
|
tree void_ftype_vptr_ub = BINARY (voidt, vptr, ubyte);
|
8430 |
|
|
tree void_ftype_vptr_uh = BINARY (voidt, vptr, uhalf);
|
8431 |
|
|
tree void_ftype_vptr_uw1 = BINARY (voidt, vptr, uword1);
|
8432 |
|
|
tree void_ftype_vptr_uw2 = BINARY (voidt, vptr, uword2);
|
8433 |
|
|
|
8434 |
|
|
def_builtin ("__MAND", uw1_ftype_uw1_uw1, FRV_BUILTIN_MAND);
|
8435 |
|
|
def_builtin ("__MOR", uw1_ftype_uw1_uw1, FRV_BUILTIN_MOR);
|
8436 |
|
|
def_builtin ("__MXOR", uw1_ftype_uw1_uw1, FRV_BUILTIN_MXOR);
|
8437 |
|
|
def_builtin ("__MNOT", uw1_ftype_uw1, FRV_BUILTIN_MNOT);
|
8438 |
|
|
def_builtin ("__MROTLI", uw1_ftype_uw1_int, FRV_BUILTIN_MROTLI);
|
8439 |
|
|
def_builtin ("__MROTRI", uw1_ftype_uw1_int, FRV_BUILTIN_MROTRI);
|
8440 |
|
|
def_builtin ("__MWCUT", uw1_ftype_uw2_uw1, FRV_BUILTIN_MWCUT);
|
8441 |
|
|
def_builtin ("__MAVEH", uw1_ftype_uw1_uw1, FRV_BUILTIN_MAVEH);
|
8442 |
|
|
def_builtin ("__MSLLHI", uw1_ftype_uw1_int, FRV_BUILTIN_MSLLHI);
|
8443 |
|
|
def_builtin ("__MSRLHI", uw1_ftype_uw1_int, FRV_BUILTIN_MSRLHI);
|
8444 |
|
|
def_builtin ("__MSRAHI", sw1_ftype_sw1_int, FRV_BUILTIN_MSRAHI);
|
8445 |
|
|
def_builtin ("__MSATHS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MSATHS);
|
8446 |
|
|
def_builtin ("__MSATHU", uw1_ftype_uw1_uw1, FRV_BUILTIN_MSATHU);
|
8447 |
|
|
def_builtin ("__MADDHSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MADDHSS);
|
8448 |
|
|
def_builtin ("__MADDHUS", uw1_ftype_uw1_uw1, FRV_BUILTIN_MADDHUS);
|
8449 |
|
|
def_builtin ("__MSUBHSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MSUBHSS);
|
8450 |
|
|
def_builtin ("__MSUBHUS", uw1_ftype_uw1_uw1, FRV_BUILTIN_MSUBHUS);
|
8451 |
|
|
def_builtin ("__MMULHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMULHS);
|
8452 |
|
|
def_builtin ("__MMULHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMULHU);
|
8453 |
|
|
def_builtin ("__MMULXHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMULXHS);
|
8454 |
|
|
def_builtin ("__MMULXHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMULXHU);
|
8455 |
|
|
def_builtin ("__MMACHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMACHS);
|
8456 |
|
|
def_builtin ("__MMACHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMACHU);
|
8457 |
|
|
def_builtin ("__MMRDHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMRDHS);
|
8458 |
|
|
def_builtin ("__MMRDHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMRDHU);
|
8459 |
|
|
def_builtin ("__MQADDHSS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQADDHSS);
|
8460 |
|
|
def_builtin ("__MQADDHUS", uw2_ftype_uw2_uw2, FRV_BUILTIN_MQADDHUS);
|
8461 |
|
|
def_builtin ("__MQSUBHSS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQSUBHSS);
|
8462 |
|
|
def_builtin ("__MQSUBHUS", uw2_ftype_uw2_uw2, FRV_BUILTIN_MQSUBHUS);
|
8463 |
|
|
def_builtin ("__MQMULHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMULHS);
|
8464 |
|
|
def_builtin ("__MQMULHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMULHU);
|
8465 |
|
|
def_builtin ("__MQMULXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMULXHS);
|
8466 |
|
|
def_builtin ("__MQMULXHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMULXHU);
|
8467 |
|
|
def_builtin ("__MQMACHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMACHS);
|
8468 |
|
|
def_builtin ("__MQMACHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMACHU);
|
8469 |
|
|
def_builtin ("__MCPXRS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MCPXRS);
|
8470 |
|
|
def_builtin ("__MCPXRU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MCPXRU);
|
8471 |
|
|
def_builtin ("__MCPXIS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MCPXIS);
|
8472 |
|
|
def_builtin ("__MCPXIU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MCPXIU);
|
8473 |
|
|
def_builtin ("__MQCPXRS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQCPXRS);
|
8474 |
|
|
def_builtin ("__MQCPXRU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQCPXRU);
|
8475 |
|
|
def_builtin ("__MQCPXIS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQCPXIS);
|
8476 |
|
|
def_builtin ("__MQCPXIU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQCPXIU);
|
8477 |
|
|
def_builtin ("__MCUT", uw1_ftype_acc_uw1, FRV_BUILTIN_MCUT);
|
8478 |
|
|
def_builtin ("__MCUTSS", uw1_ftype_acc_sw1, FRV_BUILTIN_MCUTSS);
|
8479 |
|
|
def_builtin ("__MEXPDHW", uw1_ftype_uw1_int, FRV_BUILTIN_MEXPDHW);
|
8480 |
|
|
def_builtin ("__MEXPDHD", uw2_ftype_uw1_int, FRV_BUILTIN_MEXPDHD);
|
8481 |
|
|
def_builtin ("__MPACKH", uw1_ftype_uh_uh, FRV_BUILTIN_MPACKH);
|
8482 |
|
|
def_builtin ("__MUNPACKH", uw2_ftype_uw1, FRV_BUILTIN_MUNPACKH);
|
8483 |
|
|
def_builtin ("__MDPACKH", uw2_ftype_uh_uh_uh_uh, FRV_BUILTIN_MDPACKH);
|
8484 |
|
|
def_builtin ("__MDUNPACKH", void_ftype_uw4_uw2, FRV_BUILTIN_MDUNPACKH);
|
8485 |
|
|
def_builtin ("__MBTOH", uw2_ftype_uw1, FRV_BUILTIN_MBTOH);
|
8486 |
|
|
def_builtin ("__MHTOB", uw1_ftype_uw2, FRV_BUILTIN_MHTOB);
|
8487 |
|
|
def_builtin ("__MBTOHE", void_ftype_uw4_uw1, FRV_BUILTIN_MBTOHE);
|
8488 |
|
|
def_builtin ("__MCLRACC", void_ftype_acc, FRV_BUILTIN_MCLRACC);
|
8489 |
|
|
def_builtin ("__MCLRACCA", void_ftype_void, FRV_BUILTIN_MCLRACCA);
|
8490 |
|
|
def_builtin ("__MRDACC", uw1_ftype_acc, FRV_BUILTIN_MRDACC);
|
8491 |
|
|
def_builtin ("__MRDACCG", uw1_ftype_acc, FRV_BUILTIN_MRDACCG);
|
8492 |
|
|
def_builtin ("__MWTACC", void_ftype_acc_uw1, FRV_BUILTIN_MWTACC);
|
8493 |
|
|
def_builtin ("__MWTACCG", void_ftype_acc_uw1, FRV_BUILTIN_MWTACCG);
|
8494 |
|
|
def_builtin ("__Mcop1", uw1_ftype_uw1_uw1, FRV_BUILTIN_MCOP1);
|
8495 |
|
|
def_builtin ("__Mcop2", uw1_ftype_uw1_uw1, FRV_BUILTIN_MCOP2);
|
8496 |
|
|
def_builtin ("__MTRAP", void_ftype_void, FRV_BUILTIN_MTRAP);
|
8497 |
|
|
def_builtin ("__MQXMACHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQXMACHS);
|
8498 |
|
|
def_builtin ("__MQXMACXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQXMACXHS);
|
8499 |
|
|
def_builtin ("__MQMACXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMACXHS);
|
8500 |
|
|
def_builtin ("__MADDACCS", void_ftype_acc_acc, FRV_BUILTIN_MADDACCS);
|
8501 |
|
|
def_builtin ("__MSUBACCS", void_ftype_acc_acc, FRV_BUILTIN_MSUBACCS);
|
8502 |
|
|
def_builtin ("__MASACCS", void_ftype_acc_acc, FRV_BUILTIN_MASACCS);
|
8503 |
|
|
def_builtin ("__MDADDACCS", void_ftype_acc_acc, FRV_BUILTIN_MDADDACCS);
|
8504 |
|
|
def_builtin ("__MDSUBACCS", void_ftype_acc_acc, FRV_BUILTIN_MDSUBACCS);
|
8505 |
|
|
def_builtin ("__MDASACCS", void_ftype_acc_acc, FRV_BUILTIN_MDASACCS);
|
8506 |
|
|
def_builtin ("__MABSHS", uw1_ftype_sw1, FRV_BUILTIN_MABSHS);
|
8507 |
|
|
def_builtin ("__MDROTLI", uw2_ftype_uw2_int, FRV_BUILTIN_MDROTLI);
|
8508 |
|
|
def_builtin ("__MCPLHI", uw1_ftype_uw2_int, FRV_BUILTIN_MCPLHI);
|
8509 |
|
|
def_builtin ("__MCPLI", uw1_ftype_uw2_int, FRV_BUILTIN_MCPLI);
|
8510 |
|
|
def_builtin ("__MDCUTSSI", uw2_ftype_acc_int, FRV_BUILTIN_MDCUTSSI);
|
8511 |
|
|
def_builtin ("__MQSATHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQSATHS);
|
8512 |
|
|
def_builtin ("__MHSETLOS", sw1_ftype_sw1_int, FRV_BUILTIN_MHSETLOS);
|
8513 |
|
|
def_builtin ("__MHSETHIS", sw1_ftype_sw1_int, FRV_BUILTIN_MHSETHIS);
|
8514 |
|
|
def_builtin ("__MHDSETS", sw1_ftype_int, FRV_BUILTIN_MHDSETS);
|
8515 |
|
|
def_builtin ("__MHSETLOH", uw1_ftype_uw1_int, FRV_BUILTIN_MHSETLOH);
|
8516 |
|
|
def_builtin ("__MHSETHIH", uw1_ftype_uw1_int, FRV_BUILTIN_MHSETHIH);
|
8517 |
|
|
def_builtin ("__MHDSETH", uw1_ftype_uw1_int, FRV_BUILTIN_MHDSETH);
|
8518 |
|
|
def_builtin ("__MQLCLRHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQLCLRHS);
|
8519 |
|
|
def_builtin ("__MQLMTHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQLMTHS);
|
8520 |
|
|
def_builtin ("__MQSLLHI", uw2_ftype_uw2_int, FRV_BUILTIN_MQSLLHI);
|
8521 |
|
|
def_builtin ("__MQSRAHI", sw2_ftype_sw2_int, FRV_BUILTIN_MQSRAHI);
|
8522 |
|
|
def_builtin ("__SMUL", sw2_ftype_sw1_sw1, FRV_BUILTIN_SMUL);
|
8523 |
|
|
def_builtin ("__UMUL", uw2_ftype_uw1_uw1, FRV_BUILTIN_UMUL);
|
8524 |
|
|
def_builtin ("__SMASS", void_ftype_sw1_sw1, FRV_BUILTIN_SMASS);
|
8525 |
|
|
def_builtin ("__SMSSS", void_ftype_sw1_sw1, FRV_BUILTIN_SMSSS);
|
8526 |
|
|
def_builtin ("__SMU", void_ftype_sw1_sw1, FRV_BUILTIN_SMU);
|
8527 |
|
|
def_builtin ("__ADDSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_ADDSS);
|
8528 |
|
|
def_builtin ("__SUBSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_SUBSS);
|
8529 |
|
|
def_builtin ("__SLASS", sw1_ftype_sw1_sw1, FRV_BUILTIN_SLASS);
|
8530 |
|
|
def_builtin ("__SCAN", sw1_ftype_sw1_sw1, FRV_BUILTIN_SCAN);
|
8531 |
|
|
def_builtin ("__SCUTSS", sw1_ftype_sw1, FRV_BUILTIN_SCUTSS);
|
8532 |
|
|
def_builtin ("__IACCreadll", sw2_ftype_iacc, FRV_BUILTIN_IACCreadll);
|
8533 |
|
|
def_builtin ("__IACCreadl", sw1_ftype_iacc, FRV_BUILTIN_IACCreadl);
|
8534 |
|
|
def_builtin ("__IACCsetll", void_ftype_iacc_sw2, FRV_BUILTIN_IACCsetll);
|
8535 |
|
|
def_builtin ("__IACCsetl", void_ftype_iacc_sw1, FRV_BUILTIN_IACCsetl);
|
8536 |
|
|
def_builtin ("__data_prefetch0", void_ftype_ptr, FRV_BUILTIN_PREFETCH0);
|
8537 |
|
|
def_builtin ("__data_prefetch", void_ftype_ptr, FRV_BUILTIN_PREFETCH);
|
8538 |
|
|
def_builtin ("__builtin_read8", uw1_ftype_vptr, FRV_BUILTIN_READ8);
|
8539 |
|
|
def_builtin ("__builtin_read16", uw1_ftype_vptr, FRV_BUILTIN_READ16);
|
8540 |
|
|
def_builtin ("__builtin_read32", uw1_ftype_vptr, FRV_BUILTIN_READ32);
|
8541 |
|
|
def_builtin ("__builtin_read64", uw2_ftype_vptr, FRV_BUILTIN_READ64);
|
8542 |
|
|
|
8543 |
|
|
def_builtin ("__builtin_write8", void_ftype_vptr_ub, FRV_BUILTIN_WRITE8);
|
8544 |
|
|
def_builtin ("__builtin_write16", void_ftype_vptr_uh, FRV_BUILTIN_WRITE16);
|
8545 |
|
|
def_builtin ("__builtin_write32", void_ftype_vptr_uw1, FRV_BUILTIN_WRITE32);
|
8546 |
|
|
def_builtin ("__builtin_write64", void_ftype_vptr_uw2, FRV_BUILTIN_WRITE64);
|
8547 |
|
|
|
8548 |
|
|
#undef UNARY
|
8549 |
|
|
#undef BINARY
|
8550 |
|
|
#undef TRINARY
|
8551 |
|
|
#undef QUAD
|
8552 |
|
|
}
|
8553 |
|
|
|
8554 |
|
|
/* Set the names for various arithmetic operations according to the
|
8555 |
|
|
FRV ABI. */
|
8556 |
|
|
static void
|
8557 |
|
|
frv_init_libfuncs (void)
|
8558 |
|
|
{
|
8559 |
|
|
set_optab_libfunc (smod_optab, SImode, "__modi");
|
8560 |
|
|
set_optab_libfunc (umod_optab, SImode, "__umodi");
|
8561 |
|
|
|
8562 |
|
|
set_optab_libfunc (add_optab, DImode, "__addll");
|
8563 |
|
|
set_optab_libfunc (sub_optab, DImode, "__subll");
|
8564 |
|
|
set_optab_libfunc (smul_optab, DImode, "__mulll");
|
8565 |
|
|
set_optab_libfunc (sdiv_optab, DImode, "__divll");
|
8566 |
|
|
set_optab_libfunc (smod_optab, DImode, "__modll");
|
8567 |
|
|
set_optab_libfunc (umod_optab, DImode, "__umodll");
|
8568 |
|
|
set_optab_libfunc (and_optab, DImode, "__andll");
|
8569 |
|
|
set_optab_libfunc (ior_optab, DImode, "__orll");
|
8570 |
|
|
set_optab_libfunc (xor_optab, DImode, "__xorll");
|
8571 |
|
|
set_optab_libfunc (one_cmpl_optab, DImode, "__notll");
|
8572 |
|
|
|
8573 |
|
|
set_optab_libfunc (add_optab, SFmode, "__addf");
|
8574 |
|
|
set_optab_libfunc (sub_optab, SFmode, "__subf");
|
8575 |
|
|
set_optab_libfunc (smul_optab, SFmode, "__mulf");
|
8576 |
|
|
set_optab_libfunc (sdiv_optab, SFmode, "__divf");
|
8577 |
|
|
|
8578 |
|
|
set_optab_libfunc (add_optab, DFmode, "__addd");
|
8579 |
|
|
set_optab_libfunc (sub_optab, DFmode, "__subd");
|
8580 |
|
|
set_optab_libfunc (smul_optab, DFmode, "__muld");
|
8581 |
|
|
set_optab_libfunc (sdiv_optab, DFmode, "__divd");
|
8582 |
|
|
|
8583 |
|
|
set_conv_libfunc (sext_optab, DFmode, SFmode, "__ftod");
|
8584 |
|
|
set_conv_libfunc (trunc_optab, SFmode, DFmode, "__dtof");
|
8585 |
|
|
|
8586 |
|
|
set_conv_libfunc (sfix_optab, SImode, SFmode, "__ftoi");
|
8587 |
|
|
set_conv_libfunc (sfix_optab, DImode, SFmode, "__ftoll");
|
8588 |
|
|
set_conv_libfunc (sfix_optab, SImode, DFmode, "__dtoi");
|
8589 |
|
|
set_conv_libfunc (sfix_optab, DImode, DFmode, "__dtoll");
|
8590 |
|
|
|
8591 |
|
|
set_conv_libfunc (ufix_optab, SImode, SFmode, "__ftoui");
|
8592 |
|
|
set_conv_libfunc (ufix_optab, DImode, SFmode, "__ftoull");
|
8593 |
|
|
set_conv_libfunc (ufix_optab, SImode, DFmode, "__dtoui");
|
8594 |
|
|
set_conv_libfunc (ufix_optab, DImode, DFmode, "__dtoull");
|
8595 |
|
|
|
8596 |
|
|
set_conv_libfunc (sfloat_optab, SFmode, SImode, "__itof");
|
8597 |
|
|
set_conv_libfunc (sfloat_optab, SFmode, DImode, "__lltof");
|
8598 |
|
|
set_conv_libfunc (sfloat_optab, DFmode, SImode, "__itod");
|
8599 |
|
|
set_conv_libfunc (sfloat_optab, DFmode, DImode, "__lltod");
|
8600 |
|
|
}
|
8601 |
|
|
|
8602 |
|
|
/* Convert an integer constant to an accumulator register. ICODE is the
|
8603 |
|
|
code of the target instruction, OPNUM is the number of the
|
8604 |
|
|
accumulator operand and OPVAL is the constant integer. Try both
|
8605 |
|
|
ACC and ACCG registers; only report an error if neither fit the
|
8606 |
|
|
instruction. */
|
8607 |
|
|
|
8608 |
|
|
static rtx
|
8609 |
|
|
frv_int_to_acc (enum insn_code icode, int opnum, rtx opval)
|
8610 |
|
|
{
|
8611 |
|
|
rtx reg;
|
8612 |
|
|
int i;
|
8613 |
|
|
|
8614 |
|
|
/* ACCs and ACCGs are implicit global registers if media intrinsics
|
8615 |
|
|
are being used. We set up this lazily to avoid creating lots of
|
8616 |
|
|
unnecessary call_insn rtl in non-media code. */
|
8617 |
|
|
for (i = 0; i <= ACC_MASK; i++)
|
8618 |
|
|
if ((i & ACC_MASK) == i)
|
8619 |
|
|
global_regs[i + ACC_FIRST] = global_regs[i + ACCG_FIRST] = 1;
|
8620 |
|
|
|
8621 |
|
|
if (GET_CODE (opval) != CONST_INT)
|
8622 |
|
|
{
|
8623 |
|
|
error ("accumulator is not a constant integer");
|
8624 |
|
|
return NULL_RTX;
|
8625 |
|
|
}
|
8626 |
|
|
if ((INTVAL (opval) & ~ACC_MASK) != 0)
|
8627 |
|
|
{
|
8628 |
|
|
error ("accumulator number is out of bounds");
|
8629 |
|
|
return NULL_RTX;
|
8630 |
|
|
}
|
8631 |
|
|
|
8632 |
|
|
reg = gen_rtx_REG (insn_data[icode].operand[opnum].mode,
|
8633 |
|
|
ACC_FIRST + INTVAL (opval));
|
8634 |
|
|
if (! (*insn_data[icode].operand[opnum].predicate) (reg, VOIDmode))
|
8635 |
|
|
REGNO (reg) = ACCG_FIRST + INTVAL (opval);
|
8636 |
|
|
|
8637 |
|
|
if (! (*insn_data[icode].operand[opnum].predicate) (reg, VOIDmode))
|
8638 |
|
|
{
|
8639 |
|
|
error ("inappropriate accumulator for %qs", insn_data[icode].name);
|
8640 |
|
|
return NULL_RTX;
|
8641 |
|
|
}
|
8642 |
|
|
return reg;
|
8643 |
|
|
}
|
8644 |
|
|
|
8645 |
|
|
/* If an ACC rtx has mode MODE, return the mode that the matching ACCG
|
8646 |
|
|
should have. */
|
8647 |
|
|
|
8648 |
|
|
static enum machine_mode
|
8649 |
|
|
frv_matching_accg_mode (enum machine_mode mode)
|
8650 |
|
|
{
|
8651 |
|
|
switch (mode)
|
8652 |
|
|
{
|
8653 |
|
|
case V4SImode:
|
8654 |
|
|
return V4QImode;
|
8655 |
|
|
|
8656 |
|
|
case DImode:
|
8657 |
|
|
return HImode;
|
8658 |
|
|
|
8659 |
|
|
case SImode:
|
8660 |
|
|
return QImode;
|
8661 |
|
|
|
8662 |
|
|
default:
|
8663 |
|
|
gcc_unreachable ();
|
8664 |
|
|
}
|
8665 |
|
|
}
|
8666 |
|
|
|
8667 |
|
|
/* Given that a __builtin_read or __builtin_write function is accessing
|
8668 |
|
|
address ADDRESS, return the value that should be used as operand 1
|
8669 |
|
|
of the membar. */
|
8670 |
|
|
|
8671 |
|
|
static rtx
|
8672 |
|
|
frv_io_address_cookie (rtx address)
|
8673 |
|
|
{
|
8674 |
|
|
return (GET_CODE (address) == CONST_INT
|
8675 |
|
|
? GEN_INT (INTVAL (address) / 8 * 8)
|
8676 |
|
|
: const0_rtx);
|
8677 |
|
|
}
|
8678 |
|
|
|
8679 |
|
|
/* Return the accumulator guard that should be paired with accumulator
|
8680 |
|
|
register ACC. The mode of the returned register is in the same
|
8681 |
|
|
class as ACC, but is four times smaller. */
|
8682 |
|
|
|
8683 |
|
|
rtx
|
8684 |
|
|
frv_matching_accg_for_acc (rtx acc)
|
8685 |
|
|
{
|
8686 |
|
|
return gen_rtx_REG (frv_matching_accg_mode (GET_MODE (acc)),
|
8687 |
|
|
REGNO (acc) - ACC_FIRST + ACCG_FIRST);
|
8688 |
|
|
}
|
8689 |
|
|
|
8690 |
|
|
/* Read a value from the head of the tree list pointed to by ARGLISTPTR.
|
8691 |
|
|
Return the value as an rtx and replace *ARGLISTPTR with the tail of the
|
8692 |
|
|
list. */
|
8693 |
|
|
|
8694 |
|
|
static rtx
|
8695 |
|
|
frv_read_argument (tree *arglistptr)
|
8696 |
|
|
{
|
8697 |
|
|
tree next = TREE_VALUE (*arglistptr);
|
8698 |
|
|
*arglistptr = TREE_CHAIN (*arglistptr);
|
8699 |
|
|
return expand_expr (next, NULL_RTX, VOIDmode, 0);
|
8700 |
|
|
}
|
8701 |
|
|
|
8702 |
|
|
/* Like frv_read_argument, but interpret the argument as the number
|
8703 |
|
|
of an IACC register and return a (reg:MODE ...) rtx for it. */
|
8704 |
|
|
|
8705 |
|
|
static rtx
|
8706 |
|
|
frv_read_iacc_argument (enum machine_mode mode, tree *arglistptr)
|
8707 |
|
|
{
|
8708 |
|
|
int i, regno;
|
8709 |
|
|
rtx op;
|
8710 |
|
|
|
8711 |
|
|
op = frv_read_argument (arglistptr);
|
8712 |
|
|
if (GET_CODE (op) != CONST_INT
|
8713 |
|
|
|| INTVAL (op) < 0
|
8714 |
|
|
|| INTVAL (op) > IACC_LAST - IACC_FIRST
|
8715 |
|
|
|| ((INTVAL (op) * 4) & (GET_MODE_SIZE (mode) - 1)) != 0)
|
8716 |
|
|
{
|
8717 |
|
|
error ("invalid IACC argument");
|
8718 |
|
|
op = const0_rtx;
|
8719 |
|
|
}
|
8720 |
|
|
|
8721 |
|
|
/* IACCs are implicit global registers. We set up this lazily to
|
8722 |
|
|
avoid creating lots of unnecessary call_insn rtl when IACCs aren't
|
8723 |
|
|
being used. */
|
8724 |
|
|
regno = INTVAL (op) + IACC_FIRST;
|
8725 |
|
|
for (i = 0; i < HARD_REGNO_NREGS (regno, mode); i++)
|
8726 |
|
|
global_regs[regno + i] = 1;
|
8727 |
|
|
|
8728 |
|
|
return gen_rtx_REG (mode, regno);
|
8729 |
|
|
}
|
8730 |
|
|
|
8731 |
|
|
/* Return true if OPVAL can be used for operand OPNUM of instruction ICODE.
|
8732 |
|
|
The instruction should require a constant operand of some sort. The
|
8733 |
|
|
function prints an error if OPVAL is not valid. */
|
8734 |
|
|
|
8735 |
|
|
static int
|
8736 |
|
|
frv_check_constant_argument (enum insn_code icode, int opnum, rtx opval)
|
8737 |
|
|
{
|
8738 |
|
|
if (GET_CODE (opval) != CONST_INT)
|
8739 |
|
|
{
|
8740 |
|
|
error ("%qs expects a constant argument", insn_data[icode].name);
|
8741 |
|
|
return FALSE;
|
8742 |
|
|
}
|
8743 |
|
|
if (! (*insn_data[icode].operand[opnum].predicate) (opval, VOIDmode))
|
8744 |
|
|
{
|
8745 |
|
|
error ("constant argument out of range for %qs", insn_data[icode].name);
|
8746 |
|
|
return FALSE;
|
8747 |
|
|
}
|
8748 |
|
|
return TRUE;
|
8749 |
|
|
}
|
8750 |
|
|
|
8751 |
|
|
/* Return a legitimate rtx for instruction ICODE's return value. Use TARGET
|
8752 |
|
|
if it's not null, has the right mode, and satisfies operand 0's
|
8753 |
|
|
predicate. */
|
8754 |
|
|
|
8755 |
|
|
static rtx
|
8756 |
|
|
frv_legitimize_target (enum insn_code icode, rtx target)
|
8757 |
|
|
{
|
8758 |
|
|
enum machine_mode mode = insn_data[icode].operand[0].mode;
|
8759 |
|
|
|
8760 |
|
|
if (! target
|
8761 |
|
|
|| GET_MODE (target) != mode
|
8762 |
|
|
|| ! (*insn_data[icode].operand[0].predicate) (target, mode))
|
8763 |
|
|
return gen_reg_rtx (mode);
|
8764 |
|
|
else
|
8765 |
|
|
return target;
|
8766 |
|
|
}
|
8767 |
|
|
|
8768 |
|
|
/* Given that ARG is being passed as operand OPNUM to instruction ICODE,
|
8769 |
|
|
check whether ARG satisfies the operand's constraints. If it doesn't,
|
8770 |
|
|
copy ARG to a temporary register and return that. Otherwise return ARG
|
8771 |
|
|
itself. */
|
8772 |
|
|
|
8773 |
|
|
static rtx
|
8774 |
|
|
frv_legitimize_argument (enum insn_code icode, int opnum, rtx arg)
|
8775 |
|
|
{
|
8776 |
|
|
enum machine_mode mode = insn_data[icode].operand[opnum].mode;
|
8777 |
|
|
|
8778 |
|
|
if ((*insn_data[icode].operand[opnum].predicate) (arg, mode))
|
8779 |
|
|
return arg;
|
8780 |
|
|
else
|
8781 |
|
|
return copy_to_mode_reg (mode, arg);
|
8782 |
|
|
}
|
8783 |
|
|
|
8784 |
|
|
/* Return a volatile memory reference of mode MODE whose address is ARG. */
|
8785 |
|
|
|
8786 |
|
|
static rtx
|
8787 |
|
|
frv_volatile_memref (enum machine_mode mode, rtx arg)
|
8788 |
|
|
{
|
8789 |
|
|
rtx mem;
|
8790 |
|
|
|
8791 |
|
|
mem = gen_rtx_MEM (mode, memory_address (mode, arg));
|
8792 |
|
|
MEM_VOLATILE_P (mem) = 1;
|
8793 |
|
|
return mem;
|
8794 |
|
|
}
|
8795 |
|
|
|
8796 |
|
|
/* Expand builtins that take a single, constant argument. At the moment,
|
8797 |
|
|
only MHDSETS falls into this category. */
|
8798 |
|
|
|
8799 |
|
|
static rtx
|
8800 |
|
|
frv_expand_set_builtin (enum insn_code icode, tree arglist, rtx target)
|
8801 |
|
|
{
|
8802 |
|
|
rtx pat;
|
8803 |
|
|
rtx op0 = frv_read_argument (&arglist);
|
8804 |
|
|
|
8805 |
|
|
if (! frv_check_constant_argument (icode, 1, op0))
|
8806 |
|
|
return NULL_RTX;
|
8807 |
|
|
|
8808 |
|
|
target = frv_legitimize_target (icode, target);
|
8809 |
|
|
pat = GEN_FCN (icode) (target, op0);
|
8810 |
|
|
if (! pat)
|
8811 |
|
|
return NULL_RTX;
|
8812 |
|
|
|
8813 |
|
|
emit_insn (pat);
|
8814 |
|
|
return target;
|
8815 |
|
|
}
|
8816 |
|
|
|
8817 |
|
|
/* Expand builtins that take one operand. */
|
8818 |
|
|
|
8819 |
|
|
static rtx
|
8820 |
|
|
frv_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
|
8821 |
|
|
{
|
8822 |
|
|
rtx pat;
|
8823 |
|
|
rtx op0 = frv_read_argument (&arglist);
|
8824 |
|
|
|
8825 |
|
|
target = frv_legitimize_target (icode, target);
|
8826 |
|
|
op0 = frv_legitimize_argument (icode, 1, op0);
|
8827 |
|
|
pat = GEN_FCN (icode) (target, op0);
|
8828 |
|
|
if (! pat)
|
8829 |
|
|
return NULL_RTX;
|
8830 |
|
|
|
8831 |
|
|
emit_insn (pat);
|
8832 |
|
|
return target;
|
8833 |
|
|
}
|
8834 |
|
|
|
8835 |
|
|
/* Expand builtins that take two operands. */
|
8836 |
|
|
|
8837 |
|
|
static rtx
|
8838 |
|
|
frv_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
|
8839 |
|
|
{
|
8840 |
|
|
rtx pat;
|
8841 |
|
|
rtx op0 = frv_read_argument (&arglist);
|
8842 |
|
|
rtx op1 = frv_read_argument (&arglist);
|
8843 |
|
|
|
8844 |
|
|
target = frv_legitimize_target (icode, target);
|
8845 |
|
|
op0 = frv_legitimize_argument (icode, 1, op0);
|
8846 |
|
|
op1 = frv_legitimize_argument (icode, 2, op1);
|
8847 |
|
|
pat = GEN_FCN (icode) (target, op0, op1);
|
8848 |
|
|
if (! pat)
|
8849 |
|
|
return NULL_RTX;
|
8850 |
|
|
|
8851 |
|
|
emit_insn (pat);
|
8852 |
|
|
return target;
|
8853 |
|
|
}
|
8854 |
|
|
|
8855 |
|
|
/* Expand cut-style builtins, which take two operands and an implicit ACCG
|
8856 |
|
|
one. */
|
8857 |
|
|
|
8858 |
|
|
static rtx
|
8859 |
|
|
frv_expand_cut_builtin (enum insn_code icode, tree arglist, rtx target)
|
8860 |
|
|
{
|
8861 |
|
|
rtx pat;
|
8862 |
|
|
rtx op0 = frv_read_argument (&arglist);
|
8863 |
|
|
rtx op1 = frv_read_argument (&arglist);
|
8864 |
|
|
rtx op2;
|
8865 |
|
|
|
8866 |
|
|
target = frv_legitimize_target (icode, target);
|
8867 |
|
|
op0 = frv_int_to_acc (icode, 1, op0);
|
8868 |
|
|
if (! op0)
|
8869 |
|
|
return NULL_RTX;
|
8870 |
|
|
|
8871 |
|
|
if (icode == CODE_FOR_mdcutssi || GET_CODE (op1) == CONST_INT)
|
8872 |
|
|
{
|
8873 |
|
|
if (! frv_check_constant_argument (icode, 2, op1))
|
8874 |
|
|
return NULL_RTX;
|
8875 |
|
|
}
|
8876 |
|
|
else
|
8877 |
|
|
op1 = frv_legitimize_argument (icode, 2, op1);
|
8878 |
|
|
|
8879 |
|
|
op2 = frv_matching_accg_for_acc (op0);
|
8880 |
|
|
pat = GEN_FCN (icode) (target, op0, op1, op2);
|
8881 |
|
|
if (! pat)
|
8882 |
|
|
return NULL_RTX;
|
8883 |
|
|
|
8884 |
|
|
emit_insn (pat);
|
8885 |
|
|
return target;
|
8886 |
|
|
}
|
8887 |
|
|
|
8888 |
|
|
/* Expand builtins that take two operands and the second is immediate. */
|
8889 |
|
|
|
8890 |
|
|
static rtx
|
8891 |
|
|
frv_expand_binopimm_builtin (enum insn_code icode, tree arglist, rtx target)
|
8892 |
|
|
{
|
8893 |
|
|
rtx pat;
|
8894 |
|
|
rtx op0 = frv_read_argument (&arglist);
|
8895 |
|
|
rtx op1 = frv_read_argument (&arglist);
|
8896 |
|
|
|
8897 |
|
|
if (! frv_check_constant_argument (icode, 2, op1))
|
8898 |
|
|
return NULL_RTX;
|
8899 |
|
|
|
8900 |
|
|
target = frv_legitimize_target (icode, target);
|
8901 |
|
|
op0 = frv_legitimize_argument (icode, 1, op0);
|
8902 |
|
|
pat = GEN_FCN (icode) (target, op0, op1);
|
8903 |
|
|
if (! pat)
|
8904 |
|
|
return NULL_RTX;
|
8905 |
|
|
|
8906 |
|
|
emit_insn (pat);
|
8907 |
|
|
return target;
|
8908 |
|
|
}
|
8909 |
|
|
|
8910 |
|
|
/* Expand builtins that take two operands, the first operand being a pointer to
|
8911 |
|
|
ints and return void. */
|
8912 |
|
|
|
8913 |
|
|
static rtx
|
8914 |
|
|
frv_expand_voidbinop_builtin (enum insn_code icode, tree arglist)
|
8915 |
|
|
{
|
8916 |
|
|
rtx pat;
|
8917 |
|
|
rtx op0 = frv_read_argument (&arglist);
|
8918 |
|
|
rtx op1 = frv_read_argument (&arglist);
|
8919 |
|
|
enum machine_mode mode0 = insn_data[icode].operand[0].mode;
|
8920 |
|
|
rtx addr;
|
8921 |
|
|
|
8922 |
|
|
if (GET_CODE (op0) != MEM)
|
8923 |
|
|
{
|
8924 |
|
|
rtx reg = op0;
|
8925 |
|
|
|
8926 |
|
|
if (! offsettable_address_p (0, mode0, op0))
|
8927 |
|
|
{
|
8928 |
|
|
reg = gen_reg_rtx (Pmode);
|
8929 |
|
|
emit_insn (gen_rtx_SET (VOIDmode, reg, op0));
|
8930 |
|
|
}
|
8931 |
|
|
|
8932 |
|
|
op0 = gen_rtx_MEM (SImode, reg);
|
8933 |
|
|
}
|
8934 |
|
|
|
8935 |
|
|
addr = XEXP (op0, 0);
|
8936 |
|
|
if (! offsettable_address_p (0, mode0, addr))
|
8937 |
|
|
addr = copy_to_mode_reg (Pmode, op0);
|
8938 |
|
|
|
8939 |
|
|
op0 = change_address (op0, V4SImode, addr);
|
8940 |
|
|
op1 = frv_legitimize_argument (icode, 1, op1);
|
8941 |
|
|
pat = GEN_FCN (icode) (op0, op1);
|
8942 |
|
|
if (! pat)
|
8943 |
|
|
return 0;
|
8944 |
|
|
|
8945 |
|
|
emit_insn (pat);
|
8946 |
|
|
return 0;
|
8947 |
|
|
}
|
8948 |
|
|
|
8949 |
|
|
/* Expand builtins that take two long operands and return void. */
|
8950 |
|
|
|
8951 |
|
|
static rtx
|
8952 |
|
|
frv_expand_int_void2arg (enum insn_code icode, tree arglist)
|
8953 |
|
|
{
|
8954 |
|
|
rtx pat;
|
8955 |
|
|
rtx op0 = frv_read_argument (&arglist);
|
8956 |
|
|
rtx op1 = frv_read_argument (&arglist);
|
8957 |
|
|
|
8958 |
|
|
op0 = frv_legitimize_argument (icode, 1, op0);
|
8959 |
|
|
op1 = frv_legitimize_argument (icode, 1, op1);
|
8960 |
|
|
pat = GEN_FCN (icode) (op0, op1);
|
8961 |
|
|
if (! pat)
|
8962 |
|
|
return NULL_RTX;
|
8963 |
|
|
|
8964 |
|
|
emit_insn (pat);
|
8965 |
|
|
return NULL_RTX;
|
8966 |
|
|
}
|
8967 |
|
|
|
8968 |
|
|
/* Expand prefetch builtins. These take a single address as argument. */
|
8969 |
|
|
|
8970 |
|
|
static rtx
|
8971 |
|
|
frv_expand_prefetches (enum insn_code icode, tree arglist)
|
8972 |
|
|
{
|
8973 |
|
|
rtx pat;
|
8974 |
|
|
rtx op0 = frv_read_argument (&arglist);
|
8975 |
|
|
|
8976 |
|
|
pat = GEN_FCN (icode) (force_reg (Pmode, op0));
|
8977 |
|
|
if (! pat)
|
8978 |
|
|
return 0;
|
8979 |
|
|
|
8980 |
|
|
emit_insn (pat);
|
8981 |
|
|
return 0;
|
8982 |
|
|
}
|
8983 |
|
|
|
8984 |
|
|
/* Expand builtins that take three operands and return void. The first
|
8985 |
|
|
argument must be a constant that describes a pair or quad accumulators. A
|
8986 |
|
|
fourth argument is created that is the accumulator guard register that
|
8987 |
|
|
corresponds to the accumulator. */
|
8988 |
|
|
|
8989 |
|
|
static rtx
|
8990 |
|
|
frv_expand_voidtriop_builtin (enum insn_code icode, tree arglist)
|
8991 |
|
|
{
|
8992 |
|
|
rtx pat;
|
8993 |
|
|
rtx op0 = frv_read_argument (&arglist);
|
8994 |
|
|
rtx op1 = frv_read_argument (&arglist);
|
8995 |
|
|
rtx op2 = frv_read_argument (&arglist);
|
8996 |
|
|
rtx op3;
|
8997 |
|
|
|
8998 |
|
|
op0 = frv_int_to_acc (icode, 0, op0);
|
8999 |
|
|
if (! op0)
|
9000 |
|
|
return NULL_RTX;
|
9001 |
|
|
|
9002 |
|
|
op1 = frv_legitimize_argument (icode, 1, op1);
|
9003 |
|
|
op2 = frv_legitimize_argument (icode, 2, op2);
|
9004 |
|
|
op3 = frv_matching_accg_for_acc (op0);
|
9005 |
|
|
pat = GEN_FCN (icode) (op0, op1, op2, op3);
|
9006 |
|
|
if (! pat)
|
9007 |
|
|
return NULL_RTX;
|
9008 |
|
|
|
9009 |
|
|
emit_insn (pat);
|
9010 |
|
|
return NULL_RTX;
|
9011 |
|
|
}
|
9012 |
|
|
|
9013 |
|
|
/* Expand builtins that perform accumulator-to-accumulator operations.
|
9014 |
|
|
These builtins take two accumulator numbers as argument and return
|
9015 |
|
|
void. */
|
9016 |
|
|
|
9017 |
|
|
static rtx
|
9018 |
|
|
frv_expand_voidaccop_builtin (enum insn_code icode, tree arglist)
|
9019 |
|
|
{
|
9020 |
|
|
rtx pat;
|
9021 |
|
|
rtx op0 = frv_read_argument (&arglist);
|
9022 |
|
|
rtx op1 = frv_read_argument (&arglist);
|
9023 |
|
|
rtx op2;
|
9024 |
|
|
rtx op3;
|
9025 |
|
|
|
9026 |
|
|
op0 = frv_int_to_acc (icode, 0, op0);
|
9027 |
|
|
if (! op0)
|
9028 |
|
|
return NULL_RTX;
|
9029 |
|
|
|
9030 |
|
|
op1 = frv_int_to_acc (icode, 1, op1);
|
9031 |
|
|
if (! op1)
|
9032 |
|
|
return NULL_RTX;
|
9033 |
|
|
|
9034 |
|
|
op2 = frv_matching_accg_for_acc (op0);
|
9035 |
|
|
op3 = frv_matching_accg_for_acc (op1);
|
9036 |
|
|
pat = GEN_FCN (icode) (op0, op1, op2, op3);
|
9037 |
|
|
if (! pat)
|
9038 |
|
|
return NULL_RTX;
|
9039 |
|
|
|
9040 |
|
|
emit_insn (pat);
|
9041 |
|
|
return NULL_RTX;
|
9042 |
|
|
}
|
9043 |
|
|
|
9044 |
|
|
/* Expand a __builtin_read* function. ICODE is the instruction code for the
|
9045 |
|
|
membar and TARGET_MODE is the mode that the loaded value should have. */
|
9046 |
|
|
|
9047 |
|
|
static rtx
|
9048 |
|
|
frv_expand_load_builtin (enum insn_code icode, enum machine_mode target_mode,
|
9049 |
|
|
tree arglist, rtx target)
|
9050 |
|
|
{
|
9051 |
|
|
rtx op0 = frv_read_argument (&arglist);
|
9052 |
|
|
rtx cookie = frv_io_address_cookie (op0);
|
9053 |
|
|
|
9054 |
|
|
if (target == 0 || !REG_P (target))
|
9055 |
|
|
target = gen_reg_rtx (target_mode);
|
9056 |
|
|
op0 = frv_volatile_memref (insn_data[icode].operand[0].mode, op0);
|
9057 |
|
|
convert_move (target, op0, 1);
|
9058 |
|
|
emit_insn (GEN_FCN (icode) (copy_rtx (op0), cookie, GEN_INT (FRV_IO_READ)));
|
9059 |
|
|
cfun->machine->has_membar_p = 1;
|
9060 |
|
|
return target;
|
9061 |
|
|
}
|
9062 |
|
|
|
9063 |
|
|
/* Likewise __builtin_write* functions. */
|
9064 |
|
|
|
9065 |
|
|
static rtx
|
9066 |
|
|
frv_expand_store_builtin (enum insn_code icode, tree arglist)
|
9067 |
|
|
{
|
9068 |
|
|
rtx op0 = frv_read_argument (&arglist);
|
9069 |
|
|
rtx op1 = frv_read_argument (&arglist);
|
9070 |
|
|
rtx cookie = frv_io_address_cookie (op0);
|
9071 |
|
|
|
9072 |
|
|
op0 = frv_volatile_memref (insn_data[icode].operand[0].mode, op0);
|
9073 |
|
|
convert_move (op0, force_reg (insn_data[icode].operand[0].mode, op1), 1);
|
9074 |
|
|
emit_insn (GEN_FCN (icode) (copy_rtx (op0), cookie, GEN_INT (FRV_IO_WRITE)));
|
9075 |
|
|
cfun->machine->has_membar_p = 1;
|
9076 |
|
|
return NULL_RTX;
|
9077 |
|
|
}
|
9078 |
|
|
|
9079 |
|
|
/* Expand the MDPACKH builtin. It takes four unsigned short arguments and
|
9080 |
|
|
each argument forms one word of the two double-word input registers.
|
9081 |
|
|
ARGLIST is a TREE_LIST of the arguments and TARGET, if nonnull,
|
9082 |
|
|
suggests a good place to put the return value. */
|
9083 |
|
|
|
9084 |
|
|
static rtx
|
9085 |
|
|
frv_expand_mdpackh_builtin (tree arglist, rtx target)
|
9086 |
|
|
{
|
9087 |
|
|
enum insn_code icode = CODE_FOR_mdpackh;
|
9088 |
|
|
rtx pat, op0, op1;
|
9089 |
|
|
rtx arg1 = frv_read_argument (&arglist);
|
9090 |
|
|
rtx arg2 = frv_read_argument (&arglist);
|
9091 |
|
|
rtx arg3 = frv_read_argument (&arglist);
|
9092 |
|
|
rtx arg4 = frv_read_argument (&arglist);
|
9093 |
|
|
|
9094 |
|
|
target = frv_legitimize_target (icode, target);
|
9095 |
|
|
op0 = gen_reg_rtx (DImode);
|
9096 |
|
|
op1 = gen_reg_rtx (DImode);
|
9097 |
|
|
|
9098 |
|
|
/* The high half of each word is not explicitly initialized, so indicate
|
9099 |
|
|
that the input operands are not live before this point. */
|
9100 |
|
|
emit_insn (gen_rtx_CLOBBER (DImode, op0));
|
9101 |
|
|
emit_insn (gen_rtx_CLOBBER (DImode, op1));
|
9102 |
|
|
|
9103 |
|
|
/* Move each argument into the low half of its associated input word. */
|
9104 |
|
|
emit_move_insn (simplify_gen_subreg (HImode, op0, DImode, 2), arg1);
|
9105 |
|
|
emit_move_insn (simplify_gen_subreg (HImode, op0, DImode, 6), arg2);
|
9106 |
|
|
emit_move_insn (simplify_gen_subreg (HImode, op1, DImode, 2), arg3);
|
9107 |
|
|
emit_move_insn (simplify_gen_subreg (HImode, op1, DImode, 6), arg4);
|
9108 |
|
|
|
9109 |
|
|
pat = GEN_FCN (icode) (target, op0, op1);
|
9110 |
|
|
if (! pat)
|
9111 |
|
|
return NULL_RTX;
|
9112 |
|
|
|
9113 |
|
|
emit_insn (pat);
|
9114 |
|
|
return target;
|
9115 |
|
|
}
|
9116 |
|
|
|
9117 |
|
|
/* Expand the MCLRACC builtin. This builtin takes a single accumulator
|
9118 |
|
|
number as argument. */
|
9119 |
|
|
|
9120 |
|
|
static rtx
|
9121 |
|
|
frv_expand_mclracc_builtin (tree arglist)
|
9122 |
|
|
{
|
9123 |
|
|
enum insn_code icode = CODE_FOR_mclracc;
|
9124 |
|
|
rtx pat;
|
9125 |
|
|
rtx op0 = frv_read_argument (&arglist);
|
9126 |
|
|
|
9127 |
|
|
op0 = frv_int_to_acc (icode, 0, op0);
|
9128 |
|
|
if (! op0)
|
9129 |
|
|
return NULL_RTX;
|
9130 |
|
|
|
9131 |
|
|
pat = GEN_FCN (icode) (op0);
|
9132 |
|
|
if (pat)
|
9133 |
|
|
emit_insn (pat);
|
9134 |
|
|
|
9135 |
|
|
return NULL_RTX;
|
9136 |
|
|
}
|
9137 |
|
|
|
9138 |
|
|
/* Expand builtins that take no arguments. */
|
9139 |
|
|
|
9140 |
|
|
static rtx
|
9141 |
|
|
frv_expand_noargs_builtin (enum insn_code icode)
|
9142 |
|
|
{
|
9143 |
|
|
rtx pat = GEN_FCN (icode) (const0_rtx);
|
9144 |
|
|
if (pat)
|
9145 |
|
|
emit_insn (pat);
|
9146 |
|
|
|
9147 |
|
|
return NULL_RTX;
|
9148 |
|
|
}
|
9149 |
|
|
|
9150 |
|
|
/* Expand MRDACC and MRDACCG. These builtins take a single accumulator
|
9151 |
|
|
number or accumulator guard number as argument and return an SI integer. */
|
9152 |
|
|
|
9153 |
|
|
static rtx
|
9154 |
|
|
frv_expand_mrdacc_builtin (enum insn_code icode, tree arglist)
|
9155 |
|
|
{
|
9156 |
|
|
rtx pat;
|
9157 |
|
|
rtx target = gen_reg_rtx (SImode);
|
9158 |
|
|
rtx op0 = frv_read_argument (&arglist);
|
9159 |
|
|
|
9160 |
|
|
op0 = frv_int_to_acc (icode, 1, op0);
|
9161 |
|
|
if (! op0)
|
9162 |
|
|
return NULL_RTX;
|
9163 |
|
|
|
9164 |
|
|
pat = GEN_FCN (icode) (target, op0);
|
9165 |
|
|
if (! pat)
|
9166 |
|
|
return NULL_RTX;
|
9167 |
|
|
|
9168 |
|
|
emit_insn (pat);
|
9169 |
|
|
return target;
|
9170 |
|
|
}
|
9171 |
|
|
|
9172 |
|
|
/* Expand MWTACC and MWTACCG. These builtins take an accumulator or
|
9173 |
|
|
accumulator guard as their first argument and an SImode value as their
|
9174 |
|
|
second. */
|
9175 |
|
|
|
9176 |
|
|
static rtx
|
9177 |
|
|
frv_expand_mwtacc_builtin (enum insn_code icode, tree arglist)
|
9178 |
|
|
{
|
9179 |
|
|
rtx pat;
|
9180 |
|
|
rtx op0 = frv_read_argument (&arglist);
|
9181 |
|
|
rtx op1 = frv_read_argument (&arglist);
|
9182 |
|
|
|
9183 |
|
|
op0 = frv_int_to_acc (icode, 0, op0);
|
9184 |
|
|
if (! op0)
|
9185 |
|
|
return NULL_RTX;
|
9186 |
|
|
|
9187 |
|
|
op1 = frv_legitimize_argument (icode, 1, op1);
|
9188 |
|
|
pat = GEN_FCN (icode) (op0, op1);
|
9189 |
|
|
if (pat)
|
9190 |
|
|
emit_insn (pat);
|
9191 |
|
|
|
9192 |
|
|
return NULL_RTX;
|
9193 |
|
|
}
|
9194 |
|
|
|
9195 |
|
|
/* Emit a move from SRC to DEST in SImode chunks. This can be used
|
9196 |
|
|
to move DImode values into and out of IACC0. */
|
9197 |
|
|
|
9198 |
|
|
static void
|
9199 |
|
|
frv_split_iacc_move (rtx dest, rtx src)
|
9200 |
|
|
{
|
9201 |
|
|
enum machine_mode inner;
|
9202 |
|
|
int i;
|
9203 |
|
|
|
9204 |
|
|
inner = GET_MODE (dest);
|
9205 |
|
|
for (i = 0; i < GET_MODE_SIZE (inner); i += GET_MODE_SIZE (SImode))
|
9206 |
|
|
emit_move_insn (simplify_gen_subreg (SImode, dest, inner, i),
|
9207 |
|
|
simplify_gen_subreg (SImode, src, inner, i));
|
9208 |
|
|
}
|
9209 |
|
|
|
9210 |
|
|
/* Expand builtins. */
|
9211 |
|
|
|
9212 |
|
|
static rtx
|
9213 |
|
|
frv_expand_builtin (tree exp,
|
9214 |
|
|
rtx target,
|
9215 |
|
|
rtx subtarget ATTRIBUTE_UNUSED,
|
9216 |
|
|
enum machine_mode mode ATTRIBUTE_UNUSED,
|
9217 |
|
|
int ignore ATTRIBUTE_UNUSED)
|
9218 |
|
|
{
|
9219 |
|
|
tree arglist = TREE_OPERAND (exp, 1);
|
9220 |
|
|
tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
|
9221 |
|
|
unsigned fcode = (unsigned)DECL_FUNCTION_CODE (fndecl);
|
9222 |
|
|
unsigned i;
|
9223 |
|
|
struct builtin_description *d;
|
9224 |
|
|
|
9225 |
|
|
if (fcode < FRV_BUILTIN_FIRST_NONMEDIA && !TARGET_MEDIA)
|
9226 |
|
|
{
|
9227 |
|
|
error ("media functions are not available unless -mmedia is used");
|
9228 |
|
|
return NULL_RTX;
|
9229 |
|
|
}
|
9230 |
|
|
|
9231 |
|
|
switch (fcode)
|
9232 |
|
|
{
|
9233 |
|
|
case FRV_BUILTIN_MCOP1:
|
9234 |
|
|
case FRV_BUILTIN_MCOP2:
|
9235 |
|
|
case FRV_BUILTIN_MDUNPACKH:
|
9236 |
|
|
case FRV_BUILTIN_MBTOHE:
|
9237 |
|
|
if (! TARGET_MEDIA_REV1)
|
9238 |
|
|
{
|
9239 |
|
|
error ("this media function is only available on the fr500");
|
9240 |
|
|
return NULL_RTX;
|
9241 |
|
|
}
|
9242 |
|
|
break;
|
9243 |
|
|
|
9244 |
|
|
case FRV_BUILTIN_MQXMACHS:
|
9245 |
|
|
case FRV_BUILTIN_MQXMACXHS:
|
9246 |
|
|
case FRV_BUILTIN_MQMACXHS:
|
9247 |
|
|
case FRV_BUILTIN_MADDACCS:
|
9248 |
|
|
case FRV_BUILTIN_MSUBACCS:
|
9249 |
|
|
case FRV_BUILTIN_MASACCS:
|
9250 |
|
|
case FRV_BUILTIN_MDADDACCS:
|
9251 |
|
|
case FRV_BUILTIN_MDSUBACCS:
|
9252 |
|
|
case FRV_BUILTIN_MDASACCS:
|
9253 |
|
|
case FRV_BUILTIN_MABSHS:
|
9254 |
|
|
case FRV_BUILTIN_MDROTLI:
|
9255 |
|
|
case FRV_BUILTIN_MCPLHI:
|
9256 |
|
|
case FRV_BUILTIN_MCPLI:
|
9257 |
|
|
case FRV_BUILTIN_MDCUTSSI:
|
9258 |
|
|
case FRV_BUILTIN_MQSATHS:
|
9259 |
|
|
case FRV_BUILTIN_MHSETLOS:
|
9260 |
|
|
case FRV_BUILTIN_MHSETLOH:
|
9261 |
|
|
case FRV_BUILTIN_MHSETHIS:
|
9262 |
|
|
case FRV_BUILTIN_MHSETHIH:
|
9263 |
|
|
case FRV_BUILTIN_MHDSETS:
|
9264 |
|
|
case FRV_BUILTIN_MHDSETH:
|
9265 |
|
|
if (! TARGET_MEDIA_REV2)
|
9266 |
|
|
{
|
9267 |
|
|
error ("this media function is only available on the fr400"
|
9268 |
|
|
" and fr550");
|
9269 |
|
|
return NULL_RTX;
|
9270 |
|
|
}
|
9271 |
|
|
break;
|
9272 |
|
|
|
9273 |
|
|
case FRV_BUILTIN_SMASS:
|
9274 |
|
|
case FRV_BUILTIN_SMSSS:
|
9275 |
|
|
case FRV_BUILTIN_SMU:
|
9276 |
|
|
case FRV_BUILTIN_ADDSS:
|
9277 |
|
|
case FRV_BUILTIN_SUBSS:
|
9278 |
|
|
case FRV_BUILTIN_SLASS:
|
9279 |
|
|
case FRV_BUILTIN_SCUTSS:
|
9280 |
|
|
case FRV_BUILTIN_IACCreadll:
|
9281 |
|
|
case FRV_BUILTIN_IACCreadl:
|
9282 |
|
|
case FRV_BUILTIN_IACCsetll:
|
9283 |
|
|
case FRV_BUILTIN_IACCsetl:
|
9284 |
|
|
if (!TARGET_FR405_BUILTINS)
|
9285 |
|
|
{
|
9286 |
|
|
error ("this builtin function is only available"
|
9287 |
|
|
" on the fr405 and fr450");
|
9288 |
|
|
return NULL_RTX;
|
9289 |
|
|
}
|
9290 |
|
|
break;
|
9291 |
|
|
|
9292 |
|
|
case FRV_BUILTIN_PREFETCH:
|
9293 |
|
|
if (!TARGET_FR500_FR550_BUILTINS)
|
9294 |
|
|
{
|
9295 |
|
|
error ("this builtin function is only available on the fr500"
|
9296 |
|
|
" and fr550");
|
9297 |
|
|
return NULL_RTX;
|
9298 |
|
|
}
|
9299 |
|
|
break;
|
9300 |
|
|
|
9301 |
|
|
case FRV_BUILTIN_MQLCLRHS:
|
9302 |
|
|
case FRV_BUILTIN_MQLMTHS:
|
9303 |
|
|
case FRV_BUILTIN_MQSLLHI:
|
9304 |
|
|
case FRV_BUILTIN_MQSRAHI:
|
9305 |
|
|
if (!TARGET_MEDIA_FR450)
|
9306 |
|
|
{
|
9307 |
|
|
error ("this builtin function is only available on the fr450");
|
9308 |
|
|
return NULL_RTX;
|
9309 |
|
|
}
|
9310 |
|
|
break;
|
9311 |
|
|
|
9312 |
|
|
default:
|
9313 |
|
|
break;
|
9314 |
|
|
}
|
9315 |
|
|
|
9316 |
|
|
/* Expand unique builtins. */
|
9317 |
|
|
|
9318 |
|
|
switch (fcode)
|
9319 |
|
|
{
|
9320 |
|
|
case FRV_BUILTIN_MTRAP:
|
9321 |
|
|
return frv_expand_noargs_builtin (CODE_FOR_mtrap);
|
9322 |
|
|
|
9323 |
|
|
case FRV_BUILTIN_MCLRACC:
|
9324 |
|
|
return frv_expand_mclracc_builtin (arglist);
|
9325 |
|
|
|
9326 |
|
|
case FRV_BUILTIN_MCLRACCA:
|
9327 |
|
|
if (TARGET_ACC_8)
|
9328 |
|
|
return frv_expand_noargs_builtin (CODE_FOR_mclracca8);
|
9329 |
|
|
else
|
9330 |
|
|
return frv_expand_noargs_builtin (CODE_FOR_mclracca4);
|
9331 |
|
|
|
9332 |
|
|
case FRV_BUILTIN_MRDACC:
|
9333 |
|
|
return frv_expand_mrdacc_builtin (CODE_FOR_mrdacc, arglist);
|
9334 |
|
|
|
9335 |
|
|
case FRV_BUILTIN_MRDACCG:
|
9336 |
|
|
return frv_expand_mrdacc_builtin (CODE_FOR_mrdaccg, arglist);
|
9337 |
|
|
|
9338 |
|
|
case FRV_BUILTIN_MWTACC:
|
9339 |
|
|
return frv_expand_mwtacc_builtin (CODE_FOR_mwtacc, arglist);
|
9340 |
|
|
|
9341 |
|
|
case FRV_BUILTIN_MWTACCG:
|
9342 |
|
|
return frv_expand_mwtacc_builtin (CODE_FOR_mwtaccg, arglist);
|
9343 |
|
|
|
9344 |
|
|
case FRV_BUILTIN_MDPACKH:
|
9345 |
|
|
return frv_expand_mdpackh_builtin (arglist, target);
|
9346 |
|
|
|
9347 |
|
|
case FRV_BUILTIN_IACCreadll:
|
9348 |
|
|
{
|
9349 |
|
|
rtx src = frv_read_iacc_argument (DImode, &arglist);
|
9350 |
|
|
if (target == 0 || !REG_P (target))
|
9351 |
|
|
target = gen_reg_rtx (DImode);
|
9352 |
|
|
frv_split_iacc_move (target, src);
|
9353 |
|
|
return target;
|
9354 |
|
|
}
|
9355 |
|
|
|
9356 |
|
|
case FRV_BUILTIN_IACCreadl:
|
9357 |
|
|
return frv_read_iacc_argument (SImode, &arglist);
|
9358 |
|
|
|
9359 |
|
|
case FRV_BUILTIN_IACCsetll:
|
9360 |
|
|
{
|
9361 |
|
|
rtx dest = frv_read_iacc_argument (DImode, &arglist);
|
9362 |
|
|
rtx src = frv_read_argument (&arglist);
|
9363 |
|
|
frv_split_iacc_move (dest, force_reg (DImode, src));
|
9364 |
|
|
return 0;
|
9365 |
|
|
}
|
9366 |
|
|
|
9367 |
|
|
case FRV_BUILTIN_IACCsetl:
|
9368 |
|
|
{
|
9369 |
|
|
rtx dest = frv_read_iacc_argument (SImode, &arglist);
|
9370 |
|
|
rtx src = frv_read_argument (&arglist);
|
9371 |
|
|
emit_move_insn (dest, force_reg (SImode, src));
|
9372 |
|
|
return 0;
|
9373 |
|
|
}
|
9374 |
|
|
|
9375 |
|
|
default:
|
9376 |
|
|
break;
|
9377 |
|
|
}
|
9378 |
|
|
|
9379 |
|
|
/* Expand groups of builtins. */
|
9380 |
|
|
|
9381 |
|
|
for (i = 0, d = bdesc_set; i < ARRAY_SIZE (bdesc_set); i++, d++)
|
9382 |
|
|
if (d->code == fcode)
|
9383 |
|
|
return frv_expand_set_builtin (d->icode, arglist, target);
|
9384 |
|
|
|
9385 |
|
|
for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
|
9386 |
|
|
if (d->code == fcode)
|
9387 |
|
|
return frv_expand_unop_builtin (d->icode, arglist, target);
|
9388 |
|
|
|
9389 |
|
|
for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
|
9390 |
|
|
if (d->code == fcode)
|
9391 |
|
|
return frv_expand_binop_builtin (d->icode, arglist, target);
|
9392 |
|
|
|
9393 |
|
|
for (i = 0, d = bdesc_cut; i < ARRAY_SIZE (bdesc_cut); i++, d++)
|
9394 |
|
|
if (d->code == fcode)
|
9395 |
|
|
return frv_expand_cut_builtin (d->icode, arglist, target);
|
9396 |
|
|
|
9397 |
|
|
for (i = 0, d = bdesc_2argimm; i < ARRAY_SIZE (bdesc_2argimm); i++, d++)
|
9398 |
|
|
if (d->code == fcode)
|
9399 |
|
|
return frv_expand_binopimm_builtin (d->icode, arglist, target);
|
9400 |
|
|
|
9401 |
|
|
for (i = 0, d = bdesc_void2arg; i < ARRAY_SIZE (bdesc_void2arg); i++, d++)
|
9402 |
|
|
if (d->code == fcode)
|
9403 |
|
|
return frv_expand_voidbinop_builtin (d->icode, arglist);
|
9404 |
|
|
|
9405 |
|
|
for (i = 0, d = bdesc_void3arg; i < ARRAY_SIZE (bdesc_void3arg); i++, d++)
|
9406 |
|
|
if (d->code == fcode)
|
9407 |
|
|
return frv_expand_voidtriop_builtin (d->icode, arglist);
|
9408 |
|
|
|
9409 |
|
|
for (i = 0, d = bdesc_voidacc; i < ARRAY_SIZE (bdesc_voidacc); i++, d++)
|
9410 |
|
|
if (d->code == fcode)
|
9411 |
|
|
return frv_expand_voidaccop_builtin (d->icode, arglist);
|
9412 |
|
|
|
9413 |
|
|
for (i = 0, d = bdesc_int_void2arg;
|
9414 |
|
|
i < ARRAY_SIZE (bdesc_int_void2arg); i++, d++)
|
9415 |
|
|
if (d->code == fcode)
|
9416 |
|
|
return frv_expand_int_void2arg (d->icode, arglist);
|
9417 |
|
|
|
9418 |
|
|
for (i = 0, d = bdesc_prefetches;
|
9419 |
|
|
i < ARRAY_SIZE (bdesc_prefetches); i++, d++)
|
9420 |
|
|
if (d->code == fcode)
|
9421 |
|
|
return frv_expand_prefetches (d->icode, arglist);
|
9422 |
|
|
|
9423 |
|
|
for (i = 0, d = bdesc_loads; i < ARRAY_SIZE (bdesc_loads); i++, d++)
|
9424 |
|
|
if (d->code == fcode)
|
9425 |
|
|
return frv_expand_load_builtin (d->icode, TYPE_MODE (TREE_TYPE (exp)),
|
9426 |
|
|
arglist, target);
|
9427 |
|
|
|
9428 |
|
|
for (i = 0, d = bdesc_stores; i < ARRAY_SIZE (bdesc_stores); i++, d++)
|
9429 |
|
|
if (d->code == fcode)
|
9430 |
|
|
return frv_expand_store_builtin (d->icode, arglist);
|
9431 |
|
|
|
9432 |
|
|
return 0;
|
9433 |
|
|
}
|
9434 |
|
|
|
9435 |
|
|
static bool
|
9436 |
|
|
frv_in_small_data_p (tree decl)
|
9437 |
|
|
{
|
9438 |
|
|
HOST_WIDE_INT size;
|
9439 |
|
|
tree section_name;
|
9440 |
|
|
|
9441 |
|
|
/* Don't apply the -G flag to internal compiler structures. We
|
9442 |
|
|
should leave such structures in the main data section, partly
|
9443 |
|
|
for efficiency and partly because the size of some of them
|
9444 |
|
|
(such as C++ typeinfos) is not known until later. */
|
9445 |
|
|
if (TREE_CODE (decl) != VAR_DECL || DECL_ARTIFICIAL (decl))
|
9446 |
|
|
return false;
|
9447 |
|
|
|
9448 |
|
|
/* If we already know which section the decl should be in, see if
|
9449 |
|
|
it's a small data section. */
|
9450 |
|
|
section_name = DECL_SECTION_NAME (decl);
|
9451 |
|
|
if (section_name)
|
9452 |
|
|
{
|
9453 |
|
|
gcc_assert (TREE_CODE (section_name) == STRING_CST);
|
9454 |
|
|
if (frv_string_begins_with (section_name, ".sdata"))
|
9455 |
|
|
return true;
|
9456 |
|
|
if (frv_string_begins_with (section_name, ".sbss"))
|
9457 |
|
|
return true;
|
9458 |
|
|
return false;
|
9459 |
|
|
}
|
9460 |
|
|
|
9461 |
|
|
size = int_size_in_bytes (TREE_TYPE (decl));
|
9462 |
|
|
if (size > 0 && (unsigned HOST_WIDE_INT) size <= g_switch_value)
|
9463 |
|
|
return true;
|
9464 |
|
|
|
9465 |
|
|
return false;
|
9466 |
|
|
}
|
9467 |
|
|
|
9468 |
|
|
static bool
|
9469 |
|
|
frv_rtx_costs (rtx x,
|
9470 |
|
|
int code ATTRIBUTE_UNUSED,
|
9471 |
|
|
int outer_code ATTRIBUTE_UNUSED,
|
9472 |
|
|
int *total)
|
9473 |
|
|
{
|
9474 |
|
|
if (outer_code == MEM)
|
9475 |
|
|
{
|
9476 |
|
|
/* Don't differentiate between memory addresses. All the ones
|
9477 |
|
|
we accept have equal cost. */
|
9478 |
|
|
*total = COSTS_N_INSNS (0);
|
9479 |
|
|
return true;
|
9480 |
|
|
}
|
9481 |
|
|
|
9482 |
|
|
switch (code)
|
9483 |
|
|
{
|
9484 |
|
|
case CONST_INT:
|
9485 |
|
|
/* Make 12 bit integers really cheap. */
|
9486 |
|
|
if (IN_RANGE_P (INTVAL (x), -2048, 2047))
|
9487 |
|
|
{
|
9488 |
|
|
*total = 0;
|
9489 |
|
|
return true;
|
9490 |
|
|
}
|
9491 |
|
|
/* Fall through. */
|
9492 |
|
|
|
9493 |
|
|
case CONST:
|
9494 |
|
|
case LABEL_REF:
|
9495 |
|
|
case SYMBOL_REF:
|
9496 |
|
|
case CONST_DOUBLE:
|
9497 |
|
|
*total = COSTS_N_INSNS (2);
|
9498 |
|
|
return true;
|
9499 |
|
|
|
9500 |
|
|
case PLUS:
|
9501 |
|
|
case MINUS:
|
9502 |
|
|
case AND:
|
9503 |
|
|
case IOR:
|
9504 |
|
|
case XOR:
|
9505 |
|
|
case ASHIFT:
|
9506 |
|
|
case ASHIFTRT:
|
9507 |
|
|
case LSHIFTRT:
|
9508 |
|
|
case NOT:
|
9509 |
|
|
case NEG:
|
9510 |
|
|
case COMPARE:
|
9511 |
|
|
if (GET_MODE (x) == SImode)
|
9512 |
|
|
*total = COSTS_N_INSNS (1);
|
9513 |
|
|
else if (GET_MODE (x) == DImode)
|
9514 |
|
|
*total = COSTS_N_INSNS (2);
|
9515 |
|
|
else
|
9516 |
|
|
*total = COSTS_N_INSNS (3);
|
9517 |
|
|
return true;
|
9518 |
|
|
|
9519 |
|
|
case MULT:
|
9520 |
|
|
if (GET_MODE (x) == SImode)
|
9521 |
|
|
*total = COSTS_N_INSNS (2);
|
9522 |
|
|
else
|
9523 |
|
|
*total = COSTS_N_INSNS (6); /* guess */
|
9524 |
|
|
return true;
|
9525 |
|
|
|
9526 |
|
|
case DIV:
|
9527 |
|
|
case UDIV:
|
9528 |
|
|
case MOD:
|
9529 |
|
|
case UMOD:
|
9530 |
|
|
*total = COSTS_N_INSNS (18);
|
9531 |
|
|
return true;
|
9532 |
|
|
|
9533 |
|
|
case MEM:
|
9534 |
|
|
*total = COSTS_N_INSNS (3);
|
9535 |
|
|
return true;
|
9536 |
|
|
|
9537 |
|
|
default:
|
9538 |
|
|
return false;
|
9539 |
|
|
}
|
9540 |
|
|
}
|
9541 |
|
|
|
9542 |
|
|
static void
|
9543 |
|
|
frv_asm_out_constructor (rtx symbol, int priority ATTRIBUTE_UNUSED)
|
9544 |
|
|
{
|
9545 |
|
|
ctors_section ();
|
9546 |
|
|
assemble_align (POINTER_SIZE);
|
9547 |
|
|
if (TARGET_FDPIC)
|
9548 |
|
|
{
|
9549 |
|
|
int ok = frv_assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, 1);
|
9550 |
|
|
|
9551 |
|
|
gcc_assert (ok);
|
9552 |
|
|
return;
|
9553 |
|
|
}
|
9554 |
|
|
assemble_integer_with_op ("\t.picptr\t", symbol);
|
9555 |
|
|
}
|
9556 |
|
|
|
9557 |
|
|
static void
|
9558 |
|
|
frv_asm_out_destructor (rtx symbol, int priority ATTRIBUTE_UNUSED)
|
9559 |
|
|
{
|
9560 |
|
|
dtors_section ();
|
9561 |
|
|
assemble_align (POINTER_SIZE);
|
9562 |
|
|
if (TARGET_FDPIC)
|
9563 |
|
|
{
|
9564 |
|
|
int ok = frv_assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, 1);
|
9565 |
|
|
|
9566 |
|
|
gcc_assert (ok);
|
9567 |
|
|
return;
|
9568 |
|
|
}
|
9569 |
|
|
assemble_integer_with_op ("\t.picptr\t", symbol);
|
9570 |
|
|
}
|
9571 |
|
|
|
9572 |
|
|
/* Worker function for TARGET_STRUCT_VALUE_RTX. */
|
9573 |
|
|
|
9574 |
|
|
static rtx
|
9575 |
|
|
frv_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
|
9576 |
|
|
int incoming ATTRIBUTE_UNUSED)
|
9577 |
|
|
{
|
9578 |
|
|
return gen_rtx_REG (Pmode, FRV_STRUCT_VALUE_REGNUM);
|
9579 |
|
|
}
|
9580 |
|
|
|
9581 |
|
|
#define TLS_BIAS (2048 - 16)
|
9582 |
|
|
|
9583 |
|
|
/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
|
9584 |
|
|
We need to emit DTP-relative relocations. */
|
9585 |
|
|
|
9586 |
|
|
static void
|
9587 |
|
|
frv_output_dwarf_dtprel (FILE *file, int size, rtx x)
|
9588 |
|
|
{
|
9589 |
|
|
gcc_assert (size == 4);
|
9590 |
|
|
fputs ("\t.picptr\ttlsmoff(", file);
|
9591 |
|
|
/* We want the unbiased TLS offset, so add the bias to the
|
9592 |
|
|
expression, such that the implicit biasing cancels out. */
|
9593 |
|
|
output_addr_const (file, plus_constant (x, TLS_BIAS));
|
9594 |
|
|
fputs (")", file);
|
9595 |
|
|
}
|
9596 |
|
|
|
9597 |
|
|
#include "gt-frv.h"
|