1 |
282 |
jeremybenn |
/* Subroutines used for code generation on the Argonaut ARC cpu.
|
2 |
|
|
Copyright (C) 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002, 2003,
|
3 |
|
|
2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
|
4 |
|
|
|
5 |
|
|
This file is part of GCC.
|
6 |
|
|
|
7 |
|
|
GCC is free software; you can redistribute it and/or modify
|
8 |
|
|
it under the terms of the GNU General Public License as published by
|
9 |
|
|
the Free Software Foundation; either version 3, or (at your option)
|
10 |
|
|
any later version.
|
11 |
|
|
|
12 |
|
|
GCC is distributed in the hope that it will be useful,
|
13 |
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
14 |
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
15 |
|
|
GNU General Public License for more details.
|
16 |
|
|
|
17 |
|
|
You should have received a copy of the GNU General Public License
|
18 |
|
|
along with GCC; see the file COPYING3. If not see
|
19 |
|
|
<http://www.gnu.org/licenses/>. */
|
20 |
|
|
|
21 |
|
|
/* ??? This is an old port, and is undoubtedly suffering from bit rot. */
|
22 |
|
|
|
23 |
|
|
#include "config.h"
|
24 |
|
|
#include "system.h"
|
25 |
|
|
#include "coretypes.h"
|
26 |
|
|
#include "tm.h"
|
27 |
|
|
#include "tree.h"
|
28 |
|
|
#include "rtl.h"
|
29 |
|
|
#include "regs.h"
|
30 |
|
|
#include "hard-reg-set.h"
|
31 |
|
|
#include "real.h"
|
32 |
|
|
#include "insn-config.h"
|
33 |
|
|
#include "conditions.h"
|
34 |
|
|
#include "output.h"
|
35 |
|
|
#include "insn-attr.h"
|
36 |
|
|
#include "flags.h"
|
37 |
|
|
#include "function.h"
|
38 |
|
|
#include "expr.h"
|
39 |
|
|
#include "recog.h"
|
40 |
|
|
#include "toplev.h"
|
41 |
|
|
#include "df.h"
|
42 |
|
|
#include "tm_p.h"
|
43 |
|
|
#include "target.h"
|
44 |
|
|
#include "target-def.h"
|
45 |
|
|
|
46 |
|
|
/* Which cpu we're compiling for. */
|
47 |
|
|
int arc_cpu_type;
|
48 |
|
|
|
49 |
|
|
/* Name of mangle string to add to symbols to separate code compiled for each
|
50 |
|
|
cpu (or NULL). */
|
51 |
|
|
const char *arc_mangle_cpu;
|
52 |
|
|
|
53 |
|
|
/* Name of text, data, and rodata sections used in varasm.c. */
|
54 |
|
|
const char *arc_text_section;
|
55 |
|
|
const char *arc_data_section;
|
56 |
|
|
const char *arc_rodata_section;
|
57 |
|
|
|
58 |
|
|
/* Array of valid operand punctuation characters. */
|
59 |
|
|
char arc_punct_chars[256];
|
60 |
|
|
|
61 |
|
|
/* Variables used by arc_final_prescan_insn to implement conditional
|
62 |
|
|
execution. */
|
63 |
|
|
static int arc_ccfsm_state;
|
64 |
|
|
static int arc_ccfsm_current_cc;
|
65 |
|
|
static rtx arc_ccfsm_target_insn;
|
66 |
|
|
static int arc_ccfsm_target_label;
|
67 |
|
|
|
68 |
|
|
/* The maximum number of insns skipped which will be conditionalised if
|
69 |
|
|
possible. */
|
70 |
|
|
#define MAX_INSNS_SKIPPED 3
|
71 |
|
|
|
72 |
|
|
/* A nop is needed between a 4 byte insn that sets the condition codes and
|
73 |
|
|
a branch that uses them (the same isn't true for an 8 byte insn that sets
|
74 |
|
|
the condition codes). Set by arc_final_prescan_insn. Used by
|
75 |
|
|
arc_print_operand. */
|
76 |
|
|
static int last_insn_set_cc_p;
|
77 |
|
|
static int current_insn_set_cc_p;
|
78 |
|
|
static bool arc_handle_option (size_t, const char *, int);
|
79 |
|
|
static void record_cc_ref (rtx);
|
80 |
|
|
static void arc_init_reg_tables (void);
|
81 |
|
|
static int get_arc_condition_code (rtx);
|
82 |
|
|
static tree arc_handle_interrupt_attribute (tree *, tree, tree, int, bool *);
|
83 |
|
|
static bool arc_assemble_integer (rtx, unsigned int, int);
|
84 |
|
|
static void arc_output_function_prologue (FILE *, HOST_WIDE_INT);
|
85 |
|
|
static void arc_output_function_epilogue (FILE *, HOST_WIDE_INT);
|
86 |
|
|
static void arc_file_start (void);
|
87 |
|
|
static void arc_internal_label (FILE *, const char *, unsigned long);
|
88 |
|
|
static void arc_va_start (tree, rtx);
|
89 |
|
|
static void arc_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
|
90 |
|
|
tree, int *, int);
|
91 |
|
|
static bool arc_rtx_costs (rtx, int, int, int *, bool);
|
92 |
|
|
static int arc_address_cost (rtx, bool);
|
93 |
|
|
static void arc_external_libcall (rtx);
|
94 |
|
|
static bool arc_return_in_memory (const_tree, const_tree);
|
95 |
|
|
static bool arc_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
|
96 |
|
|
const_tree, bool);
|
97 |
|
|
static void arc_trampoline_init (rtx, tree, rtx);
|
98 |
|
|
|
99 |
|
|
|
100 |
|
|
/* ARC specific attributs. */
|
101 |
|
|
|
102 |
|
|
static const struct attribute_spec arc_attribute_table[] =
|
103 |
|
|
{
|
104 |
|
|
/* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
|
105 |
|
|
{ "interrupt", 1, 1, true, false, false, arc_handle_interrupt_attribute },
|
106 |
|
|
{ NULL, 0, 0, false, false, false, NULL }
|
107 |
|
|
};
|
108 |
|
|
|
109 |
|
|
/* Initialize the GCC target structure. */
|
110 |
|
|
#undef TARGET_ASM_ALIGNED_HI_OP
|
111 |
|
|
#define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
|
112 |
|
|
#undef TARGET_ASM_ALIGNED_SI_OP
|
113 |
|
|
#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
|
114 |
|
|
#undef TARGET_ASM_INTEGER
|
115 |
|
|
#define TARGET_ASM_INTEGER arc_assemble_integer
|
116 |
|
|
|
117 |
|
|
#undef TARGET_ASM_FUNCTION_PROLOGUE
|
118 |
|
|
#define TARGET_ASM_FUNCTION_PROLOGUE arc_output_function_prologue
|
119 |
|
|
#undef TARGET_ASM_FUNCTION_EPILOGUE
|
120 |
|
|
#define TARGET_ASM_FUNCTION_EPILOGUE arc_output_function_epilogue
|
121 |
|
|
#undef TARGET_ASM_FILE_START
|
122 |
|
|
#define TARGET_ASM_FILE_START arc_file_start
|
123 |
|
|
#undef TARGET_ATTRIBUTE_TABLE
|
124 |
|
|
#define TARGET_ATTRIBUTE_TABLE arc_attribute_table
|
125 |
|
|
#undef TARGET_ASM_INTERNAL_LABEL
|
126 |
|
|
#define TARGET_ASM_INTERNAL_LABEL arc_internal_label
|
127 |
|
|
#undef TARGET_ASM_EXTERNAL_LIBCALL
|
128 |
|
|
#define TARGET_ASM_EXTERNAL_LIBCALL arc_external_libcall
|
129 |
|
|
|
130 |
|
|
#undef TARGET_HANDLE_OPTION
|
131 |
|
|
#define TARGET_HANDLE_OPTION arc_handle_option
|
132 |
|
|
|
133 |
|
|
#undef TARGET_RTX_COSTS
|
134 |
|
|
#define TARGET_RTX_COSTS arc_rtx_costs
|
135 |
|
|
#undef TARGET_ADDRESS_COST
|
136 |
|
|
#define TARGET_ADDRESS_COST arc_address_cost
|
137 |
|
|
|
138 |
|
|
#undef TARGET_PROMOTE_FUNCTION_MODE
|
139 |
|
|
#define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
|
140 |
|
|
#undef TARGET_PROMOTE_PROTOTYPES
|
141 |
|
|
#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
|
142 |
|
|
|
143 |
|
|
#undef TARGET_RETURN_IN_MEMORY
|
144 |
|
|
#define TARGET_RETURN_IN_MEMORY arc_return_in_memory
|
145 |
|
|
#undef TARGET_PASS_BY_REFERENCE
|
146 |
|
|
#define TARGET_PASS_BY_REFERENCE arc_pass_by_reference
|
147 |
|
|
#undef TARGET_CALLEE_COPIES
|
148 |
|
|
#define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
|
149 |
|
|
|
150 |
|
|
#undef TARGET_SETUP_INCOMING_VARARGS
|
151 |
|
|
#define TARGET_SETUP_INCOMING_VARARGS arc_setup_incoming_varargs
|
152 |
|
|
|
153 |
|
|
#undef TARGET_EXPAND_BUILTIN_VA_START
|
154 |
|
|
#define TARGET_EXPAND_BUILTIN_VA_START arc_va_start
|
155 |
|
|
|
156 |
|
|
#undef TARGET_TRAMPOLINE_INIT
|
157 |
|
|
#define TARGET_TRAMPOLINE_INIT arc_trampoline_init
|
158 |
|
|
|
159 |
|
|
struct gcc_target targetm = TARGET_INITIALIZER;
|
160 |
|
|
|
161 |
|
|
/* Implement TARGET_HANDLE_OPTION. */
|
162 |
|
|
|
163 |
|
|
static bool
|
164 |
|
|
arc_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
|
165 |
|
|
{
|
166 |
|
|
switch (code)
|
167 |
|
|
{
|
168 |
|
|
case OPT_mcpu_:
|
169 |
|
|
return strcmp (arg, "base") == 0 || ARC_EXTENSION_CPU (arg);
|
170 |
|
|
|
171 |
|
|
default:
|
172 |
|
|
return true;
|
173 |
|
|
}
|
174 |
|
|
}
|
175 |
|
|
|
176 |
|
|
/* Called by OVERRIDE_OPTIONS to initialize various things. */
|
177 |
|
|
|
178 |
|
|
void
|
179 |
|
|
arc_init (void)
|
180 |
|
|
{
|
181 |
|
|
char *tmp;
|
182 |
|
|
|
183 |
|
|
/* Set the pseudo-ops for the various standard sections. */
|
184 |
|
|
arc_text_section = tmp = XNEWVEC (char, strlen (arc_text_string) + sizeof (ARC_SECTION_FORMAT) + 1);
|
185 |
|
|
sprintf (tmp, ARC_SECTION_FORMAT, arc_text_string);
|
186 |
|
|
arc_data_section = tmp = XNEWVEC (char, strlen (arc_data_string) + sizeof (ARC_SECTION_FORMAT) + 1);
|
187 |
|
|
sprintf (tmp, ARC_SECTION_FORMAT, arc_data_string);
|
188 |
|
|
arc_rodata_section = tmp = XNEWVEC (char, strlen (arc_rodata_string) + sizeof (ARC_SECTION_FORMAT) + 1);
|
189 |
|
|
sprintf (tmp, ARC_SECTION_FORMAT, arc_rodata_string);
|
190 |
|
|
|
191 |
|
|
arc_init_reg_tables ();
|
192 |
|
|
|
193 |
|
|
/* Initialize array for PRINT_OPERAND_PUNCT_VALID_P. */
|
194 |
|
|
memset (arc_punct_chars, 0, sizeof (arc_punct_chars));
|
195 |
|
|
arc_punct_chars['#'] = 1;
|
196 |
|
|
arc_punct_chars['*'] = 1;
|
197 |
|
|
arc_punct_chars['?'] = 1;
|
198 |
|
|
arc_punct_chars['!'] = 1;
|
199 |
|
|
arc_punct_chars['~'] = 1;
|
200 |
|
|
}
|
201 |
|
|
|
202 |
|
|
/* The condition codes of the ARC, and the inverse function. */
|
203 |
|
|
static const char *const arc_condition_codes[] =
|
204 |
|
|
{
|
205 |
|
|
"al", 0, "eq", "ne", "p", "n", "c", "nc", "v", "nv",
|
206 |
|
|
"gt", "le", "ge", "lt", "hi", "ls", "pnz", 0
|
207 |
|
|
};
|
208 |
|
|
|
209 |
|
|
#define ARC_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
|
210 |
|
|
|
211 |
|
|
/* Returns the index of the ARC condition code string in
|
212 |
|
|
`arc_condition_codes'. COMPARISON should be an rtx like
|
213 |
|
|
`(eq (...) (...))'. */
|
214 |
|
|
|
215 |
|
|
static int
|
216 |
|
|
get_arc_condition_code (rtx comparison)
|
217 |
|
|
{
|
218 |
|
|
switch (GET_CODE (comparison))
|
219 |
|
|
{
|
220 |
|
|
case EQ : return 2;
|
221 |
|
|
case NE : return 3;
|
222 |
|
|
case GT : return 10;
|
223 |
|
|
case LE : return 11;
|
224 |
|
|
case GE : return 12;
|
225 |
|
|
case LT : return 13;
|
226 |
|
|
case GTU : return 14;
|
227 |
|
|
case LEU : return 15;
|
228 |
|
|
case LTU : return 6;
|
229 |
|
|
case GEU : return 7;
|
230 |
|
|
default : gcc_unreachable ();
|
231 |
|
|
}
|
232 |
|
|
/*NOTREACHED*/
|
233 |
|
|
return (42);
|
234 |
|
|
}
|
235 |
|
|
|
236 |
|
|
/* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
|
237 |
|
|
return the mode to be used for the comparison. */
|
238 |
|
|
|
239 |
|
|
enum machine_mode
|
240 |
|
|
arc_select_cc_mode (enum rtx_code op,
|
241 |
|
|
rtx x ATTRIBUTE_UNUSED,
|
242 |
|
|
rtx y ATTRIBUTE_UNUSED)
|
243 |
|
|
{
|
244 |
|
|
switch (op)
|
245 |
|
|
{
|
246 |
|
|
case EQ :
|
247 |
|
|
case NE :
|
248 |
|
|
return CCZNmode;
|
249 |
|
|
default :
|
250 |
|
|
switch (GET_CODE (x))
|
251 |
|
|
{
|
252 |
|
|
case AND :
|
253 |
|
|
case IOR :
|
254 |
|
|
case XOR :
|
255 |
|
|
case SIGN_EXTEND :
|
256 |
|
|
case ZERO_EXTEND :
|
257 |
|
|
return CCZNmode;
|
258 |
|
|
case ASHIFT :
|
259 |
|
|
case ASHIFTRT :
|
260 |
|
|
case LSHIFTRT :
|
261 |
|
|
return CCZNCmode;
|
262 |
|
|
default:
|
263 |
|
|
break;
|
264 |
|
|
}
|
265 |
|
|
}
|
266 |
|
|
return CCmode;
|
267 |
|
|
}
|
268 |
|
|
|
269 |
|
|
/* Vectors to keep interesting information about registers where it can easily
|
270 |
|
|
be got. We use to use the actual mode value as the bit number, but there
|
271 |
|
|
is (or may be) more than 32 modes now. Instead we use two tables: one
|
272 |
|
|
indexed by hard register number, and one indexed by mode. */
|
273 |
|
|
|
274 |
|
|
/* The purpose of arc_mode_class is to shrink the range of modes so that
|
275 |
|
|
they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
|
276 |
|
|
mapped into one arc_mode_class mode. */
|
277 |
|
|
|
278 |
|
|
enum arc_mode_class {
|
279 |
|
|
C_MODE,
|
280 |
|
|
S_MODE, D_MODE, T_MODE, O_MODE,
|
281 |
|
|
SF_MODE, DF_MODE, TF_MODE, OF_MODE
|
282 |
|
|
};
|
283 |
|
|
|
284 |
|
|
/* Modes for condition codes. */
|
285 |
|
|
#define C_MODES (1 << (int) C_MODE)
|
286 |
|
|
|
287 |
|
|
/* Modes for single-word and smaller quantities. */
|
288 |
|
|
#define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
|
289 |
|
|
|
290 |
|
|
/* Modes for double-word and smaller quantities. */
|
291 |
|
|
#define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
|
292 |
|
|
|
293 |
|
|
/* Modes for quad-word and smaller quantities. */
|
294 |
|
|
#define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
|
295 |
|
|
|
296 |
|
|
/* Value is 1 if register/mode pair is acceptable on arc. */
|
297 |
|
|
|
298 |
|
|
const unsigned int arc_hard_regno_mode_ok[] = {
|
299 |
|
|
T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
|
300 |
|
|
T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
|
301 |
|
|
T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, D_MODES,
|
302 |
|
|
D_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
|
303 |
|
|
|
304 |
|
|
/* ??? Leave these as S_MODES for now. */
|
305 |
|
|
S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
|
306 |
|
|
S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
|
307 |
|
|
S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
|
308 |
|
|
S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, C_MODES
|
309 |
|
|
};
|
310 |
|
|
|
311 |
|
|
unsigned int arc_mode_class [NUM_MACHINE_MODES];
|
312 |
|
|
|
313 |
|
|
enum reg_class arc_regno_reg_class[FIRST_PSEUDO_REGISTER];
|
314 |
|
|
|
315 |
|
|
static void
|
316 |
|
|
arc_init_reg_tables (void)
|
317 |
|
|
{
|
318 |
|
|
int i;
|
319 |
|
|
|
320 |
|
|
for (i = 0; i < NUM_MACHINE_MODES; i++)
|
321 |
|
|
{
|
322 |
|
|
switch (GET_MODE_CLASS (i))
|
323 |
|
|
{
|
324 |
|
|
case MODE_INT:
|
325 |
|
|
case MODE_PARTIAL_INT:
|
326 |
|
|
case MODE_COMPLEX_INT:
|
327 |
|
|
if (GET_MODE_SIZE (i) <= 4)
|
328 |
|
|
arc_mode_class[i] = 1 << (int) S_MODE;
|
329 |
|
|
else if (GET_MODE_SIZE (i) == 8)
|
330 |
|
|
arc_mode_class[i] = 1 << (int) D_MODE;
|
331 |
|
|
else if (GET_MODE_SIZE (i) == 16)
|
332 |
|
|
arc_mode_class[i] = 1 << (int) T_MODE;
|
333 |
|
|
else if (GET_MODE_SIZE (i) == 32)
|
334 |
|
|
arc_mode_class[i] = 1 << (int) O_MODE;
|
335 |
|
|
else
|
336 |
|
|
arc_mode_class[i] = 0;
|
337 |
|
|
break;
|
338 |
|
|
case MODE_FLOAT:
|
339 |
|
|
case MODE_COMPLEX_FLOAT:
|
340 |
|
|
if (GET_MODE_SIZE (i) <= 4)
|
341 |
|
|
arc_mode_class[i] = 1 << (int) SF_MODE;
|
342 |
|
|
else if (GET_MODE_SIZE (i) == 8)
|
343 |
|
|
arc_mode_class[i] = 1 << (int) DF_MODE;
|
344 |
|
|
else if (GET_MODE_SIZE (i) == 16)
|
345 |
|
|
arc_mode_class[i] = 1 << (int) TF_MODE;
|
346 |
|
|
else if (GET_MODE_SIZE (i) == 32)
|
347 |
|
|
arc_mode_class[i] = 1 << (int) OF_MODE;
|
348 |
|
|
else
|
349 |
|
|
arc_mode_class[i] = 0;
|
350 |
|
|
break;
|
351 |
|
|
case MODE_CC:
|
352 |
|
|
arc_mode_class[i] = 1 << (int) C_MODE;
|
353 |
|
|
break;
|
354 |
|
|
default:
|
355 |
|
|
arc_mode_class[i] = 0;
|
356 |
|
|
break;
|
357 |
|
|
}
|
358 |
|
|
}
|
359 |
|
|
|
360 |
|
|
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
361 |
|
|
{
|
362 |
|
|
if (i < 60)
|
363 |
|
|
arc_regno_reg_class[i] = GENERAL_REGS;
|
364 |
|
|
else if (i == 60)
|
365 |
|
|
arc_regno_reg_class[i] = LPCOUNT_REG;
|
366 |
|
|
else if (i == 61)
|
367 |
|
|
arc_regno_reg_class[i] = NO_REGS /* CC_REG: must be NO_REGS */;
|
368 |
|
|
else
|
369 |
|
|
arc_regno_reg_class[i] = NO_REGS;
|
370 |
|
|
}
|
371 |
|
|
}
|
372 |
|
|
|
373 |
|
|
/* ARC specific attribute support.
|
374 |
|
|
|
375 |
|
|
The ARC has these attributes:
|
376 |
|
|
interrupt - for interrupt functions
|
377 |
|
|
*/
|
378 |
|
|
|
379 |
|
|
/* Handle an "interrupt" attribute; arguments as in
|
380 |
|
|
struct attribute_spec.handler. */
|
381 |
|
|
static tree
|
382 |
|
|
arc_handle_interrupt_attribute (tree *node ATTRIBUTE_UNUSED,
|
383 |
|
|
tree name,
|
384 |
|
|
tree args,
|
385 |
|
|
int flags ATTRIBUTE_UNUSED,
|
386 |
|
|
bool *no_add_attrs)
|
387 |
|
|
{
|
388 |
|
|
tree value = TREE_VALUE (args);
|
389 |
|
|
|
390 |
|
|
if (TREE_CODE (value) != STRING_CST)
|
391 |
|
|
{
|
392 |
|
|
warning (OPT_Wattributes,
|
393 |
|
|
"argument of %qE attribute is not a string constant",
|
394 |
|
|
name);
|
395 |
|
|
*no_add_attrs = true;
|
396 |
|
|
}
|
397 |
|
|
else if (strcmp (TREE_STRING_POINTER (value), "ilink1")
|
398 |
|
|
&& strcmp (TREE_STRING_POINTER (value), "ilink2"))
|
399 |
|
|
{
|
400 |
|
|
warning (OPT_Wattributes,
|
401 |
|
|
"argument of %qE attribute is not \"ilink1\" or \"ilink2\"",
|
402 |
|
|
name);
|
403 |
|
|
*no_add_attrs = true;
|
404 |
|
|
}
|
405 |
|
|
|
406 |
|
|
return NULL_TREE;
|
407 |
|
|
}
|
408 |
|
|
|
409 |
|
|
|
410 |
|
|
/* Acceptable arguments to the call insn. */
|
411 |
|
|
|
412 |
|
|
int
|
413 |
|
|
call_address_operand (rtx op, enum machine_mode mode)
|
414 |
|
|
{
|
415 |
|
|
return (symbolic_operand (op, mode)
|
416 |
|
|
|| (GET_CODE (op) == CONST_INT && LEGITIMATE_CONSTANT_P (op))
|
417 |
|
|
|| (GET_CODE (op) == REG));
|
418 |
|
|
}
|
419 |
|
|
|
420 |
|
|
int
|
421 |
|
|
call_operand (rtx op, enum machine_mode mode)
|
422 |
|
|
{
|
423 |
|
|
if (GET_CODE (op) != MEM)
|
424 |
|
|
return 0;
|
425 |
|
|
op = XEXP (op, 0);
|
426 |
|
|
return call_address_operand (op, mode);
|
427 |
|
|
}
|
428 |
|
|
|
429 |
|
|
/* Returns 1 if OP is a symbol reference. */
|
430 |
|
|
|
431 |
|
|
int
|
432 |
|
|
symbolic_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
433 |
|
|
{
|
434 |
|
|
switch (GET_CODE (op))
|
435 |
|
|
{
|
436 |
|
|
case SYMBOL_REF:
|
437 |
|
|
case LABEL_REF:
|
438 |
|
|
case CONST :
|
439 |
|
|
return 1;
|
440 |
|
|
default:
|
441 |
|
|
return 0;
|
442 |
|
|
}
|
443 |
|
|
}
|
444 |
|
|
|
445 |
|
|
/* Return truth value of statement that OP is a symbolic memory
|
446 |
|
|
operand of mode MODE. */
|
447 |
|
|
|
448 |
|
|
int
|
449 |
|
|
symbolic_memory_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
450 |
|
|
{
|
451 |
|
|
if (GET_CODE (op) == SUBREG)
|
452 |
|
|
op = SUBREG_REG (op);
|
453 |
|
|
if (GET_CODE (op) != MEM)
|
454 |
|
|
return 0;
|
455 |
|
|
op = XEXP (op, 0);
|
456 |
|
|
return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST
|
457 |
|
|
|| GET_CODE (op) == LABEL_REF);
|
458 |
|
|
}
|
459 |
|
|
|
460 |
|
|
/* Return true if OP is a short immediate (shimm) value. */
|
461 |
|
|
|
462 |
|
|
int
|
463 |
|
|
short_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
464 |
|
|
{
|
465 |
|
|
if (GET_CODE (op) != CONST_INT)
|
466 |
|
|
return 0;
|
467 |
|
|
return SMALL_INT (INTVAL (op));
|
468 |
|
|
}
|
469 |
|
|
|
470 |
|
|
/* Return true if OP will require a long immediate (limm) value.
|
471 |
|
|
This is currently only used when calculating length attributes. */
|
472 |
|
|
|
473 |
|
|
int
|
474 |
|
|
long_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
475 |
|
|
{
|
476 |
|
|
switch (GET_CODE (op))
|
477 |
|
|
{
|
478 |
|
|
case SYMBOL_REF :
|
479 |
|
|
case LABEL_REF :
|
480 |
|
|
case CONST :
|
481 |
|
|
return 1;
|
482 |
|
|
case CONST_INT :
|
483 |
|
|
return !SMALL_INT (INTVAL (op));
|
484 |
|
|
case CONST_DOUBLE :
|
485 |
|
|
/* These can happen because large unsigned 32-bit constants are
|
486 |
|
|
represented this way (the multiplication patterns can cause these
|
487 |
|
|
to be generated). They also occur for SFmode values. */
|
488 |
|
|
return 1;
|
489 |
|
|
default:
|
490 |
|
|
break;
|
491 |
|
|
}
|
492 |
|
|
return 0;
|
493 |
|
|
}
|
494 |
|
|
|
495 |
|
|
/* Return true if OP is a MEM that when used as a load or store address will
|
496 |
|
|
require an 8 byte insn.
|
497 |
|
|
Load and store instructions don't allow the same possibilities but they're
|
498 |
|
|
similar enough that this one function will do.
|
499 |
|
|
This is currently only used when calculating length attributes. */
|
500 |
|
|
|
501 |
|
|
int
|
502 |
|
|
long_immediate_loadstore_operand (rtx op,
|
503 |
|
|
enum machine_mode mode ATTRIBUTE_UNUSED)
|
504 |
|
|
{
|
505 |
|
|
if (GET_CODE (op) != MEM)
|
506 |
|
|
return 0;
|
507 |
|
|
|
508 |
|
|
op = XEXP (op, 0);
|
509 |
|
|
switch (GET_CODE (op))
|
510 |
|
|
{
|
511 |
|
|
case SYMBOL_REF :
|
512 |
|
|
case LABEL_REF :
|
513 |
|
|
case CONST :
|
514 |
|
|
return 1;
|
515 |
|
|
case CONST_INT :
|
516 |
|
|
/* This must be handled as "st c,[limm]". Ditto for load.
|
517 |
|
|
Technically, the assembler could translate some possibilities to
|
518 |
|
|
"st c,[limm/2 + limm/2]" if limm/2 will fit in a shimm, but we don't
|
519 |
|
|
assume that it does. */
|
520 |
|
|
return 1;
|
521 |
|
|
case CONST_DOUBLE :
|
522 |
|
|
/* These can happen because large unsigned 32-bit constants are
|
523 |
|
|
represented this way (the multiplication patterns can cause these
|
524 |
|
|
to be generated). They also occur for SFmode values. */
|
525 |
|
|
return 1;
|
526 |
|
|
case REG :
|
527 |
|
|
return 0;
|
528 |
|
|
case PLUS :
|
529 |
|
|
if (GET_CODE (XEXP (op, 1)) == CONST_INT
|
530 |
|
|
&& !SMALL_INT (INTVAL (XEXP (op, 1))))
|
531 |
|
|
return 1;
|
532 |
|
|
return 0;
|
533 |
|
|
default:
|
534 |
|
|
break;
|
535 |
|
|
}
|
536 |
|
|
return 0;
|
537 |
|
|
}
|
538 |
|
|
|
539 |
|
|
/* Return true if OP is an acceptable argument for a single word
|
540 |
|
|
move source. */
|
541 |
|
|
|
542 |
|
|
int
|
543 |
|
|
move_src_operand (rtx op, enum machine_mode mode)
|
544 |
|
|
{
|
545 |
|
|
switch (GET_CODE (op))
|
546 |
|
|
{
|
547 |
|
|
case SYMBOL_REF :
|
548 |
|
|
case LABEL_REF :
|
549 |
|
|
case CONST :
|
550 |
|
|
return 1;
|
551 |
|
|
case CONST_INT :
|
552 |
|
|
return (LARGE_INT (INTVAL (op)));
|
553 |
|
|
case CONST_DOUBLE :
|
554 |
|
|
/* We can handle DImode integer constants in SImode if the value
|
555 |
|
|
(signed or unsigned) will fit in 32 bits. This is needed because
|
556 |
|
|
large unsigned 32-bit constants are represented as CONST_DOUBLEs. */
|
557 |
|
|
if (mode == SImode)
|
558 |
|
|
return arc_double_limm_p (op);
|
559 |
|
|
/* We can handle 32-bit floating point constants. */
|
560 |
|
|
if (mode == SFmode)
|
561 |
|
|
return GET_MODE (op) == SFmode;
|
562 |
|
|
return 0;
|
563 |
|
|
case REG :
|
564 |
|
|
return register_operand (op, mode);
|
565 |
|
|
case SUBREG :
|
566 |
|
|
/* (subreg (mem ...) ...) can occur here if the inner part was once a
|
567 |
|
|
pseudo-reg and is now a stack slot. */
|
568 |
|
|
if (GET_CODE (SUBREG_REG (op)) == MEM)
|
569 |
|
|
return address_operand (XEXP (SUBREG_REG (op), 0), mode);
|
570 |
|
|
else
|
571 |
|
|
return register_operand (op, mode);
|
572 |
|
|
case MEM :
|
573 |
|
|
return address_operand (XEXP (op, 0), mode);
|
574 |
|
|
default :
|
575 |
|
|
return 0;
|
576 |
|
|
}
|
577 |
|
|
}
|
578 |
|
|
|
579 |
|
|
/* Return true if OP is an acceptable argument for a double word
|
580 |
|
|
move source. */
|
581 |
|
|
|
582 |
|
|
int
|
583 |
|
|
move_double_src_operand (rtx op, enum machine_mode mode)
|
584 |
|
|
{
|
585 |
|
|
switch (GET_CODE (op))
|
586 |
|
|
{
|
587 |
|
|
case REG :
|
588 |
|
|
return register_operand (op, mode);
|
589 |
|
|
case SUBREG :
|
590 |
|
|
/* (subreg (mem ...) ...) can occur here if the inner part was once a
|
591 |
|
|
pseudo-reg and is now a stack slot. */
|
592 |
|
|
if (GET_CODE (SUBREG_REG (op)) == MEM)
|
593 |
|
|
return move_double_src_operand (SUBREG_REG (op), mode);
|
594 |
|
|
else
|
595 |
|
|
return register_operand (op, mode);
|
596 |
|
|
case MEM :
|
597 |
|
|
/* Disallow auto inc/dec for now. */
|
598 |
|
|
if (GET_CODE (XEXP (op, 0)) == PRE_DEC
|
599 |
|
|
|| GET_CODE (XEXP (op, 0)) == PRE_INC)
|
600 |
|
|
return 0;
|
601 |
|
|
return address_operand (XEXP (op, 0), mode);
|
602 |
|
|
case CONST_INT :
|
603 |
|
|
case CONST_DOUBLE :
|
604 |
|
|
return 1;
|
605 |
|
|
default :
|
606 |
|
|
return 0;
|
607 |
|
|
}
|
608 |
|
|
}
|
609 |
|
|
|
610 |
|
|
/* Return true if OP is an acceptable argument for a move destination. */
|
611 |
|
|
|
612 |
|
|
int
|
613 |
|
|
move_dest_operand (rtx op, enum machine_mode mode)
|
614 |
|
|
{
|
615 |
|
|
switch (GET_CODE (op))
|
616 |
|
|
{
|
617 |
|
|
case REG :
|
618 |
|
|
return register_operand (op, mode);
|
619 |
|
|
case SUBREG :
|
620 |
|
|
/* (subreg (mem ...) ...) can occur here if the inner part was once a
|
621 |
|
|
pseudo-reg and is now a stack slot. */
|
622 |
|
|
if (GET_CODE (SUBREG_REG (op)) == MEM)
|
623 |
|
|
return address_operand (XEXP (SUBREG_REG (op), 0), mode);
|
624 |
|
|
else
|
625 |
|
|
return register_operand (op, mode);
|
626 |
|
|
case MEM :
|
627 |
|
|
return address_operand (XEXP (op, 0), mode);
|
628 |
|
|
default :
|
629 |
|
|
return 0;
|
630 |
|
|
}
|
631 |
|
|
}
|
632 |
|
|
|
633 |
|
|
/* Return true if OP is valid load with update operand. */
|
634 |
|
|
|
635 |
|
|
int
|
636 |
|
|
load_update_operand (rtx op, enum machine_mode mode)
|
637 |
|
|
{
|
638 |
|
|
if (GET_CODE (op) != MEM
|
639 |
|
|
|| GET_MODE (op) != mode)
|
640 |
|
|
return 0;
|
641 |
|
|
op = XEXP (op, 0);
|
642 |
|
|
if (GET_CODE (op) != PLUS
|
643 |
|
|
|| GET_MODE (op) != Pmode
|
644 |
|
|
|| !register_operand (XEXP (op, 0), Pmode)
|
645 |
|
|
|| !nonmemory_operand (XEXP (op, 1), Pmode))
|
646 |
|
|
return 0;
|
647 |
|
|
return 1;
|
648 |
|
|
}
|
649 |
|
|
|
650 |
|
|
/* Return true if OP is valid store with update operand. */
|
651 |
|
|
|
652 |
|
|
int
|
653 |
|
|
store_update_operand (rtx op, enum machine_mode mode)
|
654 |
|
|
{
|
655 |
|
|
if (GET_CODE (op) != MEM
|
656 |
|
|
|| GET_MODE (op) != mode)
|
657 |
|
|
return 0;
|
658 |
|
|
op = XEXP (op, 0);
|
659 |
|
|
if (GET_CODE (op) != PLUS
|
660 |
|
|
|| GET_MODE (op) != Pmode
|
661 |
|
|
|| !register_operand (XEXP (op, 0), Pmode)
|
662 |
|
|
|| !(GET_CODE (XEXP (op, 1)) == CONST_INT
|
663 |
|
|
&& SMALL_INT (INTVAL (XEXP (op, 1)))))
|
664 |
|
|
return 0;
|
665 |
|
|
return 1;
|
666 |
|
|
}
|
667 |
|
|
|
668 |
|
|
/* Return true if OP is a non-volatile non-immediate operand.
|
669 |
|
|
Volatile memory refs require a special "cache-bypass" instruction
|
670 |
|
|
and only the standard movXX patterns are set up to handle them. */
|
671 |
|
|
|
672 |
|
|
int
|
673 |
|
|
nonvol_nonimm_operand (rtx op, enum machine_mode mode)
|
674 |
|
|
{
|
675 |
|
|
if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
|
676 |
|
|
return 0;
|
677 |
|
|
return nonimmediate_operand (op, mode);
|
678 |
|
|
}
|
679 |
|
|
|
680 |
|
|
/* Accept integer operands in the range -0x80000000..0x7fffffff. We have
|
681 |
|
|
to check the range carefully since this predicate is used in DImode
|
682 |
|
|
contexts. */
|
683 |
|
|
|
684 |
|
|
int
|
685 |
|
|
const_sint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
686 |
|
|
{
|
687 |
|
|
/* All allowed constants will fit a CONST_INT. */
|
688 |
|
|
return (GET_CODE (op) == CONST_INT
|
689 |
|
|
&& (INTVAL (op) >= (-0x7fffffff - 1) && INTVAL (op) <= 0x7fffffff));
|
690 |
|
|
}
|
691 |
|
|
|
692 |
|
|
/* Accept integer operands in the range 0..0xffffffff. We have to check the
|
693 |
|
|
range carefully since this predicate is used in DImode contexts. Also, we
|
694 |
|
|
need some extra crud to make it work when hosted on 64-bit machines. */
|
695 |
|
|
|
696 |
|
|
int
|
697 |
|
|
const_uint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
698 |
|
|
{
|
699 |
|
|
#if HOST_BITS_PER_WIDE_INT > 32
|
700 |
|
|
/* All allowed constants will fit a CONST_INT. */
|
701 |
|
|
return (GET_CODE (op) == CONST_INT
|
702 |
|
|
&& (INTVAL (op) >= 0 && INTVAL (op) <= 0xffffffffL));
|
703 |
|
|
#else
|
704 |
|
|
return ((GET_CODE (op) == CONST_INT && INTVAL (op) >= 0)
|
705 |
|
|
|| (GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_HIGH (op) == 0));
|
706 |
|
|
#endif
|
707 |
|
|
}
|
708 |
|
|
|
709 |
|
|
/* Return 1 if OP is a comparison operator valid for the mode of CC.
|
710 |
|
|
This allows the use of MATCH_OPERATOR to recognize all the branch insns.
|
711 |
|
|
|
712 |
|
|
Some insns only set a few bits in the condition code. So only allow those
|
713 |
|
|
comparisons that use the bits that are valid. */
|
714 |
|
|
|
715 |
|
|
int
|
716 |
|
|
proper_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
717 |
|
|
{
|
718 |
|
|
enum rtx_code code;
|
719 |
|
|
if (!COMPARISON_P (op))
|
720 |
|
|
return 0;
|
721 |
|
|
|
722 |
|
|
code = GET_CODE (op);
|
723 |
|
|
if (GET_MODE (XEXP (op, 0)) == CCZNmode)
|
724 |
|
|
return (code == EQ || code == NE);
|
725 |
|
|
if (GET_MODE (XEXP (op, 0)) == CCZNCmode)
|
726 |
|
|
return (code == EQ || code == NE
|
727 |
|
|
|| code == LTU || code == GEU || code == GTU || code == LEU);
|
728 |
|
|
return 1;
|
729 |
|
|
}
|
730 |
|
|
|
731 |
|
|
/* Misc. utilities. */
|
732 |
|
|
|
733 |
|
|
/* X and Y are two things to compare using CODE. Return the rtx
|
734 |
|
|
for the cc reg in the proper mode. */
|
735 |
|
|
|
736 |
|
|
rtx
|
737 |
|
|
gen_compare_reg (enum rtx_code code, rtx x, rtx y)
|
738 |
|
|
{
|
739 |
|
|
enum machine_mode mode = SELECT_CC_MODE (code, x, y);
|
740 |
|
|
return gen_rtx_REG (mode, 61);
|
741 |
|
|
}
|
742 |
|
|
|
743 |
|
|
/* Return 1 if VALUE, a const_double, will fit in a limm (4 byte number).
|
744 |
|
|
We assume the value can be either signed or unsigned. */
|
745 |
|
|
|
746 |
|
|
int
|
747 |
|
|
arc_double_limm_p (rtx value)
|
748 |
|
|
{
|
749 |
|
|
HOST_WIDE_INT low, high;
|
750 |
|
|
|
751 |
|
|
gcc_assert (GET_CODE (value) == CONST_DOUBLE);
|
752 |
|
|
|
753 |
|
|
low = CONST_DOUBLE_LOW (value);
|
754 |
|
|
high = CONST_DOUBLE_HIGH (value);
|
755 |
|
|
|
756 |
|
|
if (low & 0x80000000)
|
757 |
|
|
{
|
758 |
|
|
return (((unsigned HOST_WIDE_INT) low <= 0xffffffff && high == 0)
|
759 |
|
|
|| (((low & - (unsigned HOST_WIDE_INT) 0x80000000)
|
760 |
|
|
== - (unsigned HOST_WIDE_INT) 0x80000000)
|
761 |
|
|
&& high == -1));
|
762 |
|
|
}
|
763 |
|
|
else
|
764 |
|
|
{
|
765 |
|
|
return (unsigned HOST_WIDE_INT) low <= 0x7fffffff && high == 0;
|
766 |
|
|
}
|
767 |
|
|
}
|
768 |
|
|
|
769 |
|
|
/* Do any needed setup for a variadic function. For the ARC, we must
|
770 |
|
|
create a register parameter block, and then copy any anonymous arguments
|
771 |
|
|
in registers to memory.
|
772 |
|
|
|
773 |
|
|
CUM has not been updated for the last named argument which has type TYPE
|
774 |
|
|
and mode MODE, and we rely on this fact.
|
775 |
|
|
|
776 |
|
|
We do things a little weird here. We're supposed to only allocate space
|
777 |
|
|
for the anonymous arguments. However we need to keep the stack eight byte
|
778 |
|
|
aligned. So we round the space up if necessary, and leave it to va_start
|
779 |
|
|
to compensate. */
|
780 |
|
|
|
781 |
|
|
static void
|
782 |
|
|
arc_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
|
783 |
|
|
enum machine_mode mode,
|
784 |
|
|
tree type ATTRIBUTE_UNUSED,
|
785 |
|
|
int *pretend_size,
|
786 |
|
|
int no_rtl)
|
787 |
|
|
{
|
788 |
|
|
int first_anon_arg;
|
789 |
|
|
|
790 |
|
|
/* All BLKmode values are passed by reference. */
|
791 |
|
|
gcc_assert (mode != BLKmode);
|
792 |
|
|
|
793 |
|
|
first_anon_arg = *cum + ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1)
|
794 |
|
|
/ UNITS_PER_WORD);
|
795 |
|
|
|
796 |
|
|
if (first_anon_arg < MAX_ARC_PARM_REGS && !no_rtl)
|
797 |
|
|
{
|
798 |
|
|
/* Note that first_reg_offset < MAX_ARC_PARM_REGS. */
|
799 |
|
|
int first_reg_offset = first_anon_arg;
|
800 |
|
|
/* Size in words to "pretend" allocate. */
|
801 |
|
|
int size = MAX_ARC_PARM_REGS - first_reg_offset;
|
802 |
|
|
/* Extra slop to keep stack eight byte aligned. */
|
803 |
|
|
int align_slop = size & 1;
|
804 |
|
|
rtx regblock;
|
805 |
|
|
|
806 |
|
|
regblock = gen_rtx_MEM (BLKmode,
|
807 |
|
|
plus_constant (arg_pointer_rtx,
|
808 |
|
|
FIRST_PARM_OFFSET (0)
|
809 |
|
|
+ align_slop * UNITS_PER_WORD));
|
810 |
|
|
set_mem_alias_set (regblock, get_varargs_alias_set ());
|
811 |
|
|
set_mem_align (regblock, BITS_PER_WORD);
|
812 |
|
|
move_block_from_reg (first_reg_offset, regblock,
|
813 |
|
|
MAX_ARC_PARM_REGS - first_reg_offset);
|
814 |
|
|
|
815 |
|
|
*pretend_size = ((MAX_ARC_PARM_REGS - first_reg_offset + align_slop)
|
816 |
|
|
* UNITS_PER_WORD);
|
817 |
|
|
}
|
818 |
|
|
}
|
819 |
|
|
|
820 |
|
|
/* Cost functions. */
|
821 |
|
|
|
822 |
|
|
/* Compute a (partial) cost for rtx X. Return true if the complete
|
823 |
|
|
cost has been computed, and false if subexpressions should be
|
824 |
|
|
scanned. In either case, *TOTAL contains the cost result. */
|
825 |
|
|
|
826 |
|
|
static bool
|
827 |
|
|
arc_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total,
|
828 |
|
|
bool speed ATTRIBUTE_UNUSED)
|
829 |
|
|
{
|
830 |
|
|
switch (code)
|
831 |
|
|
{
|
832 |
|
|
/* Small integers are as cheap as registers. 4 byte values can
|
833 |
|
|
be fetched as immediate constants - let's give that the cost
|
834 |
|
|
of an extra insn. */
|
835 |
|
|
case CONST_INT:
|
836 |
|
|
if (SMALL_INT (INTVAL (x)))
|
837 |
|
|
{
|
838 |
|
|
*total = 0;
|
839 |
|
|
return true;
|
840 |
|
|
}
|
841 |
|
|
/* FALLTHRU */
|
842 |
|
|
|
843 |
|
|
case CONST:
|
844 |
|
|
case LABEL_REF:
|
845 |
|
|
case SYMBOL_REF:
|
846 |
|
|
*total = COSTS_N_INSNS (1);
|
847 |
|
|
return true;
|
848 |
|
|
|
849 |
|
|
case CONST_DOUBLE:
|
850 |
|
|
{
|
851 |
|
|
rtx high, low;
|
852 |
|
|
split_double (x, &high, &low);
|
853 |
|
|
*total = COSTS_N_INSNS (!SMALL_INT (INTVAL (high))
|
854 |
|
|
+ !SMALL_INT (INTVAL (low)));
|
855 |
|
|
return true;
|
856 |
|
|
}
|
857 |
|
|
|
858 |
|
|
/* Encourage synth_mult to find a synthetic multiply when reasonable.
|
859 |
|
|
If we need more than 12 insns to do a multiply, then go out-of-line,
|
860 |
|
|
since the call overhead will be < 10% of the cost of the multiply. */
|
861 |
|
|
case ASHIFT:
|
862 |
|
|
case ASHIFTRT:
|
863 |
|
|
case LSHIFTRT:
|
864 |
|
|
if (TARGET_SHIFTER)
|
865 |
|
|
*total = COSTS_N_INSNS (1);
|
866 |
|
|
else if (GET_CODE (XEXP (x, 1)) != CONST_INT)
|
867 |
|
|
*total = COSTS_N_INSNS (16);
|
868 |
|
|
else
|
869 |
|
|
*total = COSTS_N_INSNS (INTVAL (XEXP ((x), 1)));
|
870 |
|
|
return false;
|
871 |
|
|
|
872 |
|
|
default:
|
873 |
|
|
return false;
|
874 |
|
|
}
|
875 |
|
|
}
|
876 |
|
|
|
877 |
|
|
|
878 |
|
|
/* Provide the costs of an addressing mode that contains ADDR.
|
879 |
|
|
If ADDR is not a valid address, its cost is irrelevant. */
|
880 |
|
|
|
881 |
|
|
static int
|
882 |
|
|
arc_address_cost (rtx addr, bool speed ATTRIBUTE_UNUSED)
|
883 |
|
|
{
|
884 |
|
|
switch (GET_CODE (addr))
|
885 |
|
|
{
|
886 |
|
|
case REG :
|
887 |
|
|
return 1;
|
888 |
|
|
|
889 |
|
|
case LABEL_REF :
|
890 |
|
|
case SYMBOL_REF :
|
891 |
|
|
case CONST :
|
892 |
|
|
return 2;
|
893 |
|
|
|
894 |
|
|
case PLUS :
|
895 |
|
|
{
|
896 |
|
|
register rtx plus0 = XEXP (addr, 0);
|
897 |
|
|
register rtx plus1 = XEXP (addr, 1);
|
898 |
|
|
|
899 |
|
|
if (GET_CODE (plus0) != REG)
|
900 |
|
|
break;
|
901 |
|
|
|
902 |
|
|
switch (GET_CODE (plus1))
|
903 |
|
|
{
|
904 |
|
|
case CONST_INT :
|
905 |
|
|
return SMALL_INT (INTVAL (plus1)) ? 1 : 2;
|
906 |
|
|
case CONST :
|
907 |
|
|
case SYMBOL_REF :
|
908 |
|
|
case LABEL_REF :
|
909 |
|
|
return 2;
|
910 |
|
|
default:
|
911 |
|
|
break;
|
912 |
|
|
}
|
913 |
|
|
break;
|
914 |
|
|
}
|
915 |
|
|
default:
|
916 |
|
|
break;
|
917 |
|
|
}
|
918 |
|
|
|
919 |
|
|
return 4;
|
920 |
|
|
}
|
921 |
|
|
|
922 |
|
|
/* Function prologue/epilogue handlers. */
|
923 |
|
|
|
924 |
|
|
/* ARC stack frames look like:
|
925 |
|
|
|
926 |
|
|
Before call After call
|
927 |
|
|
+-----------------------+ +-----------------------+
|
928 |
|
|
| | | |
|
929 |
|
|
high | local variables, | | local variables, |
|
930 |
|
|
mem | reg save area, etc. | | reg save area, etc. |
|
931 |
|
|
| | | |
|
932 |
|
|
+-----------------------+ +-----------------------+
|
933 |
|
|
| | | |
|
934 |
|
|
| arguments on stack. | | arguments on stack. |
|
935 |
|
|
| | | |
|
936 |
|
|
SP+16->+-----------------------+FP+48->+-----------------------+
|
937 |
|
|
| 4 word save area for | | reg parm save area, |
|
938 |
|
|
| return addr, prev %fp | | only created for |
|
939 |
|
|
SP+0->+-----------------------+ | variable argument |
|
940 |
|
|
| functions |
|
941 |
|
|
FP+16->+-----------------------+
|
942 |
|
|
| 4 word save area for |
|
943 |
|
|
| return addr, prev %fp |
|
944 |
|
|
FP+0->+-----------------------+
|
945 |
|
|
| |
|
946 |
|
|
| local variables |
|
947 |
|
|
| |
|
948 |
|
|
+-----------------------+
|
949 |
|
|
| |
|
950 |
|
|
| register save area |
|
951 |
|
|
| |
|
952 |
|
|
+-----------------------+
|
953 |
|
|
| |
|
954 |
|
|
| alloca allocations |
|
955 |
|
|
| |
|
956 |
|
|
+-----------------------+
|
957 |
|
|
| |
|
958 |
|
|
| arguments on stack |
|
959 |
|
|
| |
|
960 |
|
|
SP+16->+-----------------------+
|
961 |
|
|
low | 4 word save area for |
|
962 |
|
|
memory | return addr, prev %fp |
|
963 |
|
|
SP+0->+-----------------------+
|
964 |
|
|
|
965 |
|
|
Notes:
|
966 |
|
|
1) The "reg parm save area" does not exist for non variable argument fns.
|
967 |
|
|
The "reg parm save area" can be eliminated completely if we created our
|
968 |
|
|
own va-arc.h, but that has tradeoffs as well (so it's not done). */
|
969 |
|
|
|
970 |
|
|
/* Structure to be filled in by arc_compute_frame_size with register
|
971 |
|
|
save masks, and offsets for the current function. */
|
972 |
|
|
struct arc_frame_info
|
973 |
|
|
{
|
974 |
|
|
unsigned int total_size; /* # bytes that the entire frame takes up. */
|
975 |
|
|
unsigned int extra_size; /* # bytes of extra stuff. */
|
976 |
|
|
unsigned int pretend_size; /* # bytes we push and pretend caller did. */
|
977 |
|
|
unsigned int args_size; /* # bytes that outgoing arguments take up. */
|
978 |
|
|
unsigned int reg_size; /* # bytes needed to store regs. */
|
979 |
|
|
unsigned int var_size; /* # bytes that variables take up. */
|
980 |
|
|
unsigned int reg_offset; /* Offset from new sp to store regs. */
|
981 |
|
|
unsigned int gmask; /* Mask of saved gp registers. */
|
982 |
|
|
int initialized; /* Nonzero if frame size already calculated. */
|
983 |
|
|
};
|
984 |
|
|
|
985 |
|
|
/* Current frame information calculated by arc_compute_frame_size. */
|
986 |
|
|
static struct arc_frame_info current_frame_info;
|
987 |
|
|
|
988 |
|
|
/* Zero structure to initialize current_frame_info. */
|
989 |
|
|
static struct arc_frame_info zero_frame_info;
|
990 |
|
|
|
991 |
|
|
/* Type of function DECL.
|
992 |
|
|
|
993 |
|
|
The result is cached. To reset the cache at the end of a function,
|
994 |
|
|
call with DECL = NULL_TREE. */
|
995 |
|
|
|
996 |
|
|
enum arc_function_type
|
997 |
|
|
arc_compute_function_type (tree decl)
|
998 |
|
|
{
|
999 |
|
|
tree a;
|
1000 |
|
|
/* Cached value. */
|
1001 |
|
|
static enum arc_function_type fn_type = ARC_FUNCTION_UNKNOWN;
|
1002 |
|
|
/* Last function we were called for. */
|
1003 |
|
|
static tree last_fn = NULL_TREE;
|
1004 |
|
|
|
1005 |
|
|
/* Resetting the cached value? */
|
1006 |
|
|
if (decl == NULL_TREE)
|
1007 |
|
|
{
|
1008 |
|
|
fn_type = ARC_FUNCTION_UNKNOWN;
|
1009 |
|
|
last_fn = NULL_TREE;
|
1010 |
|
|
return fn_type;
|
1011 |
|
|
}
|
1012 |
|
|
|
1013 |
|
|
if (decl == last_fn && fn_type != ARC_FUNCTION_UNKNOWN)
|
1014 |
|
|
return fn_type;
|
1015 |
|
|
|
1016 |
|
|
/* Assume we have a normal function (not an interrupt handler). */
|
1017 |
|
|
fn_type = ARC_FUNCTION_NORMAL;
|
1018 |
|
|
|
1019 |
|
|
/* Now see if this is an interrupt handler. */
|
1020 |
|
|
for (a = DECL_ATTRIBUTES (current_function_decl);
|
1021 |
|
|
a;
|
1022 |
|
|
a = TREE_CHAIN (a))
|
1023 |
|
|
{
|
1024 |
|
|
tree name = TREE_PURPOSE (a), args = TREE_VALUE (a);
|
1025 |
|
|
|
1026 |
|
|
if (name == get_identifier ("__interrupt__")
|
1027 |
|
|
&& list_length (args) == 1
|
1028 |
|
|
&& TREE_CODE (TREE_VALUE (args)) == STRING_CST)
|
1029 |
|
|
{
|
1030 |
|
|
tree value = TREE_VALUE (args);
|
1031 |
|
|
|
1032 |
|
|
if (!strcmp (TREE_STRING_POINTER (value), "ilink1"))
|
1033 |
|
|
fn_type = ARC_FUNCTION_ILINK1;
|
1034 |
|
|
else if (!strcmp (TREE_STRING_POINTER (value), "ilink2"))
|
1035 |
|
|
fn_type = ARC_FUNCTION_ILINK2;
|
1036 |
|
|
else
|
1037 |
|
|
gcc_unreachable ();
|
1038 |
|
|
break;
|
1039 |
|
|
}
|
1040 |
|
|
}
|
1041 |
|
|
|
1042 |
|
|
last_fn = decl;
|
1043 |
|
|
return fn_type;
|
1044 |
|
|
}
|
1045 |
|
|
|
1046 |
|
|
#define ILINK1_REGNUM 29
|
1047 |
|
|
#define ILINK2_REGNUM 30
|
1048 |
|
|
#define RETURN_ADDR_REGNUM 31
|
1049 |
|
|
#define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
|
1050 |
|
|
#define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
|
1051 |
|
|
|
1052 |
|
|
/* Tell prologue and epilogue if register REGNO should be saved / restored.
|
1053 |
|
|
The return address and frame pointer are treated separately.
|
1054 |
|
|
Don't consider them here. */
|
1055 |
|
|
#define MUST_SAVE_REGISTER(regno, interrupt_p) \
|
1056 |
|
|
((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
|
1057 |
|
|
&& (df_regs_ever_live_p (regno) && (!call_used_regs[regno] || interrupt_p)))
|
1058 |
|
|
|
1059 |
|
|
#define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM))
|
1060 |
|
|
|
1061 |
|
|
/* Return the bytes needed to compute the frame pointer from the current
|
1062 |
|
|
stack pointer.
|
1063 |
|
|
|
1064 |
|
|
SIZE is the size needed for local variables. */
|
1065 |
|
|
|
1066 |
|
|
unsigned int
|
1067 |
|
|
arc_compute_frame_size (int size /* # of var. bytes allocated. */)
|
1068 |
|
|
{
|
1069 |
|
|
int regno;
|
1070 |
|
|
unsigned int total_size, var_size, args_size, pretend_size, extra_size;
|
1071 |
|
|
unsigned int reg_size, reg_offset;
|
1072 |
|
|
unsigned int gmask;
|
1073 |
|
|
enum arc_function_type fn_type;
|
1074 |
|
|
int interrupt_p;
|
1075 |
|
|
|
1076 |
|
|
var_size = size;
|
1077 |
|
|
args_size = crtl->outgoing_args_size;
|
1078 |
|
|
pretend_size = crtl->args.pretend_args_size;
|
1079 |
|
|
extra_size = FIRST_PARM_OFFSET (0);
|
1080 |
|
|
total_size = extra_size + pretend_size + args_size + var_size;
|
1081 |
|
|
reg_offset = FIRST_PARM_OFFSET(0) + crtl->outgoing_args_size;
|
1082 |
|
|
reg_size = 0;
|
1083 |
|
|
gmask = 0;
|
1084 |
|
|
|
1085 |
|
|
/* See if this is an interrupt handler. Call used registers must be saved
|
1086 |
|
|
for them too. */
|
1087 |
|
|
fn_type = arc_compute_function_type (current_function_decl);
|
1088 |
|
|
interrupt_p = ARC_INTERRUPT_P (fn_type);
|
1089 |
|
|
|
1090 |
|
|
/* Calculate space needed for registers.
|
1091 |
|
|
??? We ignore the extension registers for now. */
|
1092 |
|
|
|
1093 |
|
|
for (regno = 0; regno <= 31; regno++)
|
1094 |
|
|
{
|
1095 |
|
|
if (MUST_SAVE_REGISTER (regno, interrupt_p))
|
1096 |
|
|
{
|
1097 |
|
|
reg_size += UNITS_PER_WORD;
|
1098 |
|
|
gmask |= 1 << regno;
|
1099 |
|
|
}
|
1100 |
|
|
}
|
1101 |
|
|
|
1102 |
|
|
total_size += reg_size;
|
1103 |
|
|
|
1104 |
|
|
/* If the only space to allocate is the fp/blink save area this is an
|
1105 |
|
|
empty frame. However, if we'll be making a function call we need to
|
1106 |
|
|
allocate a stack frame for our callee's fp/blink save area. */
|
1107 |
|
|
if (total_size == extra_size
|
1108 |
|
|
&& !MUST_SAVE_RETURN_ADDR)
|
1109 |
|
|
total_size = extra_size = 0;
|
1110 |
|
|
|
1111 |
|
|
total_size = ARC_STACK_ALIGN (total_size);
|
1112 |
|
|
|
1113 |
|
|
/* Save computed information. */
|
1114 |
|
|
current_frame_info.total_size = total_size;
|
1115 |
|
|
current_frame_info.extra_size = extra_size;
|
1116 |
|
|
current_frame_info.pretend_size = pretend_size;
|
1117 |
|
|
current_frame_info.var_size = var_size;
|
1118 |
|
|
current_frame_info.args_size = args_size;
|
1119 |
|
|
current_frame_info.reg_size = reg_size;
|
1120 |
|
|
current_frame_info.reg_offset = reg_offset;
|
1121 |
|
|
current_frame_info.gmask = gmask;
|
1122 |
|
|
current_frame_info.initialized = reload_completed;
|
1123 |
|
|
|
1124 |
|
|
/* Ok, we're done. */
|
1125 |
|
|
return total_size;
|
1126 |
|
|
}
|
1127 |
|
|
|
1128 |
|
|
/* Common code to save/restore registers. */
|
1129 |
|
|
|
1130 |
|
|
void
|
1131 |
|
|
arc_save_restore (FILE *file,
|
1132 |
|
|
const char *base_reg,
|
1133 |
|
|
unsigned int offset,
|
1134 |
|
|
unsigned int gmask,
|
1135 |
|
|
const char *op)
|
1136 |
|
|
{
|
1137 |
|
|
int regno;
|
1138 |
|
|
|
1139 |
|
|
if (gmask == 0)
|
1140 |
|
|
return;
|
1141 |
|
|
|
1142 |
|
|
for (regno = 0; regno <= 31; regno++)
|
1143 |
|
|
{
|
1144 |
|
|
if ((gmask & (1L << regno)) != 0)
|
1145 |
|
|
{
|
1146 |
|
|
fprintf (file, "\t%s %s,[%s,%d]\n",
|
1147 |
|
|
op, reg_names[regno], base_reg, offset);
|
1148 |
|
|
offset += UNITS_PER_WORD;
|
1149 |
|
|
}
|
1150 |
|
|
}
|
1151 |
|
|
}
|
1152 |
|
|
|
1153 |
|
|
/* Target hook to assemble an integer object. The ARC version needs to
|
1154 |
|
|
emit a special directive for references to labels and function
|
1155 |
|
|
symbols. */
|
1156 |
|
|
|
1157 |
|
|
static bool
|
1158 |
|
|
arc_assemble_integer (rtx x, unsigned int size, int aligned_p)
|
1159 |
|
|
{
|
1160 |
|
|
if (size == UNITS_PER_WORD && aligned_p
|
1161 |
|
|
&& ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
|
1162 |
|
|
|| GET_CODE (x) == LABEL_REF))
|
1163 |
|
|
{
|
1164 |
|
|
fputs ("\t.word\t%st(", asm_out_file);
|
1165 |
|
|
output_addr_const (asm_out_file, x);
|
1166 |
|
|
fputs (")\n", asm_out_file);
|
1167 |
|
|
return true;
|
1168 |
|
|
}
|
1169 |
|
|
return default_assemble_integer (x, size, aligned_p);
|
1170 |
|
|
}
|
1171 |
|
|
|
1172 |
|
|
/* Set up the stack and frame pointer (if desired) for the function. */
|
1173 |
|
|
|
1174 |
|
|
static void
|
1175 |
|
|
arc_output_function_prologue (FILE *file, HOST_WIDE_INT size)
|
1176 |
|
|
{
|
1177 |
|
|
const char *sp_str = reg_names[STACK_POINTER_REGNUM];
|
1178 |
|
|
const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
|
1179 |
|
|
unsigned int gmask = current_frame_info.gmask;
|
1180 |
|
|
enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
|
1181 |
|
|
|
1182 |
|
|
/* If this is an interrupt handler, set up our stack frame.
|
1183 |
|
|
??? Optimize later. */
|
1184 |
|
|
if (ARC_INTERRUPT_P (fn_type))
|
1185 |
|
|
{
|
1186 |
|
|
fprintf (file, "\t%s interrupt handler\n",
|
1187 |
|
|
ASM_COMMENT_START);
|
1188 |
|
|
fprintf (file, "\tsub %s,%s,16\n", sp_str, sp_str);
|
1189 |
|
|
}
|
1190 |
|
|
|
1191 |
|
|
/* This is only for the human reader. */
|
1192 |
|
|
fprintf (file, "\t%s BEGIN PROLOGUE %s vars= %d, regs= %d, args= %d, extra= %d\n",
|
1193 |
|
|
ASM_COMMENT_START, ASM_COMMENT_START,
|
1194 |
|
|
current_frame_info.var_size,
|
1195 |
|
|
current_frame_info.reg_size / 4,
|
1196 |
|
|
current_frame_info.args_size,
|
1197 |
|
|
current_frame_info.extra_size);
|
1198 |
|
|
|
1199 |
|
|
size = ARC_STACK_ALIGN (size);
|
1200 |
|
|
size = (! current_frame_info.initialized
|
1201 |
|
|
? arc_compute_frame_size (size)
|
1202 |
|
|
: current_frame_info.total_size);
|
1203 |
|
|
|
1204 |
|
|
/* These cases shouldn't happen. Catch them now. */
|
1205 |
|
|
gcc_assert (size || !gmask);
|
1206 |
|
|
|
1207 |
|
|
/* Allocate space for register arguments if this is a variadic function. */
|
1208 |
|
|
if (current_frame_info.pretend_size != 0)
|
1209 |
|
|
fprintf (file, "\tsub %s,%s,%d\n",
|
1210 |
|
|
sp_str, sp_str, current_frame_info.pretend_size);
|
1211 |
|
|
|
1212 |
|
|
/* The home-grown ABI says link register is saved first. */
|
1213 |
|
|
if (MUST_SAVE_RETURN_ADDR)
|
1214 |
|
|
fprintf (file, "\tst %s,[%s,%d]\n",
|
1215 |
|
|
reg_names[RETURN_ADDR_REGNUM], sp_str, UNITS_PER_WORD);
|
1216 |
|
|
|
1217 |
|
|
/* Set up the previous frame pointer next (if we need to). */
|
1218 |
|
|
if (frame_pointer_needed)
|
1219 |
|
|
{
|
1220 |
|
|
fprintf (file, "\tst %s,[%s]\n", fp_str, sp_str);
|
1221 |
|
|
fprintf (file, "\tmov %s,%s\n", fp_str, sp_str);
|
1222 |
|
|
}
|
1223 |
|
|
|
1224 |
|
|
/* ??? We don't handle the case where the saved regs are more than 252
|
1225 |
|
|
bytes away from sp. This can be handled by decrementing sp once, saving
|
1226 |
|
|
the regs, and then decrementing it again. The epilogue doesn't have this
|
1227 |
|
|
problem as the `ld' insn takes reg+limm values (though it would be more
|
1228 |
|
|
efficient to avoid reg+limm). */
|
1229 |
|
|
|
1230 |
|
|
/* Allocate the stack frame. */
|
1231 |
|
|
if (size - current_frame_info.pretend_size > 0)
|
1232 |
|
|
fprintf (file, "\tsub %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
|
1233 |
|
|
sp_str, sp_str, size - current_frame_info.pretend_size);
|
1234 |
|
|
|
1235 |
|
|
/* Save any needed call-saved regs (and call-used if this is an
|
1236 |
|
|
interrupt handler). */
|
1237 |
|
|
arc_save_restore (file, sp_str, current_frame_info.reg_offset,
|
1238 |
|
|
/* The zeroing of these two bits is unnecessary,
|
1239 |
|
|
but leave this in for clarity. */
|
1240 |
|
|
gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
|
1241 |
|
|
"st");
|
1242 |
|
|
|
1243 |
|
|
fprintf (file, "\t%s END PROLOGUE\n", ASM_COMMENT_START);
|
1244 |
|
|
}
|
1245 |
|
|
|
1246 |
|
|
/* Do any necessary cleanup after a function to restore stack, frame,
|
1247 |
|
|
and regs. */
|
1248 |
|
|
|
1249 |
|
|
static void
|
1250 |
|
|
arc_output_function_epilogue (FILE *file, HOST_WIDE_INT size)
|
1251 |
|
|
{
|
1252 |
|
|
rtx epilogue_delay = crtl->epilogue_delay_list;
|
1253 |
|
|
int noepilogue = FALSE;
|
1254 |
|
|
enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
|
1255 |
|
|
|
1256 |
|
|
/* This is only for the human reader. */
|
1257 |
|
|
fprintf (file, "\t%s EPILOGUE\n", ASM_COMMENT_START);
|
1258 |
|
|
|
1259 |
|
|
size = ARC_STACK_ALIGN (size);
|
1260 |
|
|
size = (!current_frame_info.initialized
|
1261 |
|
|
? arc_compute_frame_size (size)
|
1262 |
|
|
: current_frame_info.total_size);
|
1263 |
|
|
|
1264 |
|
|
if (size == 0 && epilogue_delay == 0)
|
1265 |
|
|
{
|
1266 |
|
|
rtx insn = get_last_insn ();
|
1267 |
|
|
|
1268 |
|
|
/* If the last insn was a BARRIER, we don't have to write any code
|
1269 |
|
|
because a jump (aka return) was put there. */
|
1270 |
|
|
if (GET_CODE (insn) == NOTE)
|
1271 |
|
|
insn = prev_nonnote_insn (insn);
|
1272 |
|
|
if (insn && GET_CODE (insn) == BARRIER)
|
1273 |
|
|
noepilogue = TRUE;
|
1274 |
|
|
}
|
1275 |
|
|
|
1276 |
|
|
if (!noepilogue)
|
1277 |
|
|
{
|
1278 |
|
|
unsigned int pretend_size = current_frame_info.pretend_size;
|
1279 |
|
|
unsigned int frame_size = size - pretend_size;
|
1280 |
|
|
int restored, fp_restored_p;
|
1281 |
|
|
int can_trust_sp_p = !cfun->calls_alloca;
|
1282 |
|
|
const char *sp_str = reg_names[STACK_POINTER_REGNUM];
|
1283 |
|
|
const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
|
1284 |
|
|
|
1285 |
|
|
/* ??? There are lots of optimizations that can be done here.
|
1286 |
|
|
EG: Use fp to restore regs if it's closer.
|
1287 |
|
|
Maybe in time we'll do them all. For now, always restore regs from
|
1288 |
|
|
sp, but don't restore sp if we don't have to. */
|
1289 |
|
|
|
1290 |
|
|
if (!can_trust_sp_p)
|
1291 |
|
|
{
|
1292 |
|
|
gcc_assert (frame_pointer_needed);
|
1293 |
|
|
fprintf (file,"\tsub %s,%s,%d\t\t%s sp not trusted here\n",
|
1294 |
|
|
sp_str, fp_str, frame_size, ASM_COMMENT_START);
|
1295 |
|
|
}
|
1296 |
|
|
|
1297 |
|
|
/* Restore any saved registers. */
|
1298 |
|
|
arc_save_restore (file, sp_str, current_frame_info.reg_offset,
|
1299 |
|
|
/* The zeroing of these two bits is unnecessary,
|
1300 |
|
|
but leave this in for clarity. */
|
1301 |
|
|
current_frame_info.gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
|
1302 |
|
|
"ld");
|
1303 |
|
|
|
1304 |
|
|
if (MUST_SAVE_RETURN_ADDR)
|
1305 |
|
|
fprintf (file, "\tld %s,[%s,%d]\n",
|
1306 |
|
|
reg_names[RETURN_ADDR_REGNUM],
|
1307 |
|
|
frame_pointer_needed ? fp_str : sp_str,
|
1308 |
|
|
UNITS_PER_WORD + (frame_pointer_needed ? 0 : frame_size));
|
1309 |
|
|
|
1310 |
|
|
/* Keep track of how much of the stack pointer we've restored.
|
1311 |
|
|
It makes the following a lot more readable. */
|
1312 |
|
|
restored = 0;
|
1313 |
|
|
fp_restored_p = 0;
|
1314 |
|
|
|
1315 |
|
|
/* We try to emit the epilogue delay slot insn right after the load
|
1316 |
|
|
of the return address register so that it can execute with the
|
1317 |
|
|
stack intact. Secondly, loads are delayed. */
|
1318 |
|
|
/* ??? If stack intactness is important, always emit now. */
|
1319 |
|
|
if (MUST_SAVE_RETURN_ADDR && epilogue_delay != NULL_RTX)
|
1320 |
|
|
{
|
1321 |
|
|
final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
|
1322 |
|
|
epilogue_delay = NULL_RTX;
|
1323 |
|
|
}
|
1324 |
|
|
|
1325 |
|
|
if (frame_pointer_needed)
|
1326 |
|
|
{
|
1327 |
|
|
/* Try to restore the frame pointer in the delay slot. We can't,
|
1328 |
|
|
however, if any of these is true. */
|
1329 |
|
|
if (epilogue_delay != NULL_RTX
|
1330 |
|
|
|| !SMALL_INT (frame_size)
|
1331 |
|
|
|| pretend_size
|
1332 |
|
|
|| ARC_INTERRUPT_P (fn_type))
|
1333 |
|
|
{
|
1334 |
|
|
/* Note that we restore fp and sp here! */
|
1335 |
|
|
fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
|
1336 |
|
|
restored += frame_size;
|
1337 |
|
|
fp_restored_p = 1;
|
1338 |
|
|
}
|
1339 |
|
|
}
|
1340 |
|
|
else if (!SMALL_INT (size /* frame_size + pretend_size */)
|
1341 |
|
|
|| ARC_INTERRUPT_P (fn_type))
|
1342 |
|
|
{
|
1343 |
|
|
fprintf (file, "\tadd %s,%s,%d\n", sp_str, sp_str, frame_size);
|
1344 |
|
|
restored += frame_size;
|
1345 |
|
|
}
|
1346 |
|
|
|
1347 |
|
|
/* These must be done before the return insn because the delay slot
|
1348 |
|
|
does the final stack restore. */
|
1349 |
|
|
if (ARC_INTERRUPT_P (fn_type))
|
1350 |
|
|
{
|
1351 |
|
|
if (epilogue_delay)
|
1352 |
|
|
{
|
1353 |
|
|
final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
|
1354 |
|
|
}
|
1355 |
|
|
}
|
1356 |
|
|
|
1357 |
|
|
/* Emit the return instruction. */
|
1358 |
|
|
{
|
1359 |
|
|
static const int regs[4] = {
|
1360 |
|
|
0, RETURN_ADDR_REGNUM, ILINK1_REGNUM, ILINK2_REGNUM
|
1361 |
|
|
};
|
1362 |
|
|
|
1363 |
|
|
/* Update the flags, if returning from an interrupt handler. */
|
1364 |
|
|
if (ARC_INTERRUPT_P (fn_type))
|
1365 |
|
|
fprintf (file, "\tj.d.f %s\n", reg_names[regs[fn_type]]);
|
1366 |
|
|
else
|
1367 |
|
|
fprintf (file, "\tj.d %s\n", reg_names[regs[fn_type]]);
|
1368 |
|
|
}
|
1369 |
|
|
|
1370 |
|
|
/* If the only register saved is the return address, we need a
|
1371 |
|
|
nop, unless we have an instruction to put into it. Otherwise
|
1372 |
|
|
we don't since reloading multiple registers doesn't reference
|
1373 |
|
|
the register being loaded. */
|
1374 |
|
|
|
1375 |
|
|
if (ARC_INTERRUPT_P (fn_type))
|
1376 |
|
|
fprintf (file, "\tadd %s,%s,16\n", sp_str, sp_str);
|
1377 |
|
|
else if (epilogue_delay != NULL_RTX)
|
1378 |
|
|
{
|
1379 |
|
|
gcc_assert (!frame_pointer_needed || fp_restored_p);
|
1380 |
|
|
gcc_assert (restored >= size);
|
1381 |
|
|
final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
|
1382 |
|
|
}
|
1383 |
|
|
else if (frame_pointer_needed && !fp_restored_p)
|
1384 |
|
|
{
|
1385 |
|
|
gcc_assert (SMALL_INT (frame_size));
|
1386 |
|
|
/* Note that we restore fp and sp here! */
|
1387 |
|
|
fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
|
1388 |
|
|
}
|
1389 |
|
|
else if (restored < size)
|
1390 |
|
|
{
|
1391 |
|
|
gcc_assert (SMALL_INT (size - restored));
|
1392 |
|
|
fprintf (file, "\tadd %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
|
1393 |
|
|
sp_str, sp_str, size - restored);
|
1394 |
|
|
}
|
1395 |
|
|
else
|
1396 |
|
|
fprintf (file, "\tnop\n");
|
1397 |
|
|
}
|
1398 |
|
|
|
1399 |
|
|
/* Reset state info for each function. */
|
1400 |
|
|
current_frame_info = zero_frame_info;
|
1401 |
|
|
arc_compute_function_type (NULL_TREE);
|
1402 |
|
|
}
|
1403 |
|
|
|
1404 |
|
|
/* Define the number of delay slots needed for the function epilogue.
|
1405 |
|
|
|
1406 |
|
|
Interrupt handlers can't have any epilogue delay slots (it's always needed
|
1407 |
|
|
for something else, I think). For normal functions, we have to worry about
|
1408 |
|
|
using call-saved regs as they'll be restored before the delay slot insn.
|
1409 |
|
|
Functions with non-empty frames already have enough choices for the epilogue
|
1410 |
|
|
delay slot so for now we only consider functions with empty frames. */
|
1411 |
|
|
|
1412 |
|
|
int
|
1413 |
|
|
arc_delay_slots_for_epilogue (void)
|
1414 |
|
|
{
|
1415 |
|
|
if (arc_compute_function_type (current_function_decl) != ARC_FUNCTION_NORMAL)
|
1416 |
|
|
return 0;
|
1417 |
|
|
if (!current_frame_info.initialized)
|
1418 |
|
|
(void) arc_compute_frame_size (get_frame_size ());
|
1419 |
|
|
if (current_frame_info.total_size == 0)
|
1420 |
|
|
return 1;
|
1421 |
|
|
return 0;
|
1422 |
|
|
}
|
1423 |
|
|
|
1424 |
|
|
/* Return true if TRIAL is a valid insn for the epilogue delay slot.
|
1425 |
|
|
Any single length instruction which doesn't reference the stack or frame
|
1426 |
|
|
pointer or any call-saved register is OK. SLOT will always be 0. */
|
1427 |
|
|
|
1428 |
|
|
int
|
1429 |
|
|
arc_eligible_for_epilogue_delay (rtx trial, int slot)
|
1430 |
|
|
{
|
1431 |
|
|
gcc_assert (!slot);
|
1432 |
|
|
|
1433 |
|
|
if (get_attr_length (trial) == 1
|
1434 |
|
|
/* If registers where saved, presumably there's more than enough
|
1435 |
|
|
possibilities for the delay slot. The alternative is something
|
1436 |
|
|
more complicated (of course, if we expanded the epilogue as rtl
|
1437 |
|
|
this problem would go away). */
|
1438 |
|
|
/* ??? Note that this will always be true since only functions with
|
1439 |
|
|
empty frames have epilogue delay slots. See
|
1440 |
|
|
arc_delay_slots_for_epilogue. */
|
1441 |
|
|
&& current_frame_info.gmask == 0
|
1442 |
|
|
&& ! reg_mentioned_p (stack_pointer_rtx, PATTERN (trial))
|
1443 |
|
|
&& ! reg_mentioned_p (frame_pointer_rtx, PATTERN (trial)))
|
1444 |
|
|
return 1;
|
1445 |
|
|
return 0;
|
1446 |
|
|
}
|
1447 |
|
|
|
1448 |
|
|
/* Return true if OP is a shift operator. */
|
1449 |
|
|
|
1450 |
|
|
int
|
1451 |
|
|
shift_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
1452 |
|
|
{
|
1453 |
|
|
switch (GET_CODE (op))
|
1454 |
|
|
{
|
1455 |
|
|
case ASHIFTRT:
|
1456 |
|
|
case LSHIFTRT:
|
1457 |
|
|
case ASHIFT:
|
1458 |
|
|
return 1;
|
1459 |
|
|
default:
|
1460 |
|
|
return 0;
|
1461 |
|
|
}
|
1462 |
|
|
}
|
1463 |
|
|
|
1464 |
|
|
/* Output the assembler code for doing a shift.
|
1465 |
|
|
We go to a bit of trouble to generate efficient code as the ARC only has
|
1466 |
|
|
single bit shifts. This is taken from the h8300 port. We only have one
|
1467 |
|
|
mode of shifting and can't access individual bytes like the h8300 can, so
|
1468 |
|
|
this is greatly simplified (at the expense of not generating hyper-
|
1469 |
|
|
efficient code).
|
1470 |
|
|
|
1471 |
|
|
This function is not used if the variable shift insns are present. */
|
1472 |
|
|
|
1473 |
|
|
/* ??? We assume the output operand is the same as operand 1.
|
1474 |
|
|
This can be optimized (deleted) in the case of 1 bit shifts. */
|
1475 |
|
|
/* ??? We use the loop register here. We don't use it elsewhere (yet) and
|
1476 |
|
|
using it here will give us a chance to play with it. */
|
1477 |
|
|
|
1478 |
|
|
const char *
|
1479 |
|
|
output_shift (rtx *operands)
|
1480 |
|
|
{
|
1481 |
|
|
rtx shift = operands[3];
|
1482 |
|
|
enum machine_mode mode = GET_MODE (shift);
|
1483 |
|
|
enum rtx_code code = GET_CODE (shift);
|
1484 |
|
|
const char *shift_one;
|
1485 |
|
|
|
1486 |
|
|
gcc_assert (mode == SImode);
|
1487 |
|
|
|
1488 |
|
|
switch (code)
|
1489 |
|
|
{
|
1490 |
|
|
case ASHIFT: shift_one = "asl %0,%0"; break;
|
1491 |
|
|
case ASHIFTRT: shift_one = "asr %0,%0"; break;
|
1492 |
|
|
case LSHIFTRT: shift_one = "lsr %0,%0"; break;
|
1493 |
|
|
default: gcc_unreachable ();
|
1494 |
|
|
}
|
1495 |
|
|
|
1496 |
|
|
if (GET_CODE (operands[2]) != CONST_INT)
|
1497 |
|
|
{
|
1498 |
|
|
if (optimize)
|
1499 |
|
|
{
|
1500 |
|
|
output_asm_insn ("sub.f 0,%2,0", operands);
|
1501 |
|
|
output_asm_insn ("mov lp_count,%2", operands);
|
1502 |
|
|
output_asm_insn ("bz 2f", operands);
|
1503 |
|
|
}
|
1504 |
|
|
else
|
1505 |
|
|
output_asm_insn ("mov %4,%2", operands);
|
1506 |
|
|
goto shiftloop;
|
1507 |
|
|
}
|
1508 |
|
|
else
|
1509 |
|
|
{
|
1510 |
|
|
int n;
|
1511 |
|
|
|
1512 |
|
|
/* If the count is negative, make it 0. */
|
1513 |
|
|
n = INTVAL (operands[2]);
|
1514 |
|
|
if (n < 0)
|
1515 |
|
|
n = 0;
|
1516 |
|
|
/* If the count is too big, truncate it.
|
1517 |
|
|
ANSI says shifts of GET_MODE_BITSIZE are undefined - we choose to
|
1518 |
|
|
do the intuitive thing. */
|
1519 |
|
|
else if (n > GET_MODE_BITSIZE (mode))
|
1520 |
|
|
n = GET_MODE_BITSIZE (mode);
|
1521 |
|
|
|
1522 |
|
|
/* First see if we can do them inline. */
|
1523 |
|
|
if (n <= 8)
|
1524 |
|
|
{
|
1525 |
|
|
while (--n >= 0)
|
1526 |
|
|
output_asm_insn (shift_one, operands);
|
1527 |
|
|
}
|
1528 |
|
|
/* See if we can use a rotate/and. */
|
1529 |
|
|
else if (n == BITS_PER_WORD - 1)
|
1530 |
|
|
{
|
1531 |
|
|
switch (code)
|
1532 |
|
|
{
|
1533 |
|
|
case ASHIFT :
|
1534 |
|
|
output_asm_insn ("and %0,%0,1\n\tror %0,%0", operands);
|
1535 |
|
|
break;
|
1536 |
|
|
case ASHIFTRT :
|
1537 |
|
|
/* The ARC doesn't have a rol insn. Use something else. */
|
1538 |
|
|
output_asm_insn ("asl.f 0,%0\n\tsbc %0,0,0", operands);
|
1539 |
|
|
break;
|
1540 |
|
|
case LSHIFTRT :
|
1541 |
|
|
/* The ARC doesn't have a rol insn. Use something else. */
|
1542 |
|
|
output_asm_insn ("asl.f 0,%0\n\tadc %0,0,0", operands);
|
1543 |
|
|
break;
|
1544 |
|
|
default:
|
1545 |
|
|
break;
|
1546 |
|
|
}
|
1547 |
|
|
}
|
1548 |
|
|
/* Must loop. */
|
1549 |
|
|
else
|
1550 |
|
|
{
|
1551 |
|
|
char buf[100];
|
1552 |
|
|
|
1553 |
|
|
if (optimize)
|
1554 |
|
|
output_asm_insn ("mov lp_count,%c2", operands);
|
1555 |
|
|
else
|
1556 |
|
|
output_asm_insn ("mov %4,%c2", operands);
|
1557 |
|
|
shiftloop:
|
1558 |
|
|
if (optimize)
|
1559 |
|
|
{
|
1560 |
|
|
if (flag_pic)
|
1561 |
|
|
sprintf (buf, "lr %%4,[status]\n\tadd %%4,%%4,6\t%s single insn loop start",
|
1562 |
|
|
ASM_COMMENT_START);
|
1563 |
|
|
else
|
1564 |
|
|
sprintf (buf, "mov %%4,%%%%st(1f)\t%s (single insn loop start) >> 2",
|
1565 |
|
|
ASM_COMMENT_START);
|
1566 |
|
|
output_asm_insn (buf, operands);
|
1567 |
|
|
output_asm_insn ("sr %4,[lp_start]", operands);
|
1568 |
|
|
output_asm_insn ("add %4,%4,1", operands);
|
1569 |
|
|
output_asm_insn ("sr %4,[lp_end]", operands);
|
1570 |
|
|
output_asm_insn ("nop\n\tnop", operands);
|
1571 |
|
|
if (flag_pic)
|
1572 |
|
|
fprintf (asm_out_file, "\t%s single insn loop\n",
|
1573 |
|
|
ASM_COMMENT_START);
|
1574 |
|
|
else
|
1575 |
|
|
fprintf (asm_out_file, "1:\t%s single insn loop\n",
|
1576 |
|
|
ASM_COMMENT_START);
|
1577 |
|
|
output_asm_insn (shift_one, operands);
|
1578 |
|
|
fprintf (asm_out_file, "2:\t%s end single insn loop\n",
|
1579 |
|
|
ASM_COMMENT_START);
|
1580 |
|
|
}
|
1581 |
|
|
else
|
1582 |
|
|
{
|
1583 |
|
|
fprintf (asm_out_file, "1:\t%s begin shift loop\n",
|
1584 |
|
|
ASM_COMMENT_START);
|
1585 |
|
|
output_asm_insn ("sub.f %4,%4,1", operands);
|
1586 |
|
|
output_asm_insn ("nop", operands);
|
1587 |
|
|
output_asm_insn ("bn.nd 2f", operands);
|
1588 |
|
|
output_asm_insn (shift_one, operands);
|
1589 |
|
|
output_asm_insn ("b.nd 1b", operands);
|
1590 |
|
|
fprintf (asm_out_file, "2:\t%s end shift loop\n",
|
1591 |
|
|
ASM_COMMENT_START);
|
1592 |
|
|
}
|
1593 |
|
|
}
|
1594 |
|
|
}
|
1595 |
|
|
|
1596 |
|
|
return "";
|
1597 |
|
|
}
|
1598 |
|
|
|
1599 |
|
|
/* Nested function support. */
|
1600 |
|
|
|
1601 |
|
|
/* Emit RTL insns to initialize the variable parts of a trampoline.
|
1602 |
|
|
FNADDR is an RTX for the address of the function's pure code.
|
1603 |
|
|
CXT is an RTX for the static chain value for the function. */
|
1604 |
|
|
|
1605 |
|
|
void
|
1606 |
|
|
arc_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
|
1607 |
|
|
rtx fnaddr ATTRIBUTE_UNUSED,
|
1608 |
|
|
rtx cxt ATTRIBUTE_UNUSED)
|
1609 |
|
|
{
|
1610 |
|
|
}
|
1611 |
|
|
|
1612 |
|
|
/* Set the cpu type and print out other fancy things,
|
1613 |
|
|
at the top of the file. */
|
1614 |
|
|
|
1615 |
|
|
static void
|
1616 |
|
|
arc_file_start (void)
|
1617 |
|
|
{
|
1618 |
|
|
default_file_start ();
|
1619 |
|
|
fprintf (asm_out_file, "\t.cpu %s\n", arc_cpu_string);
|
1620 |
|
|
}
|
1621 |
|
|
|
1622 |
|
|
/* Print operand X (an rtx) in assembler syntax to file FILE.
|
1623 |
|
|
CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
|
1624 |
|
|
For `%' followed by punctuation, CODE is the punctuation and X is null. */
|
1625 |
|
|
|
1626 |
|
|
void
|
1627 |
|
|
arc_print_operand (FILE *file, rtx x, int code)
|
1628 |
|
|
{
|
1629 |
|
|
switch (code)
|
1630 |
|
|
{
|
1631 |
|
|
case '#' :
|
1632 |
|
|
/* Conditional branches. For now these are equivalent. */
|
1633 |
|
|
case '*' :
|
1634 |
|
|
/* Unconditional branches. Output the appropriate delay slot suffix. */
|
1635 |
|
|
if (!final_sequence || XVECLEN (final_sequence, 0) == 1)
|
1636 |
|
|
{
|
1637 |
|
|
/* There's nothing in the delay slot. */
|
1638 |
|
|
fputs (".nd", file);
|
1639 |
|
|
}
|
1640 |
|
|
else
|
1641 |
|
|
{
|
1642 |
|
|
rtx jump = XVECEXP (final_sequence, 0, 0);
|
1643 |
|
|
rtx delay = XVECEXP (final_sequence, 0, 1);
|
1644 |
|
|
if (INSN_ANNULLED_BRANCH_P (jump))
|
1645 |
|
|
fputs (INSN_FROM_TARGET_P (delay) ? ".jd" : ".nd", file);
|
1646 |
|
|
else
|
1647 |
|
|
fputs (".d", file);
|
1648 |
|
|
}
|
1649 |
|
|
return;
|
1650 |
|
|
case '?' : /* with leading "." */
|
1651 |
|
|
case '!' : /* without leading "." */
|
1652 |
|
|
/* This insn can be conditionally executed. See if the ccfsm machinery
|
1653 |
|
|
says it should be conditionalized. */
|
1654 |
|
|
if (arc_ccfsm_state == 3 || arc_ccfsm_state == 4)
|
1655 |
|
|
{
|
1656 |
|
|
/* Is this insn in a delay slot? */
|
1657 |
|
|
if (final_sequence && XVECLEN (final_sequence, 0) == 2)
|
1658 |
|
|
{
|
1659 |
|
|
rtx insn = XVECEXP (final_sequence, 0, 1);
|
1660 |
|
|
|
1661 |
|
|
/* If the insn is annulled and is from the target path, we need
|
1662 |
|
|
to inverse the condition test. */
|
1663 |
|
|
if (INSN_ANNULLED_BRANCH_P (insn))
|
1664 |
|
|
{
|
1665 |
|
|
if (INSN_FROM_TARGET_P (insn))
|
1666 |
|
|
fprintf (file, "%s%s",
|
1667 |
|
|
code == '?' ? "." : "",
|
1668 |
|
|
arc_condition_codes[ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc)]);
|
1669 |
|
|
else
|
1670 |
|
|
fprintf (file, "%s%s",
|
1671 |
|
|
code == '?' ? "." : "",
|
1672 |
|
|
arc_condition_codes[arc_ccfsm_current_cc]);
|
1673 |
|
|
}
|
1674 |
|
|
else
|
1675 |
|
|
{
|
1676 |
|
|
/* This insn is executed for either path, so don't
|
1677 |
|
|
conditionalize it at all. */
|
1678 |
|
|
; /* nothing to do */
|
1679 |
|
|
}
|
1680 |
|
|
}
|
1681 |
|
|
else
|
1682 |
|
|
{
|
1683 |
|
|
/* This insn isn't in a delay slot. */
|
1684 |
|
|
fprintf (file, "%s%s",
|
1685 |
|
|
code == '?' ? "." : "",
|
1686 |
|
|
arc_condition_codes[arc_ccfsm_current_cc]);
|
1687 |
|
|
}
|
1688 |
|
|
}
|
1689 |
|
|
return;
|
1690 |
|
|
case '~' :
|
1691 |
|
|
/* Output a nop if we're between a set of the condition codes,
|
1692 |
|
|
and a conditional branch. */
|
1693 |
|
|
if (last_insn_set_cc_p)
|
1694 |
|
|
fputs ("nop\n\t", file);
|
1695 |
|
|
return;
|
1696 |
|
|
case 'd' :
|
1697 |
|
|
fputs (arc_condition_codes[get_arc_condition_code (x)], file);
|
1698 |
|
|
return;
|
1699 |
|
|
case 'D' :
|
1700 |
|
|
fputs (arc_condition_codes[ARC_INVERSE_CONDITION_CODE
|
1701 |
|
|
(get_arc_condition_code (x))],
|
1702 |
|
|
file);
|
1703 |
|
|
return;
|
1704 |
|
|
case 'R' :
|
1705 |
|
|
/* Write second word of DImode or DFmode reference,
|
1706 |
|
|
register or memory. */
|
1707 |
|
|
if (GET_CODE (x) == REG)
|
1708 |
|
|
fputs (reg_names[REGNO (x)+1], file);
|
1709 |
|
|
else if (GET_CODE (x) == MEM)
|
1710 |
|
|
{
|
1711 |
|
|
fputc ('[', file);
|
1712 |
|
|
/* Handle possible auto-increment. Since it is pre-increment and
|
1713 |
|
|
we have already done it, we can just use an offset of four. */
|
1714 |
|
|
/* ??? This is taken from rs6000.c I think. I don't think it is
|
1715 |
|
|
currently necessary, but keep it around. */
|
1716 |
|
|
if (GET_CODE (XEXP (x, 0)) == PRE_INC
|
1717 |
|
|
|| GET_CODE (XEXP (x, 0)) == PRE_DEC)
|
1718 |
|
|
output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
|
1719 |
|
|
else
|
1720 |
|
|
output_address (plus_constant (XEXP (x, 0), 4));
|
1721 |
|
|
fputc (']', file);
|
1722 |
|
|
}
|
1723 |
|
|
else
|
1724 |
|
|
output_operand_lossage ("invalid operand to %%R code");
|
1725 |
|
|
return;
|
1726 |
|
|
case 'S' :
|
1727 |
|
|
if ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
|
1728 |
|
|
|| GET_CODE (x) == LABEL_REF)
|
1729 |
|
|
{
|
1730 |
|
|
fprintf (file, "%%st(");
|
1731 |
|
|
output_addr_const (file, x);
|
1732 |
|
|
fprintf (file, ")");
|
1733 |
|
|
return;
|
1734 |
|
|
}
|
1735 |
|
|
break;
|
1736 |
|
|
case 'H' :
|
1737 |
|
|
case 'L' :
|
1738 |
|
|
if (GET_CODE (x) == REG)
|
1739 |
|
|
{
|
1740 |
|
|
/* L = least significant word, H = most significant word */
|
1741 |
|
|
if ((TARGET_BIG_ENDIAN != 0) ^ (code == 'L'))
|
1742 |
|
|
fputs (reg_names[REGNO (x)], file);
|
1743 |
|
|
else
|
1744 |
|
|
fputs (reg_names[REGNO (x)+1], file);
|
1745 |
|
|
}
|
1746 |
|
|
else if (GET_CODE (x) == CONST_INT
|
1747 |
|
|
|| GET_CODE (x) == CONST_DOUBLE)
|
1748 |
|
|
{
|
1749 |
|
|
rtx first, second;
|
1750 |
|
|
|
1751 |
|
|
split_double (x, &first, &second);
|
1752 |
|
|
fprintf (file, "0x%08lx",
|
1753 |
|
|
(long)(code == 'L' ? INTVAL (first) : INTVAL (second)));
|
1754 |
|
|
}
|
1755 |
|
|
else
|
1756 |
|
|
output_operand_lossage ("invalid operand to %%H/%%L code");
|
1757 |
|
|
return;
|
1758 |
|
|
case 'A' :
|
1759 |
|
|
{
|
1760 |
|
|
char str[30];
|
1761 |
|
|
|
1762 |
|
|
gcc_assert (GET_CODE (x) == CONST_DOUBLE
|
1763 |
|
|
&& GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT);
|
1764 |
|
|
|
1765 |
|
|
real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
|
1766 |
|
|
fprintf (file, "%s", str);
|
1767 |
|
|
return;
|
1768 |
|
|
}
|
1769 |
|
|
case 'U' :
|
1770 |
|
|
/* Output a load/store with update indicator if appropriate. */
|
1771 |
|
|
if (GET_CODE (x) == MEM)
|
1772 |
|
|
{
|
1773 |
|
|
if (GET_CODE (XEXP (x, 0)) == PRE_INC
|
1774 |
|
|
|| GET_CODE (XEXP (x, 0)) == PRE_DEC)
|
1775 |
|
|
fputs (".a", file);
|
1776 |
|
|
}
|
1777 |
|
|
else
|
1778 |
|
|
output_operand_lossage ("invalid operand to %%U code");
|
1779 |
|
|
return;
|
1780 |
|
|
case 'V' :
|
1781 |
|
|
/* Output cache bypass indicator for a load/store insn. Volatile memory
|
1782 |
|
|
refs are defined to use the cache bypass mechanism. */
|
1783 |
|
|
if (GET_CODE (x) == MEM)
|
1784 |
|
|
{
|
1785 |
|
|
if (MEM_VOLATILE_P (x))
|
1786 |
|
|
fputs (".di", file);
|
1787 |
|
|
}
|
1788 |
|
|
else
|
1789 |
|
|
output_operand_lossage ("invalid operand to %%V code");
|
1790 |
|
|
return;
|
1791 |
|
|
case 0 :
|
1792 |
|
|
/* Do nothing special. */
|
1793 |
|
|
break;
|
1794 |
|
|
default :
|
1795 |
|
|
/* Unknown flag. */
|
1796 |
|
|
output_operand_lossage ("invalid operand output code");
|
1797 |
|
|
}
|
1798 |
|
|
|
1799 |
|
|
switch (GET_CODE (x))
|
1800 |
|
|
{
|
1801 |
|
|
case REG :
|
1802 |
|
|
fputs (reg_names[REGNO (x)], file);
|
1803 |
|
|
break;
|
1804 |
|
|
case MEM :
|
1805 |
|
|
fputc ('[', file);
|
1806 |
|
|
if (GET_CODE (XEXP (x, 0)) == PRE_INC)
|
1807 |
|
|
output_address (plus_constant (XEXP (XEXP (x, 0), 0),
|
1808 |
|
|
GET_MODE_SIZE (GET_MODE (x))));
|
1809 |
|
|
else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
|
1810 |
|
|
output_address (plus_constant (XEXP (XEXP (x, 0), 0),
|
1811 |
|
|
- GET_MODE_SIZE (GET_MODE (x))));
|
1812 |
|
|
else
|
1813 |
|
|
output_address (XEXP (x, 0));
|
1814 |
|
|
fputc (']', file);
|
1815 |
|
|
break;
|
1816 |
|
|
case CONST_DOUBLE :
|
1817 |
|
|
/* We handle SFmode constants here as output_addr_const doesn't. */
|
1818 |
|
|
if (GET_MODE (x) == SFmode)
|
1819 |
|
|
{
|
1820 |
|
|
REAL_VALUE_TYPE d;
|
1821 |
|
|
long l;
|
1822 |
|
|
|
1823 |
|
|
REAL_VALUE_FROM_CONST_DOUBLE (d, x);
|
1824 |
|
|
REAL_VALUE_TO_TARGET_SINGLE (d, l);
|
1825 |
|
|
fprintf (file, "0x%08lx", l);
|
1826 |
|
|
break;
|
1827 |
|
|
}
|
1828 |
|
|
/* Fall through. Let output_addr_const deal with it. */
|
1829 |
|
|
default :
|
1830 |
|
|
output_addr_const (file, x);
|
1831 |
|
|
break;
|
1832 |
|
|
}
|
1833 |
|
|
}
|
1834 |
|
|
|
1835 |
|
|
/* Print a memory address as an operand to reference that memory location. */
|
1836 |
|
|
|
1837 |
|
|
void
|
1838 |
|
|
arc_print_operand_address (FILE *file, rtx addr)
|
1839 |
|
|
{
|
1840 |
|
|
register rtx base, index = 0;
|
1841 |
|
|
int offset = 0;
|
1842 |
|
|
|
1843 |
|
|
switch (GET_CODE (addr))
|
1844 |
|
|
{
|
1845 |
|
|
case REG :
|
1846 |
|
|
fputs (reg_names[REGNO (addr)], file);
|
1847 |
|
|
break;
|
1848 |
|
|
case SYMBOL_REF :
|
1849 |
|
|
if (/*???*/ 0 && SYMBOL_REF_FUNCTION_P (addr))
|
1850 |
|
|
{
|
1851 |
|
|
fprintf (file, "%%st(");
|
1852 |
|
|
output_addr_const (file, addr);
|
1853 |
|
|
fprintf (file, ")");
|
1854 |
|
|
}
|
1855 |
|
|
else
|
1856 |
|
|
output_addr_const (file, addr);
|
1857 |
|
|
break;
|
1858 |
|
|
case PLUS :
|
1859 |
|
|
if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
|
1860 |
|
|
offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
|
1861 |
|
|
else if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
|
1862 |
|
|
offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
|
1863 |
|
|
else
|
1864 |
|
|
base = XEXP (addr, 0), index = XEXP (addr, 1);
|
1865 |
|
|
gcc_assert (GET_CODE (base) == REG);
|
1866 |
|
|
fputs (reg_names[REGNO (base)], file);
|
1867 |
|
|
if (index == 0)
|
1868 |
|
|
{
|
1869 |
|
|
if (offset != 0)
|
1870 |
|
|
fprintf (file, ",%d", offset);
|
1871 |
|
|
}
|
1872 |
|
|
else
|
1873 |
|
|
{
|
1874 |
|
|
switch (GET_CODE (index))
|
1875 |
|
|
{
|
1876 |
|
|
case REG:
|
1877 |
|
|
fprintf (file, ",%s", reg_names[REGNO (index)]);
|
1878 |
|
|
break;
|
1879 |
|
|
case SYMBOL_REF:
|
1880 |
|
|
fputc (',', file), output_addr_const (file, index);
|
1881 |
|
|
break;
|
1882 |
|
|
default:
|
1883 |
|
|
gcc_unreachable ();
|
1884 |
|
|
}
|
1885 |
|
|
}
|
1886 |
|
|
break;
|
1887 |
|
|
case PRE_INC :
|
1888 |
|
|
case PRE_DEC :
|
1889 |
|
|
/* We shouldn't get here as we've lost the mode of the memory object
|
1890 |
|
|
(which says how much to inc/dec by. */
|
1891 |
|
|
gcc_unreachable ();
|
1892 |
|
|
break;
|
1893 |
|
|
default :
|
1894 |
|
|
output_addr_const (file, addr);
|
1895 |
|
|
break;
|
1896 |
|
|
}
|
1897 |
|
|
}
|
1898 |
|
|
|
1899 |
|
|
/* Update compare/branch separation marker. */
|
1900 |
|
|
|
1901 |
|
|
static void
|
1902 |
|
|
record_cc_ref (rtx insn)
|
1903 |
|
|
{
|
1904 |
|
|
last_insn_set_cc_p = current_insn_set_cc_p;
|
1905 |
|
|
|
1906 |
|
|
switch (get_attr_cond (insn))
|
1907 |
|
|
{
|
1908 |
|
|
case COND_SET :
|
1909 |
|
|
case COND_SET_ZN :
|
1910 |
|
|
case COND_SET_ZNC :
|
1911 |
|
|
if (get_attr_length (insn) == 1)
|
1912 |
|
|
current_insn_set_cc_p = 1;
|
1913 |
|
|
else
|
1914 |
|
|
current_insn_set_cc_p = 0;
|
1915 |
|
|
break;
|
1916 |
|
|
default :
|
1917 |
|
|
current_insn_set_cc_p = 0;
|
1918 |
|
|
break;
|
1919 |
|
|
}
|
1920 |
|
|
}
|
1921 |
|
|
|
1922 |
|
|
/* Conditional execution support.
|
1923 |
|
|
|
1924 |
|
|
This is based on the ARM port but for now is much simpler.
|
1925 |
|
|
|
1926 |
|
|
A finite state machine takes care of noticing whether or not instructions
|
1927 |
|
|
can be conditionally executed, and thus decrease execution time and code
|
1928 |
|
|
size by deleting branch instructions. The fsm is controlled by
|
1929 |
|
|
final_prescan_insn, and controls the actions of PRINT_OPERAND. The patterns
|
1930 |
|
|
in the .md file for the branch insns also have a hand in this. */
|
1931 |
|
|
|
1932 |
|
|
/* The state of the fsm controlling condition codes are:
|
1933 |
|
|
0: normal, do nothing special
|
1934 |
|
|
1: don't output this insn
|
1935 |
|
|
2: don't output this insn
|
1936 |
|
|
3: make insns conditional
|
1937 |
|
|
4: make insns conditional
|
1938 |
|
|
|
1939 |
|
|
State transitions (state->state by whom, under what condition):
|
1940 |
|
|
|
1941 |
|
|
|
1942 |
|
|
1 -> 3 branch patterns, after having not output the conditional branch
|
1943 |
|
|
2 -> 4 branch patterns, after having not output the conditional branch
|
1944 |
|
|
3 -> 0 (*targetm.asm_out.internal_label), if the `target' label is reached
|
1945 |
|
|
(the target label has CODE_LABEL_NUMBER equal to
|
1946 |
|
|
arc_ccfsm_target_label).
|
1947 |
|
|
4 -> 0 final_prescan_insn, if `target' unconditional branch is reached
|
1948 |
|
|
|
1949 |
|
|
If the jump clobbers the conditions then we use states 2 and 4.
|
1950 |
|
|
|
1951 |
|
|
A similar thing can be done with conditional return insns.
|
1952 |
|
|
|
1953 |
|
|
We also handle separating branches from sets of the condition code.
|
1954 |
|
|
This is done here because knowledge of the ccfsm state is required,
|
1955 |
|
|
we may not be outputting the branch. */
|
1956 |
|
|
|
1957 |
|
|
void
|
1958 |
|
|
arc_final_prescan_insn (rtx insn,
|
1959 |
|
|
rtx *opvec ATTRIBUTE_UNUSED,
|
1960 |
|
|
int noperands ATTRIBUTE_UNUSED)
|
1961 |
|
|
{
|
1962 |
|
|
/* BODY will hold the body of INSN. */
|
1963 |
|
|
register rtx body = PATTERN (insn);
|
1964 |
|
|
|
1965 |
|
|
/* This will be 1 if trying to repeat the trick (i.e.: do the `else' part of
|
1966 |
|
|
an if/then/else), and things need to be reversed. */
|
1967 |
|
|
int reverse = 0;
|
1968 |
|
|
|
1969 |
|
|
/* If we start with a return insn, we only succeed if we find another one. */
|
1970 |
|
|
int seeking_return = 0;
|
1971 |
|
|
|
1972 |
|
|
/* START_INSN will hold the insn from where we start looking. This is the
|
1973 |
|
|
first insn after the following code_label if REVERSE is true. */
|
1974 |
|
|
rtx start_insn = insn;
|
1975 |
|
|
|
1976 |
|
|
/* Update compare/branch separation marker. */
|
1977 |
|
|
record_cc_ref (insn);
|
1978 |
|
|
|
1979 |
|
|
/* Allow -mdebug-ccfsm to turn this off so we can see how well it does.
|
1980 |
|
|
We can't do this in macro FINAL_PRESCAN_INSN because its called from
|
1981 |
|
|
final_scan_insn which has `optimize' as a local. */
|
1982 |
|
|
if (optimize < 2 || TARGET_NO_COND_EXEC)
|
1983 |
|
|
return;
|
1984 |
|
|
|
1985 |
|
|
/* If in state 4, check if the target branch is reached, in order to
|
1986 |
|
|
change back to state 0. */
|
1987 |
|
|
if (arc_ccfsm_state == 4)
|
1988 |
|
|
{
|
1989 |
|
|
if (insn == arc_ccfsm_target_insn)
|
1990 |
|
|
{
|
1991 |
|
|
arc_ccfsm_target_insn = NULL;
|
1992 |
|
|
arc_ccfsm_state = 0;
|
1993 |
|
|
}
|
1994 |
|
|
return;
|
1995 |
|
|
}
|
1996 |
|
|
|
1997 |
|
|
/* If in state 3, it is possible to repeat the trick, if this insn is an
|
1998 |
|
|
unconditional branch to a label, and immediately following this branch
|
1999 |
|
|
is the previous target label which is only used once, and the label this
|
2000 |
|
|
branch jumps to is not too far off. Or in other words "we've done the
|
2001 |
|
|
`then' part, see if we can do the `else' part." */
|
2002 |
|
|
if (arc_ccfsm_state == 3)
|
2003 |
|
|
{
|
2004 |
|
|
if (simplejump_p (insn))
|
2005 |
|
|
{
|
2006 |
|
|
start_insn = next_nonnote_insn (start_insn);
|
2007 |
|
|
if (GET_CODE (start_insn) == BARRIER)
|
2008 |
|
|
{
|
2009 |
|
|
/* ??? Isn't this always a barrier? */
|
2010 |
|
|
start_insn = next_nonnote_insn (start_insn);
|
2011 |
|
|
}
|
2012 |
|
|
if (GET_CODE (start_insn) == CODE_LABEL
|
2013 |
|
|
&& CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
|
2014 |
|
|
&& LABEL_NUSES (start_insn) == 1)
|
2015 |
|
|
reverse = TRUE;
|
2016 |
|
|
else
|
2017 |
|
|
return;
|
2018 |
|
|
}
|
2019 |
|
|
else if (GET_CODE (body) == RETURN)
|
2020 |
|
|
{
|
2021 |
|
|
start_insn = next_nonnote_insn (start_insn);
|
2022 |
|
|
if (GET_CODE (start_insn) == BARRIER)
|
2023 |
|
|
start_insn = next_nonnote_insn (start_insn);
|
2024 |
|
|
if (GET_CODE (start_insn) == CODE_LABEL
|
2025 |
|
|
&& CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
|
2026 |
|
|
&& LABEL_NUSES (start_insn) == 1)
|
2027 |
|
|
{
|
2028 |
|
|
reverse = TRUE;
|
2029 |
|
|
seeking_return = 1;
|
2030 |
|
|
}
|
2031 |
|
|
else
|
2032 |
|
|
return;
|
2033 |
|
|
}
|
2034 |
|
|
else
|
2035 |
|
|
return;
|
2036 |
|
|
}
|
2037 |
|
|
|
2038 |
|
|
if (GET_CODE (insn) != JUMP_INSN)
|
2039 |
|
|
return;
|
2040 |
|
|
|
2041 |
|
|
/* This jump might be paralleled with a clobber of the condition codes,
|
2042 |
|
|
the jump should always come first. */
|
2043 |
|
|
if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
|
2044 |
|
|
body = XVECEXP (body, 0, 0);
|
2045 |
|
|
|
2046 |
|
|
if (reverse
|
2047 |
|
|
|| (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
|
2048 |
|
|
&& GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
|
2049 |
|
|
{
|
2050 |
|
|
int insns_skipped = 0, fail = FALSE, succeed = FALSE;
|
2051 |
|
|
/* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
|
2052 |
|
|
int then_not_else = TRUE;
|
2053 |
|
|
/* Nonzero if next insn must be the target label. */
|
2054 |
|
|
int next_must_be_target_label_p;
|
2055 |
|
|
rtx this_insn = start_insn, label = 0;
|
2056 |
|
|
|
2057 |
|
|
/* Register the insn jumped to. */
|
2058 |
|
|
if (reverse)
|
2059 |
|
|
{
|
2060 |
|
|
if (!seeking_return)
|
2061 |
|
|
label = XEXP (SET_SRC (body), 0);
|
2062 |
|
|
}
|
2063 |
|
|
else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
|
2064 |
|
|
label = XEXP (XEXP (SET_SRC (body), 1), 0);
|
2065 |
|
|
else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
|
2066 |
|
|
{
|
2067 |
|
|
label = XEXP (XEXP (SET_SRC (body), 2), 0);
|
2068 |
|
|
then_not_else = FALSE;
|
2069 |
|
|
}
|
2070 |
|
|
else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
|
2071 |
|
|
seeking_return = 1;
|
2072 |
|
|
else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
|
2073 |
|
|
{
|
2074 |
|
|
seeking_return = 1;
|
2075 |
|
|
then_not_else = FALSE;
|
2076 |
|
|
}
|
2077 |
|
|
else
|
2078 |
|
|
gcc_unreachable ();
|
2079 |
|
|
|
2080 |
|
|
/* See how many insns this branch skips, and what kind of insns. If all
|
2081 |
|
|
insns are okay, and the label or unconditional branch to the same
|
2082 |
|
|
label is not too far away, succeed. */
|
2083 |
|
|
for (insns_skipped = 0, next_must_be_target_label_p = FALSE;
|
2084 |
|
|
!fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
|
2085 |
|
|
insns_skipped++)
|
2086 |
|
|
{
|
2087 |
|
|
rtx scanbody;
|
2088 |
|
|
|
2089 |
|
|
this_insn = next_nonnote_insn (this_insn);
|
2090 |
|
|
if (!this_insn)
|
2091 |
|
|
break;
|
2092 |
|
|
|
2093 |
|
|
if (next_must_be_target_label_p)
|
2094 |
|
|
{
|
2095 |
|
|
if (GET_CODE (this_insn) == BARRIER)
|
2096 |
|
|
continue;
|
2097 |
|
|
if (GET_CODE (this_insn) == CODE_LABEL
|
2098 |
|
|
&& this_insn == label)
|
2099 |
|
|
{
|
2100 |
|
|
arc_ccfsm_state = 1;
|
2101 |
|
|
succeed = TRUE;
|
2102 |
|
|
}
|
2103 |
|
|
else
|
2104 |
|
|
fail = TRUE;
|
2105 |
|
|
break;
|
2106 |
|
|
}
|
2107 |
|
|
|
2108 |
|
|
scanbody = PATTERN (this_insn);
|
2109 |
|
|
|
2110 |
|
|
switch (GET_CODE (this_insn))
|
2111 |
|
|
{
|
2112 |
|
|
case CODE_LABEL:
|
2113 |
|
|
/* Succeed if it is the target label, otherwise fail since
|
2114 |
|
|
control falls in from somewhere else. */
|
2115 |
|
|
if (this_insn == label)
|
2116 |
|
|
{
|
2117 |
|
|
arc_ccfsm_state = 1;
|
2118 |
|
|
succeed = TRUE;
|
2119 |
|
|
}
|
2120 |
|
|
else
|
2121 |
|
|
fail = TRUE;
|
2122 |
|
|
break;
|
2123 |
|
|
|
2124 |
|
|
case BARRIER:
|
2125 |
|
|
/* Succeed if the following insn is the target label.
|
2126 |
|
|
Otherwise fail.
|
2127 |
|
|
If return insns are used then the last insn in a function
|
2128 |
|
|
will be a barrier. */
|
2129 |
|
|
next_must_be_target_label_p = TRUE;
|
2130 |
|
|
break;
|
2131 |
|
|
|
2132 |
|
|
case CALL_INSN:
|
2133 |
|
|
/* Can handle a call insn if there are no insns after it.
|
2134 |
|
|
IE: The next "insn" is the target label. We don't have to
|
2135 |
|
|
worry about delay slots as such insns are SEQUENCE's inside
|
2136 |
|
|
INSN's. ??? It is possible to handle such insns though. */
|
2137 |
|
|
if (get_attr_cond (this_insn) == COND_CANUSE)
|
2138 |
|
|
next_must_be_target_label_p = TRUE;
|
2139 |
|
|
else
|
2140 |
|
|
fail = TRUE;
|
2141 |
|
|
break;
|
2142 |
|
|
|
2143 |
|
|
case JUMP_INSN:
|
2144 |
|
|
/* If this is an unconditional branch to the same label, succeed.
|
2145 |
|
|
If it is to another label, do nothing. If it is conditional,
|
2146 |
|
|
fail. */
|
2147 |
|
|
/* ??? Probably, the test for the SET and the PC are unnecessary. */
|
2148 |
|
|
|
2149 |
|
|
if (GET_CODE (scanbody) == SET
|
2150 |
|
|
&& GET_CODE (SET_DEST (scanbody)) == PC)
|
2151 |
|
|
{
|
2152 |
|
|
if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
|
2153 |
|
|
&& XEXP (SET_SRC (scanbody), 0) == label && !reverse)
|
2154 |
|
|
{
|
2155 |
|
|
arc_ccfsm_state = 2;
|
2156 |
|
|
succeed = TRUE;
|
2157 |
|
|
}
|
2158 |
|
|
else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
|
2159 |
|
|
fail = TRUE;
|
2160 |
|
|
}
|
2161 |
|
|
else if (GET_CODE (scanbody) == RETURN
|
2162 |
|
|
&& seeking_return)
|
2163 |
|
|
{
|
2164 |
|
|
arc_ccfsm_state = 2;
|
2165 |
|
|
succeed = TRUE;
|
2166 |
|
|
}
|
2167 |
|
|
else if (GET_CODE (scanbody) == PARALLEL)
|
2168 |
|
|
{
|
2169 |
|
|
if (get_attr_cond (this_insn) != COND_CANUSE)
|
2170 |
|
|
fail = TRUE;
|
2171 |
|
|
}
|
2172 |
|
|
break;
|
2173 |
|
|
|
2174 |
|
|
case INSN:
|
2175 |
|
|
/* We can only do this with insns that can use the condition
|
2176 |
|
|
codes (and don't set them). */
|
2177 |
|
|
if (GET_CODE (scanbody) == SET
|
2178 |
|
|
|| GET_CODE (scanbody) == PARALLEL)
|
2179 |
|
|
{
|
2180 |
|
|
if (get_attr_cond (this_insn) != COND_CANUSE)
|
2181 |
|
|
fail = TRUE;
|
2182 |
|
|
}
|
2183 |
|
|
/* We can't handle other insns like sequences. */
|
2184 |
|
|
else
|
2185 |
|
|
fail = TRUE;
|
2186 |
|
|
break;
|
2187 |
|
|
|
2188 |
|
|
default:
|
2189 |
|
|
break;
|
2190 |
|
|
}
|
2191 |
|
|
}
|
2192 |
|
|
|
2193 |
|
|
if (succeed)
|
2194 |
|
|
{
|
2195 |
|
|
if ((!seeking_return) && (arc_ccfsm_state == 1 || reverse))
|
2196 |
|
|
arc_ccfsm_target_label = CODE_LABEL_NUMBER (label);
|
2197 |
|
|
else
|
2198 |
|
|
{
|
2199 |
|
|
gcc_assert (seeking_return || arc_ccfsm_state == 2);
|
2200 |
|
|
while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
|
2201 |
|
|
{
|
2202 |
|
|
this_insn = next_nonnote_insn (this_insn);
|
2203 |
|
|
gcc_assert (!this_insn
|
2204 |
|
|
|| (GET_CODE (this_insn) != BARRIER
|
2205 |
|
|
&& GET_CODE (this_insn) != CODE_LABEL));
|
2206 |
|
|
}
|
2207 |
|
|
if (!this_insn)
|
2208 |
|
|
{
|
2209 |
|
|
/* Oh dear! we ran off the end, give up. */
|
2210 |
|
|
extract_insn_cached (insn);
|
2211 |
|
|
arc_ccfsm_state = 0;
|
2212 |
|
|
arc_ccfsm_target_insn = NULL;
|
2213 |
|
|
return;
|
2214 |
|
|
}
|
2215 |
|
|
arc_ccfsm_target_insn = this_insn;
|
2216 |
|
|
}
|
2217 |
|
|
|
2218 |
|
|
/* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
|
2219 |
|
|
what it was. */
|
2220 |
|
|
if (!reverse)
|
2221 |
|
|
arc_ccfsm_current_cc = get_arc_condition_code (XEXP (SET_SRC (body),
|
2222 |
|
|
0));
|
2223 |
|
|
|
2224 |
|
|
if (reverse || then_not_else)
|
2225 |
|
|
arc_ccfsm_current_cc = ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc);
|
2226 |
|
|
}
|
2227 |
|
|
|
2228 |
|
|
/* Restore recog_data. Getting the attributes of other insns can
|
2229 |
|
|
destroy this array, but final.c assumes that it remains intact
|
2230 |
|
|
across this call. */
|
2231 |
|
|
extract_insn_cached (insn);
|
2232 |
|
|
}
|
2233 |
|
|
}
|
2234 |
|
|
|
2235 |
|
|
/* Record that we are currently outputting label NUM with prefix PREFIX.
|
2236 |
|
|
It it's the label we're looking for, reset the ccfsm machinery.
|
2237 |
|
|
|
2238 |
|
|
Called from (*targetm.asm_out.internal_label). */
|
2239 |
|
|
|
2240 |
|
|
void
|
2241 |
|
|
arc_ccfsm_at_label (const char *prefix, int num)
|
2242 |
|
|
{
|
2243 |
|
|
if (arc_ccfsm_state == 3 && arc_ccfsm_target_label == num
|
2244 |
|
|
&& !strcmp (prefix, "L"))
|
2245 |
|
|
{
|
2246 |
|
|
arc_ccfsm_state = 0;
|
2247 |
|
|
arc_ccfsm_target_insn = NULL_RTX;
|
2248 |
|
|
}
|
2249 |
|
|
}
|
2250 |
|
|
|
2251 |
|
|
/* See if the current insn, which is a conditional branch, is to be
|
2252 |
|
|
deleted. */
|
2253 |
|
|
|
2254 |
|
|
int
|
2255 |
|
|
arc_ccfsm_branch_deleted_p (void)
|
2256 |
|
|
{
|
2257 |
|
|
if (arc_ccfsm_state == 1 || arc_ccfsm_state == 2)
|
2258 |
|
|
return 1;
|
2259 |
|
|
return 0;
|
2260 |
|
|
}
|
2261 |
|
|
|
2262 |
|
|
/* Record a branch isn't output because subsequent insns can be
|
2263 |
|
|
conditionalized. */
|
2264 |
|
|
|
2265 |
|
|
void
|
2266 |
|
|
arc_ccfsm_record_branch_deleted (void)
|
2267 |
|
|
{
|
2268 |
|
|
/* Indicate we're conditionalizing insns now. */
|
2269 |
|
|
arc_ccfsm_state += 2;
|
2270 |
|
|
|
2271 |
|
|
/* If the next insn is a subroutine call, we still need a nop between the
|
2272 |
|
|
cc setter and user. We need to undo the effect of calling record_cc_ref
|
2273 |
|
|
for the just deleted branch. */
|
2274 |
|
|
current_insn_set_cc_p = last_insn_set_cc_p;
|
2275 |
|
|
}
|
2276 |
|
|
|
2277 |
|
|
static void
|
2278 |
|
|
arc_va_start (tree valist, rtx nextarg)
|
2279 |
|
|
{
|
2280 |
|
|
/* See arc_setup_incoming_varargs for reasons for this oddity. */
|
2281 |
|
|
if (crtl->args.info < 8
|
2282 |
|
|
&& (crtl->args.info & 1))
|
2283 |
|
|
nextarg = plus_constant (nextarg, UNITS_PER_WORD);
|
2284 |
|
|
|
2285 |
|
|
std_expand_builtin_va_start (valist, nextarg);
|
2286 |
|
|
}
|
2287 |
|
|
|
2288 |
|
|
/* This is how to output a definition of an internal numbered label where
|
2289 |
|
|
PREFIX is the class of label and NUM is the number within the class. */
|
2290 |
|
|
|
2291 |
|
|
static void
|
2292 |
|
|
arc_internal_label (FILE *stream, const char *prefix, unsigned long labelno)
|
2293 |
|
|
{
|
2294 |
|
|
arc_ccfsm_at_label (prefix, labelno);
|
2295 |
|
|
default_internal_label (stream, prefix, labelno);
|
2296 |
|
|
}
|
2297 |
|
|
|
2298 |
|
|
/* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
|
2299 |
|
|
|
2300 |
|
|
static void
|
2301 |
|
|
arc_external_libcall (rtx fun ATTRIBUTE_UNUSED)
|
2302 |
|
|
{
|
2303 |
|
|
#if 0
|
2304 |
|
|
/* On the ARC we want to have libgcc's for multiple cpus in one binary.
|
2305 |
|
|
We can't use `assemble_name' here as that will call ASM_OUTPUT_LABELREF
|
2306 |
|
|
and we'll get another suffix added on if -mmangle-cpu. */
|
2307 |
|
|
if (TARGET_MANGLE_CPU_LIBGCC)
|
2308 |
|
|
{
|
2309 |
|
|
fprintf (FILE, "\t.rename\t_%s, _%s%s\n",
|
2310 |
|
|
XSTR (SYMREF, 0), XSTR (SYMREF, 0),
|
2311 |
|
|
arc_mangle_suffix);
|
2312 |
|
|
}
|
2313 |
|
|
#endif
|
2314 |
|
|
}
|
2315 |
|
|
|
2316 |
|
|
/* Worker function for TARGET_RETURN_IN_MEMORY. */
|
2317 |
|
|
|
2318 |
|
|
static bool
|
2319 |
|
|
arc_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
|
2320 |
|
|
{
|
2321 |
|
|
if (AGGREGATE_TYPE_P (type))
|
2322 |
|
|
return true;
|
2323 |
|
|
else
|
2324 |
|
|
{
|
2325 |
|
|
HOST_WIDE_INT size = int_size_in_bytes (type);
|
2326 |
|
|
return (size == -1 || size > 8);
|
2327 |
|
|
}
|
2328 |
|
|
}
|
2329 |
|
|
|
2330 |
|
|
/* For ARC, All aggregates and arguments greater than 8 bytes are
|
2331 |
|
|
passed by reference. */
|
2332 |
|
|
|
2333 |
|
|
static bool
|
2334 |
|
|
arc_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
|
2335 |
|
|
enum machine_mode mode, const_tree type,
|
2336 |
|
|
bool named ATTRIBUTE_UNUSED)
|
2337 |
|
|
{
|
2338 |
|
|
unsigned HOST_WIDE_INT size;
|
2339 |
|
|
|
2340 |
|
|
if (type)
|
2341 |
|
|
{
|
2342 |
|
|
if (AGGREGATE_TYPE_P (type))
|
2343 |
|
|
return true;
|
2344 |
|
|
size = int_size_in_bytes (type);
|
2345 |
|
|
}
|
2346 |
|
|
else
|
2347 |
|
|
size = GET_MODE_SIZE (mode);
|
2348 |
|
|
|
2349 |
|
|
return size > 8;
|
2350 |
|
|
}
|
2351 |
|
|
|
2352 |
|
|
/* Trampolines. */
|
2353 |
|
|
/* ??? This doesn't work yet because GCC will use as the address of a nested
|
2354 |
|
|
function the address of the trampoline. We need to use that address
|
2355 |
|
|
right shifted by 2. It looks like we'll need PSImode after all. :-(
|
2356 |
|
|
|
2357 |
|
|
??? The above comment sounds like it's doable via
|
2358 |
|
|
TARGET_TRAMPOLINE_ADJUST_ADDRESS; no PSImode needed.
|
2359 |
|
|
|
2360 |
|
|
On the ARC, the trampoline is quite simple as we have 32-bit immediate
|
2361 |
|
|
constants.
|
2362 |
|
|
|
2363 |
|
|
mov r24,STATIC
|
2364 |
|
|
j.nd FUNCTION
|
2365 |
|
|
*/
|
2366 |
|
|
|
2367 |
|
|
static void
|
2368 |
|
|
arc_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
|
2369 |
|
|
{
|
2370 |
|
|
rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
|
2371 |
|
|
rtx mem;
|
2372 |
|
|
|
2373 |
|
|
mem = adjust_address (m_tramp, SImode, 0);
|
2374 |
|
|
emit_move_insn (mem, GEN_INT (0x631f7c00));
|
2375 |
|
|
|
2376 |
|
|
mem = adjust_address (m_tramp, SImode, 4);
|
2377 |
|
|
emit_move_insn (mem, chain_value);
|
2378 |
|
|
|
2379 |
|
|
mem = adjust_address (m_tramp, SImode, 8);
|
2380 |
|
|
emit_move_insn (mem, GEN_INT (0x381f0000));
|
2381 |
|
|
|
2382 |
|
|
mem = adjust_address (m_tramp, SImode, 12);
|
2383 |
|
|
emit_move_insn (mem, fnaddr);
|
2384 |
|
|
|
2385 |
|
|
emit_insn (gen_flush_icache (m_tramp));
|
2386 |
|
|
}
|